Merge "Add support for PSC of serving cell."
diff --git a/api/current.xml b/api/current.xml
index 76b1c9a..e66e529 100644
--- a/api/current.xml
+++ b/api/current.xml
@@ -19481,17 +19481,6 @@
<parameter name="tab" type="android.app.ActionBar.Tab">
</parameter>
</method>
-<method name="finishContextMode"
- return="void"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-</method>
<method name="getCustomNavigationView"
return="android.view.View"
abstract="true"
@@ -19783,19 +19772,6 @@
<parameter name="title" type="java.lang.CharSequence">
</parameter>
</method>
-<method name="startContextMode"
- return="void"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-<parameter name="callback" type="android.app.ActionBar.ContextModeCallback">
-</parameter>
-</method>
<field name="DISPLAY_HIDE_HOME"
type="int"
transient="false"
@@ -19863,194 +19839,6 @@
>
</field>
</class>
-<class name="ActionBar.ContextMode"
- extends="java.lang.Object"
- abstract="true"
- static="true"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-<constructor name="ActionBar.ContextMode"
- type="android.app.ActionBar.ContextMode"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-</constructor>
-<method name="finish"
- return="void"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-</method>
-<method name="getCustomView"
- return="android.view.View"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-</method>
-<method name="getMenu"
- return="android.view.Menu"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-</method>
-<method name="getSubtitle"
- return="java.lang.CharSequence"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-</method>
-<method name="getTitle"
- return="java.lang.CharSequence"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-</method>
-<method name="invalidate"
- return="void"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-</method>
-<method name="setCustomView"
- return="void"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-<parameter name="view" type="android.view.View">
-</parameter>
-</method>
-<method name="setSubtitle"
- return="void"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-<parameter name="subtitle" type="java.lang.CharSequence">
-</parameter>
-</method>
-<method name="setTitle"
- return="void"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-<parameter name="title" type="java.lang.CharSequence">
-</parameter>
-</method>
-</class>
-<interface name="ActionBar.ContextModeCallback"
- abstract="true"
- static="true"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-<method name="onContextItemClicked"
- return="boolean"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-<parameter name="mode" type="android.app.ActionBar.ContextMode">
-</parameter>
-<parameter name="item" type="android.view.MenuItem">
-</parameter>
-</method>
-<method name="onCreateContextMode"
- return="boolean"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-<parameter name="mode" type="android.app.ActionBar.ContextMode">
-</parameter>
-<parameter name="menu" type="android.view.Menu">
-</parameter>
-</method>
-<method name="onDestroyContextMode"
- return="void"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-<parameter name="mode" type="android.app.ActionBar.ContextMode">
-</parameter>
-</method>
-<method name="onPrepareContextMode"
- return="boolean"
- abstract="true"
- native="false"
- synchronized="false"
- static="false"
- final="false"
- deprecated="not deprecated"
- visibility="public"
->
-<parameter name="mode" type="android.app.ActionBar.ContextMode">
-</parameter>
-<parameter name="menu" type="android.view.Menu">
-</parameter>
-</method>
-</interface>
<interface name="ActionBar.NavigationCallback"
abstract="true"
static="true"
@@ -24691,6 +24479,194 @@
>
</method>
</class>
+<class name="ContextualMode"
+ extends="java.lang.Object"
+ abstract="true"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<constructor name="ContextualMode"
+ type="android.app.ContextualMode"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</constructor>
+<method name="finish"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="getCustomView"
+ return="android.view.View"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="getMenu"
+ return="android.view.Menu"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="getSubtitle"
+ return="java.lang.CharSequence"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="getTitle"
+ return="java.lang.CharSequence"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="invalidate"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="setCustomView"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="view" type="android.view.View">
+</parameter>
+</method>
+<method name="setSubtitle"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="subtitle" type="java.lang.CharSequence">
+</parameter>
+</method>
+<method name="setTitle"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="title" type="java.lang.CharSequence">
+</parameter>
+</method>
+</class>
+<interface name="ContextualMode.Callback"
+ abstract="true"
+ static="true"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<method name="onContextItemClicked"
+ return="boolean"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="mode" type="android.app.ContextualMode">
+</parameter>
+<parameter name="item" type="android.view.MenuItem">
+</parameter>
+</method>
+<method name="onCreateContextMode"
+ return="boolean"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="mode" type="android.app.ContextualMode">
+</parameter>
+<parameter name="menu" type="android.view.Menu">
+</parameter>
+</method>
+<method name="onDestroyContextMode"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="mode" type="android.app.ContextualMode">
+</parameter>
+</method>
+<method name="onPrepareContextMode"
+ return="boolean"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="mode" type="android.app.ContextualMode">
+</parameter>
+<parameter name="menu" type="android.view.Menu">
+</parameter>
+</method>
+</interface>
<class name="DatePickerDialog"
extends="android.app.AlertDialog"
abstract="false"
@@ -40109,6 +40085,17 @@
visibility="public"
>
</method>
+<method name="finishContextualMode"
+ return="void"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
<method name="getApplicationContext"
return="android.content.Context"
abstract="true"
@@ -40823,6 +40810,19 @@
<parameter name="intent" type="android.content.Intent">
</parameter>
</method>
+<method name="startContextualMode"
+ return="android.app.ContextualMode"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="callback" type="android.app.ContextualMode.Callback">
+</parameter>
+</method>
<method name="startInstrumentation"
return="boolean"
abstract="true"
@@ -82198,7 +82198,7 @@
type="float"
transient="false"
volatile="false"
- value="0.001f"
+ value="0.0010f"
static="true"
final="true"
deprecated="not deprecated"
@@ -226630,7 +226630,7 @@
deprecated="not deprecated"
visibility="public"
>
-<parameter name="arg0" type="T">
+<parameter name="t" type="T">
</parameter>
</method>
</interface>
diff --git a/core/java/android/app/ActionBar.java b/core/java/android/app/ActionBar.java
index 3cd2b9e..e1124a1 100644
--- a/core/java/android/app/ActionBar.java
+++ b/core/java/android/app/ActionBar.java
@@ -17,8 +17,6 @@
package android.app;
import android.graphics.drawable.Drawable;
-import android.view.Menu;
-import android.view.MenuItem;
import android.view.View;
import android.widget.SpinnerAdapter;
@@ -221,20 +219,6 @@
public abstract int getDisplayOptions();
/**
- * Start a context mode controlled by <code>callback</code>.
- * The {@link ContextModeCallback} will receive lifecycle events for the duration
- * of the context mode.
- *
- * @param callback Callback handler that will manage this context mode.
- */
- public abstract void startContextMode(ContextModeCallback callback);
-
- /**
- * Finish the current context mode.
- */
- public abstract void finishContextMode();
-
- /**
* Set the action bar into tabbed navigation mode.
*
* @see #addTab(Tab)
@@ -312,139 +296,6 @@
public abstract void selectTabAt(int position);
/**
- * Represents a contextual mode of the Action Bar. Context modes can be used for
- * modal interactions with activity content and replace the normal Action Bar until finished.
- * Examples of good contextual modes include selection modes, search, content editing, etc.
- */
- public static abstract class ContextMode {
- /**
- * Set the title of the context mode. This method will have no visible effect if
- * a custom view has been set.
- *
- * @param title Title string to set
- *
- * @see #setCustomView(View)
- */
- public abstract void setTitle(CharSequence title);
-
- /**
- * Set the subtitle of the context mode. This method will have no visible effect if
- * a custom view has been set.
- *
- * @param subtitle Subtitle string to set
- *
- * @see #setCustomView(View)
- */
- public abstract void setSubtitle(CharSequence subtitle);
-
- /**
- * Set a custom view for this context mode. The custom view will take the place of
- * the title and subtitle. Useful for things like search boxes.
- *
- * @param view Custom view to use in place of the title/subtitle.
- *
- * @see #setTitle(CharSequence)
- * @see #setSubtitle(CharSequence)
- */
- public abstract void setCustomView(View view);
-
- /**
- * Invalidate the context mode and refresh menu content. The context mode's
- * {@link ContextModeCallback} will have its
- * {@link ContextModeCallback#onPrepareContextMode(ContextMode, Menu)} method called.
- * If it returns true the menu will be scanned for updated content and any relevant changes
- * will be reflected to the user.
- */
- public abstract void invalidate();
-
- /**
- * Finish and close this context mode. The context mode's {@link ContextModeCallback} will
- * have its {@link ContextModeCallback#onDestroyContextMode(ContextMode)} method called.
- */
- public abstract void finish();
-
- /**
- * Returns the menu of actions that this context mode presents.
- * @return The context mode's menu.
- */
- public abstract Menu getMenu();
-
- /**
- * Returns the current title of this context mode.
- * @return Title text
- */
- public abstract CharSequence getTitle();
-
- /**
- * Returns the current subtitle of this context mode.
- * @return Subtitle text
- */
- public abstract CharSequence getSubtitle();
-
- /**
- * Returns the current custom view for this context mode.
- * @return The current custom view
- */
- public abstract View getCustomView();
- }
-
- /**
- * Callback interface for ActionBar context modes. Supplied to
- * {@link ActionBar#startContextMode(ContextModeCallback)}, a ContextModeCallback
- * configures and handles events raised by a user's interaction with a context mode.
- *
- * <p>A context mode's lifecycle is as follows:
- * <ul>
- * <li>{@link ContextModeCallback#onCreateContextMode(ContextMode, Menu)} once on initial
- * creation</li>
- * <li>{@link ContextModeCallback#onPrepareContextMode(ContextMode, Menu)} after creation
- * and any time the {@link ContextMode} is invalidated</li>
- * <li>{@link ContextModeCallback#onContextItemClicked(ContextMode, MenuItem)} any time a
- * contextual action button is clicked</li>
- * <li>{@link ContextModeCallback#onDestroyContextMode(ContextMode)} when the context mode
- * is closed</li>
- * </ul>
- */
- public interface ContextModeCallback {
- /**
- * Called when a context mode is first created. The menu supplied will be used to generate
- * action buttons for the context mode.
- *
- * @param mode ContextMode being created
- * @param menu Menu used to populate contextual action buttons
- * @return true if the context mode should be created, false if entering this context mode
- * should be aborted.
- */
- public boolean onCreateContextMode(ContextMode mode, Menu menu);
-
- /**
- * Called to refresh a context mode's action menu whenever it is invalidated.
- *
- * @param mode ContextMode being prepared
- * @param menu Menu used to populate contextual action buttons
- * @return true if the menu or context mode was updated, false otherwise.
- */
- public boolean onPrepareContextMode(ContextMode mode, Menu menu);
-
- /**
- * Called to report a user click on a contextual action button.
- *
- * @param mode The current ContextMode
- * @param item The item that was clicked
- * @return true if this callback handled the event, false if the standard MenuItem
- * invocation should continue.
- */
- public boolean onContextItemClicked(ContextMode mode, MenuItem item);
-
- /**
- * Called when a context mode is about to be exited and destroyed.
- *
- * @param mode The current ContextMode being destroyed
- */
- public void onDestroyContextMode(ContextMode mode);
- }
-
- /**
* Callback interface for ActionBar navigation events.
*/
public interface NavigationCallback {
diff --git a/core/java/android/app/Activity.java b/core/java/android/app/Activity.java
index 1cdd423..3298208 100644
--- a/core/java/android/app/Activity.java
+++ b/core/java/android/app/Activity.java
@@ -662,7 +662,7 @@
/*package*/ boolean mWindowAdded = false;
/*package*/ boolean mVisibleFromServer = false;
/*package*/ boolean mVisibleFromClient = true;
- /*package*/ ActionBar mActionBar = null;
+ /*package*/ ActionBarImpl mActionBar = null;
private CharSequence mTitle;
private int mTitleColor = 0;
@@ -4253,4 +4253,19 @@
}
}
}
+
+ @Override
+ public ContextualMode startContextualMode(ContextualMode.Callback callback) {
+ if (mActionBar == null) {
+ return null;
+ }
+ return mActionBar.startContextualMode(callback);
+ }
+
+ @Override
+ public void finishContextualMode() {
+ if (mActionBar != null) {
+ mActionBar.finishContextualMode();
+ }
+ }
}
diff --git a/core/java/android/app/ApplicationErrorReport.java b/core/java/android/app/ApplicationErrorReport.java
index 6981cd6..48cbd46 100644
--- a/core/java/android/app/ApplicationErrorReport.java
+++ b/core/java/android/app/ApplicationErrorReport.java
@@ -179,7 +179,7 @@
/**
* Return activity in receiverPackage that handles ACTION_APP_ERROR.
*
- * @param pm PackageManager isntance
+ * @param pm PackageManager instance
* @param errorPackage package which caused the error
* @param receiverPackage candidate package to receive the error
* @return activity component within receiverPackage which handles
diff --git a/core/java/android/app/ContextualMode.java b/core/java/android/app/ContextualMode.java
new file mode 100644
index 0000000..d187dc0
--- /dev/null
+++ b/core/java/android/app/ContextualMode.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.app;
+
+import android.content.Context;
+import android.view.Menu;
+import android.view.MenuItem;
+import android.view.View;
+
+/**
+ * Represents a contextual mode of the user interface. Contextual modes can be used for
+ * modal interactions with content and replace parts of the normal UI until finished.
+ * Examples of good contextual modes include selection modes, search, content editing, etc.
+ */
+public abstract class ContextualMode {
+ /**
+ * Set the title of the contextual mode. This method will have no visible effect if
+ * a custom view has been set.
+ *
+ * @param title Title string to set
+ *
+ * @see #setCustomView(View)
+ */
+ public abstract void setTitle(CharSequence title);
+
+ /**
+ * Set the subtitle of the contextual mode. This method will have no visible effect if
+ * a custom view has been set.
+ *
+ * @param subtitle Subtitle string to set
+ *
+ * @see #setCustomView(View)
+ */
+ public abstract void setSubtitle(CharSequence subtitle);
+
+ /**
+ * Set a custom view for this contextual mode. The custom view will take the place of
+ * the title and subtitle. Useful for things like search boxes.
+ *
+ * @param view Custom view to use in place of the title/subtitle.
+ *
+ * @see #setTitle(CharSequence)
+ * @see #setSubtitle(CharSequence)
+ */
+ public abstract void setCustomView(View view);
+
+ /**
+ * Invalidate the contextual mode and refresh menu content. The contextual mode's
+ * {@link ContextualMode.Callback} will have its
+ * {@link Callback#onPrepareContextMode(ContextualMode, Menu)} method called.
+ * If it returns true the menu will be scanned for updated content and any relevant changes
+ * will be reflected to the user.
+ */
+ public abstract void invalidate();
+
+ /**
+ * Finish and close this context mode. The context mode's {@link ContextualMode.Callback} will
+ * have its {@link Callback#onDestroyContextMode(ContextualMode)} method called.
+ */
+ public abstract void finish();
+
+ /**
+ * Returns the menu of actions that this contextual mode presents.
+ * @return The contextual mode's menu.
+ */
+ public abstract Menu getMenu();
+
+ /**
+ * Returns the current title of this contextual mode.
+ * @return Title text
+ */
+ public abstract CharSequence getTitle();
+
+ /**
+ * Returns the current subtitle of this contextual mode.
+ * @return Subtitle text
+ */
+ public abstract CharSequence getSubtitle();
+
+ /**
+ * Returns the current custom view for this contextual mode.
+ * @return The current custom view
+ */
+ public abstract View getCustomView();
+
+ /**
+ * Callback interface for contextual modes. Supplied to
+ * {@link Context#startContextualMode(Callback)}, a Callback
+ * configures and handles events raised by a user's interaction with a context mode.
+ *
+ * <p>A context mode's lifecycle is as follows:
+ * <ul>
+ * <li>{@link Callback#onCreateContextMode(ContextualMode, Menu)} once on initial
+ * creation</li>
+ * <li>{@link Callback#onPrepareContextMode(ContextualMode, Menu)} after creation
+ * and any time the {@link ContextualMode} is invalidated</li>
+ * <li>{@link Callback#onContextItemClicked(ContextualMode, MenuItem)} any time a
+ * contextual action button is clicked</li>
+ * <li>{@link Callback#onDestroyContextMode(ContextualMode)} when the context mode
+ * is closed</li>
+ * </ul>
+ */
+ public interface Callback {
+ /**
+ * Called when a context mode is first created. The menu supplied will be used to generate
+ * action buttons for the context mode.
+ *
+ * @param mode ContextMode being created
+ * @param menu Menu used to populate contextual action buttons
+ * @return true if the context mode should be created, false if entering this context mode
+ * should be aborted.
+ */
+ public boolean onCreateContextMode(ContextualMode mode, Menu menu);
+
+ /**
+ * Called to refresh a context mode's action menu whenever it is invalidated.
+ *
+ * @param mode ContextMode being prepared
+ * @param menu Menu used to populate contextual action buttons
+ * @return true if the menu or context mode was updated, false otherwise.
+ */
+ public boolean onPrepareContextMode(ContextualMode mode, Menu menu);
+
+ /**
+ * Called to report a user click on a contextual action button.
+ *
+ * @param mode The current ContextMode
+ * @param item The item that was clicked
+ * @return true if this callback handled the event, false if the standard MenuItem
+ * invocation should continue.
+ */
+ public boolean onContextItemClicked(ContextualMode mode, MenuItem item);
+
+ /**
+ * Called when a context mode is about to be exited and destroyed.
+ *
+ * @param mode The current ContextMode being destroyed
+ */
+ public void onDestroyContextMode(ContextualMode mode);
+ }
+}
\ No newline at end of file
diff --git a/core/java/android/bluetooth/ScoSocket.java b/core/java/android/bluetooth/ScoSocket.java
deleted file mode 100644
index b65a99a..0000000
--- a/core/java/android/bluetooth/ScoSocket.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.bluetooth;
-
-import android.os.Handler;
-import android.os.Message;
-import android.os.PowerManager;
-import android.os.PowerManager.WakeLock;
-import android.util.Log;
-
-/**
- * The Android Bluetooth API is not finalized, and *will* change. Use at your
- * own risk.
- *
- * Simple SCO Socket.
- * Currently in Android, there is no support for sending data over a SCO
- * socket - this is managed by the hardware link to the Bluetooth Chip. This
- * class is instead intended for management of the SCO socket lifetime,
- * and is tailored for use with the headset / handsfree profiles.
- * @hide
- */
-public class ScoSocket {
- private static final String TAG = "ScoSocket";
- private static final boolean DBG = true;
- private static final boolean VDBG = false; // even more logging
-
- public static final int STATE_READY = 1; // Ready for use. No threads or sockets
- public static final int STATE_ACCEPT = 2; // accept() thread running
- public static final int STATE_CONNECTING = 3; // connect() thread running
- public static final int STATE_CONNECTED = 4; // connected, waiting for close()
- public static final int STATE_CLOSED = 5; // was connected, now closed.
-
- private int mState;
- private int mNativeData;
- private Handler mHandler;
- private int mAcceptedCode;
- private int mConnectedCode;
- private int mClosedCode;
-
- private WakeLock mWakeLock; // held while in STATE_CONNECTING
-
- static {
- classInitNative();
- }
- private native static void classInitNative();
-
- public ScoSocket(PowerManager pm, Handler handler, int acceptedCode, int connectedCode,
- int closedCode) {
- initNative();
- mState = STATE_READY;
- mHandler = handler;
- mAcceptedCode = acceptedCode;
- mConnectedCode = connectedCode;
- mClosedCode = closedCode;
- mWakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "ScoSocket");
- mWakeLock.setReferenceCounted(false);
- if (VDBG) log(this + " SCO OBJECT CTOR");
- }
- private native void initNative();
-
- protected void finalize() throws Throwable {
- try {
- if (VDBG) log(this + " SCO OBJECT DTOR");
- destroyNative();
- releaseWakeLockNow();
- } finally {
- super.finalize();
- }
- }
- private native void destroyNative();
-
- /** Connect this SCO socket to the given BT address.
- * Does not block.
- */
- public synchronized boolean connect(String address, String name) {
- if (DBG) log("connect() " + this);
- if (mState != STATE_READY) {
- if (DBG) log("connect(): Bad state");
- return false;
- }
- acquireWakeLock();
- if (connectNative(address, name)) {
- mState = STATE_CONNECTING;
- return true;
- } else {
- mState = STATE_CLOSED;
- releaseWakeLockNow();
- return false;
- }
- }
- private native boolean connectNative(String address, String name);
-
- /** Accept incoming SCO connections.
- * Does not block.
- */
- public synchronized boolean accept() {
- if (VDBG) log("accept() " + this);
- if (mState != STATE_READY) {
- if (DBG) log("Bad state");
- return false;
- }
- if (acceptNative()) {
- mState = STATE_ACCEPT;
- return true;
- } else {
- mState = STATE_CLOSED;
- return false;
- }
- }
- private native boolean acceptNative();
-
- public synchronized void close() {
- if (DBG) log(this + " SCO OBJECT close() mState = " + mState);
- acquireWakeLock();
- mState = STATE_CLOSED;
- closeNative();
- releaseWakeLock();
- }
- private native void closeNative();
-
- public synchronized int getState() {
- return mState;
- }
-
- private synchronized void onConnected(int result) {
- if (VDBG) log(this + " onConnected() mState = " + mState + " " + this);
- if (mState != STATE_CONNECTING) {
- if (DBG) log("Strange state, closing " + mState + " " + this);
- return;
- }
- if (result >= 0) {
- mState = STATE_CONNECTED;
- } else {
- mState = STATE_CLOSED;
- }
- mHandler.obtainMessage(mConnectedCode, mState, -1, this).sendToTarget();
- releaseWakeLockNow();
- }
-
- private synchronized void onAccepted(int result) {
- if (VDBG) log("onAccepted() " + this);
- if (mState != STATE_ACCEPT) {
- if (DBG) log("Strange state " + this);
- return;
- }
- if (result >= 0) {
- mState = STATE_CONNECTED;
- } else {
- mState = STATE_CLOSED;
- }
- mHandler.obtainMessage(mAcceptedCode, mState, -1, this).sendToTarget();
- }
-
- private synchronized void onClosed() {
- if (DBG) log("onClosed() " + this);
- if (mState != STATE_CLOSED) {
- mState = STATE_CLOSED;
- mHandler.obtainMessage(mClosedCode, mState, -1, this).sendToTarget();
- releaseWakeLock();
- }
- }
-
- private void acquireWakeLock() {
- if (!mWakeLock.isHeld()) {
- mWakeLock.acquire();
- if (VDBG) log("mWakeLock.acquire() " + this);
- }
- }
-
- private void releaseWakeLock() {
- if (mWakeLock.isHeld()) {
- // Keep apps processor awake for a further 2 seconds.
- // This is a hack to resolve issue http://b/1616263 - in which
- // we are left in a 80 mA power state when remotely terminating a
- // call while connected to BT headset "HTC BH S100 " with A2DP and
- // HFP profiles.
- if (VDBG) log("mWakeLock.release() in 2 sec" + this);
- mWakeLock.acquire(2000);
- }
- }
-
- private void releaseWakeLockNow() {
- if (mWakeLock.isHeld()) {
- if (VDBG) log("mWakeLock.release() now" + this);
- mWakeLock.release();
- }
- }
-
- private void log(String msg) {
- Log.d(TAG, msg);
- }
-}
diff --git a/core/java/android/content/Context.java b/core/java/android/content/Context.java
index b7e4b17..aceeed6 100644
--- a/core/java/android/content/Context.java
+++ b/core/java/android/content/Context.java
@@ -16,6 +16,7 @@
package android.content;
+import android.app.ContextualMode;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.res.AssetManager;
@@ -26,6 +27,7 @@
import android.database.sqlite.SQLiteDatabase.CursorFactory;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
+import android.media.MediaScannerConnection.OnScanCompletedListener;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
@@ -1981,4 +1983,25 @@
public boolean isRestricted() {
return false;
}
+
+ /**
+ * Start a contextual mode controlled by <code>callback</code>.
+ * The {@link ContextualMode.Callback} will receive lifecycle events for the duration
+ * of the contextual mode. There can only be one contextual mode active at a time.
+ * Starting a new contextual mode while one is already active will finish the old
+ * contextual mode.
+ *
+ * @param callback Callback handler that will manage this context mode.
+ * @return The new contextual mode started by this call, or <code>null</code>
+ * if the mode was not started.
+ */
+ public ContextualMode startContextualMode(ContextualMode.Callback callback) {
+ return null;
+ }
+
+ /**
+ * Finish the current contextual mode if present.
+ */
+ public void finishContextualMode() {
+ }
}
diff --git a/core/java/android/content/res/Resources.java b/core/java/android/content/res/Resources.java
index f90982a..ba1b3a9 100755
--- a/core/java/android/content/res/Resources.java
+++ b/core/java/android/content/res/Resources.java
@@ -1345,8 +1345,8 @@
keyboardHidden, mConfiguration.navigation, width, height,
mConfiguration.screenLayout, mConfiguration.uiMode, sSdkVersion);
- drawableCacheClear(mDrawableCache, configChanges);
- drawableCacheClear(mColorDrawableCache, configChanges);
+ clearDrawableCache(mDrawableCache, configChanges);
+ clearDrawableCache(mColorDrawableCache, configChanges);
mColorStateListCache.clear();
@@ -1360,7 +1360,7 @@
}
}
- private void drawableCacheClear(
+ private void clearDrawableCache(
LongSparseArray<WeakReference<ConstantState>> cache,
int configChanges) {
int N = cache.size();
@@ -1821,7 +1821,7 @@
//Log.i(TAG, "Returning cached drawable @ #" +
// Integer.toHexString(((Integer)key).intValue())
// + " in " + this + ": " + entry);
- return entry.newDrawable();
+ return entry.newDrawable(this);
}
else { // our entry has been purged
drawableCache.delete(key);
diff --git a/core/java/android/database/sqlite/SQLiteDatabase.java b/core/java/android/database/sqlite/SQLiteDatabase.java
index c0226f8..d058858 100644
--- a/core/java/android/database/sqlite/SQLiteDatabase.java
+++ b/core/java/android/database/sqlite/SQLiteDatabase.java
@@ -400,6 +400,7 @@
* @see #unlock()
*/
/* package */ void lock() {
+ verifyDbIsOpen();
if (!mLockingEnabled) return;
mLock.lock();
if (SQLiteDebug.DEBUG_LOCK_TIME_TRACKING) {
@@ -420,6 +421,7 @@
* @see #unlockForced()
*/
private void lockForced() {
+ verifyDbIsOpen();
mLock.lock();
if (SQLiteDebug.DEBUG_LOCK_TIME_TRACKING) {
if (mLock.getHoldCount() == 1) {
@@ -952,7 +954,10 @@
//STOPSHIP - uncomment the following line
//sqliteDatabase.setJournalMode(path, "TRUNCATE");
// STOPSHIP remove the following lines
- sqliteDatabase.enableWriteAheadLogging();
+ if (!path.equalsIgnoreCase(MEMORY_DB_PATH)) {
+ sqliteDatabase.enableWriteAheadLogging();
+ }
+ // END STOPSHIP
// add this database to the list of databases opened in this process
ActiveDatabases.addActiveDatabase(sqliteDatabase);
@@ -2406,14 +2411,18 @@
}
/**
- * package visibility only for testing purposes
+ * This method disables the features enabled by {@link #enableWriteAheadLogging()}.
+ * @hide
*/
- /* package */ synchronized void disableWriteAheadLogging() {
- if (mConnectionPool == null) {
- return;
+ public void disableWriteAheadLogging() {
+ synchronized (this) {
+ if (mConnectionPool == null) {
+ return;
+ }
+ mConnectionPool.close();
+ mConnectionPool = null;
+ setJournalMode(mPath, "TRUNCATE");
}
- mConnectionPool.close();
- mConnectionPool = null;
}
/**
diff --git a/core/java/android/hardware/Camera.java b/core/java/android/hardware/Camera.java
index 1100886..19e578f 100644
--- a/core/java/android/hardware/Camera.java
+++ b/core/java/android/hardware/Camera.java
@@ -32,23 +32,79 @@
import android.os.Message;
/**
- * The Camera class is used to connect/disconnect with the camera service,
- * set capture settings, start/stop preview, snap a picture, and retrieve
- * frames for encoding for video.
- * <p>There is no default constructor for this class. Use {@link #open()} to
- * get a Camera object.</p>
+ * The Camera class is used to set image capture settings, start/stop preview,
+ * snap pictures, and retrieve frames for encoding for video. This class is a
+ * client for the Camera service, which manages the actual camera hardware.
*
- * <p>In order to use the device camera, you must declare the
+ * <p>To access the device camera, you must declare the
* {@link android.Manifest.permission#CAMERA} permission in your Android
* Manifest. Also be sure to include the
* <a href="{@docRoot}guide/topics/manifest/uses-feature-element.html"><uses-feature></a>
- * manifest element in order to declare camera features used by your application.
+ * manifest element to declare camera features used by your application.
* For example, if you use the camera and auto-focus feature, your Manifest
* should include the following:</p>
* <pre> <uses-permission android:name="android.permission.CAMERA" />
* <uses-feature android:name="android.hardware.camera" />
* <uses-feature android:name="android.hardware.camera.autofocus" /></pre>
*
+ * <p>To take pictures with this class, use the following steps:</p>
+ *
+ * <ol>
+ * <li>Obtain an instance of Camera from {@link #open(int)}.
+ *
+ * <li>Get existing (default) settings with {@link #getParameters()}.
+ *
+ * <li>If necessary, modify the returned {@link Camera.Parameters} object and call
+ * {@link #setParameters(Camera.Parameters)}.
+ *
+ * <li>If desired, call {@link #setDisplayOrientation(int)}.
+ *
+ * <li><b>Important</b>: Pass a fully initialized {@link SurfaceHolder} to
+ * {@link #setPreviewDisplay(SurfaceHolder)}. Without a surface, the camera
+ * will be unable to start the preview.
+ *
+ * <li><b>Important</b>: Call {@link #startPreview()} to start updating the
+ * preview surface. Preview must be started before you can take a picture.
+ *
+ * <li>When you want, call {@link #takePicture(Camera.ShutterCallback,
+ * Camera.PictureCallback, Camera.PictureCallback, Camera.PictureCallback)} to
+ * capture a photo. Wait for the callbacks to provide the actual image data.
+ *
+ * <li>After taking a picture, preview display will have stopped. To take more
+ * photos, call {@link #startPreview()} again first.
+ *
+ * <li>Call {@link #stopPreview()} to stop updating the preview surface.
+ *
+ * <li><b>Important:</b> Call {@link #release()} to release the camera for
+ * use by other applications. Applications should release the camera
+ * immediately in {@link android.app.Activity#onPause()} (and re-{@link #open()}
+ * it in {@link android.app.Activity#onResume()}).
+ * </ol>
+ *
+ * <p>To quickly switch to video recording mode, use these steps:</p>
+ *
+ * <ol>
+ * <li>Obtain and initialize a Camera and start preview as described above.
+ *
+ * <li>Call {@link #unlock()} to allow the media process to access the camera.
+ *
+ * <li>Pass the camera to {@link android.media.MediaRecorder#setCamera(Camera)}.
+ * See {@link android.media.MediaRecorder} information about video recording.
+ *
+ * <li>When finished recording, call {@link #reconnect()} to re-acquire
+ * and re-lock the camera.
+ *
+ * <li>If desired, restart preview and take more photos or videos.
+ *
+ * <li>Call {@link #stopPreview()} and {@link #release()} as described above.
+ * </ol>
+ *
+ * <p>This class is not thread-safe, and is meant for use from one event thread.
+ * Most long-running operations (preview, focus, photo capture, etc) happen
+ * asynchronously and invoke callbacks as necessary. Callbacks will be invoked
+ * on the event thread {@link #open(int)} was called from. This class's methods
+ * must never be called from multiple threads at once.</p>
+ *
* <p class="caution"><strong>Caution:</strong> Different Android-powered devices
* may have different hardware specifications, such as megapixel ratings and
* auto-focus capabilities. In order for your application to be compatible with
@@ -84,12 +140,12 @@
private boolean mWithBuffer;
/**
- * Returns the number of Cameras available.
+ * Returns the number of physical cameras available on this device.
*/
public native static int getNumberOfCameras();
/**
- * Returns the information about the camera.
+ * Returns the information about a particular camera.
* If {@link #getNumberOfCameras()} returns N, the valid id is 0 to N-1.
*/
public native static void getCameraInfo(int cameraId, CameraInfo cameraInfo);
@@ -123,9 +179,30 @@
};
/**
- * Returns a new Camera object.
- * If {@link #getNumberOfCameras()} returns N, the valid id is 0 to N-1.
- * The id 0 is the default camera.
+ * Creates a new Camera object to access a particular hardware camera.
+ *
+ * <p>You must call {@link #release()} when you are done using the camera,
+ * otherwise it will remain locked and be unavailable to other applications.
+ *
+ * <p>Your application should only have one Camera object active at a time
+ * for a particular hardware camera.
+ *
+ * <p>Callbacks from other methods are delivered to the event loop of the
+ * thread which called open(). If this thread has no event loop, then
+ * callbacks are delivered to the main application event loop. If there
+ * is no main application event loop, callbacks are not delivered.
+ *
+ * <p class="caution"><b>Caution:</b> On some devices, this method may
+ * take a long time to complete. It is best to call this method from a
+ * worker thread (possibly using {@link android.os.AsyncTask}) to avoid
+ * blocking the main application UI thread.
+ *
+ * @param cameraId the hardware camera to access, between 0 and
+ * {@link #getNumberOfCameras()}-1. Use {@link #CAMERA_ID_DEFAULT}
+ * to access the default camera.
+ * @return a new Camera object, connected, locked and ready for use.
+ * @throws RuntimeException if connection to the camera service fails (for
+ * example, if the camera is in use by another process).
*/
public static Camera open(int cameraId) {
return new Camera(cameraId);
@@ -137,7 +214,8 @@
public static int CAMERA_ID_DEFAULT = 0;
/**
- * Returns a new Camera object. This returns the default camera.
+ * Equivalent to Camera.open(Camera.CAMERA_ID_DEFAULT).
+ * Creates a new Camera object to access the default camera.
*/
public static Camera open() {
return new Camera(CAMERA_ID_DEFAULT);
@@ -173,56 +251,83 @@
/**
* Disconnects and releases the Camera object resources.
- * <p>It is recommended that you call this as soon as you're done with the
- * Camera object.</p>
+ *
+ * <p>You must call this as soon as you're done with the Camera object.</p>
*/
public final void release() {
native_release();
}
/**
- * Reconnect to the camera after passing it to MediaRecorder. To save
- * setup/teardown time, a client of Camera can pass an initialized Camera
- * object to a MediaRecorder to use for video recording. Once the
- * MediaRecorder is done with the Camera, this method can be used to
- * re-establish a connection with the camera hardware. NOTE: The Camera
- * object must first be unlocked by the process that owns it before it
- * can be connected to another process.
+ * Unlocks the camera to allow another process to access it.
+ * Normally, the camera is locked to the process with an active Camera
+ * object until {@link #release()} is called. To allow rapid handoff
+ * between processes, you can call this method to release the camera
+ * temporarily for another process to use; once the other process is done
+ * you can call {@link #reconnect()} to reclaim the camera.
*
- * @throws IOException if the method fails.
- */
- public native final void reconnect() throws IOException;
-
- /**
- * Lock the camera to prevent other processes from accessing it. To save
- * setup/teardown time, a client of Camera can pass an initialized Camera
- * object to another process. This method is used to re-lock the Camera
- * object prevent other processes from accessing it. By default, the
- * Camera object is locked. Locking it again from the same process will
- * have no effect. Attempting to lock it from another process if it has
- * not been unlocked will fail.
+ * <p>This must be done before calling
+ * {@link android.media.MediaRecorder#setCamera(Camera)}.
*
- * @throws RuntimeException if the method fails.
- */
- public native final void lock();
-
- /**
- * Unlock the camera to allow another process to access it. To save
- * setup/teardown time, a client of Camera can pass an initialized Camera
- * object to another process. This method is used to unlock the Camera
- * object before handing off the Camera object to the other process.
+ * <p>If you are not recording video, you probably do not need this method.
*
- * @throws RuntimeException if the method fails.
+ * @throws RuntimeException if the camera cannot be unlocked.
*/
public native final void unlock();
/**
- * Sets the SurfaceHolder to be used for a picture preview. If the surface
- * changed since the last call, the screen will blank. Nothing happens
- * if the same surface is re-set.
+ * Re-locks the camera to prevent other processes from accessing it.
+ * Camera objects are locked by default unless {@link #unlock()} is
+ * called. Normally {@link #reconnect()} is used instead.
*
- * @param holder the SurfaceHolder upon which to place the picture preview
- * @throws IOException if the method fails.
+ * <p>If you are not recording video, you probably do not need this method.
+ *
+ * @throws RuntimeException if the camera cannot be re-locked (for
+ * example, if the camera is still in use by another process).
+ */
+ public native final void lock();
+
+ /**
+ * Reconnects to the camera service after another process used it.
+ * After {@link #unlock()} is called, another process may use the
+ * camera; when the process is done, you must reconnect to the camera,
+ * which will re-acquire the lock and allow you to continue using the
+ * camera.
+ *
+ * <p>This must be done after {@link android.media.MediaRecorder} is
+ * done recording if {@link android.media.MediaRecorder#setCamera(Camera)}
+ * was used.
+ *
+ * <p>If you are not recording video, you probably do not need this method.
+ *
+ * @throws IOException if a connection cannot be re-established (for
+ * example, if the camera is still in use by another process).
+ */
+ public native final void reconnect() throws IOException;
+
+ /**
+ * Sets the {@link Surface} to be used for live preview.
+ * A surface is necessary for preview, and preview is necessary to take
+ * pictures. The same surface can be re-set without harm.
+ *
+ * <p>The {@link SurfaceHolder} must already contain a surface when this
+ * method is called. If you are using {@link android.view.SurfaceView},
+ * you will need to register a {@link SurfaceHolder.Callback} with
+ * {@link SurfaceHolder#addCallback(SurfaceHolder.Callback)} and wait for
+ * {@link SurfaceHolder.Callback#surfaceCreated(SurfaceHolder)} before
+ * calling setPreviewDisplay() or starting preview.
+ *
+ * <p>This method must be called before {@link #startPreview()}. The
+ * one exception is that if the preview surface is not set (or set to null)
+ * before startPreview() is called, then this method may be called once
+ * with a non-null parameter to set the preview surface. (This allows
+ * camera setup and surface creation to happen in parallel, saving time.)
+ * The preview surface may not otherwise change while preview is running.
+ *
+ * @param holder containing the Surface on which to place the preview,
+ * or null to remove the preview surface
+ * @throws IOException if the method fails (for example, if the surface
+ * is unavailable or unsuitable).
*/
public final void setPreviewDisplay(SurfaceHolder holder) throws IOException {
if (holder != null) {
@@ -235,31 +340,47 @@
private native final void setPreviewDisplay(Surface surface);
/**
- * Used to get a copy of each preview frame.
+ * Callback interface used to deliver copies of preview frames as
+ * they are displayed.
+ *
+ * @see #setPreviewCallback(Camera.PreviewCallback)
+ * @see #setOneShotPreviewCallback(Camera.PreviewCallback)
+ * @see #setPreviewCallbackWithBuffer(Camera.PreviewCallback)
+ * @see #startPreview()
*/
public interface PreviewCallback
{
/**
- * The callback that delivers the preview frames.
+ * Called as preview frames are displayed. This callback is invoked
+ * on the event thread {@link #open(int)} was called from.
*
- * @param data The contents of the preview frame in the format defined
+ * @param data the contents of the preview frame in the format defined
* by {@link android.graphics.ImageFormat}, which can be queried
* with {@link android.hardware.Camera.Parameters#getPreviewFormat()}.
* If {@link android.hardware.Camera.Parameters#setPreviewFormat(int)}
* is never called, the default will be the YCbCr_420_SP
* (NV21) format.
- * @param camera The Camera service object.
+ * @param camera the Camera service object.
*/
void onPreviewFrame(byte[] data, Camera camera);
};
/**
- * Start drawing preview frames to the surface.
+ * Starts capturing and drawing preview frames to the screen.
+ * Preview will not actually start until a surface is supplied with
+ * {@link #setPreviewDisplay(SurfaceHolder)}.
+ *
+ * <p>If {@link #setPreviewCallback(Camera.PreviewCallback)},
+ * {@link #setOneShotPreviewCallback(Camera.PreviewCallback)}, or
+ * {@link #setPreviewCallbackWithBuffer(Camera.PreviewCallback)} were
+ * called, {@link Camera.PreviewCallback#onPreviewFrame(byte[], Camera)}
+ * will be called when preview data becomes available.
*/
public native final void startPreview();
/**
- * Stop drawing preview frames to the surface.
+ * Stops capturing and drawing preview frames to the surface, and
+ * resets the camera for a future call to {@link #startPreview()}.
*/
public native final void stopPreview();
@@ -272,11 +393,13 @@
public native final boolean previewEnabled();
/**
- * Can be called at any time to instruct the camera to use a callback for
- * each preview frame in addition to displaying it.
+ * Installs a callback to be invoked for every preview frame in addition
+ * to displaying them on the screen. The callback will be repeatedly called
+ * for as long as preview is active. This method can be called at any time,
+ * even while preview is live. Any other preview callbacks are overridden.
*
- * @param cb A callback object that receives a copy of each preview frame.
- * Pass null to stop receiving callbacks at any time.
+ * @param cb a callback object that receives a copy of each preview frame,
+ * or null to stop receiving callbacks.
*/
public final void setPreviewCallback(PreviewCallback cb) {
mPreviewCallback = cb;
@@ -288,10 +411,13 @@
}
/**
- * Installs a callback to retrieve a single preview frame, after which the
- * callback is cleared.
+ * Installs a callback to be invoked for the next preview frame in addition
+ * to displaying it on the screen. After one invocation, the callback is
+ * cleared. This method can be called any time, even when preview is live.
+ * Any other preview callbacks are overridden.
*
- * @param cb A callback object that receives a copy of the preview frame.
+ * @param cb a callback object that receives a copy of the next preview frame,
+ * or null to stop receiving callbacks.
*/
public final void setOneShotPreviewCallback(PreviewCallback cb) {
mPreviewCallback = cb;
@@ -303,17 +429,24 @@
private native final void setHasPreviewCallback(boolean installed, boolean manualBuffer);
/**
- * Installs a callback which will get called as long as there are buffers in the
- * preview buffer queue, which minimizes dynamic allocation of preview buffers.
+ * Installs a callback to be invoked for every preview frame, using buffers
+ * supplied with {@link #addCallbackBuffer(byte[])}, in addition to
+ * displaying them on the screen. The callback will be repeatedly called
+ * for as long as preview is active and buffers are available.
+ * Any other preview callbacks are overridden.
*
- * Apps must call addCallbackBuffer to explicitly register the buffers to use, or no callbacks
- * will be received. addCallbackBuffer may be safely called before or after
- * a call to setPreviewCallbackWithBuffer with a non-null callback parameter.
+ * <p>The purpose of this method is to improve preview efficiency and frame
+ * rate by allowing preview frame memory reuse. You must call
+ * {@link #addCallbackBuffer(byte[])} at some point -- before or after
+ * calling this method -- or no callbacks will received.
*
- * The buffer queue will be cleared upon any calls to setOneShotPreviewCallback,
- * setPreviewCallback, or to this method with a null callback parameter.
+ * The buffer queue will be cleared if this method is called with a null
+ * callback, {@link #setPreviewCallback(Camera.PreviewCallback)} is called,
+ * or {@link #setOneShotPreviewCallback(Camera.PreviewCallback)} is called.
*
- * @param cb A callback object that receives a copy of the preview frame. A null value will clear the queue.
+ * @param cb a callback object that receives a copy of the preview frame,
+ * or null to stop receiving callbacks and clear the buffer queue.
+ * @see #addCallbackBuffer(byte[])
*/
public final void setPreviewCallbackWithBuffer(PreviewCallback cb) {
mPreviewCallback = cb;
@@ -325,21 +458,27 @@
/**
* Adds a pre-allocated buffer to the preview callback buffer queue.
* Applications can add one or more buffers to the queue. When a preview
- * frame arrives and there is still available buffer, buffer will be filled
- * and it is removed from the queue. Then preview callback is invoked with
- * the buffer. If a frame arrives and there is no buffer left, the frame is
- * discarded. Applications should add the buffers back when they finish the
- * processing.
+ * frame arrives and there is still at least one available buffer, the
+ * buffer will be used and removed from the queue. Then preview callback is
+ * invoked with the buffer. If a frame arrives and there is no buffer left,
+ * the frame is discarded. Applications should add buffers back when they
+ * finish processing the data in them.
*
- * The image format of the callback buffer can be read from {@link
- * android.hardware.Camera.Parameters#getPreviewFormat()}. bitsPerPixel can
- * be read from {@link android.graphics.ImageFormat#getBitsPerPixel(int)}.
- * Preview width and height can be determined from getPreviewSize.
+ * <p>The size of the buffer is determined by multiplying the preview
+ * image width, height, and bytes per pixel. The width and height can be
+ * read from {@link Camera.Parameters#getPreviewSize()}. Bytes per pixel
+ * can be computed from
+ * {@link android.graphics.ImageFormat#getBitsPerPixel(int)} / 8,
+ * using the image format from {@link Camera.Parameters#getPreviewFormat()}.
*
- * Alternatively, a buffer from a previous callback may be passed in or used
- * to determine the size of new preview frame buffers.
+ * <p>This method is only necessary when
+ * {@link #setPreviewCallbackWithBuffer(PreviewCallback)} is used. When
+ * {@link #setPreviewCallback(PreviewCallback)} or
+ * {@link #setOneShotPreviewCallback(PreviewCallback)} are used, buffers
+ * are automatically allocated.
*
- * @param callbackBuffer The buffer to register. Size should be width * height * bitsPerPixel / 8.
+ * @param callbackBuffer the buffer to add to the queue.
+ * The size should be width * height * bits_per_pixel / 8.
* @see #setPreviewCallbackWithBuffer(PreviewCallback)
*/
public native final void addCallbackBuffer(byte[] callbackBuffer);
@@ -438,7 +577,8 @@
}
/**
- * Handles the callback for the camera auto focus.
+ * Callback interface used to notify on completion of camera auto focus.
+ *
* <p>Devices that do not support auto-focus will receive a "fake"
* callback to this interface. If your application needs auto-focus and
* should not be installed on devices <em>without</em> auto-focus, you must
@@ -446,13 +586,15 @@
* {@code android.hardware.camera.autofocus} feature, in the
* <a href="{@docRoot}guide/topics/manifest/uses-feature-element.html"><uses-feature></a>
* manifest element.</p>
+ *
+ * @see #autoFocus(AutoFocusCallback)
*/
public interface AutoFocusCallback
{
/**
- * Callback for the camera auto focus. If the camera does not support
- * auto-focus and autoFocus is called, onAutoFocus will be called
- * immediately with success.
+ * Called when the camera auto focus completes. If the camera does not
+ * support auto-focus and autoFocus is called, onAutoFocus will be
+ * called immediately with success.
*
* @param success true if focus was successful, false if otherwise
* @param camera the Camera service object
@@ -461,23 +603,28 @@
};
/**
- * Starts auto-focus function and registers a callback function to run when
- * camera is focused. Only valid after startPreview() has been called.
- * Applications should call {@link
- * android.hardware.Camera.Parameters#getFocusMode()} to determine if this
- * method should be called. If the camera does not support auto-focus, it is
- * a no-op and {@link AutoFocusCallback#onAutoFocus(boolean, Camera)}
+ * Starts camera auto-focus and registers a callback function to run when
+ * the camera is focused. This method is only valid when preview is active
+ * (between {@link #startPreview()} and before {@link #stopPreview()}).
+ *
+ * <p>Callers should check
+ * {@link android.hardware.Camera.Parameters#getFocusMode()} to determine if
+ * this method should be called. If the camera does not support auto-focus,
+ * it is a no-op and {@link AutoFocusCallback#onAutoFocus(boolean, Camera)}
* callback will be called immediately.
+ *
* <p>If your application should not be installed
* on devices without auto-focus, you must declare that your application
* uses auto-focus with the
* <a href="{@docRoot}guide/topics/manifest/uses-feature-element.html"><uses-feature></a>
* manifest element.</p>
+ *
* <p>If the current flash mode is not
* {@link android.hardware.Camera.Parameters#FLASH_MODE_OFF}, flash may be
- * fired during auto-focus depending on the driver.<p>
+ * fired during auto-focus, depending on the driver and camera hardware.<p>
*
* @param cb the callback to run
+ * @see #cancelAutoFocus()
*/
public final void autoFocus(AutoFocusCallback cb)
{
@@ -487,10 +634,12 @@
private native final void native_autoFocus();
/**
- * Cancels auto-focus function. If the auto-focus is still in progress,
- * this function will cancel it. Whether the auto-focus is in progress
- * or not, this function will return the focus position to the default.
+ * Cancels any auto-focus function in progress.
+ * Whether or not auto-focus is currently in progress,
+ * this function will return the focus position to the default.
* If the camera does not support auto-focus, this is a no-op.
+ *
+ * @see #autoFocus(Camera.AutoFocusCallback)
*/
public final void cancelAutoFocus()
{
@@ -500,23 +649,32 @@
private native final void native_cancelAutoFocus();
/**
- * An interface which contains a callback for the shutter closing after taking a picture.
+ * Callback interface used to signal the moment of actual image capture.
+ *
+ * @see #takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback)
*/
public interface ShutterCallback
{
/**
- * Can be used to play a shutter sound as soon as the image has been captured, but before
- * the data is available.
+ * Called as near as possible to the moment when a photo is captured
+ * from the sensor. This is a good opportunity to play a shutter sound
+ * or give other feedback of camera operation. This may be some time
+ * after the photo was triggered, but some time before the actual data
+ * is available.
*/
void onShutter();
}
/**
- * Handles the callback for when a picture is taken.
+ * Callback interface used to supply image data from a photo capture.
+ *
+ * @see #takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback)
*/
public interface PictureCallback {
/**
- * Callback for when a picture is taken.
+ * Called when image data is available after a picture is taken.
+ * The format of the data depends on the context of the callback
+ * and {@link Camera.Parameters} settings.
*
* @param data a byte array of the picture data
* @param camera the Camera service object
@@ -525,24 +683,9 @@
};
/**
- * Triggers an asynchronous image capture. The camera service will initiate
- * a series of callbacks to the application as the image capture progresses.
- * The shutter callback occurs after the image is captured. This can be used
- * to trigger a sound to let the user know that image has been captured. The
- * raw callback occurs when the raw image data is available (NOTE: the data
- * may be null if the hardware does not have enough memory to make a copy).
- * The jpeg callback occurs when the compressed image is available. If the
- * application does not need a particular callback, a null can be passed
- * instead of a callback method.
+ * Equivalent to takePicture(shutter, raw, null, jpeg).
*
- * This method is only valid after {@link #startPreview()} has been called.
- * This method will stop the preview. Applications should not call {@link
- * #stopPreview()} before this. After jpeg callback is received,
- * applications can call {@link #startPreview()} to restart the preview.
- *
- * @param shutter callback after the image is captured, may be null
- * @param raw callback with raw image data, may be null
- * @param jpeg callback with jpeg image data, may be null
+ * @see #takePicture(ShutterCallback, PictureCallback, PictureCallback, PictureCallback)
*/
public final void takePicture(ShutterCallback shutter, PictureCallback raw,
PictureCallback jpeg) {
@@ -563,15 +706,18 @@
* application does not need a particular callback, a null can be passed
* instead of a callback method.
*
- * This method is only valid after {@link #startPreview()} has been called.
- * This method will stop the preview. Applications should not call {@link
- * #stopPreview()} before this. After jpeg callback is received,
- * applications can call {@link #startPreview()} to restart the preview.
+ * <p>This method is only valid when preview is active (after
+ * {@link #startPreview()}). Preview will be stopped after the image is
+ * taken; callers must call {@link #startPreview()} again if they want to
+ * re-start preview or take more pictures.
*
- * @param shutter callback after the image is captured, may be null
- * @param raw callback with raw image data, may be null
+ * <p>After calling this method, you must not call {@link #startPreview()}
+ * or take another picture until the JPEG callback has returned.
+ *
+ * @param shutter the callback for image capture moment, or null
+ * @param raw the callback for raw (uncompressed) image data, or null
* @param postview callback with postview image data, may be null
- * @param jpeg callback with jpeg image data, may be null
+ * @param jpeg the callback for JPEG image data, or null
*/
public final void takePicture(ShutterCallback shutter, PictureCallback raw,
PictureCallback postview, PictureCallback jpeg) {
@@ -583,26 +729,29 @@
}
/**
- * Zooms to the requested value smoothly. Driver will notify {@link
+ * Zooms to the requested value smoothly. The driver will notify {@link
* OnZoomChangeListener} of the zoom value and whether zoom is stopped at
* the time. For example, suppose the current zoom is 0 and startSmoothZoom
- * is called with value 3. Method onZoomChange will be called three times
- * with zoom value 1, 2, and 3. The applications can call {@link
- * #stopSmoothZoom} to stop the zoom earlier. The applications should not
- * call startSmoothZoom again or change the zoom value before zoom stops. If
- * the passing zoom value equals to the current zoom value, no zoom callback
- * will be generated. This method is supported if {@link
- * android.hardware.Camera.Parameters#isSmoothZoomSupported} is true.
+ * is called with value 3. The
+ * {@link Camera.OnZoomChangeListener#onZoomChange(int, boolean, Camera)}
+ * method will be called three times with zoom values 1, 2, and 3.
+ * Applications can call {@link #stopSmoothZoom} to stop the zoom earlier.
+ * Applications should not call startSmoothZoom again or change the zoom
+ * value before zoom stops. If the supplied zoom value equals to the current
+ * zoom value, no zoom callback will be generated. This method is supported
+ * if {@link android.hardware.Camera.Parameters#isSmoothZoomSupported}
+ * returns true.
*
* @param value zoom value. The valid range is 0 to {@link
* android.hardware.Camera.Parameters#getMaxZoom}.
* @throws IllegalArgumentException if the zoom value is invalid.
* @throws RuntimeException if the method fails.
+ * @see #setZoomChangeListener(OnZoomChangeListener)
*/
public native final void startSmoothZoom(int value);
/**
- * Stops the smooth zoom. The applications should wait for the {@link
+ * Stops the smooth zoom. Applications should wait for the {@link
* OnZoomChangeListener} to know when the zoom is actually stopped. This
* method is supported if {@link
* android.hardware.Camera.Parameters#isSmoothZoomSupported} is true.
@@ -649,20 +798,21 @@
public native final void setDisplayOrientation(int degrees);
/**
- * Interface for a callback to be invoked when zoom value changes.
+ * Callback interface for zoom changes during a smooth zoom operation.
+ *
+ * @see #setZoomChangeListener(OnZoomChangeListener)
+ * @see #startSmoothZoom(int)
*/
public interface OnZoomChangeListener
{
/**
- * Called when the zoom value has changed.
+ * Called when the zoom value has changed during a smooth zoom.
*
* @param zoomValue the current zoom value. In smooth zoom mode, camera
* calls this for every new zoom value.
* @param stopped whether smooth zoom is stopped. If the value is true,
* this is the last zoom update for the application.
- *
* @param camera the Camera service object
- * @see #startSmoothZoom(int)
*/
void onZoomChange(int zoomValue, boolean stopped, Camera camera);
};
@@ -679,15 +829,25 @@
mZoomListener = listener;
}
- // These match the enum in include/ui/Camera.h
- /** Unspecified camerar error. @see #ErrorCallback */
+ // Error codes match the enum in include/ui/Camera.h
+
+ /**
+ * Unspecified camera error.
+ * @see Camera.ErrorCallback
+ */
public static final int CAMERA_ERROR_UNKNOWN = 1;
- /** Media server died. In this case, the application must release the
- * Camera object and instantiate a new one. @see #ErrorCallback */
+
+ /**
+ * Media server died. In this case, the application must release the
+ * Camera object and instantiate a new one.
+ * @see Camera.ErrorCallback
+ */
public static final int CAMERA_ERROR_SERVER_DIED = 100;
/**
- * Handles the camera error callback.
+ * Callback interface for camera error notification.
+ *
+ * @see #setErrorCallback(ErrorCallback)
*/
public interface ErrorCallback
{
@@ -705,7 +865,7 @@
/**
* Registers a callback to be invoked when an error occurs.
- * @param cb the callback to run
+ * @param cb The callback to run
*/
public final void setErrorCallback(ErrorCallback cb)
{
@@ -716,16 +876,21 @@
private native final String native_getParameters();
/**
- * Sets the Parameters for pictures from this Camera service.
+ * Changes the settings for this Camera service.
*
* @param params the Parameters to use for this Camera service
+ * @see #getParameters()
*/
public void setParameters(Parameters params) {
native_setParameters(params.flatten());
}
/**
- * Returns the picture Parameters for this Camera service.
+ * Returns the current settings for this Camera service.
+ * If modifications are made to the returned Parameters, they must be passed
+ * to {@link #setParameters(Camera.Parameters)} to take effect.
+ *
+ * @see #setParameters(Camera.Parameters)
*/
public Parameters getParameters() {
Parameters p = new Parameters();
@@ -735,7 +900,7 @@
}
/**
- * Handles the picture size (dimensions).
+ * Image size (width and height dimensions).
*/
public class Size {
/**
@@ -774,18 +939,21 @@
};
/**
- * Handles the parameters for pictures created by a Camera service.
+ * Camera service settings.
*
* <p>To make camera parameters take effect, applications have to call
- * Camera.setParameters. For example, after setWhiteBalance is called, white
- * balance is not changed until Camera.setParameters() is called.
+ * {@link Camera#setParameters(Camera.Parameters)}. For example, after
+ * {@link Camera.Parameters#setWhiteBalance} is called, white balance is not
+ * actually changed until {@link Camera#setParameters(Camera.Parameters)}
+ * is called with the changed parameters object.
*
* <p>Different devices may have different camera capabilities, such as
* picture size or flash modes. The application should query the camera
* capabilities before setting parameters. For example, the application
- * should call getSupportedColorEffects before calling setEffect. If the
- * camera does not support color effects, getSupportedColorEffects will
- * return null.
+ * should call {@link Camera.Parameters#getSupportedColorEffects()} before
+ * calling {@link Camera.Parameters#setColorEffect(String)}. If the
+ * camera does not support color effects,
+ * {@link Camera.Parameters#getSupportedColorEffects()} will return null.
*/
public class Parameters {
// Parameter keys to communicate with the camera driver.
diff --git a/core/java/android/os/BatteryStats.java b/core/java/android/os/BatteryStats.java
index e3f3b87..0afd6cd 100644
--- a/core/java/android/os/BatteryStats.java
+++ b/core/java/android/os/BatteryStats.java
@@ -1237,10 +1237,9 @@
linePrefix);
if (!linePrefix.equals(": ")) {
sb.append(" realtime");
- } else {
- sb.append(": (nothing executed)");
+ // Only print out wake locks that were held
+ pw.println(sb.toString());
}
- pw.println(sb.toString());
}
}
}
@@ -1453,11 +1452,10 @@
"window", which, linePrefix);
if (!linePrefix.equals(": ")) {
sb.append(" realtime");
- } else {
- sb.append(": (nothing executed)");
+ // Only print out wake locks that were held
+ pw.println(sb.toString());
+ uidActivity = true;
}
- pw.println(sb.toString());
- uidActivity = true;
}
}
diff --git a/core/java/android/os/Debug.java b/core/java/android/os/Debug.java
index d23b161..a58e70b 100644
--- a/core/java/android/os/Debug.java
+++ b/core/java/android/os/Debug.java
@@ -94,7 +94,8 @@
/**
* Default trace file path and file
*/
- private static final String DEFAULT_TRACE_PATH_PREFIX = "/sdcard/";
+ private static final String DEFAULT_TRACE_PATH_PREFIX =
+ Environment.getExternalStorageDirectory().getPath() + "/";
private static final String DEFAULT_TRACE_BODY = "dmtrace";
private static final String DEFAULT_TRACE_EXTENSION = ".trace";
private static final String DEFAULT_TRACE_FILE_PATH =
@@ -127,7 +128,7 @@
public int otherPrivateDirty;
/** The shared dirty pages used by everything else. */
public int otherSharedDirty;
-
+
public MemoryInfo() {
}
@@ -137,21 +138,21 @@
public int getTotalPss() {
return dalvikPss + nativePss + otherPss;
}
-
+
/**
* Return total private dirty memory usage in kB.
*/
public int getTotalPrivateDirty() {
return dalvikPrivateDirty + nativePrivateDirty + otherPrivateDirty;
}
-
+
/**
* Return total shared dirty memory usage in kB.
*/
public int getTotalSharedDirty() {
return dalvikSharedDirty + nativeSharedDirty + otherSharedDirty;
}
-
+
public int describeContents() {
return 0;
}
@@ -179,7 +180,7 @@
otherPrivateDirty = source.readInt();
otherSharedDirty = source.readInt();
}
-
+
public static final Creator<MemoryInfo> CREATOR = new Creator<MemoryInfo>() {
public MemoryInfo createFromParcel(Parcel source) {
return new MemoryInfo(source);
@@ -460,7 +461,7 @@
* Like startMethodTracing(String, int, int), but taking an already-opened
* FileDescriptor in which the trace is written. The file name is also
* supplied simply for logging. Makes a dup of the file descriptor.
- *
+ *
* Not exposed in the SDK unless we are really comfortable with supporting
* this and find it would be useful.
* @hide
@@ -1090,7 +1091,7 @@
* static {
* // Sets all the fields
* Debug.setFieldsOn(MyDebugVars.class);
- *
+ *
* // Sets only the fields annotated with @Debug.DebugProperty
* // Debug.setFieldsOn(MyDebugVars.class, true);
* }
diff --git a/core/java/android/os/Parcel.java b/core/java/android/os/Parcel.java
index b9c9565..31f8719 100644
--- a/core/java/android/os/Parcel.java
+++ b/core/java/android/os/Parcel.java
@@ -440,6 +440,12 @@
/**
* Write a FileDescriptor into the parcel at the current dataPosition(),
* growing dataCapacity() if needed.
+ *
+ * <p class="caution">The file descriptor will not be closed, which may
+ * result in file descriptor leaks when objects are returned from Binder
+ * calls. Use {@link ParcelFileDescriptor#writeToParcel} instead, which
+ * accepts contextual flags and will close the original file descriptor
+ * if {@link Parcelable#PARCELABLE_WRITE_RETURN_VALUE} is set.</p>
*/
public final native void writeFileDescriptor(FileDescriptor val);
@@ -1003,7 +1009,7 @@
/**
* Flatten a generic object in to a parcel. The given Object value may
* currently be one of the following types:
- *
+ *
* <ul>
* <li> null
* <li> String
@@ -1026,7 +1032,7 @@
* <li> Parcelable[]
* <li> CharSequence (as supported by {@link TextUtils#writeToParcel}).
* <li> List (as supported by {@link #writeList}).
- * <li> {@link SparseArray} (as supported by {@link #writeSparseArray}).
+ * <li> {@link SparseArray} (as supported by {@link #writeSparseArray(SparseArray)}).
* <li> {@link IBinder}
* <li> Any object that implements Serializable (but see
* {@link #writeSerializable} for caveats). Note that all of the
@@ -1035,6 +1041,13 @@
* approach is much less efficient and should be avoided whenever
* possible.
* </ul>
+ *
+ * <p class="caution">{@link Parcelable} objects are written with
+ * {@link Parcelable#writeToParcel} using contextual flags of 0. When
+ * serializing objects containing {@link ParcelFileDescriptor}s,
+ * this may result in file descriptor leaks when they are returned from
+ * Binder calls (where {@link Parcelable#PARCELABLE_WRITE_RETURN_VALUE}
+ * should be used).</p>
*/
public final void writeValue(Object v) {
if (v == null) {
@@ -1123,7 +1136,7 @@
/**
* Flatten the name of the class of the Parcelable and its contents
* into the parcel.
- *
+ *
* @param p The Parcelable object to be written.
* @param parcelableFlags Contextual flags as per
* {@link Parcelable#writeToParcel(Parcel, int) Parcelable.writeToParcel()}.
diff --git a/core/java/android/os/ParcelFileDescriptor.java b/core/java/android/os/ParcelFileDescriptor.java
index d26f066..9d213b3 100644
--- a/core/java/android/os/ParcelFileDescriptor.java
+++ b/core/java/android/os/ParcelFileDescriptor.java
@@ -250,6 +250,11 @@
return Parcelable.CONTENTS_FILE_DESCRIPTOR;
}
+ /**
+ * {@inheritDoc}
+ * If {@link Parcelable#PARCELABLE_WRITE_RETURN_VALUE} is set in flags,
+ * the file descriptor will be closed after a copy is written to the Parcel.
+ */
public void writeToParcel(Parcel out, int flags) {
out.writeFileDescriptor(mFileDescriptor);
if ((flags&PARCELABLE_WRITE_RETURN_VALUE) != 0 && !mClosed) {
diff --git a/core/java/android/provider/Telephony.java b/core/java/android/provider/Telephony.java
index bf9e854..d271e93 100644
--- a/core/java/android/provider/Telephony.java
+++ b/core/java/android/provider/Telephony.java
@@ -25,6 +25,7 @@
import android.database.Cursor;
import android.database.sqlite.SqliteWrapper;
import android.net.Uri;
+import android.os.Environment;
import android.telephony.SmsMessage;
import android.text.TextUtils;
import android.util.Config;
@@ -1526,7 +1527,8 @@
* which streams the captured image to the uri. Internally we write the media content
* to this file. It's named '.temp.jpg' so Gallery won't pick it up.
*/
- public static final String SCRAP_FILE_PATH = "/sdcard/mms/scrapSpace/.temp.jpg";
+ public static final String SCRAP_FILE_PATH =
+ Environment.getExternalStorageDirectory().getPath() + "/mms/scrapSpace/.temp.jpg";
}
public static final class Intents {
diff --git a/core/java/android/server/BluetoothService.java b/core/java/android/server/BluetoothService.java
index 8bee174..d7cd933 100644
--- a/core/java/android/server/BluetoothService.java
+++ b/core/java/android/server/BluetoothService.java
@@ -676,6 +676,10 @@
removeProfileState(address);
}
+ // HID is handled by BluetoothService, other profiles
+ // will be handled by their respective services.
+ setInitialInputDevicePriority(mAdapter.getRemoteDevice(address), state);
+
if (DBG) log(address + " bond state " + oldState + " -> " + state + " (" +
reason + ")");
Intent intent = new Intent(BluetoothDevice.ACTION_BOND_STATE_CHANGED);
@@ -1347,6 +1351,19 @@
handleInputDeviceStateChange(device, state);
}
+ private void setInitialInputDevicePriority(BluetoothDevice device, int state) {
+ switch (state) {
+ case BluetoothDevice.BOND_BONDED:
+ if (getInputDevicePriority(device) == BluetoothInputDevice.PRIORITY_UNDEFINED) {
+ setInputDevicePriority(device, BluetoothInputDevice.PRIORITY_ON);
+ }
+ break;
+ case BluetoothDevice.BOND_NONE:
+ setInputDevicePriority(device, BluetoothInputDevice.PRIORITY_UNDEFINED);
+ break;
+ }
+ }
+
/*package*/ boolean isRemoteDeviceInCache(String address) {
return (mDeviceProperties.get(address) != null);
}
diff --git a/core/java/android/view/GLES20Canvas.java b/core/java/android/view/GLES20Canvas.java
index b811b7b..1424638 100644
--- a/core/java/android/view/GLES20Canvas.java
+++ b/core/java/android/view/GLES20Canvas.java
@@ -32,6 +32,11 @@
import android.graphics.Region;
import android.graphics.Shader;
import android.graphics.SweepGradient;
+import android.graphics.TemporaryBuffer;
+import android.text.GraphicsOperations;
+import android.text.SpannableString;
+import android.text.SpannedString;
+import android.text.TextUtils;
import javax.microedition.khronos.opengles.GL;
@@ -574,22 +579,46 @@
@Override
public void drawText(char[] text, int index, int count, float x, float y, Paint paint) {
- // TODO: Implement
+ if ((index | count | (index + count) | (text.length - index - count)) < 0) {
+ throw new IndexOutOfBoundsException();
+ }
+ nDrawText(mRenderer, text, index, count, x, y, paint.mBidiFlags, paint.mNativePaint);
}
+
+ private native void nDrawText(int renderer, char[] text, int index, int count, float x, float y,
+ int bidiFlags, int paint);
@Override
public void drawText(CharSequence text, int start, int end, float x, float y, Paint paint) {
- // TODO: Implement
+ if (text instanceof String || text instanceof SpannedString ||
+ text instanceof SpannableString) {
+ nDrawText(mRenderer, text.toString(), start, end, x, y, paint.mBidiFlags,
+ paint.mNativePaint);
+ } else if (text instanceof GraphicsOperations) {
+ ((GraphicsOperations) text).drawText(this, start, end, x, y,
+ paint);
+ } else {
+ char[] buf = TemporaryBuffer.obtain(end - start);
+ TextUtils.getChars(text, start, end, buf, 0);
+ nDrawText(mRenderer, buf, 0, end - start, x, y, paint.mBidiFlags, paint.mNativePaint);
+ TemporaryBuffer.recycle(buf);
+ }
}
@Override
public void drawText(String text, int start, int end, float x, float y, Paint paint) {
- // TODO: Implement
+ if ((start | end | (end - start) | (text.length() - end)) < 0) {
+ throw new IndexOutOfBoundsException();
+ }
+ nDrawText(mRenderer, text, start, end, x, y, paint.mBidiFlags, paint.mNativePaint);
}
+ private native void nDrawText(int renderer, String text, int start, int end, float x, float y,
+ int bidiFlags, int paint);
+
@Override
public void drawText(String text, float x, float y, Paint paint) {
- drawText(text, 0, text.length(), x, y, paint);
+ nDrawText(mRenderer, text, 0, text.length(), x, y, paint.mBidiFlags, paint.mNativePaint);
}
@Override
diff --git a/core/java/com/android/internal/app/ActionBarImpl.java b/core/java/com/android/internal/app/ActionBarImpl.java
index f37021b..ec6d2be 100644
--- a/core/java/com/android/internal/app/ActionBarImpl.java
+++ b/core/java/com/android/internal/app/ActionBarImpl.java
@@ -16,13 +16,16 @@
package com.android.internal.app;
-import com.android.internal.view.menu.ActionMenu;
-import com.android.internal.view.menu.ActionMenuItem;
+import com.android.internal.view.menu.MenuBuilder;
+import com.android.internal.view.menu.MenuItemImpl;
+import com.android.internal.view.menu.MenuPopupHelper;
+import com.android.internal.view.menu.SubMenuBuilder;
import com.android.internal.widget.ActionBarContextView;
import com.android.internal.widget.ActionBarView;
import android.app.ActionBar;
import android.app.Activity;
+import android.app.ContextualMode;
import android.app.Fragment;
import android.app.FragmentTransaction;
import android.graphics.drawable.Drawable;
@@ -194,11 +197,8 @@
return mActionView.getDisplayOptions();
}
- @Override
- public void startContextMode(ContextModeCallback callback) {
- if (mContextMode != null) {
- mContextMode.finish();
- }
+ public ContextualMode startContextualMode(ContextualMode.Callback callback) {
+ finishContextualMode();
// Don't wait for the close context mode animation to finish.
if (mClosingContext) {
@@ -207,20 +207,22 @@
mCloseContext.run();
}
- mContextMode = new ContextMode(callback);
- if (callback.onCreateContextMode(mContextMode, mContextMode.getMenu())) {
- mContextMode.invalidate();
- mUpperContextView.initForMode(mContextMode);
+ ContextMode mode = new ContextMode(callback);
+ if (callback.onCreateContextMode(mode, mode.getMenu())) {
+ mode.invalidate();
+ mUpperContextView.initForMode(mode);
mAnimatorView.setDisplayedChild(CONTEXT_VIEW);
if (mLowerContextView != null) {
// TODO animate this
mLowerContextView.setVisibility(View.VISIBLE);
}
+ mContextMode = mode;
+ return mode;
}
+ return null;
}
- @Override
- public void finishContextMode() {
+ public void finishContextualMode() {
if (mContextMode != null) {
mContextMode.finish();
}
@@ -336,14 +338,15 @@
/**
* @hide
*/
- public class ContextMode extends ActionBar.ContextMode {
- private ContextModeCallback mCallback;
- private ActionMenu mMenu;
+ public class ContextMode extends ContextualMode implements MenuBuilder.Callback {
+ private ContextualMode.Callback mCallback;
+ private MenuBuilder mMenu;
private WeakReference<View> mCustomView;
- public ContextMode(ContextModeCallback callback) {
+ public ContextMode(ContextualMode.Callback callback) {
mCallback = callback;
- mMenu = new ActionMenu(mActionView.getContext());
+ mMenu = new MenuBuilder(mActionView.getContext());
+ mMenu.setCallback(this);
}
@Override
@@ -405,12 +408,27 @@
return mCustomView != null ? mCustomView.get() : null;
}
- public void dispatchOnContextItemClicked(MenuItem item) {
- ActionMenuItem actionItem = (ActionMenuItem) item;
- if (!actionItem.invoke()) {
- mCallback.onContextItemClicked(this, item);
+ public boolean onMenuItemSelected(MenuBuilder menu, MenuItem item) {
+ return mCallback.onContextItemClicked(this, item);
+ }
+
+ public void onCloseMenu(MenuBuilder menu, boolean allMenusAreClosing) {
+ }
+
+ public boolean onSubMenuSelected(SubMenuBuilder subMenu) {
+ if (!subMenu.hasVisibleItems()) {
+ return true;
}
- }
+
+ new MenuPopupHelper(mActivity, subMenu).show();
+ return true;
+ }
+
+ public void onCloseSubMenu(SubMenuBuilder menu) {
+ }
+
+ public void onMenuModeChange(MenuBuilder menu) {
+ }
}
/**
diff --git a/core/java/com/android/internal/view/menu/ActionMenuView.java b/core/java/com/android/internal/view/menu/ActionMenuView.java
index 7024a27..e281536 100644
--- a/core/java/com/android/internal/view/menu/ActionMenuView.java
+++ b/core/java/com/android/internal/view/menu/ActionMenuView.java
@@ -71,6 +71,10 @@
return mReserveOverflow;
}
+ public void setOverflowReserved(boolean reserveOverflow) {
+ mReserveOverflow = reserveOverflow;
+ }
+
@Override
protected boolean checkLayoutParams(ViewGroup.LayoutParams p) {
if (p instanceof LayoutParams) {
diff --git a/core/java/com/android/internal/widget/ActionBarContextView.java b/core/java/com/android/internal/widget/ActionBarContextView.java
index cd9832f..08405a3 100644
--- a/core/java/com/android/internal/widget/ActionBarContextView.java
+++ b/core/java/com/android/internal/widget/ActionBarContextView.java
@@ -16,20 +16,17 @@
package com.android.internal.widget;
import com.android.internal.R;
-import com.android.internal.app.ActionBarImpl;
+import com.android.internal.view.menu.ActionMenuView;
+import com.android.internal.view.menu.MenuBuilder;
-import android.app.ActionBar;
+import android.app.ContextualMode;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.LayoutInflater;
-import android.view.Menu;
-import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
-import android.view.View.MeasureSpec;
-import android.view.ViewGroup.LayoutParams;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
@@ -43,6 +40,7 @@
private int mItemPadding;
private int mItemMargin;
+ private int mActionSpacing;
private int mContentHeight;
private CharSequence mTitle;
@@ -54,6 +52,7 @@
private TextView mTitleView;
private TextView mSubtitleView;
private Drawable mCloseDrawable;
+ private ActionMenuView mMenuView;
public ActionBarContextView(Context context) {
this(context, null, 0);
@@ -136,49 +135,30 @@
}
}
- public void initForMode(final ActionBar.ContextMode mode) {
- final ActionBarImpl.ContextMode implMode = (ActionBarImpl.ContextMode) mode;
-
+ public void initForMode(final ContextualMode mode) {
if (mCloseButton == null) {
mCloseButton = new ImageButton(getContext());
mCloseButton.setImageDrawable(mCloseDrawable);
mCloseButton.setBackgroundDrawable(null);
- mCloseButton.setOnClickListener(new OnClickListener() {
- public void onClick(View v) {
- mode.finish();
- }
- });
}
+ mCloseButton.setOnClickListener(new OnClickListener() {
+ public void onClick(View v) {
+ mode.finish();
+ }
+ });
addView(mCloseButton);
- final Context context = getContext();
- final Menu menu = mode.getMenu();
- final int itemCount = menu.size();
- for (int i = 0; i < itemCount; i++) {
- final MenuItem item = menu.getItem(i);
- final ImageButton button = new ImageButton(context, null,
- com.android.internal.R.attr.actionButtonStyle);
- button.setClickable(true);
- button.setFocusable(true);
- button.setImageDrawable(item.getIcon());
- button.setId(item.getItemId());
- button.setVisibility(item.isVisible() ? VISIBLE : GONE);
- button.setEnabled(item.isEnabled());
-
- button.setOnClickListener(new OnClickListener() {
- public void onClick(View v) {
- implMode.dispatchOnContextItemClicked(item);
- }
- });
-
- addView(button);
- }
- requestLayout();
+ final MenuBuilder menu = (MenuBuilder) mode.getMenu();
+ mMenuView = (ActionMenuView) menu.getMenuView(MenuBuilder.TYPE_ACTION_BUTTON, this);
+ mMenuView.setOverflowReserved(true);
+ mMenuView.updateChildren(false);
+ addView(mMenuView);
}
public void closeMode() {
removeAllViews();
mCustomView = null;
+ mMenuView = null;
}
@Override
@@ -266,15 +246,10 @@
}
x = r - l - getPaddingRight();
-
- final int childCount = getChildCount();
- for (int i = 0; i < childCount; i++) {
- final View child = getChildAt(i);
- if (child == mCloseButton || child == mTitleLayout || child == mCustomView) {
- continue;
- }
- x -= positionChildInverse(child, x, y, contentHeight) + itemMargin;
+ if (mMenuView != null) {
+ x -= positionChildInverse(mMenuView, x + mActionSpacing, y, contentHeight)
+ - mActionSpacing;
}
}
diff --git a/core/jni/Android.mk b/core/jni/Android.mk
index c1921aa..fea5ae3 100644
--- a/core/jni/Android.mk
+++ b/core/jni/Android.mk
@@ -124,7 +124,6 @@
android_bluetooth_common.cpp \
android_bluetooth_BluetoothAudioGateway.cpp \
android_bluetooth_BluetoothSocket.cpp \
- android_bluetooth_ScoSocket.cpp \
android_server_BluetoothService.cpp \
android_server_BluetoothEventLoop.cpp \
android_server_BluetoothA2dpService.cpp \
diff --git a/core/jni/AndroidRuntime.cpp b/core/jni/AndroidRuntime.cpp
index d70f64f..3b91710 100644
--- a/core/jni/AndroidRuntime.cpp
+++ b/core/jni/AndroidRuntime.cpp
@@ -152,7 +152,6 @@
extern int register_android_bluetooth_HeadsetBase(JNIEnv* env);
extern int register_android_bluetooth_BluetoothAudioGateway(JNIEnv* env);
extern int register_android_bluetooth_BluetoothSocket(JNIEnv *env);
-extern int register_android_bluetooth_ScoSocket(JNIEnv *env);
extern int register_android_server_BluetoothService(JNIEnv* env);
extern int register_android_server_BluetoothEventLoop(JNIEnv *env);
extern int register_android_server_BluetoothA2dpService(JNIEnv* env);
@@ -1286,7 +1285,6 @@
REG_JNI(register_android_bluetooth_HeadsetBase),
REG_JNI(register_android_bluetooth_BluetoothAudioGateway),
REG_JNI(register_android_bluetooth_BluetoothSocket),
- REG_JNI(register_android_bluetooth_ScoSocket),
REG_JNI(register_android_server_BluetoothService),
REG_JNI(register_android_server_BluetoothEventLoop),
REG_JNI(register_android_server_BluetoothA2dpService),
diff --git a/core/jni/android/graphics/Typeface.cpp b/core/jni/android/graphics/Typeface.cpp
index 7c7bfeb..1fe72e6 100644
--- a/core/jni/android/graphics/Typeface.cpp
+++ b/core/jni/android/graphics/Typeface.cpp
@@ -130,7 +130,13 @@
return NULL;
}
- return SkTypeface::CreateFromStream(new AssetStream(asset, true));
+ SkStream* stream = new AssetStream(asset, true);
+ SkTypeface* face = SkTypeface::CreateFromStream(stream);
+ // SkTypeFace::CreateFromStream calls ref() on the stream, so we
+ // need to unref it here or it won't be freed later on
+ stream->unref();
+
+ return face;
}
static SkTypeface* Typeface_createFromFile(JNIEnv* env, jobject, jstring jpath) {
diff --git a/core/jni/android_bluetooth_ScoSocket.cpp b/core/jni/android_bluetooth_ScoSocket.cpp
deleted file mode 100644
index 94e4409..0000000
--- a/core/jni/android_bluetooth_ScoSocket.cpp
+++ /dev/null
@@ -1,689 +0,0 @@
-/*
-** Copyright 2008, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#define LOG_TAG "bluetooth_ScoSocket.cpp"
-
-#include "android_bluetooth_common.h"
-#include "android_runtime/AndroidRuntime.h"
-#include "JNIHelp.h"
-#include "jni.h"
-#include "utils/Log.h"
-#include "utils/misc.h"
-
-#include <stdio.h>
-#include <string.h>
-#include <stdlib.h>
-#include <errno.h>
-#include <unistd.h>
-#include <pthread.h>
-#include <sys/socket.h>
-#include <sys/types.h>
-#include <sys/uio.h>
-#include <sys/poll.h>
-
-#ifdef HAVE_BLUETOOTH
-#include <bluetooth/bluetooth.h>
-#include <bluetooth/sco.h>
-#include <bluetooth/hci.h>
-
-#define MAX_LINE 255
-
-/*
- * Defines the module strings used in the blacklist file.
- * These are used by consumers of the blacklist file to see if the line is
- * used by that module.
- */
-#define SCO_BLACKLIST_MODULE_NAME "scoSocket"
-
-
-/* Define the type strings used in the blacklist file. */
-#define BLACKLIST_BY_NAME "name"
-#define BLACKLIST_BY_PARTIAL_NAME "partial_name"
-#define BLACKLIST_BY_OUI "vendor_oui"
-
-#endif
-
-/* Ideally, blocking I/O on a SCO socket would return when another thread
- * calls close(). However it does not right now, in fact close() on a SCO
- * socket has strange behavior (returns a bogus value) when other threads
- * are performing blocking I/O on that socket. So, to workaround, we always
- * call close() from the same thread that does blocking I/O. This requires the
- * use of a socketpair to signal the blocking I/O to abort.
- *
- * Unfortunately I don't know a way to abort connect() yet, but at least this
- * times out after the BT page timeout (10 seconds currently), so the thread
- * will die eventually. The fact that the thread can outlive
- * the Java object forces us to use a mutex in destoryNative().
- *
- * The JNI API is entirely async.
- *
- * Also note this class deals only with SCO connections, not with data
- * transmission.
- */
-namespace android {
-#ifdef HAVE_BLUETOOTH
-
-static JavaVM *jvm;
-static jfieldID field_mNativeData;
-static jmethodID method_onAccepted;
-static jmethodID method_onConnected;
-static jmethodID method_onClosed;
-
-struct thread_data_t;
-static void *work_thread(void *arg);
-static int connect_work(const char *address, uint16_t sco_pkt_type);
-static int accept_work(int signal_sk);
-static void wait_for_close(int sk, int signal_sk);
-static void closeNative(JNIEnv *env, jobject object);
-
-static void parseBlacklist(void);
-static uint16_t getScoType(char *address, const char *name);
-
-#define COMPARE_STRING(key, s) (!strncmp(key, s, strlen(s)))
-
-/* Blacklist data */
-typedef struct scoBlacklist {
- int fieldType;
- char *value;
- uint16_t scoType;
- struct scoBlacklist *next;
-} scoBlacklist_t;
-
-#define BL_TYPE_NAME 1 // Field type is name string
-
-static scoBlacklist_t *blacklist = NULL;
-
-/* shared native data - protected by mutex */
-typedef struct {
- pthread_mutex_t mutex;
- int signal_sk; // socket to signal blocked I/O to unblock
- jobject object; // JNI global ref to the Java object
- thread_data_t *thread_data; // pointer to thread local data
- // max 1 thread per sco socket
-} native_data_t;
-
-/* thread local data */
-struct thread_data_t {
- native_data_t *nat;
- bool is_accept; // accept (listening) or connect (outgoing) thread
- int signal_sk; // socket for thread to listen for unblock signal
- char address[BTADDR_SIZE]; // BT addres as string
- uint16_t sco_pkt_type; // SCO packet types supported
-};
-
-static inline native_data_t * get_native_data(JNIEnv *env, jobject object) {
- return (native_data_t *)(env->GetIntField(object, field_mNativeData));
-}
-
-static uint16_t str2scoType (char *key) {
- LOGV("%s: key = %s", __FUNCTION__, key);
- if (COMPARE_STRING(key, "ESCO_HV1"))
- return ESCO_HV1;
- if (COMPARE_STRING(key, "ESCO_HV2"))
- return ESCO_HV2;
- if (COMPARE_STRING(key, "ESCO_HV3"))
- return ESCO_HV3;
- if (COMPARE_STRING(key, "ESCO_EV3"))
- return ESCO_EV3;
- if (COMPARE_STRING(key, "ESCO_EV4"))
- return ESCO_EV4;
- if (COMPARE_STRING(key, "ESCO_EV5"))
- return ESCO_EV5;
- if (COMPARE_STRING(key, "ESCO_2EV3"))
- return ESCO_2EV3;
- if (COMPARE_STRING(key, "ESCO_3EV3"))
- return ESCO_3EV3;
- if (COMPARE_STRING(key, "ESCO_2EV5"))
- return ESCO_2EV5;
- if (COMPARE_STRING(key, "ESCO_3EV5"))
- return ESCO_3EV5;
- if (COMPARE_STRING(key, "SCO_ESCO_MASK"))
- return SCO_ESCO_MASK;
- if (COMPARE_STRING(key, "EDR_ESCO_MASK"))
- return EDR_ESCO_MASK;
- if (COMPARE_STRING(key, "ALL_ESCO_MASK"))
- return ALL_ESCO_MASK;
- LOGE("Unknown SCO Type (%s) skipping",key);
- return 0;
-}
-
-static void parseBlacklist(void) {
- const char *filename = "/etc/bluetooth/blacklist.conf";
- char line[MAX_LINE];
- scoBlacklist_t *list = NULL;
- scoBlacklist_t *newelem;
-
- LOGV(__FUNCTION__);
-
- /* Open file */
- FILE *fp = fopen(filename, "r");
- if(!fp) {
- LOGE("Error(%s)opening blacklist file", strerror(errno));
- return;
- }
-
- while (fgets(line, MAX_LINE, fp) != NULL) {
- if ((COMPARE_STRING(line, "//")) || (!strcmp(line, "")))
- continue;
- char *module = strtok(line,":");
- if (COMPARE_STRING(module, SCO_BLACKLIST_MODULE_NAME)) {
- newelem = (scoBlacklist_t *)calloc(1, sizeof(scoBlacklist_t));
- if (newelem == NULL) {
- LOGE("%s: out of memory!", __FUNCTION__);
- return;
- }
- // parse line
- char *type = strtok(NULL, ",");
- char *valueList = strtok(NULL, ",");
- char *paramList = strtok(NULL, ",");
- if (COMPARE_STRING(type, BLACKLIST_BY_NAME)) {
- // Extract Name from Value list
- newelem->fieldType = BL_TYPE_NAME;
- newelem->value = (char *)calloc(1, strlen(valueList));
- if (newelem->value == NULL) {
- LOGE("%s: out of memory!", __FUNCTION__);
- continue;
- }
- valueList++; // Skip open quote
- strncpy(newelem->value, valueList, strlen(valueList) - 1);
-
- // Get Sco Settings from Parameters
- char *param = strtok(paramList, ";");
- uint16_t scoTypes = 0;
- while (param != NULL) {
- uint16_t sco;
- if (param[0] == '-') {
- param++;
- sco = str2scoType(param);
- if (sco != 0)
- scoTypes &= ~sco;
- } else if (param[0] == '+') {
- param++;
- sco = str2scoType(param);
- if (sco != 0)
- scoTypes |= sco;
- } else if (param[0] == '=') {
- param++;
- sco = str2scoType(param);
- if (sco != 0)
- scoTypes = sco;
- } else {
- LOGE("Invalid SCO type must be =, + or -");
- }
- param = strtok(NULL, ";");
- }
- newelem->scoType = scoTypes;
- } else {
- LOGE("Unknown SCO type entry in Blacklist file");
- continue;
- }
- if (list) {
- list->next = newelem;
- list = newelem;
- } else {
- blacklist = list = newelem;
- }
- LOGI("Entry name = %s ScoTypes = 0x%x", newelem->value,
- newelem->scoType);
- }
- }
- fclose(fp);
- return;
-}
-static uint16_t getScoType(char *address, const char *name) {
- uint16_t ret = 0;
- scoBlacklist_t *list = blacklist;
-
- while (list != NULL) {
- if (list->fieldType == BL_TYPE_NAME) {
- if (COMPARE_STRING(name, list->value)) {
- ret = list->scoType;
- break;
- }
- }
- list = list->next;
- }
- LOGI("%s %s - 0x%x", __FUNCTION__, name, ret);
- return ret;
-}
-#endif
-
-static void classInitNative(JNIEnv* env, jclass clazz) {
- LOGV(__FUNCTION__);
-#ifdef HAVE_BLUETOOTH
- if (env->GetJavaVM(&jvm) < 0) {
- LOGE("Could not get handle to the VM");
- }
- field_mNativeData = get_field(env, clazz, "mNativeData", "I");
- method_onAccepted = env->GetMethodID(clazz, "onAccepted", "(I)V");
- method_onConnected = env->GetMethodID(clazz, "onConnected", "(I)V");
- method_onClosed = env->GetMethodID(clazz, "onClosed", "()V");
-
- /* Read the blacklist file in here */
- parseBlacklist();
-#endif
-}
-
-/* Returns false if a serious error occured */
-static jboolean initNative(JNIEnv* env, jobject object) {
- LOGV(__FUNCTION__);
-#ifdef HAVE_BLUETOOTH
-
- native_data_t *nat = (native_data_t *) calloc(1, sizeof(native_data_t));
- if (nat == NULL) {
- LOGE("%s: out of memory!", __FUNCTION__);
- return JNI_FALSE;
- }
-
- pthread_mutex_init(&nat->mutex, NULL);
- env->SetIntField(object, field_mNativeData, (jint)nat);
- nat->signal_sk = -1;
- nat->object = NULL;
- nat->thread_data = NULL;
-
-#endif
- return JNI_TRUE;
-}
-
-static void destroyNative(JNIEnv* env, jobject object) {
- LOGV(__FUNCTION__);
-#ifdef HAVE_BLUETOOTH
- native_data_t *nat = get_native_data(env, object);
-
- closeNative(env, object);
-
- pthread_mutex_lock(&nat->mutex);
- if (nat->thread_data != NULL) {
- nat->thread_data->nat = NULL;
- }
- pthread_mutex_unlock(&nat->mutex);
- pthread_mutex_destroy(&nat->mutex);
-
- free(nat);
-#endif
-}
-
-static jboolean acceptNative(JNIEnv *env, jobject object) {
- LOGV(__FUNCTION__);
-#ifdef HAVE_BLUETOOTH
- native_data_t *nat = get_native_data(env, object);
- int signal_sks[2];
- pthread_t thread;
- struct thread_data_t *data = NULL;
-
- pthread_mutex_lock(&nat->mutex);
- if (nat->signal_sk != -1) {
- pthread_mutex_unlock(&nat->mutex);
- return JNI_FALSE;
- }
-
- // setup socketpair to pass messages between threads
- if (socketpair(AF_UNIX, SOCK_STREAM, 0, signal_sks) < 0) {
- LOGE("%s: socketpair() failed: %s", __FUNCTION__, strerror(errno));
- pthread_mutex_unlock(&nat->mutex);
- return JNI_FALSE;
- }
- nat->signal_sk = signal_sks[0];
- nat->object = env->NewGlobalRef(object);
-
- data = (thread_data_t *)calloc(1, sizeof(thread_data_t));
- if (data == NULL) {
- LOGE("%s: out of memory", __FUNCTION__);
- pthread_mutex_unlock(&nat->mutex);
- return JNI_FALSE;
- }
- nat->thread_data = data;
- pthread_mutex_unlock(&nat->mutex);
-
- data->signal_sk = signal_sks[1];
- data->nat = nat;
- data->is_accept = true;
-
- if (pthread_create(&thread, NULL, &work_thread, (void *)data) < 0) {
- LOGE("%s: pthread_create() failed: %s", __FUNCTION__, strerror(errno));
- return JNI_FALSE;
- }
- return JNI_TRUE;
-
-#endif
- return JNI_FALSE;
-}
-
-static jboolean connectNative(JNIEnv *env, jobject object, jstring address,
- jstring name) {
-
- LOGV(__FUNCTION__);
-#ifdef HAVE_BLUETOOTH
- native_data_t *nat = get_native_data(env, object);
- int signal_sks[2];
- pthread_t thread;
- struct thread_data_t *data;
- const char *c_address;
- const char *c_name;
-
- pthread_mutex_lock(&nat->mutex);
- if (nat->signal_sk != -1) {
- pthread_mutex_unlock(&nat->mutex);
- return JNI_FALSE;
- }
-
- // setup socketpair to pass messages between threads
- if (socketpair(AF_UNIX, SOCK_STREAM, 0, signal_sks) < 0) {
- LOGE("%s: socketpair() failed: %s\n", __FUNCTION__, strerror(errno));
- pthread_mutex_unlock(&nat->mutex);
- return JNI_FALSE;
- }
- nat->signal_sk = signal_sks[0];
- nat->object = env->NewGlobalRef(object);
-
- data = (thread_data_t *)calloc(1, sizeof(thread_data_t));
- if (data == NULL) {
- LOGE("%s: out of memory", __FUNCTION__);
- pthread_mutex_unlock(&nat->mutex);
- return JNI_FALSE;
- }
- pthread_mutex_unlock(&nat->mutex);
-
- data->signal_sk = signal_sks[1];
- data->nat = nat;
- c_address = env->GetStringUTFChars(address, NULL);
- strlcpy(data->address, c_address, BTADDR_SIZE);
- env->ReleaseStringUTFChars(address, c_address);
- data->is_accept = false;
-
- if (name == NULL) {
- LOGE("%s: Null pointer passed in for device name", __FUNCTION__);
- data->sco_pkt_type = 0;
- } else {
- c_name = env->GetStringUTFChars(name, NULL);
- /* See if this device is in the black list */
- data->sco_pkt_type = getScoType(data->address, c_name);
- env->ReleaseStringUTFChars(name, c_name);
- }
- if (pthread_create(&thread, NULL, &work_thread, (void *)data) < 0) {
- LOGE("%s: pthread_create() failed: %s", __FUNCTION__, strerror(errno));
- return JNI_FALSE;
- }
- return JNI_TRUE;
-
-#endif
- return JNI_FALSE;
-}
-
-static void closeNative(JNIEnv *env, jobject object) {
- LOGV(__FUNCTION__);
-#ifdef HAVE_BLUETOOTH
- native_data_t *nat = get_native_data(env, object);
- int signal_sk;
-
- pthread_mutex_lock(&nat->mutex);
- signal_sk = nat->signal_sk;
- nat->signal_sk = -1;
- env->DeleteGlobalRef(nat->object);
- nat->object = NULL;
- pthread_mutex_unlock(&nat->mutex);
-
- if (signal_sk >= 0) {
- LOGV("%s: signal_sk = %d", __FUNCTION__, signal_sk);
- unsigned char dummy;
- write(signal_sk, &dummy, sizeof(dummy));
- close(signal_sk);
- }
-#endif
-}
-
-#ifdef HAVE_BLUETOOTH
-/* thread entry point */
-static void *work_thread(void *arg) {
- JNIEnv* env;
- thread_data_t *data = (thread_data_t *)arg;
- int sk;
-
- LOGV(__FUNCTION__);
- if (jvm->AttachCurrentThread(&env, NULL) != JNI_OK) {
- LOGE("%s: AttachCurrentThread() failed", __FUNCTION__);
- return NULL;
- }
-
- /* connect the SCO socket */
- if (data->is_accept) {
- LOGV("SCO OBJECT %p ACCEPT #####", data->nat->object);
- sk = accept_work(data->signal_sk);
- LOGV("SCO OBJECT %p END ACCEPT *****", data->nat->object);
- } else {
- sk = connect_work(data->address, data->sco_pkt_type);
- }
-
- /* callback with connection result */
- if (data->nat == NULL) {
- LOGV("%s: object destroyed!", __FUNCTION__);
- goto done;
- }
- pthread_mutex_lock(&data->nat->mutex);
- if (data->nat->object == NULL) {
- pthread_mutex_unlock(&data->nat->mutex);
- LOGV("%s: callback cancelled", __FUNCTION__);
- goto done;
- }
- if (data->is_accept) {
- env->CallVoidMethod(data->nat->object, method_onAccepted, sk);
- } else {
- env->CallVoidMethod(data->nat->object, method_onConnected, sk);
- }
- pthread_mutex_unlock(&data->nat->mutex);
-
- if (sk < 0) {
- goto done;
- }
-
- LOGV("SCO OBJECT %p %d CONNECTED +++ (%s)", data->nat->object, sk,
- data->is_accept ? "in" : "out");
-
- /* wait for the socket to close */
- LOGV("wait_for_close()...");
- wait_for_close(sk, data->signal_sk);
- LOGV("wait_for_close() returned");
-
- /* callback with close result */
- if (data->nat == NULL) {
- LOGV("%s: object destroyed!", __FUNCTION__);
- goto done;
- }
- pthread_mutex_lock(&data->nat->mutex);
- if (data->nat->object == NULL) {
- LOGV("%s: callback cancelled", __FUNCTION__);
- } else {
- env->CallVoidMethod(data->nat->object, method_onClosed);
- }
- pthread_mutex_unlock(&data->nat->mutex);
-
-done:
- if (sk >= 0) {
- close(sk);
- LOGV("SCO OBJECT %p %d CLOSED --- (%s)", data->nat->object, sk, data->is_accept ? "in" : "out");
- }
- if (data->signal_sk >= 0) {
- close(data->signal_sk);
- }
- LOGV("SCO socket closed");
-
- if (data->nat != NULL) {
- pthread_mutex_lock(&data->nat->mutex);
- env->DeleteGlobalRef(data->nat->object);
- data->nat->object = NULL;
- data->nat->thread_data = NULL;
- pthread_mutex_unlock(&data->nat->mutex);
- }
-
- free(data);
- if (jvm->DetachCurrentThread() != JNI_OK) {
- LOGE("%s: DetachCurrentThread() failed", __FUNCTION__);
- }
-
- LOGV("work_thread() done");
- return NULL;
-}
-
-static int accept_work(int signal_sk) {
- LOGV(__FUNCTION__);
- int sk;
- int nsk;
- int addr_sz;
- int max_fd;
- fd_set fds;
- struct sockaddr_sco addr;
-
- sk = socket(PF_BLUETOOTH, SOCK_SEQPACKET, BTPROTO_SCO);
- if (sk < 0) {
- LOGE("%s socket() failed: %s", __FUNCTION__, strerror(errno));
- return -1;
- }
-
- memset(&addr, 0, sizeof(addr));
- addr.sco_family = AF_BLUETOOTH;
- memcpy(&addr.sco_bdaddr, BDADDR_ANY, sizeof(bdaddr_t));
- if (bind(sk, (struct sockaddr *) &addr, sizeof(addr)) < 0) {
- LOGE("%s bind() failed: %s", __FUNCTION__, strerror(errno));
- goto error;
- }
-
- if (listen(sk, 1)) {
- LOGE("%s: listen() failed: %s", __FUNCTION__, strerror(errno));
- goto error;
- }
-
- memset(&addr, 0, sizeof(addr));
- addr_sz = sizeof(addr);
-
- FD_ZERO(&fds);
- FD_SET(sk, &fds);
- FD_SET(signal_sk, &fds);
-
- max_fd = (sk > signal_sk) ? sk : signal_sk;
- LOGI("Listening SCO socket...");
- while (select(max_fd + 1, &fds, NULL, NULL, NULL) < 0) {
- if (errno != EINTR) {
- LOGE("%s: select() failed: %s", __FUNCTION__, strerror(errno));
- goto error;
- }
- LOGV("%s: select() EINTR, retrying", __FUNCTION__);
- }
- LOGV("select() returned");
- if (FD_ISSET(signal_sk, &fds)) {
- // signal to cancel listening
- LOGV("cancelled listening socket, closing");
- goto error;
- }
- if (!FD_ISSET(sk, &fds)) {
- LOGE("error: select() returned >= 0 with no fds set");
- goto error;
- }
-
- nsk = accept(sk, (struct sockaddr *)&addr, &addr_sz);
- if (nsk < 0) {
- LOGE("%s: accept() failed: %s", __FUNCTION__, strerror(errno));
- goto error;
- }
- LOGI("Connected SCO socket (incoming)");
- close(sk); // The listening socket
-
- return nsk;
-
-error:
- close(sk);
-
- return -1;
-}
-
-static int connect_work(const char *address, uint16_t sco_pkt_type) {
- LOGV(__FUNCTION__);
- struct sockaddr_sco addr;
- int sk = -1;
-
- sk = socket(PF_BLUETOOTH, SOCK_SEQPACKET, BTPROTO_SCO);
- if (sk < 0) {
- LOGE("%s: socket() failed: %s", __FUNCTION__, strerror(errno));
- return -1;
- }
-
- /* Bind to local address */
- memset(&addr, 0, sizeof(addr));
- addr.sco_family = AF_BLUETOOTH;
- memcpy(&addr.sco_bdaddr, BDADDR_ANY, sizeof(bdaddr_t));
- if (bind(sk, (struct sockaddr *) &addr, sizeof(addr)) < 0) {
- LOGE("%s: bind() failed: %s", __FUNCTION__, strerror(errno));
- goto error;
- }
-
- memset(&addr, 0, sizeof(addr));
- addr.sco_family = AF_BLUETOOTH;
- get_bdaddr(address, &addr.sco_bdaddr);
- addr.sco_pkt_type = sco_pkt_type;
- LOGI("Connecting to socket");
- while (connect(sk, (struct sockaddr *)&addr, sizeof(addr)) < 0) {
- if (errno != EINTR) {
- LOGE("%s: connect() failed: %s", __FUNCTION__, strerror(errno));
- goto error;
- }
- LOGV("%s: connect() EINTR, retrying", __FUNCTION__);
- }
- LOGI("SCO socket connected (outgoing)");
-
- return sk;
-
-error:
- if (sk >= 0) close(sk);
- return -1;
-}
-
-static void wait_for_close(int sk, int signal_sk) {
- LOGV(__FUNCTION__);
- pollfd p[2];
-
- memset(p, 0, 2 * sizeof(pollfd));
- p[0].fd = sk;
- p[1].fd = signal_sk;
- p[1].events = POLLIN | POLLPRI;
-
- LOGV("poll...");
-
- while (poll(p, 2, -1) < 0) { // blocks
- if (errno != EINTR) {
- LOGE("%s: poll() failed: %s", __FUNCTION__, strerror(errno));
- break;
- }
- LOGV("%s: poll() EINTR, retrying", __FUNCTION__);
- }
-
- LOGV("poll() returned");
-}
-#endif
-
-static JNINativeMethod sMethods[] = {
- {"classInitNative", "()V", (void*)classInitNative},
- {"initNative", "()V", (void *)initNative},
- {"destroyNative", "()V", (void *)destroyNative},
- {"connectNative", "(Ljava/lang/String;Ljava/lang/String;)Z", (void *)connectNative},
- {"acceptNative", "()Z", (void *)acceptNative},
- {"closeNative", "()V", (void *)closeNative},
-};
-
-int register_android_bluetooth_ScoSocket(JNIEnv *env) {
- return AndroidRuntime::registerNativeMethods(env,
- "android/bluetooth/ScoSocket", sMethods, NELEM(sMethods));
-}
-
-} /* namespace android */
diff --git a/core/jni/android_view_GLES20Canvas.cpp b/core/jni/android_view_GLES20Canvas.cpp
index d177e1a..fa4d23c 100644
--- a/core/jni/android_view_GLES20Canvas.cpp
+++ b/core/jni/android_view_GLES20Canvas.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#define LOG_TAG "OpenGLRenderer"
+
#include "jni.h"
#include <nativehelper/JNIHelp.h>
#include <android_runtime/AndroidRuntime.h>
@@ -24,6 +26,7 @@
#include <SkMatrix.h>
#include <SkPaint.h>
#include <SkRegion.h>
+#include <SkScalerContext.h>
#include <SkXfermode.h>
#include <OpenGLRenderer.h>
@@ -207,7 +210,6 @@
renderer->drawPatch(bitmap, patch, left, top, right, bottom, paint);
- // TODO: make sure that 0 is correct for the flags
env->ReleaseByteArrayElements(chunks, storage, 0);
}
@@ -246,6 +248,26 @@
}
// ----------------------------------------------------------------------------
+// Text
+// ----------------------------------------------------------------------------
+
+static void android_view_GLES20Canvas_drawTextArray(JNIEnv* env, jobject canvas,
+ OpenGLRenderer* renderer, jcharArray text, int index, int count,
+ jfloat x, jfloat y, int flags, SkPaint* paint) {
+ jchar* textArray = env->GetCharArrayElements(text, NULL);
+ // TODO: draw from textArray + index
+ env->ReleaseCharArrayElements(text, textArray, JNI_ABORT);
+}
+
+static void android_view_GLES20Canvas_drawText(JNIEnv* env, jobject canvas,
+ OpenGLRenderer* renderer, jstring text, int start, int end,
+ jfloat x, jfloat y, int flags, SkPaint* paint) {
+ const jchar* textArray = env->GetStringChars(text, NULL);
+ // TODO: draw from textArray + start
+ env->ReleaseStringChars(text, textArray);
+}
+
+// ----------------------------------------------------------------------------
// JNI Glue
// ----------------------------------------------------------------------------
@@ -288,6 +310,10 @@
{ "nSetupBitmapShader", "(IIIIII)V", (void*) android_view_GLES20Canvas_setupBitmapShader },
{ "nSetupLinearShader", "(IIIIIIII)V", (void*) android_view_GLES20Canvas_setupLinearShader },
+ { "nDrawText", "(I[CIIFFII)V", (void*) android_view_GLES20Canvas_drawTextArray },
+ { "nDrawText", "(ILjava/lang/String;IIFFII)V",
+ (void*) android_view_GLES20Canvas_drawText },
+
{ "nGetClipBounds", "(ILandroid/graphics/Rect;)Z",
(void*) android_view_GLES20Canvas_getClipBounds },
};
diff --git a/core/jni/android_view_HardwareRenderer.cpp b/core/jni/android_view_HardwareRenderer.cpp
index abd788b..6d20c9d 100644
--- a/core/jni/android_view_HardwareRenderer.cpp
+++ b/core/jni/android_view_HardwareRenderer.cpp
@@ -34,13 +34,11 @@
const char* const kClassPathName = "android/view/HardwareRenderer";
static JNINativeMethod gMethods[] = {
- { "nativeAbandonGlCaches", "()V",
- (void*)android_view_HardwareRenderer_abandonGlCaches },
+ { "nativeAbandonGlCaches", "()V", (void*)android_view_HardwareRenderer_abandonGlCaches },
};
int register_android_view_HardwareRenderer(JNIEnv* env) {
- return AndroidRuntime::registerNativeMethods(env,
- kClassPathName, gMethods, NELEM(gMethods));
+ return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods));
}
};
diff --git a/core/res/res/values/config.xml b/core/res/res/values/config.xml
index 98b0a28..b37d887 100644
--- a/core/res/res/values/config.xml
+++ b/core/res/res/values/config.xml
@@ -201,6 +201,9 @@
<!-- Indicate whether the SD card is accessible without removing the battery. -->
<bool name="config_batterySdCardAccessibility">false</bool>
+ <!-- Indicate whether the device has USB host support. -->
+ <bool name="config_hasUsbHostSupport">false</bool>
+
<!-- Vibrator pattern for feedback about a long screen/key press -->
<integer-array name="config_longPressVibePattern">
<item>0</item>
diff --git a/docs/html/guide/topics/resources/drawable-resource.jd b/docs/html/guide/topics/resources/drawable-resource.jd
index 1e4cca7..1c3cc4d 100644
--- a/docs/html/guide/topics/resources/drawable-resource.jd
+++ b/docs/html/guide/topics/resources/drawable-resource.jd
@@ -12,9 +12,10 @@
</div>
</div>
-<p>A drawable resource is a general concept for a graphic that you
-can retrieve with {@link android.content.res.Resources#getDrawable(int)}
-and draw on the screen. There are several different types of drawables:</p>
+<p>A drawable resource is a general concept for a graphic that can be drawn to the screen and which
+you can retrieve with APIs such as {@link android.content.res.Resources#getDrawable(int)} or apply
+to another XML resource with attributes such as {@code android:drawable} and {@code android:icon}.
+There are several different types of drawables:</p>
<dl>
<dt><a href="#Bitmap">Bitmap File</a><dt>
<dd>A bitmap graphic file ({@code .png}, {@code .jpg}, or {@code .gif}).
@@ -51,6 +52,12 @@
<p>Also see the <a href="animation-resource.html">Animation Resource</a> document for how to
create an {@link android.graphics.drawable.AnimationDrawable}.</p>
+<p class="note"><strong>Note:</strong> A <a
+href="{@docRoot}guide/topics/resources/more-resources.html#Color">color resource</a> can also be
+used as a drawable in XML. For example, when creating a <a href="#StateList">state list
+drawable</a>, you can reference a color resource for the {@code android:drawable} attribute ({@code
+android:drawable="@color/green"}).</p>
+
diff --git a/docs/html/guide/topics/resources/more-resources.jd b/docs/html/guide/topics/resources/more-resources.jd
index 22abbb2..1f03446 100644
--- a/docs/html/guide/topics/resources/more-resources.jd
+++ b/docs/html/guide/topics/resources/more-resources.jd
@@ -114,8 +114,9 @@
<h2 id="Color">Color</h2>
<p>A color value defined in XML.
-The color is specified with an RGB value and alpha channel. You can use color resource
-any place that accepts a hexadecimal color value.</p>
+The color is specified with an RGB value and alpha channel. You can use a color resource
+any place that accepts a hexadecimal color value. You can also use a color resource when a
+drawable resource is expected in XML (for example, {@code android:drawable="@color/green"}).</p>
<p>The value always begins with a pound (#) character and then followed by the
Alpha-Red-Green-Blue information in one of the following formats:</p>
diff --git a/graphics/java/android/graphics/Paint.java b/graphics/java/android/graphics/Paint.java
index 9b4d3a8..3d63aa6 100644
--- a/graphics/java/android/graphics/Paint.java
+++ b/graphics/java/android/graphics/Paint.java
@@ -43,7 +43,10 @@
private boolean mHasCompatScaling;
private float mCompatScaling;
private float mInvCompatScaling;
- /* package */ int mBidiFlags = BIDI_DEFAULT_LTR;
+ /**
+ * @hide
+ */
+ public int mBidiFlags = BIDI_DEFAULT_LTR;
private static final Style[] sStyleArray = {
Style.FILL, Style.STROKE, Style.FILL_AND_STROKE
diff --git a/graphics/java/android/graphics/TemporaryBuffer.java b/graphics/java/android/graphics/TemporaryBuffer.java
index 1d7fe01..c5b8143 100644
--- a/graphics/java/android/graphics/TemporaryBuffer.java
+++ b/graphics/java/android/graphics/TemporaryBuffer.java
@@ -18,9 +18,11 @@
import com.android.internal.util.ArrayUtils;
-/* package */ class TemporaryBuffer
-{
- /* package */ static char[] obtain(int len) {
+/**
+ * @hide
+ */
+public class TemporaryBuffer {
+ public static char[] obtain(int len) {
char[] buf;
synchronized (TemporaryBuffer.class) {
@@ -28,15 +30,15 @@
sTemp = null;
}
- if (buf == null || buf.length < len)
+ if (buf == null || buf.length < len) {
buf = new char[ArrayUtils.idealCharArraySize(len)];
+ }
return buf;
}
- /* package */ static void recycle(char[] temp) {
- if (temp.length > 1000)
- return;
+ public static void recycle(char[] temp) {
+ if (temp.length > 1000) return;
synchronized (TemporaryBuffer.class) {
sTemp = temp;
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 194f23a..9fd905f 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -168,6 +168,15 @@
TX_DISABLE = 0
};
+ // special audio session values
+ enum audio_sessions {
+ SESSION_OUTPUT_STAGE = -1, // session for effects attached to a particular output stream
+ // (value must be less than 0)
+ SESSION_OUTPUT_MIX = 0, // session for effects applied to output mix. These effects can
+ // be moved by audio policy manager to another output stream
+ // (value must be 0)
+ };
+
/* These are static methods to control the system-wide AudioFlinger
* only privileged processes can have access to them
*/
@@ -353,8 +362,12 @@
uint32_t format = FORMAT_DEFAULT,
uint32_t channels = CHANNEL_OUT_STEREO,
output_flags flags = OUTPUT_FLAG_INDIRECT);
- static status_t startOutput(audio_io_handle_t output, AudioSystem::stream_type stream);
- static status_t stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream);
+ static status_t startOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session = 0);
+ static status_t stopOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session = 0);
static void releaseOutput(audio_io_handle_t output);
static audio_io_handle_t getInput(int inputSource,
uint32_t samplingRate = 0,
@@ -370,6 +383,16 @@
static status_t setStreamVolumeIndex(stream_type stream, int index);
static status_t getStreamVolumeIndex(stream_type stream, int *index);
+ static uint32_t getStrategyForStream(stream_type stream);
+
+ static audio_io_handle_t getOutputForEffect(effect_descriptor_t *desc);
+ static status_t registerEffect(effect_descriptor_t *desc,
+ audio_io_handle_t output,
+ uint32_t strategy,
+ int session,
+ int id);
+ static status_t unregisterEffect(int id);
+
static const sp<IAudioPolicyService>& get_audio_policy_service();
// ----------------------------------------------------------------------------
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 5814fd6..70e505e 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -161,6 +161,8 @@
status_t *status,
int *id,
int *enabled) = 0;
+
+ virtual status_t moveEffects(int session, int srcOutput, int dstOutput) = 0;
};
diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h
index 4804bbd..49eee59 100644
--- a/include/media/IAudioPolicyService.h
+++ b/include/media/IAudioPolicyService.h
@@ -53,8 +53,12 @@
uint32_t format = AudioSystem::FORMAT_DEFAULT,
uint32_t channels = 0,
AudioSystem::output_flags flags = AudioSystem::OUTPUT_FLAG_INDIRECT) = 0;
- virtual status_t startOutput(audio_io_handle_t output, AudioSystem::stream_type stream) = 0;
- virtual status_t stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream) = 0;
+ virtual status_t startOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session = 0) = 0;
+ virtual status_t stopOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session = 0) = 0;
virtual void releaseOutput(audio_io_handle_t output) = 0;
virtual audio_io_handle_t getInput(int inputSource,
uint32_t samplingRate = 0,
@@ -69,6 +73,14 @@
int indexMax) = 0;
virtual status_t setStreamVolumeIndex(AudioSystem::stream_type stream, int index) = 0;
virtual status_t getStreamVolumeIndex(AudioSystem::stream_type stream, int *index) = 0;
+ virtual uint32_t getStrategyForStream(AudioSystem::stream_type stream) = 0;
+ virtual audio_io_handle_t getOutputForEffect(effect_descriptor_t *desc) = 0;
+ virtual status_t registerEffect(effect_descriptor_t *desc,
+ audio_io_handle_t output,
+ uint32_t strategy,
+ int session,
+ int id) = 0;
+ virtual status_t unregisterEffect(int id) = 0;
};
diff --git a/include/media/stagefright/AudioPlayer.h b/include/media/stagefright/AudioPlayer.h
index 9af5871..9a09586 100644
--- a/include/media/stagefright/AudioPlayer.h
+++ b/include/media/stagefright/AudioPlayer.h
@@ -86,6 +86,10 @@
bool mStarted;
+ bool mIsFirstBuffer;
+ status_t mFirstBufferResult;
+ MediaBuffer *mFirstBuffer;
+
sp<MediaPlayerBase::AudioSink> mAudioSink;
static void AudioCallback(int event, void *user, void *info);
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h
index fd30ba58..ed5f09f 100644
--- a/include/media/stagefright/CameraSource.h
+++ b/include/media/stagefright/CameraSource.h
@@ -22,7 +22,6 @@
#include <media/stagefright/MediaSource.h>
#include <utils/List.h>
#include <utils/RefBase.h>
-#include <utils/threads.h>
namespace android {
@@ -35,10 +34,6 @@
static CameraSource *Create();
static CameraSource *CreateFromCamera(const sp<Camera> &camera);
- void enableTimeLapseMode(
- int64_t timeBetweenTimeLapseFrameCaptureUs, int32_t videoFrameRate);
- void disableTimeLapseMode();
-
virtual ~CameraSource();
virtual status_t start(MetaData *params = NULL);
@@ -51,12 +46,34 @@
virtual void signalBufferReturned(MediaBuffer* buffer);
-private:
- friend class CameraSourceListener;
-
+protected:
sp<Camera> mCamera;
sp<MetaData> mMeta;
+ int64_t mStartTimeUs;
+ int32_t mNumFramesReceived;
+ int64_t mLastFrameTimestampUs;
+ bool mStarted;
+
+ CameraSource(const sp<Camera> &camera);
+
+ virtual void startCameraRecording();
+ virtual void stopCameraRecording();
+ virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+ // Returns true if need to skip the current frame.
+ // Called from dataCallbackTimestamp.
+ virtual bool skipCurrentFrame(int64_t timestampUs) {return false;}
+
+ // Callback called when still camera raw data is available.
+ virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {}
+
+ virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data);
+
+private:
+ friend class CameraSourceListener;
+
Mutex mLock;
Condition mFrameAvailableCondition;
Condition mFrameCompleteCondition;
@@ -64,29 +81,12 @@
List<sp<IMemory> > mFramesBeingEncoded;
List<int64_t> mFrameTimes;
- int64_t mStartTimeUs;
int64_t mFirstFrameTimeUs;
- int64_t mLastFrameTimestampUs;
- int32_t mNumFramesReceived;
int32_t mNumFramesEncoded;
int32_t mNumFramesDropped;
int32_t mNumGlitches;
int64_t mGlitchDurationThresholdUs;
bool mCollectStats;
- bool mStarted;
-
- // Time between capture of two frames during time lapse recording
- // Negative value indicates that timelapse is disabled.
- int64_t mTimeBetweenTimeLapseFrameCaptureUs;
- // Time between two frames in final video (1/frameRate)
- int64_t mTimeBetweenTimeLapseVideoFramesUs;
- // Real timestamp of the last encoded time lapse frame
- int64_t mLastTimeLapseFrameRealTimestampUs;
-
- CameraSource(const sp<Camera> &camera);
-
- void dataCallbackTimestamp(
- int64_t timestampUs, int32_t msgType, const sp<IMemory> &data);
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);
diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h
new file mode 100644
index 0000000..f153f09
--- /dev/null
+++ b/include/media/stagefright/CameraSourceTimeLapse.h
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CAMERA_SOURCE_TIME_LAPSE_H_
+
+#define CAMERA_SOURCE_TIME_LAPSE_H_
+
+#include <pthread.h>
+
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+
+namespace android {
+
+class ICamera;
+class IMemory;
+class Camera;
+
+class CameraSourceTimeLapse : public CameraSource {
+public:
+ static CameraSourceTimeLapse *Create(bool useStillCameraForTimeLapse,
+ int64_t timeBetweenTimeLapseFrameCaptureUs,
+ int32_t videoFrameRate);
+
+ static CameraSourceTimeLapse *CreateFromCamera(const sp<Camera> &camera,
+ bool useStillCameraForTimeLapse,
+ int64_t timeBetweenTimeLapseFrameCaptureUs,
+ int32_t videoFrameRate);
+
+ virtual ~CameraSourceTimeLapse();
+
+private:
+ // If true, will use still camera takePicture() for time lapse frames
+ // If false, will use the videocamera frames instead.
+ bool mUseStillCameraForTimeLapse;
+
+ // Time between capture of two frames during time lapse recording
+ // Negative value indicates that timelapse is disabled.
+ int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+
+ // Time between two frames in final video (1/frameRate)
+ int64_t mTimeBetweenTimeLapseVideoFramesUs;
+
+ // Real timestamp of the last encoded time lapse frame
+ int64_t mLastTimeLapseFrameRealTimestampUs;
+
+ // Thread id of thread which takes still picture and sleeps in a loop.
+ pthread_t mThreadTimeLapse;
+
+ // Variable set in dataCallbackTimestamp() to help skipCurrentFrame()
+ // to know if current frame needs to be skipped.
+ bool mSkipCurrentFrame;
+
+ // True if camera is in preview mode and ready for takePicture().
+ bool mCameraIdle;
+
+ CameraSourceTimeLapse(const sp<Camera> &camera,
+ bool useStillCameraForTimeLapse,
+ int64_t timeBetweenTimeLapseFrameCaptureUs,
+ int32_t videoFrameRate);
+
+ // For still camera case starts a thread which calls camera's takePicture()
+ // in a loop. For video camera case, just starts the camera's video recording.
+ virtual void startCameraRecording();
+
+ // For still camera case joins the thread created in startCameraRecording().
+ // For video camera case, just stops the camera's video recording.
+ virtual void stopCameraRecording();
+
+ // For still camera case don't need to do anything as memory is locally
+ // allocated with refcounting.
+ // For video camera case just tell the camera to release the frame.
+ virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+ // mSkipCurrentFrame is set to true in dataCallbackTimestamp() if the current
+ // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
+ virtual bool skipCurrentFrame(int64_t timestampUs);
+
+ // Handles the callback to handle raw frame data from the still camera.
+ // Creates a copy of the frame data as the camera can reuse the frame memory
+ // once this callback returns. The function also sets a new timstamp corresponding
+ // to one frame time ahead of the last encoded frame's time stamp. It then
+ // calls dataCallbackTimestamp() of the base class with the copied data and the
+ // modified timestamp, which will think that it recieved the frame from a video
+ // camera and proceed as usual.
+ virtual void dataCallback(int32_t msgType, const sp<IMemory> &data);
+
+ // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
+ // timestamp and set mSkipCurrentFrame.
+ // Then it calls the base CameraSource::dataCallbackTimestamp()
+ virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data);
+
+ // When video camera is used for time lapse capture, returns true
+ // until enough time has passed for the next time lapse frame. When
+ // the frame needs to be encoded, it returns false and also modifies
+ // the time stamp to be one frame time ahead of the last encoded
+ // frame's time stamp.
+ bool skipFrameAndModifyTimeStamp(int64_t *timestampUs);
+
+ // Wrapper to enter threadTimeLapseEntry()
+ static void *ThreadTimeLapseWrapper(void *me);
+
+ // Runs a loop which sleeps until a still picture is required
+ // and then calls mCamera->takePicture() to take the still picture.
+ // Used only in the case mUseStillCameraForTimeLapse = true.
+ void threadTimeLapseEntry();
+
+ // Wrapper to enter threadStartPreview()
+ static void *ThreadStartPreviewWrapper(void *me);
+
+ // Starts the camera's preview.
+ void threadStartPreview();
+
+ // Starts thread ThreadStartPreviewWrapper() for restarting preview.
+ // Needs to be done in a thread so that dataCallback() which calls this function
+ // can return, and the camera can know that takePicture() is done.
+ void restartPreview();
+
+ // Creates a copy of source_data into a new memory of final type MemoryBase.
+ sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data);
+
+ CameraSourceTimeLapse(const CameraSourceTimeLapse &);
+ CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &);
+};
+
+} // namespace android
+
+#endif // CAMERA_SOURCE_TIME_LAPSE_H_
diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h
index 232583a..d0c2dca 100644
--- a/include/media/stagefright/MPEG4Writer.h
+++ b/include/media/stagefright/MPEG4Writer.h
@@ -97,6 +97,7 @@
inline size_t write(const void *ptr, size_t size, size_t nmemb, FILE* stream);
bool exceedsFileSizeLimit();
bool exceedsFileDurationLimit();
+ void trackProgressStatus(const Track* track, int64_t timeUs, status_t err = OK);
MPEG4Writer(const MPEG4Writer &);
MPEG4Writer &operator=(const MPEG4Writer &);
diff --git a/libs/hwui/OpenGLRenderer.cpp b/libs/hwui/OpenGLRenderer.cpp
index e39385a..1fa76d2 100644
--- a/libs/hwui/OpenGLRenderer.cpp
+++ b/libs/hwui/OpenGLRenderer.cpp
@@ -138,8 +138,8 @@
mCurrentProgram = mDrawTextureProgram;
mShader = kShaderNone;
- mShaderTileX = SkShader::kClamp_TileMode;
- mShaderTileY = SkShader::kClamp_TileMode;
+ mShaderTileX = GL_CLAMP_TO_EDGE;
+ mShaderTileY = GL_CLAMP_TO_EDGE;
mShaderMatrix = NULL;
mShaderBitmap = NULL;
@@ -535,8 +535,8 @@
mShader = OpenGLRenderer::kShaderNone;
mShaderKey = NULL;
mShaderBlend = false;
- mShaderTileX = SkShader::kClamp_TileMode;
- mShaderTileY = SkShader::kClamp_TileMode;
+ mShaderTileX = GL_CLAMP_TO_EDGE;
+ mShaderTileY = GL_CLAMP_TO_EDGE;
}
void OpenGLRenderer::setupBitmapShader(SkBitmap* bitmap, SkShader::TileMode tileX,
@@ -544,8 +544,8 @@
mShader = OpenGLRenderer::kShaderBitmap;
mShaderBlend = hasAlpha;
mShaderBitmap = bitmap;
- mShaderTileX = tileX;
- mShaderTileY = tileY;
+ mShaderTileX = gTileModes[tileX];
+ mShaderTileY = gTileModes[tileY];
mShaderMatrix = matrix;
}
@@ -556,8 +556,8 @@
mShader = OpenGLRenderer::kShaderLinearGradient;
mShaderKey = shader;
mShaderBlend = hasAlpha;
- mShaderTileX = tileMode;
- mShaderTileY = tileMode;
+ mShaderTileX = gTileModes[tileMode];
+ mShaderTileY = gTileModes[tileMode];
mShaderMatrix = matrix;
mShaderBounds = bounds;
mShaderColors = colors;
@@ -623,8 +623,18 @@
float alpha, SkXfermode::Mode mode) {
Texture* texture = mGradientCache.get(mShaderKey);
if (!texture) {
+ SkShader::TileMode tileMode = SkShader::kClamp_TileMode;
+ switch (mShaderTileX) {
+ case GL_REPEAT:
+ tileMode = SkShader::kRepeat_TileMode;
+ break;
+ case GL_MIRRORED_REPEAT:
+ tileMode = SkShader::kMirror_TileMode;
+ break;
+ }
+
texture = mGradientCache.addLinearGradient(mShaderKey, mShaderBounds, mShaderColors,
- mShaderPositions, mShaderCount, mShaderTileX);
+ mShaderPositions, mShaderCount, tileMode);
}
mModelView.loadTranslate(left, top, 0.0f);
@@ -634,14 +644,7 @@
mDrawLinearGradientProgram->set(mOrthoMatrix, mModelView, mSnapshot->transform);
chooseBlending(mShaderBlend || alpha < 1.0f, mode);
-
- if (texture->id != mLastTexture) {
- glBindTexture(GL_TEXTURE_2D, texture->id);
- mLastTexture = texture->id;
- }
- // TODO: Don't set the texture parameters every time
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, gTileModes[mShaderTileX]);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, gTileModes[mShaderTileX]);
+ bindTexture(texture->id, mShaderTileX, mShaderTileY);
Rect start(mShaderBounds[0], mShaderBounds[1], mShaderBounds[2], mShaderBounds[3]);
if (mShaderMatrix) {
@@ -736,14 +739,7 @@
mDrawTextureProgram->set(mOrthoMatrix, mModelView, mSnapshot->transform);
chooseBlending(blend || alpha < 1.0f, mode);
-
- if (texture != mLastTexture) {
- glBindTexture(GL_TEXTURE_2D, texture);
- mLastTexture = texture;
- }
- // TODO: Don't set the texture parameters every time
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, gTileModes[mShaderTileX]);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, gTileModes[mShaderTileY]);
+ bindTexture(texture, mShaderTileX, mShaderTileY);
// Always premultiplied
//glUniform4f(mDrawTextureProgram->color, alpha, alpha, alpha, alpha);
@@ -826,5 +822,15 @@
}
}
+void OpenGLRenderer::bindTexture(GLuint texture, GLenum wrapS, GLenum wrapT) {
+ if (texture != mLastTexture) {
+ glBindTexture(GL_TEXTURE_2D, texture);
+ mLastTexture = texture;
+ }
+ // TODO: Don't set the texture parameters every time
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, wrapS);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, wrapT);
+}
+
}; // namespace uirenderer
}; // namespace android
diff --git a/libs/hwui/OpenGLRenderer.h b/libs/hwui/OpenGLRenderer.h
index dd7999d..9dc2a43 100644
--- a/libs/hwui/OpenGLRenderer.h
+++ b/libs/hwui/OpenGLRenderer.h
@@ -287,6 +287,11 @@
inline void getAlphaAndMode(const SkPaint* paint, int* alpha, SkXfermode::Mode* mode);
/**
+ * Binds the specified texture with the specified wrap modes.
+ */
+ inline void bindTexture(GLuint texture, GLenum wrapS, GLenum wrapT);
+
+ /**
* Enable or disable blending as necessary. This function sets the appropriate
* blend function based on the specified xfermode.
*/
@@ -341,8 +346,8 @@
ShaderType mShader;
SkShader* mShaderKey;
bool mShaderBlend;
- SkShader::TileMode mShaderTileX;
- SkShader::TileMode mShaderTileY;
+ GLenum mShaderTileX;
+ GLenum mShaderTileY;
SkMatrix* mShaderMatrix;
// Bitmaps
SkBitmap* mShaderBitmap;
diff --git a/libs/rs/java/ImageProcessing/Android.mk b/libs/rs/java/ImageProcessing/Android.mk
index f7ff378..7fa30d0 100644
--- a/libs/rs/java/ImageProcessing/Android.mk
+++ b/libs/rs/java/ImageProcessing/Android.mk
@@ -21,7 +21,8 @@
LOCAL_MODULE_TAGS := optional
-LOCAL_SRC_FILES := $(call all-java-files-under, src)
+LOCAL_SRC_FILES := $(call all-java-files-under, src) \
+ $(call all-renderscript-files-under, src)
#LOCAL_STATIC_JAVA_LIBRARIES := android.renderscript
LOCAL_PACKAGE_NAME := ImageProcessing
diff --git a/libs/rs/java/ImageProcessing/res/raw/horizontal_blur_bc.bc b/libs/rs/java/ImageProcessing/res/raw/horizontal_blur_bc.bc
deleted file mode 100644
index 5920f3a..0000000
--- a/libs/rs/java/ImageProcessing/res/raw/horizontal_blur_bc.bc
+++ /dev/null
Binary files differ
diff --git a/libs/rs/java/ImageProcessing/res/raw/threshold.rs b/libs/rs/java/ImageProcessing/res/raw/threshold.rs
deleted file mode 100644
index aa6b6fa..0000000
--- a/libs/rs/java/ImageProcessing/res/raw/threshold.rs
+++ /dev/null
@@ -1,176 +0,0 @@
-#pragma version(1)
-
-#include "../../../../scriptc/rs_types.rsh"
-#include "../../../../scriptc/rs_math.rsh"
-
-#include "ip.rsh"
-
-int height;
-int width;
-int radius;
-
-uchar4 * InPixel;
-uchar4 * OutPixel;
-uchar4 * ScratchPixel;
-
-float inBlack;
-float outBlack;
-float inWhite;
-float outWhite;
-float gamma;
-
-float saturation;
-
-static float inWMinInB;
-static float outWMinOutB;
-static float overInWMinInB;
-
-#pragma rs export_var(height, width, radius, InPixel, OutPixel, ScratchPixel, inBlack, outBlack, inWhite, outWhite, gamma, saturation, InPixel, OutPixel, ScratchPixel, vBlurScript, hBlurScript)
-#pragma rs export_func(filter, filterBenchmark);
-
-rs_script vBlurScript;
-rs_script hBlurScript;
-
-
-// Store our coefficients here
-static float gaussian[MAX_RADIUS * 2 + 1];
-static rs_matrix3x3 colorMat;
-
-static void computeColorMatrix() {
- // Saturation
- // Linear weights
- //float rWeight = 0.3086f;
- //float gWeight = 0.6094f;
- //float bWeight = 0.0820f;
-
- // Gamma 2.2 weights (we haven't converted our image to linear space yet for perf reasons)
- float rWeight = 0.299f;
- float gWeight = 0.587f;
- float bWeight = 0.114f;
-
- float oneMinusS = 1.0f - saturation;
-
- rsMatrixSet(&colorMat, 0, 0, oneMinusS * rWeight + saturation);
- rsMatrixSet(&colorMat, 0, 1, oneMinusS * rWeight);
- rsMatrixSet(&colorMat, 0, 2, oneMinusS * rWeight);
- rsMatrixSet(&colorMat, 1, 0, oneMinusS * gWeight);
- rsMatrixSet(&colorMat, 1, 1, oneMinusS * gWeight + saturation);
- rsMatrixSet(&colorMat, 1, 2, oneMinusS * gWeight);
- rsMatrixSet(&colorMat, 2, 0, oneMinusS * bWeight);
- rsMatrixSet(&colorMat, 2, 1, oneMinusS * bWeight);
- rsMatrixSet(&colorMat, 2, 2, oneMinusS * bWeight + saturation);
-
- inWMinInB = inWhite - inBlack;
- outWMinOutB = outWhite - outBlack;
- overInWMinInB = 1.f / inWMinInB;
-}
-
-static void computeGaussianWeights() {
- // Compute gaussian weights for the blur
- // e is the euler's number
- float e = 2.718281828459045f;
- float pi = 3.1415926535897932f;
- // g(x) = ( 1 / sqrt( 2 * pi ) * sigma) * e ^ ( -x^2 / 2 * sigma^2 )
- // x is of the form [-radius .. 0 .. radius]
- // and sigma varies with radius.
- // Based on some experimental radius values and sigma's
- // we approximately fit sigma = f(radius) as
- // sigma = radius * 0.4 + 0.6
- // The larger the radius gets, the more our gaussian blur
- // will resemble a box blur since with large sigma
- // the gaussian curve begins to lose its shape
- float sigma = 0.4f * (float)radius + 0.6f;
-
- // Now compute the coefficints
- // We will store some redundant values to save some math during
- // the blur calculations
- // precompute some values
- float coeff1 = 1.0f / (sqrt( 2.0f * pi ) * sigma);
- float coeff2 = - 1.0f / (2.0f * sigma * sigma);
-
- float normalizeFactor = 0.0f;
- float floatR = 0.0f;
- int r;
- for(r = -radius; r <= radius; r ++) {
- floatR = (float)r;
- gaussian[r + radius] = coeff1 * pow(e, floatR * floatR * coeff2);
- normalizeFactor += gaussian[r + radius];
- }
-
- //Now we need to normalize the weights because all our coefficients need to add up to one
- normalizeFactor = 1.0f / normalizeFactor;
- for(r = -radius; r <= radius; r ++) {
- floatR = (float)r;
- gaussian[r + radius] *= normalizeFactor;
- }
-}
-
-static void processNoBlur() {
- float inWMinInB = inWhite - inBlack;
- float outWMinOutB = outWhite - outBlack;
- float4 currentPixel = 0;
-
- for(int h = 0; h < height; h ++) {
- uchar4 *input = InPixel + h*width;
- uchar4 *output = OutPixel + h*width;
-
- for(int w = 0; w < width; w ++) {
- //currentPixel.xyz = convert_float3(input.xyz);
- currentPixel.x = (float)(input->x);
- currentPixel.y = (float)(input->y);
- currentPixel.z = (float)(input->z);
-
- float3 temp = rsMatrixMultiply(&colorMat, currentPixel.xyz);
- temp = (clamp(temp, 0.f, 255.f) - inBlack) * overInWMinInB;
- temp = pow(temp, (float3)gamma);
- currentPixel.xyz = clamp(temp * outWMinOutB + outBlack, 0.f, 255.f);
-
- //output.xyz = convert_uchar3(currentPixel.xyz);
- output->x = (uint8_t)currentPixel.x;
- output->y = (uint8_t)currentPixel.y;
- output->z = (uint8_t)currentPixel.z;
- output->w = input->w;
-
- input++;
- output++;
- }
- }
-}
-
-static void blur() {
- computeGaussianWeights();
-
- FilterStruct fs;
- fs.gaussian = gaussian;
- fs.width = width;
- fs.height = height;
- fs.radius = radius;
-
- fs.ain = rsGetAllocation(InPixel);
- rsForEach(hBlurScript, fs.ain, rsGetAllocation(ScratchPixel), &fs);
-
- fs.ain = rsGetAllocation(ScratchPixel);
- rsForEach(vBlurScript, fs.ain, rsGetAllocation(OutPixel), &fs);
-}
-
-void filter() {
- RS_DEBUG(radius);
-
- computeColorMatrix();
-
- if(radius > 0) {
- blur();
- }
- processNoBlur();
-
- int count = 0;
- rsSendToClient(&count, 1, 4, 0);
-}
-
-void filterBenchmark() {
- blur();
-
- int count = 0;
- rsSendToClient(&count, 1, 4, 0);
-}
-
diff --git a/libs/rs/java/ImageProcessing/res/raw/threshold_bc.bc b/libs/rs/java/ImageProcessing/res/raw/threshold_bc.bc
deleted file mode 100644
index 2b5d254..0000000
--- a/libs/rs/java/ImageProcessing/res/raw/threshold_bc.bc
+++ /dev/null
Binary files differ
diff --git a/libs/rs/java/ImageProcessing/res/raw/vertical_blur_bc.bc b/libs/rs/java/ImageProcessing/res/raw/vertical_blur_bc.bc
deleted file mode 100644
index be5d0e4..0000000
--- a/libs/rs/java/ImageProcessing/res/raw/vertical_blur_bc.bc
+++ /dev/null
Binary files differ
diff --git a/libs/rs/java/ImageProcessing/src/com/android/rs/image/ImageProcessingActivity.java b/libs/rs/java/ImageProcessing/src/com/android/rs/image/ImageProcessingActivity.java
index 0ed1185..606bfa8 100644
--- a/libs/rs/java/ImageProcessing/src/com/android/rs/image/ImageProcessingActivity.java
+++ b/libs/rs/java/ImageProcessing/src/com/android/rs/image/ImageProcessingActivity.java
@@ -44,6 +44,7 @@
private ScriptC_Threshold mScript;
private ScriptC_Vertical_blur mScriptVBlur;
private ScriptC_Horizontal_blur mScriptHBlur;
+ private ScriptC_Levels mScriptLevels;
private int mRadius = 0;
private SeekBar mRadiusSeekBar;
@@ -260,29 +261,29 @@
}
else if(seekBar == mInBlackSeekBar) {
mInBlack = (float)progress;
- mScript.set_inBlack(mInBlack);
+ mScriptLevels.invoke_setLevels(mInBlack, mOutBlack, mInWhite, mOutWhite);
}
else if(seekBar == mOutBlackSeekBar) {
mOutBlack = (float)progress;
- mScript.set_outBlack(mOutBlack);
+ mScriptLevels.invoke_setLevels(mInBlack, mOutBlack, mInWhite, mOutWhite);
}
else if(seekBar == mInWhiteSeekBar) {
mInWhite = (float)progress + 127.0f;
- mScript.set_inWhite(mInWhite);
+ mScriptLevels.invoke_setLevels(mInBlack, mOutBlack, mInWhite, mOutWhite);
}
else if(seekBar == mOutWhiteSeekBar) {
mOutWhite = (float)progress + 127.0f;
- mScript.set_outWhite(mOutWhite);
+ mScriptLevels.invoke_setLevels(mInBlack, mOutBlack, mInWhite, mOutWhite);
}
else if(seekBar == mGammaSeekBar) {
mGamma = (float)progress/100.0f;
mGamma = Math.max(mGamma, 0.1f);
mGamma = 1.0f / mGamma;
- mScript.set_gamma(mGamma);
+ mScriptLevels.invoke_setGamma(mGamma);
}
else if(seekBar == mSaturationSeekBar) {
mSaturation = (float)progress / 50.0f;
- mScript.set_saturation(mSaturation);
+ mScriptLevels.invoke_setSaturation(mSaturation);
}
long t = java.lang.System.currentTimeMillis();
@@ -375,20 +376,18 @@
mOutPixelsAllocation = Allocation.createBitmapRef(mRS, mBitmapOut);
mScratchPixelsAllocation = Allocation.createBitmapRef(mRS, mBitmapScratch);
- mScriptVBlur = new ScriptC_Vertical_blur(mRS, getResources(), R.raw.vertical_blur_bc, false);
- mScriptHBlur = new ScriptC_Horizontal_blur(mRS, getResources(), R.raw.horizontal_blur_bc, false);
+ mScriptVBlur = new ScriptC_Vertical_blur(mRS, getResources(), R.raw.vertical_blur, false);
+ mScriptHBlur = new ScriptC_Horizontal_blur(mRS, getResources(), R.raw.horizontal_blur, false);
+ mScriptLevels = new ScriptC_Levels(mRS, getResources(), R.raw.levels, false);
- mScript = new ScriptC_Threshold(mRS, getResources(), R.raw.threshold_bc, false);
+ mScript = new ScriptC_Threshold(mRS, getResources(), R.raw.threshold, false);
mScript.set_width(mBitmapIn.getWidth());
mScript.set_height(mBitmapIn.getHeight());
mScript.set_radius(mRadius);
- mScript.set_inBlack(mInBlack);
- mScript.set_outBlack(mOutBlack);
- mScript.set_inWhite(mInWhite);
- mScript.set_outWhite(mOutWhite);
- mScript.set_gamma(mGamma);
- mScript.set_saturation(mSaturation);
+ mScriptLevels.invoke_setLevels(mInBlack, mOutBlack, mInWhite, mOutWhite);
+ mScriptLevels.invoke_setGamma(mGamma);
+ mScriptLevels.invoke_setSaturation(mSaturation);
mScript.bind_InPixel(mInPixelsAllocation);
mScript.bind_OutPixel(mOutPixelsAllocation);
@@ -396,6 +395,7 @@
mScript.set_vBlurScript(mScriptVBlur);
mScript.set_hBlurScript(mScriptHBlur);
+ mScript.set_levelsScript(mScriptLevels);
}
private Bitmap loadBitmap(int resource) {
diff --git a/libs/rs/java/ImageProcessing/src/com/android/rs/image/ScriptC_Horizontal_blur.java b/libs/rs/java/ImageProcessing/src/com/android/rs/image/ScriptC_Horizontal_blur.java
deleted file mode 100644
index c447b9b..0000000
--- a/libs/rs/java/ImageProcessing/src/com/android/rs/image/ScriptC_Horizontal_blur.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.rs.image;
-
-import android.renderscript.*;
-import android.content.res.Resources;
-import android.util.Log;
-
-public class ScriptC_Horizontal_blur extends ScriptC {
- // Constructor
- public ScriptC_Horizontal_blur(RenderScript rs, Resources resources, int id, boolean isRoot) {
- super(rs, resources, id, isRoot);
- }
-
-}
-
diff --git a/libs/rs/java/ImageProcessing/src/com/android/rs/image/ScriptC_Threshold.java b/libs/rs/java/ImageProcessing/src/com/android/rs/image/ScriptC_Threshold.java
deleted file mode 100644
index c23dca1..0000000
--- a/libs/rs/java/ImageProcessing/src/com/android/rs/image/ScriptC_Threshold.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.rs.image;
-
-import android.renderscript.*;
-import android.content.res.Resources;
-import android.util.Log;
-
-public class ScriptC_Threshold extends ScriptC {
- // Constructor
- public ScriptC_Threshold(RenderScript rs, Resources resources, int id, boolean isRoot) {
- super(rs, resources, id, isRoot);
- }
-
- private final static int mExportVarIdx_height = 0;
- private int mExportVar_height;
- public void set_height(int v) {
- mExportVar_height = v;
- setVar(mExportVarIdx_height, v);
- }
-
- public int get_height() {
- return mExportVar_height;
- }
-
- private final static int mExportVarIdx_width = 1;
- private int mExportVar_width;
- public void set_width(int v) {
- mExportVar_width = v;
- setVar(mExportVarIdx_width, v);
- }
-
- public int get_width() {
- return mExportVar_width;
- }
-
- private final static int mExportVarIdx_radius = 2;
- private int mExportVar_radius;
- public void set_radius(int v) {
- mExportVar_radius = v;
- setVar(mExportVarIdx_radius, v);
- }
-
- public int get_radius() {
- return mExportVar_radius;
- }
-
- private final static int mExportVarIdx_InPixel = 3;
- private Allocation mExportVar_InPixel;
- public void bind_InPixel(Allocation v) {
- mExportVar_InPixel = v;
- if(v == null) bindAllocation(null, mExportVarIdx_InPixel);
- else bindAllocation(v, mExportVarIdx_InPixel);
- }
-
- public Allocation get_InPixel() {
- return mExportVar_InPixel;
- }
-
- private final static int mExportVarIdx_OutPixel = 4;
- private Allocation mExportVar_OutPixel;
- public void bind_OutPixel(Allocation v) {
- mExportVar_OutPixel = v;
- if(v == null) bindAllocation(null, mExportVarIdx_OutPixel);
- else bindAllocation(v, mExportVarIdx_OutPixel);
- }
-
- public Allocation get_OutPixel() {
- return mExportVar_OutPixel;
- }
-
- private final static int mExportVarIdx_ScratchPixel = 5;
- private Allocation mExportVar_ScratchPixel;
- public void bind_ScratchPixel(Allocation v) {
- mExportVar_ScratchPixel = v;
- if(v == null) bindAllocation(null, mExportVarIdx_ScratchPixel);
- else bindAllocation(v, mExportVarIdx_ScratchPixel);
- }
-
- public Allocation get_ScratchPixel() {
- return mExportVar_ScratchPixel;
- }
-
- private final static int mExportVarIdx_inBlack = 6;
- private float mExportVar_inBlack;
- public void set_inBlack(float v) {
- mExportVar_inBlack = v;
- setVar(mExportVarIdx_inBlack, v);
- }
-
- public float get_inBlack() {
- return mExportVar_inBlack;
- }
-
- private final static int mExportVarIdx_outBlack = 7;
- private float mExportVar_outBlack;
- public void set_outBlack(float v) {
- mExportVar_outBlack = v;
- setVar(mExportVarIdx_outBlack, v);
- }
-
- public float get_outBlack() {
- return mExportVar_outBlack;
- }
-
- private final static int mExportVarIdx_inWhite = 8;
- private float mExportVar_inWhite;
- public void set_inWhite(float v) {
- mExportVar_inWhite = v;
- setVar(mExportVarIdx_inWhite, v);
- }
-
- public float get_inWhite() {
- return mExportVar_inWhite;
- }
-
- private final static int mExportVarIdx_outWhite = 9;
- private float mExportVar_outWhite;
- public void set_outWhite(float v) {
- mExportVar_outWhite = v;
- setVar(mExportVarIdx_outWhite, v);
- }
-
- public float get_outWhite() {
- return mExportVar_outWhite;
- }
-
- private final static int mExportVarIdx_gamma = 10;
- private float mExportVar_gamma;
- public void set_gamma(float v) {
- mExportVar_gamma = v;
- setVar(mExportVarIdx_gamma, v);
- }
-
- public float get_gamma() {
- return mExportVar_gamma;
- }
-
- private final static int mExportVarIdx_saturation = 11;
- private float mExportVar_saturation;
- public void set_saturation(float v) {
- mExportVar_saturation = v;
- setVar(mExportVarIdx_saturation, v);
- }
-
- public float get_saturation() {
- return mExportVar_saturation;
- }
-
- private final static int mExportVarIdx_vBlurScript = 12;
- private Script mExportVar_vBlurScript;
- public void set_vBlurScript(Script v) {
- mExportVar_vBlurScript = v;
- setVar(mExportVarIdx_vBlurScript, (v == null) ? 0 : v.getID());
- }
-
- public Script get_vBlurScript() {
- return mExportVar_vBlurScript;
- }
-
- private final static int mExportVarIdx_hBlurScript = 13;
- private Script mExportVar_hBlurScript;
- public void set_hBlurScript(Script v) {
- mExportVar_hBlurScript = v;
- setVar(mExportVarIdx_hBlurScript, (v == null) ? 0 : v.getID());
- }
-
- public Script get_hBlurScript() {
- return mExportVar_hBlurScript;
- }
-
- private final static int mExportFuncIdx_filter = 0;
- public void invoke_filter() {
- invoke(mExportFuncIdx_filter);
- }
-
- private final static int mExportFuncIdx_filterBenchmark = 1;
- public void invoke_filterBenchmark() {
- invoke(mExportFuncIdx_filterBenchmark);
- }
-
-}
-
diff --git a/libs/rs/java/ImageProcessing/src/com/android/rs/image/ScriptC_Vertical_blur.java b/libs/rs/java/ImageProcessing/src/com/android/rs/image/ScriptC_Vertical_blur.java
deleted file mode 100644
index cee74d9..0000000
--- a/libs/rs/java/ImageProcessing/src/com/android/rs/image/ScriptC_Vertical_blur.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.rs.image;
-
-import android.renderscript.*;
-import android.content.res.Resources;
-import android.util.Log;
-
-public class ScriptC_Vertical_blur extends ScriptC {
- // Constructor
- public ScriptC_Vertical_blur(RenderScript rs, Resources resources, int id, boolean isRoot) {
- super(rs, resources, id, isRoot);
- }
-
-}
-
diff --git a/libs/rs/java/ImageProcessing/res/raw/horizontal_blur.rs b/libs/rs/java/ImageProcessing/src/com/android/rs/image/horizontal_blur.rs
similarity index 92%
rename from libs/rs/java/ImageProcessing/res/raw/horizontal_blur.rs
rename to libs/rs/java/ImageProcessing/src/com/android/rs/image/horizontal_blur.rs
index 10815fb..b580393 100644
--- a/libs/rs/java/ImageProcessing/res/raw/horizontal_blur.rs
+++ b/libs/rs/java/ImageProcessing/src/com/android/rs/image/horizontal_blur.rs
@@ -1,7 +1,7 @@
#pragma version(1)
-#include "../../../../scriptc/rs_types.rsh"
-#include "../../../../scriptc/rs_math.rsh"
+#include "rs_types.rsh"
+#include "rs_math.rsh"
#include "ip.rsh"
diff --git a/libs/rs/java/ImageProcessing/res/raw/ip.rsh b/libs/rs/java/ImageProcessing/src/com/android/rs/image/ip.rsh
similarity index 88%
rename from libs/rs/java/ImageProcessing/res/raw/ip.rsh
rename to libs/rs/java/ImageProcessing/src/com/android/rs/image/ip.rsh
index dea92c3..34213f5 100644
--- a/libs/rs/java/ImageProcessing/res/raw/ip.rsh
+++ b/libs/rs/java/ImageProcessing/src/com/android/rs/image/ip.rsh
@@ -6,8 +6,6 @@
rs_allocation ain;
float *gaussian; //[MAX_RADIUS * 2 + 1];
- rs_matrix3x3 colorMat;
-
int height;
int width;
int radius;
diff --git a/libs/rs/java/ImageProcessing/src/com/android/rs/image/levels.rs b/libs/rs/java/ImageProcessing/src/com/android/rs/image/levels.rs
new file mode 100644
index 0000000..b0db8a3
--- /dev/null
+++ b/libs/rs/java/ImageProcessing/src/com/android/rs/image/levels.rs
@@ -0,0 +1,88 @@
+#pragma version(1)
+
+#include "rs_types.rsh"
+#include "rs_math.rsh"
+
+#include "ip.rsh"
+
+
+static float inBlack;
+static float outBlack;
+static float inWhite;
+static float outWhite;
+static float3 gamma;
+static float saturation;
+
+static float inWMinInB;
+static float outWMinOutB;
+static float overInWMinInB;
+static rs_matrix3x3 colorMat;
+
+//#pragma rs export_var(height, width, radius, InPixel, OutPixel, ScratchPixel, inBlack, outBlack, inWhite, outWhite, gamma, saturation, InPixel, OutPixel, ScratchPixel, vBlurScript, hBlurScript)
+#pragma rs export_func(setLevels, setSaturation, setGamma);
+
+void setLevels(float iBlk, float oBlk, float iWht, float oWht) {
+ inBlack = iBlk;
+ outBlack = oBlk;
+ inWhite = iWht;
+ outWhite = oWht;
+
+ inWMinInB = inWhite - inBlack;
+ outWMinOutB = outWhite - outBlack;
+ overInWMinInB = 1.f / inWMinInB;
+}
+
+void setSaturation(float sat) {
+ saturation = sat;
+
+ // Saturation
+ // Linear weights
+ //float rWeight = 0.3086f;
+ //float gWeight = 0.6094f;
+ //float bWeight = 0.0820f;
+
+ // Gamma 2.2 weights (we haven't converted our image to linear space yet for perf reasons)
+ float rWeight = 0.299f;
+ float gWeight = 0.587f;
+ float bWeight = 0.114f;
+
+ float oneMinusS = 1.0f - saturation;
+ rsMatrixSet(&colorMat, 0, 0, oneMinusS * rWeight + saturation);
+ rsMatrixSet(&colorMat, 0, 1, oneMinusS * rWeight);
+ rsMatrixSet(&colorMat, 0, 2, oneMinusS * rWeight);
+ rsMatrixSet(&colorMat, 1, 0, oneMinusS * gWeight);
+ rsMatrixSet(&colorMat, 1, 1, oneMinusS * gWeight + saturation);
+ rsMatrixSet(&colorMat, 1, 2, oneMinusS * gWeight);
+ rsMatrixSet(&colorMat, 2, 0, oneMinusS * bWeight);
+ rsMatrixSet(&colorMat, 2, 1, oneMinusS * bWeight);
+ rsMatrixSet(&colorMat, 2, 2, oneMinusS * bWeight + saturation);
+}
+
+void setGamma(float g) {
+ gamma = (float3)g;
+}
+
+
+void root(const void *v_in, void *v_out, const void *usrData, uint32_t x, uint32_t y) {
+ const uchar4 *input = v_in;
+ uchar4 *output = v_out;
+
+ float4 currentPixel = 0;
+
+ //currentPixel.xyz = convert_float3(input.xyz);
+ currentPixel.x = (float)(input->x);
+ currentPixel.y = (float)(input->y);
+ currentPixel.z = (float)(input->z);
+
+ float3 temp = rsMatrixMultiply(&colorMat, currentPixel.xyz);
+ temp = (clamp(temp, 0.f, 255.f) - inBlack) * overInWMinInB;
+ temp = pow(temp, (float3)gamma);
+ currentPixel.xyz = clamp(temp * outWMinOutB + outBlack, 0.f, 255.f);
+
+ //output.xyz = convert_uchar3(currentPixel.xyz);
+ output->x = (uint8_t)currentPixel.x;
+ output->y = (uint8_t)currentPixel.y;
+ output->z = (uint8_t)currentPixel.z;
+ output->w = input->w;
+}
+
diff --git a/libs/rs/java/ImageProcessing/src/com/android/rs/image/threshold.rs b/libs/rs/java/ImageProcessing/src/com/android/rs/image/threshold.rs
new file mode 100644
index 0000000..3cd43c7
--- /dev/null
+++ b/libs/rs/java/ImageProcessing/src/com/android/rs/image/threshold.rs
@@ -0,0 +1,105 @@
+#pragma version(1)
+
+#include "rs_types.rsh"
+#include "rs_math.rsh"
+
+#include "ip.rsh"
+
+int height;
+int width;
+int radius;
+
+uchar4 * InPixel;
+uchar4 * OutPixel;
+uchar4 * ScratchPixel;
+
+#pragma rs export_var(height, width, radius, InPixel, OutPixel, ScratchPixel, vBlurScript, hBlurScript, levelsScript)
+#pragma rs export_func(filter, filterBenchmark);
+
+rs_script vBlurScript;
+rs_script hBlurScript;
+rs_script levelsScript;
+
+
+// Store our coefficients here
+static float gaussian[MAX_RADIUS * 2 + 1];
+
+
+static void computeGaussianWeights() {
+ // Compute gaussian weights for the blur
+ // e is the euler's number
+ float e = 2.718281828459045f;
+ float pi = 3.1415926535897932f;
+ // g(x) = ( 1 / sqrt( 2 * pi ) * sigma) * e ^ ( -x^2 / 2 * sigma^2 )
+ // x is of the form [-radius .. 0 .. radius]
+ // and sigma varies with radius.
+ // Based on some experimental radius values and sigma's
+ // we approximately fit sigma = f(radius) as
+ // sigma = radius * 0.4 + 0.6
+ // The larger the radius gets, the more our gaussian blur
+ // will resemble a box blur since with large sigma
+ // the gaussian curve begins to lose its shape
+ float sigma = 0.4f * (float)radius + 0.6f;
+
+ // Now compute the coefficints
+ // We will store some redundant values to save some math during
+ // the blur calculations
+ // precompute some values
+ float coeff1 = 1.0f / (sqrt( 2.0f * pi ) * sigma);
+ float coeff2 = - 1.0f / (2.0f * sigma * sigma);
+
+ float normalizeFactor = 0.0f;
+ float floatR = 0.0f;
+ int r;
+ for(r = -radius; r <= radius; r ++) {
+ floatR = (float)r;
+ gaussian[r + radius] = coeff1 * pow(e, floatR * floatR * coeff2);
+ normalizeFactor += gaussian[r + radius];
+ }
+
+ //Now we need to normalize the weights because all our coefficients need to add up to one
+ normalizeFactor = 1.0f / normalizeFactor;
+ for(r = -radius; r <= radius; r ++) {
+ floatR = (float)r;
+ gaussian[r + radius] *= normalizeFactor;
+ }
+}
+
+
+static void blur() {
+ computeGaussianWeights();
+
+ FilterStruct fs;
+ fs.gaussian = gaussian;
+ fs.width = width;
+ fs.height = height;
+ fs.radius = radius;
+
+ fs.ain = rsGetAllocation(InPixel);
+ rsForEach(hBlurScript, fs.ain, rsGetAllocation(ScratchPixel), &fs);
+
+ fs.ain = rsGetAllocation(ScratchPixel);
+ rsForEach(vBlurScript, fs.ain, rsGetAllocation(OutPixel), &fs);
+}
+
+void filter() {
+ //RS_DEBUG(radius);
+
+ if(radius > 0) {
+ blur();
+ rsForEach(levelsScript, rsGetAllocation(OutPixel), rsGetAllocation(OutPixel), 0);
+ } else {
+ rsForEach(levelsScript, rsGetAllocation(InPixel), rsGetAllocation(OutPixel), 0);
+ }
+
+ int count = 0;
+ rsSendToClient(&count, 1, 4, 0);
+}
+
+void filterBenchmark() {
+ blur();
+
+ int count = 0;
+ rsSendToClient(&count, 1, 4, 0);
+}
+
diff --git a/libs/rs/java/ImageProcessing/res/raw/vertical_blur.rs b/libs/rs/java/ImageProcessing/src/com/android/rs/image/vertical_blur.rs
similarity index 93%
rename from libs/rs/java/ImageProcessing/res/raw/vertical_blur.rs
rename to libs/rs/java/ImageProcessing/src/com/android/rs/image/vertical_blur.rs
index f5f2d69..23387ef 100644
--- a/libs/rs/java/ImageProcessing/res/raw/vertical_blur.rs
+++ b/libs/rs/java/ImageProcessing/src/com/android/rs/image/vertical_blur.rs
@@ -1,7 +1,7 @@
#pragma version(1)
-#include "../../../../scriptc/rs_types.rsh"
-#include "../../../../scriptc/rs_math.rsh"
+#include "rs_types.rsh"
+#include "rs_math.rsh"
#include "ip.rsh"
diff --git a/libs/rs/rsContext.cpp b/libs/rs/rsContext.cpp
index 629b481..e897d00 100644
--- a/libs/rs/rsContext.cpp
+++ b/libs/rs/rsContext.cpp
@@ -361,7 +361,7 @@
Context *rsc = static_cast<Context *>(vrsc);
uint32_t idx = (uint32_t)android_atomic_inc(&rsc->mWorkers.mLaunchCount);
- LOGE("helperThreadProc 1 %p idx=%i", rsc, idx);
+ LOGV("RS helperThread starting %p idx=%i", rsc, idx);
rsc->mWorkers.mLaunchSignals[idx].init();
rsc->mWorkers.mNativeThreadId[idx] = gettid();
@@ -376,13 +376,13 @@
while(rsc->mRunning) {
rsc->mWorkers.mLaunchSignals[idx].wait();
if (rsc->mWorkers.mLaunchCallback) {
- LOGE("helperThreadProc 4");
rsc->mWorkers.mLaunchCallback(rsc->mWorkers.mLaunchData, idx);
}
- LOGE("helperThreadProc 5");
android_atomic_dec(&rsc->mWorkers.mRunningCount);
rsc->mWorkers.mCompleteSignal.set();
}
+
+ LOGV("RS helperThread exiting %p idx=%i", rsc, idx);
return NULL;
}
@@ -479,6 +479,10 @@
LOGE("Failed to start rs context thread.");
return;
}
+ while(!mRunning) {
+ usleep(100);
+ }
+
mWorkers.mRunningCount = 0;
mWorkers.mLaunchCount = 0;
for (uint32_t ct=0; ct < mWorkers.mCount; ct++) {
@@ -490,9 +494,6 @@
}
}
- while(!mRunning) {
- usleep(100);
- }
pthread_attr_destroy(&threadAttr);
}
diff --git a/libs/rs/rsContext.h b/libs/rs/rsContext.h
index 98ad3a4..b8fffbf 100644
--- a/libs/rs/rsContext.h
+++ b/libs/rs/rsContext.h
@@ -174,6 +174,7 @@
bool ext_OES_texture_npot() const {return mGL.OES_texture_npot;}
void launchThreads(WorkerCallback_t cbk, void *data);
+ uint32_t getWorkerPoolSize() const {return (uint32_t)mWorkers.mRunningCount;}
protected:
Device *mDev;
diff --git a/libs/rs/rsScriptC.cpp b/libs/rs/rsScriptC.cpp
index 9693b16e..7c7b037 100644
--- a/libs/rs/rsScriptC.cpp
+++ b/libs/rs/rsScriptC.cpp
@@ -171,7 +171,6 @@
static void wc_xy(void *usr, uint32_t idx)
{
MTLaunchStruct *mtls = (MTLaunchStruct *)usr;
- LOGE("usr %p, idx %i", usr, idx);
while (1) {
uint32_t slice = (uint32_t)android_atomic_inc(&mtls->mSliceNum);
@@ -279,32 +278,32 @@
}
- {
- LOGE("launch 1");
+ if ((rsc->getWorkerPoolSize() > 1) &&
+ ((mtls.dimY * mtls.dimZ * mtls.dimArray) > 1)) {
+
+ //LOGE("launch 1");
rsc->launchThreads(wc_xy, &mtls);
- LOGE("launch 2");
- }
+ //LOGE("launch 2");
+ } else {
+ for (uint32_t ar = mtls.arrayStart; ar < mtls.arrayEnd; ar++) {
+ for (uint32_t z = mtls.zStart; z < mtls.zEnd; z++) {
+ for (uint32_t y = mtls.yStart; y < mtls.yEnd; y++) {
+ uint32_t offset = mtls.dimX * mtls.dimY * mtls.dimZ * ar +
+ mtls.dimX * mtls.dimY * z +
+ mtls.dimX * y;
+ uint8_t *xPtrOut = mtls.ptrOut + (mtls.eStrideOut * offset);
+ const uint8_t *xPtrIn = mtls.ptrIn + (mtls.eStrideIn * offset);
-/*
- for (uint32_t ar = arrayStart; ar < arrayEnd; ar++) {
- for (uint32_t z = zStart; z < zEnd; z++) {
- for (uint32_t y = yStart; y < yEnd; y++) {
- uint32_t offset = dimX * dimY * dimZ * ar +
- dimX * dimY * z +
- dimX * y;
- uint8_t *xPtrOut = ptrOut + (eStrideOut * offset);
- const uint8_t *xPtrIn = ptrIn + (eStrideIn * offset);
-
- for (uint32_t x = xStart; x < xEnd; x++) {
- ((rs_t)mProgram.mRoot) (xPtrIn, xPtrOut, usr, x, y, z, ar);
- xPtrIn += eStrideIn;
- xPtrOut += eStrideOut;
+ for (uint32_t x = mtls.xStart; x < mtls.xEnd; x++) {
+ ((rs_t)mProgram.mRoot) (xPtrIn, xPtrOut, usr, x, y, z, ar);
+ xPtrIn += mtls.eStrideIn;
+ xPtrOut += mtls.eStrideOut;
+ }
}
}
}
-
}
-*/
+
setTLS(oldTLS);
}
@@ -391,9 +390,16 @@
bccGetExportFuncs(s->mBccScript, NULL, s->mEnviroment.mInvokeFunctionCount, (BCCvoid **) s->mEnviroment.mInvokeFunctions);
}
- s->mEnviroment.mFieldAddress = (void **)calloc(100, sizeof(void *));
- bccGetExportVars(s->mBccScript, (BCCsizei *)&s->mEnviroment.mFieldCount,
- 100, s->mEnviroment.mFieldAddress);
+ bccGetExportVars(s->mBccScript, (BCCsizei*) &s->mEnviroment.mFieldCount, 0, NULL);
+ if(s->mEnviroment.mFieldCount <= 0)
+ s->mEnviroment.mFieldAddress = NULL;
+ else {
+ s->mEnviroment.mFieldAddress = (void **) calloc(s->mEnviroment.mFieldCount, sizeof(void *));
+ bccGetExportVars(s->mBccScript, NULL, s->mEnviroment.mFieldCount, (BCCvoid **) s->mEnviroment.mFieldAddress);
+ }
+ //for (int ct2=0; ct2 < s->mEnviroment.mFieldCount; ct2++ ) {
+ //LOGE("Script field %i = %p", ct2, s->mEnviroment.mFieldAddress[ct2]);
+ //}
s->mEnviroment.mFragment.set(rsc->getDefaultProgramFragment());
s->mEnviroment.mVertex.set(rsc->getDefaultProgramVertex());
diff --git a/libs/rs/rsType.cpp b/libs/rs/rsType.cpp
index 52e0d52..79cfd41 100644
--- a/libs/rs/rsType.cpp
+++ b/libs/rs/rsType.cpp
@@ -145,6 +145,10 @@
void Type::makeGLComponents()
{
+ if(getElement()->getFieldCount() >= RS_MAX_ATTRIBS) {
+ return;
+ }
+
uint32_t userNum = 0;
for (uint32_t ct=0; ct < getElement()->getFieldCount(); ct++) {
diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c
index edd6184..0be280c 100644
--- a/media/libeffects/factory/EffectsFactory.c
+++ b/media/libeffects/factory/EffectsFactory.c
@@ -31,7 +31,7 @@
static list_elem_t *gCurEffect; // current effect in enumeration process
static uint32_t gCurEffectIdx; // current effect index in enumeration process
-static const char * const gEffectLibPath = "/system/lib/soundfx"; // path to built-in effect libraries
+const char * const gEffectLibPath = "/system/lib/soundfx"; // path to built-in effect libraries
static int gInitDone; // true is global initialization has been preformed
static int gNextLibId; // used by loadLibrary() to allocate unique library handles
static int gCanQueryEffect; // indicates that call to EffectQueryEffect() is valid, i.e. that the list of effects
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 3bbcf55..9e39e79 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -18,7 +18,7 @@
#define LOG_TAG "Bundle"
#define ARRAY_SIZE(array) (sizeof array / sizeof array[0])
#define LVM_BUNDLE // Include all the bundle code
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
#include <cutils/log.h>
#include <assert.h>
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 372a927..9c2a8ba 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -590,18 +590,22 @@
return output;
}
-status_t AudioSystem::startOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+status_t AudioSystem::startOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- return aps->startOutput(output, stream);
+ return aps->startOutput(output, stream, session);
}
-status_t AudioSystem::stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+status_t AudioSystem::stopOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- return aps->stopOutput(output, stream);
+ return aps->stopOutput(output, stream, session);
}
void AudioSystem::releaseOutput(audio_io_handle_t output)
@@ -666,6 +670,38 @@
return aps->getStreamVolumeIndex(stream, index);
}
+uint32_t AudioSystem::getStrategyForStream(AudioSystem::stream_type stream)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return 0;
+ return aps->getStrategyForStream(stream);
+}
+
+audio_io_handle_t AudioSystem::getOutputForEffect(effect_descriptor_t *desc)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->getOutputForEffect(desc);
+}
+
+status_t AudioSystem::registerEffect(effect_descriptor_t *desc,
+ audio_io_handle_t output,
+ uint32_t strategy,
+ int session,
+ int id)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->registerEffect(desc, output, strategy, session, id);
+}
+
+status_t AudioSystem::unregisterEffect(int id)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->unregisterEffect(id);
+}
+
// ---------------------------------------------------------------------------
void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who) {
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index 7d6a5d3..3a89e25 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -69,7 +69,8 @@
QUERY_NUM_EFFECTS,
QUERY_EFFECT,
GET_EFFECT_DESCRIPTOR,
- CREATE_EFFECT
+ CREATE_EFFECT,
+ MOVE_EFFECTS
};
class BpAudioFlinger : public BpInterface<IAudioFlinger>
@@ -676,6 +677,17 @@
return effect;
}
+
+ virtual status_t moveEffects(int session, int srcOutput, int dstOutput)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.writeInt32(session);
+ data.writeInt32(srcOutput);
+ data.writeInt32(dstOutput);
+ remote()->transact(MOVE_EFFECTS, data, &reply);
+ return reply.readInt32();
+ }
};
IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger");
@@ -1024,6 +1036,14 @@
reply->write(&desc, sizeof(effect_descriptor_t));
return NO_ERROR;
} break;
+ case MOVE_EFFECTS: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ int session = data.readInt32();
+ int srcOutput = data.readInt32();
+ int dstOutput = data.readInt32();
+ reply->writeInt32(moveEffects(session, srcOutput, dstOutput));
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 18dd173..950c213 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -44,7 +44,11 @@
RELEASE_INPUT,
INIT_STREAM_VOLUME,
SET_STREAM_VOLUME,
- GET_STREAM_VOLUME
+ GET_STREAM_VOLUME,
+ GET_STRATEGY_FOR_STREAM,
+ GET_OUTPUT_FOR_EFFECT,
+ REGISTER_EFFECT,
+ UNREGISTER_EFFECT
};
class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
@@ -137,22 +141,28 @@
return static_cast <audio_io_handle_t> (reply.readInt32());
}
- virtual status_t startOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+ virtual status_t startOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(output);
data.writeInt32(stream);
+ data.writeInt32(session);
remote()->transact(START_OUTPUT, data, &reply);
return static_cast <status_t> (reply.readInt32());
}
- virtual status_t stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+ virtual status_t stopOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(output);
data.writeInt32(stream);
+ data.writeInt32(session);
remote()->transact(STOP_OUTPUT, data, &reply);
return static_cast <status_t> (reply.readInt32());
}
@@ -242,6 +252,51 @@
if (index) *index = lIndex;
return static_cast <status_t> (reply.readInt32());
}
+
+ virtual uint32_t getStrategyForStream(AudioSystem::stream_type stream)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(static_cast <uint32_t>(stream));
+ remote()->transact(GET_STRATEGY_FOR_STREAM, data, &reply);
+ return reply.readInt32();
+ }
+
+ virtual audio_io_handle_t getOutputForEffect(effect_descriptor_t *desc)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(desc, sizeof(effect_descriptor_t));
+ remote()->transact(GET_OUTPUT_FOR_EFFECT, data, &reply);
+ return static_cast <audio_io_handle_t> (reply.readInt32());
+ }
+
+ virtual status_t registerEffect(effect_descriptor_t *desc,
+ audio_io_handle_t output,
+ uint32_t strategy,
+ int session,
+ int id)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(desc, sizeof(effect_descriptor_t));
+ data.writeInt32(output);
+ data.writeInt32(strategy);
+ data.writeInt32(session);
+ data.writeInt32(id);
+ remote()->transact(REGISTER_EFFECT, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
+ virtual status_t unregisterEffect(int id)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(id);
+ remote()->transact(UNREGISTER_EFFECT, data, &reply);
+ return static_cast <status_t> (reply.readInt32());
+ }
+
};
IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -255,18 +310,24 @@
switch(code) {
case SET_DEVICE_CONNECTION_STATE: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- AudioSystem::audio_devices device = static_cast <AudioSystem::audio_devices>(data.readInt32());
- AudioSystem::device_connection_state state = static_cast <AudioSystem::device_connection_state>(data.readInt32());
+ AudioSystem::audio_devices device =
+ static_cast <AudioSystem::audio_devices>(data.readInt32());
+ AudioSystem::device_connection_state state =
+ static_cast <AudioSystem::device_connection_state>(data.readInt32());
const char *device_address = data.readCString();
- reply->writeInt32(static_cast <uint32_t>(setDeviceConnectionState(device, state, device_address)));
+ reply->writeInt32(static_cast<uint32_t> (setDeviceConnectionState(device,
+ state,
+ device_address)));
return NO_ERROR;
} break;
case GET_DEVICE_CONNECTION_STATE: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- AudioSystem::audio_devices device = static_cast <AudioSystem::audio_devices>(data.readInt32());
+ AudioSystem::audio_devices device =
+ static_cast<AudioSystem::audio_devices> (data.readInt32());
const char *device_address = data.readCString();
- reply->writeInt32(static_cast <uint32_t>(getDeviceConnectionState(device, device_address)));
+ reply->writeInt32(static_cast<uint32_t> (getDeviceConnectionState(device,
+ device_address)));
return NO_ERROR;
} break;
@@ -287,7 +348,8 @@
case SET_FORCE_USE: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
AudioSystem::force_use usage = static_cast <AudioSystem::force_use>(data.readInt32());
- AudioSystem::forced_config config = static_cast <AudioSystem::forced_config>(data.readInt32());
+ AudioSystem::forced_config config =
+ static_cast <AudioSystem::forced_config>(data.readInt32());
reply->writeInt32(static_cast <uint32_t>(setForceUse(usage, config)));
return NO_ERROR;
} break;
@@ -301,11 +363,13 @@
case GET_OUTPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32());
+ AudioSystem::stream_type stream =
+ static_cast <AudioSystem::stream_type>(data.readInt32());
uint32_t samplingRate = data.readInt32();
uint32_t format = data.readInt32();
uint32_t channels = data.readInt32();
- AudioSystem::output_flags flags = static_cast <AudioSystem::output_flags>(data.readInt32());
+ AudioSystem::output_flags flags =
+ static_cast <AudioSystem::output_flags>(data.readInt32());
audio_io_handle_t output = getOutput(stream,
samplingRate,
@@ -320,7 +384,10 @@
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32());
uint32_t stream = data.readInt32();
- reply->writeInt32(static_cast <uint32_t>(startOutput(output, (AudioSystem::stream_type)stream)));
+ int session = data.readInt32();
+ reply->writeInt32(static_cast <uint32_t>(startOutput(output,
+ (AudioSystem::stream_type)stream,
+ session)));
return NO_ERROR;
} break;
@@ -328,7 +395,10 @@
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32());
uint32_t stream = data.readInt32();
- reply->writeInt32(static_cast <uint32_t>(stopOutput(output, (AudioSystem::stream_type)stream)));
+ int session = data.readInt32();
+ reply->writeInt32(static_cast <uint32_t>(stopOutput(output,
+ (AudioSystem::stream_type)stream,
+ session)));
return NO_ERROR;
} break;
@@ -345,7 +415,8 @@
uint32_t samplingRate = data.readInt32();
uint32_t format = data.readInt32();
uint32_t channels = data.readInt32();
- AudioSystem::audio_in_acoustics acoustics = static_cast <AudioSystem::audio_in_acoustics>(data.readInt32());
+ AudioSystem::audio_in_acoustics acoustics =
+ static_cast <AudioSystem::audio_in_acoustics>(data.readInt32());
audio_io_handle_t input = getInput(inputSource,
samplingRate,
format,
@@ -378,7 +449,8 @@
case INIT_STREAM_VOLUME: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32());
+ AudioSystem::stream_type stream =
+ static_cast <AudioSystem::stream_type>(data.readInt32());
int indexMin = data.readInt32();
int indexMax = data.readInt32();
reply->writeInt32(static_cast <uint32_t>(initStreamVolume(stream, indexMin,indexMax)));
@@ -387,7 +459,8 @@
case SET_STREAM_VOLUME: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32());
+ AudioSystem::stream_type stream =
+ static_cast <AudioSystem::stream_type>(data.readInt32());
int index = data.readInt32();
reply->writeInt32(static_cast <uint32_t>(setStreamVolumeIndex(stream, index)));
return NO_ERROR;
@@ -395,7 +468,8 @@
case GET_STREAM_VOLUME: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32());
+ AudioSystem::stream_type stream =
+ static_cast <AudioSystem::stream_type>(data.readInt32());
int index;
status_t status = getStreamVolumeIndex(stream, &index);
reply->writeInt32(index);
@@ -403,6 +477,46 @@
return NO_ERROR;
} break;
+ case GET_STRATEGY_FOR_STREAM: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ AudioSystem::stream_type stream =
+ static_cast <AudioSystem::stream_type>(data.readInt32());
+ reply->writeInt32(getStrategyForStream(stream));
+ return NO_ERROR;
+ } break;
+
+ case GET_OUTPUT_FOR_EFFECT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ effect_descriptor_t desc;
+ data.read(&desc, sizeof(effect_descriptor_t));
+ audio_io_handle_t output = getOutputForEffect(&desc);
+ reply->writeInt32(static_cast <int>(output));
+ return NO_ERROR;
+ } break;
+
+ case REGISTER_EFFECT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ effect_descriptor_t desc;
+ data.read(&desc, sizeof(effect_descriptor_t));
+ audio_io_handle_t output = data.readInt32();
+ uint32_t strategy = data.readInt32();
+ int session = data.readInt32();
+ int id = data.readInt32();
+ reply->writeInt32(static_cast <int32_t>(registerEffect(&desc,
+ output,
+ strategy,
+ session,
+ id)));
+ return NO_ERROR;
+ } break;
+
+ case UNREGISTER_EFFECT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ int id = data.readInt32();
+ reply->writeInt32(static_cast <int32_t>(unregisterEffect(id)));
+ return NO_ERROR;
+ } break;
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 24b0e7b..3dbcfd5 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -24,6 +24,7 @@
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
@@ -406,16 +407,6 @@
return OK;
}
-status_t StagefrightRecorder::setParamTrackFrameStatus(int32_t nFrames) {
- LOGV("setParamTrackFrameStatus: %d", nFrames);
- if (nFrames <= 0) {
- LOGE("Invalid number of frames to track: %d", nFrames);
- return BAD_VALUE;
- }
- mTrackEveryNumberOfFrames = nFrames;
- return OK;
-}
-
status_t StagefrightRecorder::setParamTrackTimeStatus(int64_t timeDurationUs) {
LOGV("setParamTrackTimeStatus: %lld", timeDurationUs);
if (timeDurationUs < 20000) { // Infeasible if shorter than 20 ms?
@@ -510,11 +501,6 @@
if (safe_strtoi32(value.string(), &use64BitOffset)) {
return setParam64BitFileOffset(use64BitOffset != 0);
}
- } else if (key == "param-track-frame-status") {
- int32_t nFrames;
- if (safe_strtoi32(value.string(), &nFrames)) {
- return setParamTrackFrameStatus(nFrames);
- }
} else if (key == "param-track-time-status") {
int64_t timeDurationUs;
if (safe_strtoi64(value.string(), &timeDurationUs)) {
@@ -895,11 +881,10 @@
status_t err = setupCameraSource();
if (err != OK) return err;
- sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
+ sp<CameraSource> cameraSource = (mCaptureTimeLapse) ?
+ CameraSourceTimeLapse::CreateFromCamera(mCamera, true, 3E6, mFrameRate):
+ CameraSource::CreateFromCamera(mCamera);
CHECK(cameraSource != NULL);
- if(mCaptureTimeLapse) {
- cameraSource->enableTimeLapseMode(1E6, mFrameRate);
- }
sp<MetaData> enc_meta = new MetaData;
enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
@@ -949,9 +934,11 @@
OMXClient client;
CHECK_EQ(client.connect(), OK);
+ uint32_t encoder_flags = (mCaptureTimeLapse) ? OMXCodec::kPreferSoftwareCodecs : 0;
sp<MediaSource> encoder = OMXCodec::Create(
client.interface(), enc_meta,
- true /* createEncoder */, cameraSource);
+ true /* createEncoder */, cameraSource,
+ NULL, encoder_flags);
if (encoder == NULL) {
return UNKNOWN_ERROR;
}
@@ -1014,9 +1001,6 @@
meta->setInt32(kKeyBitRate, totalBitRate);
meta->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
meta->setInt32(kKeyTimeScale, mMovieTimeScale);
- if (mTrackEveryNumberOfFrames > 0) {
- meta->setInt32(kKeyTrackFrameStatus, mTrackEveryNumberOfFrames);
- }
if (mTrackEveryTimeDurationUs > 0) {
meta->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
}
@@ -1095,7 +1079,6 @@
mVideoEncoderLevel = -1;
mMaxFileDurationUs = 0;
mMaxFileSizeBytes = 0;
- mTrackEveryNumberOfFrames = 0;
mTrackEveryTimeDurationUs = 0;
mCaptureTimeLapse = false;
mEncoderProfiles = MediaProfiles::getInstance();
@@ -1140,8 +1123,6 @@
result.append(buffer);
snprintf(buffer, SIZE, " Interleave duration (us): %d\n", mInterleaveDurationUs);
result.append(buffer);
- snprintf(buffer, SIZE, " Progress notification: %d frames\n", mTrackEveryNumberOfFrames);
- result.append(buffer);
snprintf(buffer, SIZE, " Progress notification: %lld us\n", mTrackEveryTimeDurationUs);
result.append(buffer);
snprintf(buffer, SIZE, " Audio\n");
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index f51d7f8..232fc0e 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -90,7 +90,6 @@
int32_t mAudioTimeScale;
int64_t mMaxFileSizeBytes;
int64_t mMaxFileDurationUs;
- int32_t mTrackEveryNumberOfFrames;
int64_t mTrackEveryTimeDurationUs;
bool mCaptureTimeLapse;
@@ -122,7 +121,6 @@
status_t setParamVideoCameraId(int32_t cameraId);
status_t setParamVideoTimeScale(int32_t timeScale);
status_t setParamTrackTimeStatus(int64_t timeDurationUs);
- status_t setParamTrackFrameStatus(int32_t nFrames);
status_t setParamInterleaveDuration(int32_t durationUs);
status_t setParam64BitFileOffset(bool use64BitFileOffset);
status_t setParamMaxFileDurationUs(int64_t timeUs);
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 89bfc1f..bf5643d 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -10,6 +10,7 @@
AudioSource.cpp \
AwesomePlayer.cpp \
CameraSource.cpp \
+ CameraSourceTimeLapse.cpp \
DataSource.cpp \
ESDS.cpp \
FileSource.cpp \
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
index b79ba13..b7bde6b 100644
--- a/media/libstagefright/AudioPlayer.cpp
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -41,6 +41,9 @@
mReachedEOS(false),
mFinalStatus(OK),
mStarted(false),
+ mIsFirstBuffer(false),
+ mFirstBufferResult(OK),
+ mFirstBuffer(NULL),
mAudioSink(audioSink) {
}
@@ -68,6 +71,24 @@
}
}
+ // We allow an optional INFO_FORMAT_CHANGED at the very beginning
+ // of playback, if there is one, getFormat below will retrieve the
+ // updated format, if there isn't, we'll stash away the valid buffer
+ // of data to be used on the first audio callback.
+
+ CHECK(mFirstBuffer == NULL);
+
+ mFirstBufferResult = mSource->read(&mFirstBuffer);
+ if (mFirstBufferResult == INFO_FORMAT_CHANGED) {
+ LOGV("INFO_FORMAT_CHANGED!!!");
+
+ CHECK(mFirstBuffer == NULL);
+ mFirstBufferResult = OK;
+ mIsFirstBuffer = false;
+ } else {
+ mIsFirstBuffer = true;
+ }
+
sp<MetaData> format = mSource->getFormat();
const char *mime;
bool success = format->findCString(kKeyMIMEType, &mime);
@@ -87,6 +108,11 @@
DEFAULT_AUDIOSINK_BUFFERCOUNT,
&AudioPlayer::AudioSinkCallback, this);
if (err != OK) {
+ if (mFirstBuffer != NULL) {
+ mFirstBuffer->release();
+ mFirstBuffer = NULL;
+ }
+
if (!sourceAlreadyStarted) {
mSource->stop();
}
@@ -110,6 +136,11 @@
delete mAudioTrack;
mAudioTrack = NULL;
+ if (mFirstBuffer != NULL) {
+ mFirstBuffer->release();
+ mFirstBuffer = NULL;
+ }
+
if (!sourceAlreadyStarted) {
mSource->stop();
}
@@ -163,6 +194,12 @@
// Make sure to release any buffer we hold onto so that the
// source is able to stop().
+
+ if (mFirstBuffer != NULL) {
+ mFirstBuffer->release();
+ mFirstBuffer = NULL;
+ }
+
if (mInputBuffer != NULL) {
LOGV("AudioPlayer releasing input buffer.");
@@ -247,6 +284,14 @@
Mutex::Autolock autoLock(mLock);
if (mSeeking) {
+ if (mIsFirstBuffer) {
+ if (mFirstBuffer != NULL) {
+ mFirstBuffer->release();
+ mFirstBuffer = NULL;
+ }
+ mIsFirstBuffer = false;
+ }
+
options.setSeekTo(mSeekTimeUs);
if (mInputBuffer != NULL) {
@@ -259,7 +304,17 @@
}
if (mInputBuffer == NULL) {
- status_t err = mSource->read(&mInputBuffer, &options);
+ status_t err;
+
+ if (mIsFirstBuffer) {
+ mInputBuffer = mFirstBuffer;
+ mFirstBuffer = NULL;
+ err = mFirstBufferResult;
+
+ mIsFirstBuffer = false;
+ } else {
+ err = mSource->read(&mInputBuffer, &options);
+ }
CHECK((err == OK && mInputBuffer != NULL)
|| (err != OK && mInputBuffer == NULL));
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index bb53d97..aa0893c 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -65,6 +65,11 @@
void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
LOGV("postData(%d, ptr:%p, size:%d)",
msgType, dataPtr->pointer(), dataPtr->size());
+
+ sp<CameraSource> source = mSource.promote();
+ if (source.get() != NULL) {
+ source->dataCallback(msgType, dataPtr);
+ }
}
void CameraSourceListener::postDataTimestamp(
@@ -116,33 +121,17 @@
return new CameraSource(camera);
}
-void CameraSource::enableTimeLapseMode(
- int64_t timeBetweenTimeLapseFrameCaptureUs, int32_t videoFrameRate) {
- LOGV("starting time lapse mode");
- mTimeBetweenTimeLapseFrameCaptureUs = timeBetweenTimeLapseFrameCaptureUs;
- mTimeBetweenTimeLapseVideoFramesUs = (1E6/videoFrameRate);
-}
-
-void CameraSource::disableTimeLapseMode() {
- LOGV("stopping time lapse mode");
- mTimeBetweenTimeLapseFrameCaptureUs = -1;
- mTimeBetweenTimeLapseVideoFramesUs = 0;
-}
-
CameraSource::CameraSource(const sp<Camera> &camera)
: mCamera(camera),
- mFirstFrameTimeUs(0),
- mLastFrameTimestampUs(0),
mNumFramesReceived(0),
+ mLastFrameTimestampUs(0),
+ mStarted(false),
+ mFirstFrameTimeUs(0),
mNumFramesEncoded(0),
mNumFramesDropped(0),
mNumGlitches(0),
mGlitchDurationThresholdUs(200000),
- mCollectStats(false),
- mStarted(false),
- mTimeBetweenTimeLapseFrameCaptureUs(-1),
- mTimeBetweenTimeLapseVideoFramesUs(0),
- mLastTimeLapseFrameRealTimestampUs(0) {
+ mCollectStats(false) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
String8 s = mCamera->getParameters();
@@ -177,7 +166,6 @@
mMeta->setInt32(kKeyHeight, height);
mMeta->setInt32(kKeyStride, stride);
mMeta->setInt32(kKeySliceHeight, sliceHeight);
-
}
CameraSource::~CameraSource() {
@@ -186,6 +174,10 @@
}
}
+void CameraSource::startCameraRecording() {
+ CHECK_EQ(OK, mCamera->startRecording());
+}
+
status_t CameraSource::start(MetaData *meta) {
CHECK(!mStarted);
@@ -203,13 +195,17 @@
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->setListener(new CameraSourceListener(this));
- CHECK_EQ(OK, mCamera->startRecording());
+ startCameraRecording();
IPCThreadState::self()->restoreCallingIdentity(token);
mStarted = true;
return OK;
}
+void CameraSource::stopCameraRecording() {
+ mCamera->stopRecording();
+}
+
status_t CameraSource::stop() {
LOGV("stop");
Mutex::Autolock autoLock(mLock);
@@ -218,7 +214,7 @@
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->setListener(NULL);
- mCamera->stopRecording();
+ stopCameraRecording();
releaseQueuedFrames();
while (!mFramesBeingEncoded.empty()) {
LOGI("Waiting for outstanding frames being encoded: %d",
@@ -238,11 +234,15 @@
return OK;
}
+void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+ mCamera->releaseRecordingFrame(frame);
+}
+
void CameraSource::releaseQueuedFrames() {
List<sp<IMemory> >::iterator it;
while (!mFramesReceived.empty()) {
it = mFramesReceived.begin();
- mCamera->releaseRecordingFrame(*it);
+ releaseRecordingFrame(*it);
mFramesReceived.erase(it);
++mNumFramesDropped;
}
@@ -254,7 +254,7 @@
void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrame(frame);
+ releaseRecordingFrame(frame);
IPCThreadState::self()->restoreCallingIdentity(token);
}
@@ -263,7 +263,6 @@
for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
it != mFramesBeingEncoded.end(); ++it) {
if ((*it)->pointer() == buffer->data()) {
-
releaseOneRecordingFrame((*it));
mFramesBeingEncoded.erase(it);
++mNumFramesEncoded;
@@ -332,33 +331,11 @@
++mNumGlitches;
}
- // time lapse
- if(mTimeBetweenTimeLapseFrameCaptureUs >= 0) {
- if(mLastTimeLapseFrameRealTimestampUs == 0) {
- // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
- // to current time (timestampUs) and save frame data.
- LOGV("dataCallbackTimestamp timelapse: initial frame");
-
- mLastTimeLapseFrameRealTimestampUs = timestampUs;
- } else if (timestampUs <
- (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
- // Skip all frames from last encoded frame until
- // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
- // Tell the camera to release its recording frame and return.
- LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
-
- releaseOneRecordingFrame(data);
- return;
- } else {
- // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
- // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
- // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
- // of the last encoded frame's time stamp.
- LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
-
- mLastTimeLapseFrameRealTimestampUs = timestampUs;
- timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
- }
+ // May need to skip frame or modify timestamp. Currently implemented
+ // by the subclass CameraSourceTimeLapse.
+ if(skipCurrentFrame(timestampUs)) {
+ releaseOneRecordingFrame(data);
+ return;
}
mLastFrameTimestampUs = timestampUs;
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
new file mode 100644
index 0000000..30ed143
--- /dev/null
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -0,0 +1,255 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CameraSourceTimeLapse"
+
+#include <binder/IPCThreadState.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <utils/String8.h>
+
+namespace android {
+
+// static
+CameraSourceTimeLapse *CameraSourceTimeLapse::Create(bool useStillCameraForTimeLapse,
+ int64_t timeBetweenTimeLapseFrameCaptureUs,
+ int32_t videoFrameRate) {
+ sp<Camera> camera = Camera::connect(0);
+
+ if (camera.get() == NULL) {
+ return NULL;
+ }
+
+ return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse,
+ timeBetweenTimeLapseFrameCaptureUs, videoFrameRate);
+}
+
+// static
+CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(const sp<Camera> &camera,
+ bool useStillCameraForTimeLapse,
+ int64_t timeBetweenTimeLapseFrameCaptureUs,
+ int32_t videoFrameRate) {
+ if (camera.get() == NULL) {
+ return NULL;
+ }
+
+ return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse,
+ timeBetweenTimeLapseFrameCaptureUs, videoFrameRate);
+}
+
+CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
+ bool useStillCameraForTimeLapse,
+ int64_t timeBetweenTimeLapseFrameCaptureUs,
+ int32_t videoFrameRate)
+ : CameraSource(camera),
+ mUseStillCameraForTimeLapse(useStillCameraForTimeLapse),
+ mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
+ mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
+ mLastTimeLapseFrameRealTimestampUs(0),
+ mSkipCurrentFrame(false) {
+
+ LOGV("starting time lapse mode");
+ if(mUseStillCameraForTimeLapse) {
+ // Currently hardcoded the picture size. Will need to choose
+ // automatically or pass in from the app.
+ int32_t width, height;
+ width = 1024;
+ height = 768;
+ mMeta->setInt32(kKeyWidth, width);
+ mMeta->setInt32(kKeyHeight, height);
+ }
+}
+
+CameraSourceTimeLapse::~CameraSourceTimeLapse() {
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
+ CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+ source->threadTimeLapseEntry();
+ return NULL;
+}
+
+void CameraSourceTimeLapse::threadTimeLapseEntry() {
+ while(mStarted) {
+ if(mCameraIdle) {
+ LOGV("threadTimeLapseEntry: taking picture");
+ CHECK_EQ(OK, mCamera->takePicture());
+ mCameraIdle = false;
+ sleep(mTimeBetweenTimeLapseFrameCaptureUs/1E6);
+ } else {
+ LOGV("threadTimeLapseEntry: camera busy with old takePicture. Sleeping a little.");
+ sleep(.01);
+ }
+ }
+}
+
+void CameraSourceTimeLapse::startCameraRecording() {
+ if(mUseStillCameraForTimeLapse) {
+ LOGV("start time lapse recording using still camera");
+
+ int32_t width;
+ int32_t height;
+ mMeta->findInt32(kKeyWidth, &width);
+ mMeta->findInt32(kKeyHeight, &height);
+
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ params.setPictureSize(width, height);
+ mCamera->setParameters(params.flatten());
+ mCameraIdle = true;
+
+ // create a thread which takes pictures in a loop
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
+ pthread_attr_destroy(&attr);
+ } else {
+ LOGV("start time lapse recording using video camera");
+ CHECK_EQ(OK, mCamera->startRecording());
+ }
+}
+
+void CameraSourceTimeLapse::stopCameraRecording() {
+ if(mUseStillCameraForTimeLapse) {
+ void *dummy;
+ pthread_join(mThreadTimeLapse, &dummy);
+ } else {
+ mCamera->stopRecording();
+ }
+}
+
+void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
+ if(!mUseStillCameraForTimeLapse) {
+ mCamera->releaseRecordingFrame(frame);
+ }
+}
+
+sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
+ size_t source_size = source_data->size();
+ void* source_pointer = source_data->pointer();
+
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
+ memcpy(newMemory->pointer(), source_pointer, source_size);
+ return newMemory;
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
+ CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+ source->threadStartPreview();
+ return NULL;
+}
+
+void CameraSourceTimeLapse::threadStartPreview() {
+ CHECK_EQ(OK, mCamera->startPreview());
+ mCameraIdle = true;
+}
+
+void CameraSourceTimeLapse::restartPreview() {
+ // Start this in a different thread, so that the dataCallback can return
+ LOGV("restartPreview");
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+
+ pthread_t threadPreview;
+ pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
+ pthread_attr_destroy(&attr);
+}
+
+void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
+ if(msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
+ // takePicture will complete after this callback, so restart preview.
+ restartPreview();
+ }
+ if(msgType != CAMERA_MSG_RAW_IMAGE) {
+ return;
+ }
+
+ LOGV("dataCallback for timelapse still frame");
+ CHECK_EQ(true, mUseStillCameraForTimeLapse);
+
+ int64_t timestampUs;
+ if (mNumFramesReceived == 0) {
+ timestampUs = mStartTimeUs;
+ } else {
+ timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ }
+ sp<IMemory> dataCopy = createIMemoryCopy(data);
+ dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+}
+
+bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
+ if(mSkipCurrentFrame) {
+ mSkipCurrentFrame = false;
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
+ if(!mUseStillCameraForTimeLapse) {
+ if(mLastTimeLapseFrameRealTimestampUs == 0) {
+ // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
+ // to current time (timestampUs) and save frame data.
+ LOGV("dataCallbackTimestamp timelapse: initial frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ } else if (*timestampUs <
+ (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
+ // Skip all frames from last encoded frame until
+ // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
+ // Tell the camera to release its recording frame and return.
+ LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
+ return true;
+ } else {
+ // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
+ // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
+ // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
+ // of the last encoded frame's time stamp.
+ LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ }
+ }
+ return false;
+}
+
+void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data) {
+ if(!mUseStillCameraForTimeLapse) {
+ mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs);
+ }
+ CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
+}
+
+} // namespace android
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 65d0146..0c2f1e6 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -1287,11 +1287,6 @@
uint32_t freqIndex = (csd[0] & 7) << 1 | (csd[1] >> 7);
int32_t sampleRate = 0;
int32_t numChannels = 0;
- uint8_t offset = 0;
- static uint32_t kSamplingRate[] = {
- 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
- 16000, 12000, 11025, 8000, 7350
- };
if (freqIndex == 15) {
if (csd_size < 5) {
return ERROR_MALFORMED;
@@ -1303,8 +1298,11 @@
| (csd[4] >> 7);
numChannels = (csd[4] >> 3) & 15;
- offset = 4;
} else {
+ static uint32_t kSamplingRate[] = {
+ 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
+ 16000, 12000, 11025, 8000, 7350
+ };
if (freqIndex == 13 || freqIndex == 14) {
return ERROR_MALFORMED;
@@ -1312,66 +1310,6 @@
sampleRate = kSamplingRate[freqIndex];
numChannels = (csd[1] >> 3) & 15;
- offset = 1;
- }
-
- uint8_t sbrPresentFlag = -1;
- uint8_t extensionAudioObjectType = 0;
- if (objectType == 5) {
- extensionAudioObjectType = objectType;
- sbrPresentFlag = 1;
- freqIndex = ((csd[offset] & 7) << 1) | (csd[offset + 1] >> 7);
- offset += 1;
- if (freqIndex == 15) {
- sampleRate = (((csd[offset] & 0x7f) << 17)
- | (csd[offset + 1] << 9)
- | (csd[offset + 2] << 1)
- | (csd[offset + 3] >> 7));
- offset += 3;
- }
- objectType = csd[offset] >> 3;
- }
-
- if (((objectType >= 1 && objectType <= 4) ||
- (objectType >= 6 && objectType <= 7) ||
- (objectType == 17) ||
- (objectType >= 19 || objectType <= 23)) &&
- (0x00 == (csd[offset] & 7)) &&
- numChannels != 0) {
-
- // XXX: We are not handling coreCoderDelay,
- // program_config_element(),
- // extensionFlag, scalable profile, etc.
- if (objectType != 6 && objectType != 20) {
- if (objectType != 5 && csd_size - offset >= 2) {
- uint32_t syncExtensionType =
- (csd[offset + 1] << 3) | (csd[offset + 2] >> 5);
- if (syncExtensionType == 0x2b7) {
- extensionAudioObjectType =
- csd[offset + 2] & 0x1F;
- if (extensionAudioObjectType == 0x05) {
- if (csd_size - offset < 3) {
- return ERROR_MALFORMED;
- }
- uint8_t sbrPresentFlag = csd[offset + 3] & 0x80;
- if (sbrPresentFlag) {
- freqIndex = (csd[offset + 3] & 0x78) >> 3;
- if (freqIndex == 15) {
- if (csd_size - offset < 6) {
- return ERROR_MALFORMED;
- }
- sampleRate = (csd[offset + 3] & 0x07) << 21
- | csd[offset + 4] << 13
- | csd[offset + 5] << 5
- | csd[offset + 6] >> 3;
- } else {
- sampleRate = kSamplingRate[freqIndex];
- }
- }
- }
- }
- }
- }
}
if (numChannels == 0) {
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index b7388bb..c4a25bc 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -111,7 +111,6 @@
int64_t mStartTimestampUs;
int64_t mPreviousTrackTimeUs;
int64_t mTrackEveryTimeDurationUs;
- int32_t mTrackEveryNumberOfFrames;
static void *ThreadWrapper(void *me);
void threadEntry();
@@ -121,7 +120,7 @@
void writeOneChunk(bool isAvc);
// Track authoring progress status
- void trackProgressStatus(int32_t nFrames, int64_t timeUs);
+ void trackProgressStatus(int64_t timeUs, status_t err = OK);
void initTrackingProgressStatus(MetaData *params);
// Utilities for collecting statistical data
@@ -742,7 +741,6 @@
mPreviousTrackTimeUs = -1;
mTrackingProgressStatus = false;
mTrackEveryTimeDurationUs = 0;
- mTrackEveryNumberOfFrames = 0;
{
int64_t timeUs;
if (params && params->findInt64(kKeyTrackTimeStatus, &timeUs)) {
@@ -751,14 +749,6 @@
mTrackingProgressStatus = true;
}
}
- {
- int32_t nFrames;
- if (params && params->findInt32(kKeyTrackFrameStatus, &nFrames)) {
- LOGV("Receive request to track progress status for every %d frames", nFrames);
- mTrackEveryNumberOfFrames = nFrames;
- mTrackingProgressStatus = true;
- }
- }
}
status_t MPEG4Writer::Track::start(MetaData *params) {
@@ -1164,7 +1154,7 @@
if (mPreviousTrackTimeUs <= 0) {
mPreviousTrackTimeUs = mStartTimestampUs;
}
- trackProgressStatus(mSampleInfos.size(), timestampUs);
+ trackProgressStatus(timestampUs);
}
if (mOwner->numTracks() == 1) {
off_t offset = is_avc? mOwner->addLengthPrefixedSample_l(copy)
@@ -1207,7 +1197,7 @@
if (mSampleInfos.empty()) {
err = UNKNOWN_ERROR;
}
- mOwner->notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_COMPLETION_STATUS, err);
+ mOwner->trackProgressStatus(this, -1, err);
// Last chunk
if (mOwner->numTracks() == 1) {
@@ -1237,26 +1227,61 @@
logStatisticalData(is_audio);
}
-void MPEG4Writer::Track::trackProgressStatus(int32_t nFrames, int64_t timeUs) {
- LOGV("trackProgressStatus: %d frames and %lld us", nFrames, timeUs);
- if (mTrackEveryNumberOfFrames > 0 &&
- nFrames % mTrackEveryNumberOfFrames == 0) {
- LOGV("Fire frame tracking progress status at frame %d", nFrames);
- mOwner->notify(MEDIA_RECORDER_EVENT_INFO,
- MEDIA_RECORDER_INFO_PROGRESS_FRAME_STATUS,
- nFrames);
- }
-
+void MPEG4Writer::Track::trackProgressStatus(int64_t timeUs, status_t err) {
+ LOGV("trackProgressStatus: %lld us", timeUs);
if (mTrackEveryTimeDurationUs > 0 &&
timeUs - mPreviousTrackTimeUs >= mTrackEveryTimeDurationUs) {
LOGV("Fire time tracking progress status at %lld us", timeUs);
- mOwner->notify(MEDIA_RECORDER_EVENT_INFO,
- MEDIA_RECORDER_INFO_PROGRESS_TIME_STATUS,
- timeUs / 1000);
+ mOwner->trackProgressStatus(this, timeUs - mPreviousTrackTimeUs, err);
mPreviousTrackTimeUs = timeUs;
}
}
+void MPEG4Writer::trackProgressStatus(
+ const MPEG4Writer::Track* track, int64_t timeUs, status_t err) {
+ Mutex::Autolock lock(mLock);
+ int32_t nTracks = mTracks.size();
+ CHECK(nTracks >= 1);
+ CHECK(nTracks < 64); // Arbitrary number
+
+ int32_t trackNum = 0;
+#if 0
+ // In the worst case, we can put the trackNum
+ // along with MEDIA_RECORDER_INFO_COMPLETION_STATUS
+ // to report the progress.
+ for (List<Track *>::iterator it = mTracks.begin();
+ it != mTracks.end(); ++it, ++trackNum) {
+ if (track == (*it)) {
+ break;
+ }
+ }
+#endif
+ CHECK(trackNum < nTracks);
+ trackNum <<= 16;
+
+ // Error notification
+ // Do not consider ERROR_END_OF_STREAM an error
+ if (err != OK && err != ERROR_END_OF_STREAM) {
+ notify(MEDIA_RECORDER_EVENT_ERROR,
+ trackNum | MEDIA_RECORDER_ERROR_UNKNOWN,
+ err);
+ return;
+ }
+
+ if (timeUs == -1) {
+ // Send completion notification
+ notify(MEDIA_RECORDER_EVENT_INFO,
+ trackNum | MEDIA_RECORDER_INFO_COMPLETION_STATUS,
+ err);
+ return;
+ } else {
+ // Send progress status
+ notify(MEDIA_RECORDER_EVENT_INFO,
+ trackNum | MEDIA_RECORDER_INFO_PROGRESS_TIME_STATUS,
+ timeUs / 1000);
+ }
+}
+
void MPEG4Writer::Track::findMinAvgMaxSampleDurationMs(
int32_t *min, int32_t *avg, int32_t *max) {
CHECK(!mSampleInfos.empty());
diff --git a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
index e375250..5002442 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
@@ -267,7 +267,6 @@
status_t M4vH263Encoder::read(
MediaBuffer **out, const ReadOptions *options) {
- CHECK(!options);
*out = NULL;
MediaBuffer *outputBuffer;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index b38a5c8..b88e69d 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -63,6 +63,8 @@
// ----------------------------------------------------------------------------
+extern const char * const gEffectLibPath;
+
namespace android {
static const char* kDeadlockedString = "AudioFlinger may be deadlocked\n";
@@ -127,8 +129,7 @@
AudioFlinger::AudioFlinger()
: BnAudioFlinger(),
- mAudioHardware(0), mMasterVolume(1.0f), mMasterMute(false), mNextUniqueId(1),
- mTotalEffectsCpuLoad(0), mTotalEffectsMemory(0)
+ mAudioHardware(0), mMasterVolume(1.0f), mMasterMute(false), mNextUniqueId(1)
{
mHardwareStatus = AUDIO_HW_IDLE;
@@ -321,13 +322,19 @@
mClients.add(pid, client);
}
- // If no audio session id is provided, create one here
- // TODO: enforce same stream type for all tracks in same audio session?
- // TODO: prevent same audio session on different output threads
LOGV("createTrack() sessionId: %d", (sessionId == NULL) ? -2 : *sessionId);
- if (sessionId != NULL && *sessionId != 0) {
+ if (sessionId != NULL && *sessionId != AudioSystem::SESSION_OUTPUT_MIX) {
+ // prevent same audio session on different output threads
+ for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+ if (mPlaybackThreads.keyAt(i) != output &&
+ mPlaybackThreads.valueAt(i)->hasAudioSession(*sessionId)) {
+ lStatus = BAD_VALUE;
+ goto Exit;
+ }
+ }
lSessionId = *sessionId;
} else {
+ // if no audio session id is provided, create one here
lSessionId = nextUniqueId();
if (sessionId != NULL) {
*sessionId = lSessionId;
@@ -1141,6 +1148,23 @@
{ // scope for mLock
Mutex::Autolock _l(mLock);
+
+ // all tracks in same audio session must share the same routing strategy otherwise
+ // conflicts will happen when tracks are moved from one output to another by audio policy
+ // manager
+ uint32_t strategy =
+ AudioSystem::getStrategyForStream((AudioSystem::stream_type)streamType);
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+ sp<Track> t = mTracks[i];
+ if (t != 0) {
+ if (sessionId == t->sessionId() &&
+ strategy != AudioSystem::getStrategyForStream((AudioSystem::stream_type)t->type())) {
+ lStatus = BAD_VALUE;
+ goto Exit;
+ }
+ }
+ }
+
track = new Track(this, client, streamType, sampleRate, format,
channelCount, frameCount, sharedBuffer, sessionId);
if (track->getCblk() == NULL || track->name() < 0) {
@@ -1153,6 +1177,7 @@
if (chain != 0) {
LOGV("createTrack_l() setting main buffer %p", chain->inBuffer());
track->setMainBuffer(chain->inBuffer());
+ chain->setStrategy(AudioSystem::getStrategyForStream((AudioSystem::stream_type)track->type()));
}
}
lStatus = NO_ERROR;
@@ -1344,7 +1369,16 @@
mMixBuffer = new int16_t[mFrameCount * 2];
memset(mMixBuffer, 0, mFrameCount * 2 * sizeof(int16_t));
- //TODO handle effects reconfig
+ // force reconfiguration of effect chains and engines to take new buffer size and audio
+ // parameters into account
+ // Note that mLock is not held when readOutputParameters() is called from the constructor
+ // but in this case nothing is done below as no audio sessions have effect yet so it doesn't
+ // matter.
+ // create a copy of mEffectChains as calling moveEffectChain_l() can reorder some effect chains
+ Vector< sp<EffectChain> > effectChains = mEffectChains;
+ for (size_t i = 0; i < effectChains.size(); i ++) {
+ mAudioFlinger->moveEffectChain_l(effectChains[i]->sessionId(), this, this);
+ }
}
status_t AudioFlinger::PlaybackThread::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames)
@@ -1369,7 +1403,8 @@
for (size_t i = 0; i < mTracks.size(); ++i) {
sp<Track> track = mTracks[i];
- if (sessionId == track->sessionId()) {
+ if (sessionId == track->sessionId() &&
+ !(track->mCblk->flags & CBLK_INVALID_MSK)) {
return true;
}
}
@@ -1377,6 +1412,23 @@
return false;
}
+uint32_t AudioFlinger::PlaybackThread::getStrategyForSession_l(int sessionId)
+{
+ // session AudioSystem::SESSION_OUTPUT_MIX is placed in same strategy as MUSIC stream so that
+ // it is moved to correct output by audio policy manager when A2DP is connected or disconnected
+ if (sessionId == AudioSystem::SESSION_OUTPUT_MIX) {
+ return AudioSystem::getStrategyForStream(AudioSystem::MUSIC);
+ }
+ for (size_t i = 0; i < mTracks.size(); i++) {
+ sp<Track> track = mTracks[i];
+ if (sessionId == track->sessionId() &&
+ !(track->mCblk->flags & CBLK_INVALID_MSK)) {
+ return AudioSystem::getStrategyForStream((AudioSystem::stream_type) track->type());
+ }
+ }
+ return AudioSystem::getStrategyForStream(AudioSystem::MUSIC);
+}
+
sp<AudioFlinger::EffectChain> AudioFlinger::PlaybackThread::getEffectChain(int sessionId)
{
Mutex::Autolock _l(mLock);
@@ -1503,8 +1555,7 @@
// prevent any changes in effect chain list and in each effect chain
// during mixing and effect process as the audio buffers could be deleted
// or modified if an effect is created or deleted
- lockEffectChains_l();
- effectChains = mEffectChains;
+ lockEffectChains_l(effectChains);
}
if (LIKELY(mixerStatus == MIXER_TRACKS_READY)) {
@@ -1540,7 +1591,7 @@
effectChains[i]->process_l();
}
// enable changes in effect chain
- unlockEffectChains();
+ unlockEffectChains(effectChains);
#ifdef LVMX
int audioOutputType = LifeVibes::getMixerType(mId, mType);
if (LifeVibes::audioOutputTypeIsLifeVibes(audioOutputType)) {
@@ -1571,7 +1622,7 @@
mStandby = false;
} else {
// enable changes in effect chain
- unlockEffectChains();
+ unlockEffectChains(effectChains);
usleep(sleepTime);
}
@@ -1625,7 +1676,7 @@
}
#endif
// Delegate master volume control to effect in output mix effect chain if needed
- sp<EffectChain> chain = getEffectChain_l(0);
+ sp<EffectChain> chain = getEffectChain_l(AudioSystem::SESSION_OUTPUT_MIX);
if (chain != 0) {
uint32_t v = (uint32_t)(masterVolume * (1 << 24));
chain->setVolume_l(&v, &v);
@@ -1814,8 +1865,10 @@
void AudioFlinger::MixerThread::invalidateTracks(int streamType)
{
- LOGV ("MixerThread::invalidateTracks() mixer %p, streamType %d, mTracks.size %d", this, streamType, mTracks.size());
+ LOGV ("MixerThread::invalidateTracks() mixer %p, streamType %d, mTracks.size %d",
+ this, streamType, mTracks.size());
Mutex::Autolock _l(mLock);
+
size_t size = mTracks.size();
for (size_t i = 0; i < size; i++) {
sp<Track> t = mTracks[i];
@@ -2070,7 +2123,6 @@
// hardware resources as soon as possible
nsecs_t standbyDelay = microseconds(activeSleepTime*2);
-
while (!exitPending())
{
bool rampVolume;
@@ -2246,7 +2298,8 @@
if (UNLIKELY(trackToRemove != 0)) {
mActiveTracks.remove(trackToRemove);
if (!effectChains.isEmpty()) {
- LOGV("stopping track on chain %p for session Id: %d", effectChains[0].get(), trackToRemove->sessionId());
+ LOGV("stopping track on chain %p for session Id: %d", effectChains[0].get(),
+ trackToRemove->sessionId());
effectChains[0]->stopTrack();
}
if (trackToRemove->isTerminated()) {
@@ -2255,7 +2308,7 @@
}
}
- lockEffectChains_l();
+ lockEffectChains_l(effectChains);
}
if (LIKELY(mixerStatus == MIXER_TRACKS_READY)) {
@@ -2301,7 +2354,7 @@
for (size_t i = 0; i < effectChains.size(); i ++) {
effectChains[i]->process_l();
}
- unlockEffectChains();
+ unlockEffectChains(effectChains);
mLastWriteTime = systemTime();
mInWrite = true;
@@ -2312,7 +2365,7 @@
mInWrite = false;
mStandby = false;
} else {
- unlockEffectChains();
+ unlockEffectChains(effectChains);
usleep(sleepTime);
}
@@ -2505,8 +2558,7 @@
// prevent any changes in effect chain list and in each effect chain
// during mixing and effect process as the audio buffers could be deleted
// or modified if an effect is created or deleted
- lockEffectChains_l();
- effectChains = mEffectChains;
+ lockEffectChains_l(effectChains);
}
if (LIKELY(mixerStatus == MIXER_TRACKS_READY)) {
@@ -2547,7 +2599,7 @@
effectChains[i]->process_l();
}
// enable changes in effect chain
- unlockEffectChains();
+ unlockEffectChains(effectChains);
standbyTime = systemTime() + kStandbyTimeInNsecs;
for (size_t i = 0; i < outputTracks.size(); i++) {
@@ -2557,7 +2609,7 @@
mBytesWritten += mixBufferSize;
} else {
// enable changes in effect chain
- unlockEffectChains();
+ unlockEffectChains(effectChains);
usleep(sleepTime);
}
@@ -2859,7 +2911,9 @@
if (thread != 0) {
if (!isOutputTrack()) {
if (mState == ACTIVE || mState == RESUMING) {
- AudioSystem::stopOutput(thread->id(), (AudioSystem::stream_type)mStreamType);
+ AudioSystem::stopOutput(thread->id(),
+ (AudioSystem::stream_type)mStreamType,
+ mSessionId);
}
AudioSystem::releaseOutput(thread->id());
}
@@ -2966,7 +3020,9 @@
if (!isOutputTrack() && state != ACTIVE && state != RESUMING) {
thread->mLock.unlock();
- status = AudioSystem::startOutput(thread->id(), (AudioSystem::stream_type)mStreamType);
+ status = AudioSystem::startOutput(thread->id(),
+ (AudioSystem::stream_type)mStreamType,
+ mSessionId);
thread->mLock.lock();
}
if (status == NO_ERROR) {
@@ -2999,7 +3055,9 @@
}
if (!isOutputTrack() && (state == ACTIVE || state == RESUMING)) {
thread->mLock.unlock();
- AudioSystem::stopOutput(thread->id(), (AudioSystem::stream_type)mStreamType);
+ AudioSystem::stopOutput(thread->id(),
+ (AudioSystem::stream_type)mStreamType,
+ mSessionId);
thread->mLock.lock();
}
}
@@ -3016,7 +3074,9 @@
LOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get());
if (!isOutputTrack()) {
thread->mLock.unlock();
- AudioSystem::stopOutput(thread->id(), (AudioSystem::stream_type)mStreamType);
+ AudioSystem::stopOutput(thread->id(),
+ (AudioSystem::stream_type)mStreamType,
+ mSessionId);
thread->mLock.lock();
}
}
@@ -3585,7 +3645,7 @@
}
// If no audio session id is provided, create one here
- if (sessionId != NULL && *sessionId != 0) {
+ if (sessionId != NULL && *sessionId != AudioSystem::SESSION_OUTPUT_MIX) {
lSessionId = *sessionId;
} else {
lSessionId = nextUniqueId();
@@ -4416,8 +4476,8 @@
thread->type() != PlaybackThread::DIRECT) {
MixerThread *srcThread = (MixerThread *)thread;
srcThread->invalidateTracks(stream);
- }
}
+ }
return NO_ERROR;
}
@@ -4472,12 +4532,26 @@
status_t AudioFlinger::loadEffectLibrary(const char *libPath, int *handle)
{
+ // check calling permissions
+ if (!settingsAllowed()) {
+ return PERMISSION_DENIED;
+ }
+ // only allow libraries loaded from /system/lib/soundfx for now
+ if (strncmp(gEffectLibPath, libPath, strlen(gEffectLibPath)) != 0) {
+ return PERMISSION_DENIED;
+ }
+
Mutex::Autolock _l(mLock);
return EffectLoadLibrary(libPath, handle);
}
status_t AudioFlinger::unloadEffectLibrary(int handle)
{
+ // check calling permissions
+ if (!settingsAllowed()) {
+ return PERMISSION_DENIED;
+ }
+
Mutex::Autolock _l(mLock);
return EffectUnloadLibrary(handle);
}
@@ -4522,7 +4596,8 @@
sp<Client> client;
wp<Client> wclient;
- LOGV("createEffect pid %d, client %p, priority %d, sessionId %d, output %d", pid, effectClient.get(), priority, sessionId, output);
+ LOGV("createEffect pid %d, client %p, priority %d, sessionId %d, output %d",
+ pid, effectClient.get(), priority, sessionId, output);
if (pDesc == NULL) {
lStatus = BAD_VALUE;
@@ -4577,7 +4652,7 @@
// an auxiliary version of this effect type is available
found = true;
memcpy(&d, &desc, sizeof(effect_descriptor_t));
- if (sessionId != 0 ||
+ if (sessionId != AudioSystem::SESSION_OUTPUT_MIX ||
(desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
break;
}
@@ -4590,22 +4665,23 @@
}
// For same effect type, chose auxiliary version over insert version if
// connect to output mix (Compliance to OpenSL ES)
- if (sessionId == 0 &&
+ if (sessionId == AudioSystem::SESSION_OUTPUT_MIX &&
(d.flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_AUXILIARY) {
memcpy(&desc, &d, sizeof(effect_descriptor_t));
}
}
// Do not allow auxiliary effects on a session different from 0 (output mix)
- if (sessionId != 0 &&
+ if (sessionId != AudioSystem::SESSION_OUTPUT_MIX &&
(desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
lStatus = INVALID_OPERATION;
goto Exit;
}
- // Session -1 is reserved for output stage effects that can only be created
- // by audio policy manager (running in same process)
- if (sessionId == -1 && getpid() != IPCThreadState::self()->getCallingPid()) {
+ // Session AudioSystem::SESSION_OUTPUT_STAGE is reserved for output stage effects
+ // that can only be created by audio policy manager (running in same process)
+ if (sessionId == AudioSystem::SESSION_OUTPUT_STAGE &&
+ getpid() != IPCThreadState::self()->getCallingPid()) {
lStatus = INVALID_OPERATION;
goto Exit;
}
@@ -4617,13 +4693,14 @@
// output threads.
// TODO: allow attachment of effect to inputs
if (output == 0) {
- if (sessionId <= 0) {
- // default to first output
- // TODO: define criteria to choose output when not specified. Or
- // receive output from audio policy manager
- if (mPlaybackThreads.size() != 0) {
- output = mPlaybackThreads.keyAt(0);
- }
+ if (sessionId == AudioSystem::SESSION_OUTPUT_STAGE) {
+ // output must be specified by AudioPolicyManager when using session
+ // AudioSystem::SESSION_OUTPUT_STAGE
+ lStatus = BAD_VALUE;
+ goto Exit;
+ } else if (sessionId == AudioSystem::SESSION_OUTPUT_MIX) {
+ output = AudioSystem::getOutputForEffect(&desc);
+ LOGV("createEffect() got output %d for effect %s", output, desc.name);
} else {
// look for the thread where the specified audio session is present
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
@@ -4636,7 +4713,7 @@
}
PlaybackThread *thread = checkPlaybackThread_l(output);
if (thread == NULL) {
- LOGE("unknown output thread");
+ LOGE("createEffect() unknown output thread");
lStatus = BAD_VALUE;
goto Exit;
}
@@ -4651,7 +4728,8 @@
}
// create effect on selected output trhead
- handle = thread->createEffect_l(client, effectClient, priority, sessionId, &desc, enabled, &lStatus);
+ handle = thread->createEffect_l(client, effectClient, priority, sessionId,
+ &desc, enabled, &lStatus);
if (handle != 0 && id != NULL) {
*id = handle->id();
}
@@ -4664,31 +4742,64 @@
return handle;
}
-status_t AudioFlinger::registerEffectResource_l(effect_descriptor_t *desc) {
- if (mTotalEffectsCpuLoad + desc->cpuLoad > MAX_EFFECTS_CPU_LOAD) {
- LOGW("registerEffectResource() CPU Load limit exceeded for Fx %s, CPU %f MIPS",
- desc->name, (float)desc->cpuLoad/10);
- return INVALID_OPERATION;
+status_t AudioFlinger::moveEffects(int session, int srcOutput, int dstOutput)
+{
+ LOGV("moveEffects() session %d, srcOutput %d, dstOutput %d",
+ session, srcOutput, dstOutput);
+ Mutex::Autolock _l(mLock);
+ if (srcOutput == dstOutput) {
+ LOGW("moveEffects() same dst and src outputs %d", dstOutput);
+ return NO_ERROR;
}
- if (mTotalEffectsMemory + desc->memoryUsage > MAX_EFFECTS_MEMORY) {
- LOGW("registerEffectResource() memory limit exceeded for Fx %s, Memory %d KB",
- desc->name, desc->memoryUsage);
- return INVALID_OPERATION;
+ PlaybackThread *srcThread = checkPlaybackThread_l(srcOutput);
+ if (srcThread == NULL) {
+ LOGW("moveEffects() bad srcOutput %d", srcOutput);
+ return BAD_VALUE;
}
- mTotalEffectsCpuLoad += desc->cpuLoad;
- mTotalEffectsMemory += desc->memoryUsage;
- LOGV("registerEffectResource_l() effect %s, CPU %d, memory %d",
- desc->name, desc->cpuLoad, desc->memoryUsage);
- LOGV(" total CPU %d, total memory %d", mTotalEffectsCpuLoad, mTotalEffectsMemory);
+ PlaybackThread *dstThread = checkPlaybackThread_l(dstOutput);
+ if (dstThread == NULL) {
+ LOGW("moveEffects() bad dstOutput %d", dstOutput);
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock _dl(dstThread->mLock);
+ Mutex::Autolock _sl(srcThread->mLock);
+ moveEffectChain_l(session, srcThread, dstThread);
+
return NO_ERROR;
}
-void AudioFlinger::unregisterEffectResource_l(effect_descriptor_t *desc) {
- mTotalEffectsCpuLoad -= desc->cpuLoad;
- mTotalEffectsMemory -= desc->memoryUsage;
- LOGV("unregisterEffectResource_l() effect %s, CPU %d, memory %d",
- desc->name, desc->cpuLoad, desc->memoryUsage);
- LOGV(" total CPU %d, total memory %d", mTotalEffectsCpuLoad, mTotalEffectsMemory);
+// moveEffectChain_l mustbe called with both srcThread and dstThread mLocks held
+status_t AudioFlinger::moveEffectChain_l(int session,
+ AudioFlinger::PlaybackThread *srcThread,
+ AudioFlinger::PlaybackThread *dstThread)
+{
+ LOGV("moveEffectChain_l() session %d from thread %p to thread %p",
+ session, srcThread, dstThread);
+
+ sp<EffectChain> chain = srcThread->getEffectChain_l(session);
+ if (chain == 0) {
+ LOGW("moveEffectChain_l() effect chain for session %d not on source thread %p",
+ session, srcThread);
+ return INVALID_OPERATION;
+ }
+
+ // remove chain first. This is usefull only if reconfiguring effect chain on same output thread,
+ // so that a new chain is created with correct parameters when first effect is added. This is
+ // otherwise unecessary as removeEffect_l() will remove the chain when last effect is
+ // removed.
+ srcThread->removeEffectChain_l(chain);
+
+ // transfer all effects one by one so that new effect chain is created on new thread with
+ // correct buffer sizes and audio parameters and effect engines reconfigured accordingly
+ sp<EffectModule> effect = chain->getEffectFromId_l(0);
+ while (effect != 0) {
+ srcThread->removeEffect_l(effect);
+ dstThread->addEffect_l(effect);
+ effect = chain->getEffectFromId_l(0);
+ }
+
+ return NO_ERROR;
}
// PlaybackThread::createEffect_l() must be called with AudioFlinger::mLock held
@@ -4707,6 +4818,7 @@
status_t lStatus;
sp<Track> track;
sp<EffectChain> chain;
+ bool chainCreated = false;
bool effectCreated = false;
bool effectRegistered = false;
@@ -4718,16 +4830,18 @@
// Do not allow auxiliary effect on session other than 0
if ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY &&
- sessionId != 0) {
- LOGW("createEffect_l() Cannot add auxiliary effect %s to session %d", desc->name, sessionId);
+ sessionId != AudioSystem::SESSION_OUTPUT_MIX) {
+ LOGW("createEffect_l() Cannot add auxiliary effect %s to session %d",
+ desc->name, sessionId);
lStatus = BAD_VALUE;
goto Exit;
}
// Do not allow effects with session ID 0 on direct output or duplicating threads
// TODO: add rule for hw accelerated effects on direct outputs with non PCM format
- if (sessionId == 0 && mType != MIXER) {
- LOGW("createEffect_l() Cannot add auxiliary effect %s to session %d", desc->name, sessionId);
+ if (sessionId == AudioSystem::SESSION_OUTPUT_MIX && mType != MIXER) {
+ LOGW("createEffect_l() Cannot add auxiliary effect %s to session %d",
+ desc->name, sessionId);
lStatus = BAD_VALUE;
goto Exit;
}
@@ -4744,6 +4858,8 @@
LOGV("createEffect_l() new effect chain for session %d", sessionId);
chain = new EffectChain(this, sessionId);
addEffectChain_l(chain);
+ chain->setStrategy(getStrategyForSession_l(sessionId));
+ chainCreated = true;
} else {
effect = chain->getEffectFromDesc_l(desc);
}
@@ -4751,14 +4867,15 @@
LOGV("createEffect_l() got effect %p on chain %p", effect == 0 ? 0 : effect.get(), chain.get());
if (effect == 0) {
+ int id = mAudioFlinger->nextUniqueId();
// Check CPU and memory usage
- lStatus = mAudioFlinger->registerEffectResource_l(desc);
+ lStatus = AudioSystem::registerEffect(desc, mId, chain->strategy(), sessionId, id);
if (lStatus != NO_ERROR) {
goto Exit;
}
effectRegistered = true;
// create a new effect module if none present in the chain
- effect = new EffectModule(this, chain, desc, mAudioFlinger->nextUniqueId(), sessionId);
+ effect = new EffectModule(this, chain, desc, id, sessionId);
lStatus = effect->status();
if (lStatus != NO_ERROR) {
goto Exit;
@@ -4782,14 +4899,15 @@
Exit:
if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
+ Mutex::Autolock _l(mLock);
if (effectCreated) {
- Mutex::Autolock _l(mLock);
- if (chain->removeEffect_l(effect) == 0) {
- removeEffectChain_l(chain);
- }
+ chain->removeEffect_l(effect);
}
if (effectRegistered) {
- mAudioFlinger->unregisterEffectResource_l(desc);
+ AudioSystem::unregisterEffect(effect->id());
+ }
+ if (chainCreated) {
+ removeEffectChain_l(chain);
}
handle.clear();
}
@@ -4800,26 +4918,71 @@
return handle;
}
-void AudioFlinger::PlaybackThread::disconnectEffect(const sp< EffectModule>& effect,
- const wp<EffectHandle>& handle) {
+// PlaybackThread::addEffect_l() must be called with AudioFlinger::mLock and
+// PlaybackThread::mLock held
+status_t AudioFlinger::PlaybackThread::addEffect_l(const sp<EffectModule>& effect)
+{
+ // check for existing effect chain with the requested audio session
+ int sessionId = effect->sessionId();
+ sp<EffectChain> chain = getEffectChain_l(sessionId);
+ bool chainCreated = false;
+
+ if (chain == 0) {
+ // create a new chain for this session
+ LOGV("addEffect_l() new effect chain for session %d", sessionId);
+ chain = new EffectChain(this, sessionId);
+ addEffectChain_l(chain);
+ chain->setStrategy(getStrategyForSession_l(sessionId));
+ chainCreated = true;
+ }
+ LOGV("addEffect_l() %p chain %p effect %p", this, chain.get(), effect.get());
+
+ if (chain->getEffectFromId_l(effect->id()) != 0) {
+ LOGW("addEffect_l() %p effect %s already present in chain %p",
+ this, effect->desc().name, chain.get());
+ return BAD_VALUE;
+ }
+
+ status_t status = chain->addEffect_l(effect);
+ if (status != NO_ERROR) {
+ if (chainCreated) {
+ removeEffectChain_l(chain);
+ }
+ return status;
+ }
+
+ effect->setDevice(mDevice);
+ effect->setMode(mAudioFlinger->getMode());
+ return NO_ERROR;
+}
+
+void AudioFlinger::PlaybackThread::removeEffect_l(const sp<EffectModule>& effect) {
+
+ LOGV("removeEffect_l() %p effect %p", this, effect.get());
effect_descriptor_t desc = effect->desc();
+ if ((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
+ detachAuxEffect_l(effect->id());
+ }
+
+ sp<EffectChain> chain = effect->chain().promote();
+ if (chain != 0) {
+ // remove effect chain if removing last effect
+ if (chain->removeEffect_l(effect) == 0) {
+ removeEffectChain_l(chain);
+ }
+ } else {
+ LOGW("removeEffect_l() %p cannot promote chain for effect %p", this, effect.get());
+ }
+}
+
+void AudioFlinger::PlaybackThread::disconnectEffect(const sp<EffectModule>& effect,
+ const wp<EffectHandle>& handle) {
Mutex::Autolock _l(mLock);
+ LOGV("disconnectEffect() %p effect %p", this, effect.get());
// delete the effect module if removing last handle on it
if (effect->removeHandle(handle) == 0) {
- if ((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
- detachAuxEffect_l(effect->id());
- }
- sp<EffectChain> chain = effect->chain().promote();
- if (chain != 0) {
- // remove effect chain if remove last effect
- if (chain->removeEffect_l(effect) == 0) {
- removeEffectChain_l(chain);
- }
- }
- mLock.unlock();
- mAudioFlinger->mLock.lock();
- mAudioFlinger->unregisterEffectResource_l(&desc);
- mAudioFlinger->mLock.unlock();
+ removeEffect_l(effect);
+ AudioSystem::unregisterEffect(effect->id());
}
}
@@ -4863,13 +5026,16 @@
chain->setInBuffer(buffer, ownsBuffer);
chain->setOutBuffer(mMixBuffer);
- // Effect chain for session -1 is inserted at end of effect chains list
- // in order to be processed last as it contains output stage effects
- // Effect chain for session 0 is inserted before session -1 to be processed
+ // Effect chain for session AudioSystem::SESSION_OUTPUT_STAGE is inserted at end of effect
+ // chains list in order to be processed last as it contains output stage effects
+ // Effect chain for session AudioSystem::SESSION_OUTPUT_MIX is inserted before
+ // session AudioSystem::SESSION_OUTPUT_STAGE to be processed
// after track specific effects and before output stage
- // Effect chain for session other than 0 is inserted at beginning of effect
- // chains list to be processed before output mix effects. Relative order between
- // sessions other than 0 is not important
+ // It is therefore mandatory that AudioSystem::SESSION_OUTPUT_MIX == 0 and
+ // that AudioSystem::SESSION_OUTPUT_STAGE < AudioSystem::SESSION_OUTPUT_MIX
+ // Effect chain for other sessions are inserted at beginning of effect
+ // chains list to be processed before output mix effects. Relative order between other
+ // sessions is not important
size_t size = mEffectChains.size();
size_t i = 0;
for (i = 0; i < size; i++) {
@@ -4896,26 +5062,30 @@
track->setMainBuffer(mMixBuffer);
}
}
+ break;
}
}
return mEffectChains.size();
}
-void AudioFlinger::PlaybackThread::lockEffectChains_l()
+void AudioFlinger::PlaybackThread::lockEffectChains_l(
+ Vector<sp <AudioFlinger::EffectChain> >& effectChains)
{
+ effectChains = mEffectChains;
for (size_t i = 0; i < mEffectChains.size(); i++) {
mEffectChains[i]->lock();
}
}
-void AudioFlinger::PlaybackThread::unlockEffectChains()
+void AudioFlinger::PlaybackThread::unlockEffectChains(
+ Vector<sp <AudioFlinger::EffectChain> >& effectChains)
{
- Mutex::Autolock _l(mLock);
- for (size_t i = 0; i < mEffectChains.size(); i++) {
- mEffectChains[i]->unlock();
+ for (size_t i = 0; i < effectChains.size(); i++) {
+ effectChains[i]->unlock();
}
}
+
sp<AudioFlinger::EffectModule> AudioFlinger::PlaybackThread::getEffect_l(int sessionId, int effectId)
{
sp<EffectModule> effect;
@@ -4927,21 +5097,23 @@
return effect;
}
-status_t AudioFlinger::PlaybackThread::attachAuxEffect(const sp<AudioFlinger::PlaybackThread::Track> track, int EffectId)
+status_t AudioFlinger::PlaybackThread::attachAuxEffect(
+ const sp<AudioFlinger::PlaybackThread::Track> track, int EffectId)
{
Mutex::Autolock _l(mLock);
return attachAuxEffect_l(track, EffectId);
}
-status_t AudioFlinger::PlaybackThread::attachAuxEffect_l(const sp<AudioFlinger::PlaybackThread::Track> track, int EffectId)
+status_t AudioFlinger::PlaybackThread::attachAuxEffect_l(
+ const sp<AudioFlinger::PlaybackThread::Track> track, int EffectId)
{
status_t status = NO_ERROR;
if (EffectId == 0) {
track->setAuxBuffer(0, NULL);
} else {
- // Auxiliary effects are always in audio session 0
- sp<EffectModule> effect = getEffect_l(0, EffectId);
+ // Auxiliary effects are always in audio session AudioSystem::SESSION_OUTPUT_MIX
+ sp<EffectModule> effect = getEffect_l(AudioSystem::SESSION_OUTPUT_MIX, EffectId);
if (effect != 0) {
if ((effect->desc().flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
track->setAuxBuffer(EffectId, (int32_t *)effect->inBuffer());
@@ -5137,7 +5309,7 @@
if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
AudioMixer::ditherAndClamp(mConfig.inputCfg.buffer.s32,
mConfig.inputCfg.buffer.s32,
- mConfig.inputCfg.buffer.frameCount);
+ mConfig.inputCfg.buffer.frameCount/2);
}
// do the actual processing in the effect engine
@@ -5214,7 +5386,8 @@
mConfig.outputCfg.bufferProvider.releaseBuffer = NULL;
mConfig.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
// Insert effect:
- // - in session 0 or -1, always overwrites output buffer: input buffer == output buffer
+ // - in session AudioSystem::SESSION_OUTPUT_MIX or AudioSystem::SESSION_OUTPUT_STAGE,
+ // always overwrites output buffer: input buffer == output buffer
// - in other sessions:
// last effect in the chain accumulates in output buffer: input buffer != output buffer
// other effect: overwrites output buffer: input buffer == output buffer
@@ -5231,6 +5404,9 @@
mConfig.inputCfg.buffer.frameCount = thread->frameCount();
mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
+ LOGV("configure() %p thread %p buffer %p framecount %d",
+ this, thread.get(), mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
+
status_t cmdStatus;
int size = sizeof(int);
status_t status = (*mEffectInterface)->command(mEffectInterface, EFFECT_CMD_CONFIGURE, sizeof(effect_config_t), &mConfig, &size, &cmdStatus);
@@ -5753,7 +5929,7 @@
mVolumeCtrlIdx(-1), mLeftVolume(0), mRightVolume(0),
mNewLeftVolume(0), mNewRightVolume(0)
{
-
+ mStrategy = AudioSystem::getStrategyForStream(AudioSystem::MUSIC);
}
AudioFlinger::EffectChain::~EffectChain()
@@ -5786,7 +5962,8 @@
size_t size = mEffects.size();
for (size_t i = 0; i < size; i++) {
- if (mEffects[i]->id() == id) {
+ // by convention, return first effect if id provided is 0 (0 is never a valid id)
+ if (id == 0 || mEffects[i]->id() == id) {
effect = mEffects[i];
break;
}
@@ -5816,21 +5993,24 @@
}
// addEffect_l() must be called with PlaybackThread::mLock held
-status_t AudioFlinger::EffectChain::addEffect_l(sp<EffectModule>& effect)
+status_t AudioFlinger::EffectChain::addEffect_l(const sp<EffectModule>& effect)
{
effect_descriptor_t desc = effect->desc();
uint32_t insertPref = desc.flags & EFFECT_FLAG_INSERT_MASK;
Mutex::Autolock _l(mLock);
+ effect->setChain(this);
+ sp<ThreadBase> thread = mThread.promote();
+ if (thread == 0) {
+ return NO_INIT;
+ }
+ effect->setThread(thread);
if ((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
// Auxiliary effects are inserted at the beginning of mEffects vector as
// they are processed first and accumulated in chain input buffer
mEffects.insertAt(effect, 0);
- sp<ThreadBase> thread = mThread.promote();
- if (thread == 0) {
- return NO_INIT;
- }
+
// the input buffer for auxiliary effect contains mono samples in
// 32 bit format. This is to avoid saturation in AudoMixer
// accumulation stage. Saturation is done in EffectModule::process() before
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 99816f9..a8c9a92 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -168,8 +168,7 @@
int *id,
int *enabled);
- status_t registerEffectResource_l(effect_descriptor_t *desc);
- void unregisterEffectResource_l(effect_descriptor_t *desc);
+ virtual status_t moveEffects(int session, int srcOutput, int dstOutput);
enum hardware_call_state {
AUDIO_HW_IDLE = 0,
@@ -619,15 +618,22 @@
sp<EffectChain> getEffectChain_l(int sessionId);
status_t addEffectChain_l(const sp<EffectChain>& chain);
size_t removeEffectChain_l(const sp<EffectChain>& chain);
- void lockEffectChains_l();
- void unlockEffectChains();
+ void lockEffectChains_l(Vector<sp <EffectChain> >& effectChains);
+ void unlockEffectChains(Vector<sp <EffectChain> >& effectChains);
sp<AudioFlinger::EffectModule> getEffect_l(int sessionId, int effectId);
void detachAuxEffect_l(int effectId);
- status_t attachAuxEffect(const sp<AudioFlinger::PlaybackThread::Track> track, int EffectId);
- status_t attachAuxEffect_l(const sp<AudioFlinger::PlaybackThread::Track> track, int EffectId);
+ status_t attachAuxEffect(const sp<AudioFlinger::PlaybackThread::Track> track,
+ int EffectId);
+ status_t attachAuxEffect_l(const sp<AudioFlinger::PlaybackThread::Track> track,
+ int EffectId);
void setMode(uint32_t mode);
+ status_t addEffect_l(const sp< EffectModule>& effect);
+ void removeEffect_l(const sp< EffectModule>& effect);
+
+ uint32_t getStrategyForSession_l(int sessionId);
+
struct stream_type_t {
stream_type_t()
: volume(1.0f),
@@ -690,7 +696,10 @@
class MixerThread : public PlaybackThread {
public:
- MixerThread (const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output, int id, uint32_t device);
+ MixerThread (const sp<AudioFlinger>& audioFlinger,
+ AudioStreamOut* output,
+ int id,
+ uint32_t device);
virtual ~MixerThread();
// Thread virtuals
@@ -701,7 +710,8 @@
virtual status_t dumpInternals(int fd, const Vector<String16>& args);
protected:
- uint32_t prepareTracks_l(const SortedVector< wp<Track> >& activeTracks, Vector< sp<Track> > *tracksToRemove);
+ uint32_t prepareTracks_l(const SortedVector< wp<Track> >& activeTracks,
+ Vector< sp<Track> > *tracksToRemove);
virtual int getTrackName_l();
virtual void deleteTrackName_l(int name);
virtual uint32_t activeSleepTimeUs();
@@ -764,6 +774,9 @@
void audioConfigChanged_l(int event, int ioHandle, void *param2);
int nextUniqueId();
+ status_t moveEffectChain_l(int session,
+ AudioFlinger::PlaybackThread *srcThread,
+ AudioFlinger::PlaybackThread *dstThread);
friend class AudioBuffer;
@@ -931,6 +944,9 @@
uint32_t status() {
return mStatus;
}
+ int sessionId() {
+ return mSessionId;
+ }
status_t setEnabled(bool enabled);
bool isEnabled();
@@ -938,6 +954,8 @@
int16_t *inBuffer() { return mConfig.inputCfg.buffer.s16; }
void setOutBuffer(int16_t *buffer) { mConfig.outputCfg.buffer.s16 = buffer; }
int16_t *outBuffer() { return mConfig.outputCfg.buffer.s16; }
+ void setChain(const wp<EffectChain>& chain) { mChain = chain; }
+ void setThread(const wp<ThreadBase>& thread) { mThread = thread; }
status_t addHandle(sp<EffectHandle>& handle);
void disconnect(const wp<EffectHandle>& handle);
@@ -1061,19 +1079,19 @@
mLock.unlock();
}
- status_t addEffect_l(sp<EffectModule>& handle);
+ status_t addEffect_l(const sp<EffectModule>& handle);
size_t removeEffect_l(const sp<EffectModule>& handle);
int sessionId() {
return mSessionId;
}
+
sp<EffectModule> getEffectFromDesc_l(effect_descriptor_t *descriptor);
sp<EffectModule> getEffectFromId_l(int id);
bool setVolume_l(uint32_t *left, uint32_t *right);
void setDevice_l(uint32_t device);
void setMode_l(uint32_t mode);
-
void setInBuffer(int16_t *buffer, bool ownsBuffer = false) {
mInBuffer = buffer;
mOwnInBuffer = ownsBuffer;
@@ -1092,6 +1110,10 @@
void stopTrack() {mActiveTrackCnt--;}
int activeTracks() { return mActiveTrackCnt;}
+ uint32_t strategy() { return mStrategy; }
+ void setStrategy(uint32_t strategy)
+ { mStrategy = strategy; }
+
status_t dump(int fd, const Vector<String16>& args);
protected:
@@ -1112,7 +1134,7 @@
uint32_t mRightVolume; // previous volume on right channel
uint32_t mNewLeftVolume; // new volume on left channel
uint32_t mNewRightVolume; // new volume on right channel
-
+ uint32_t mStrategy; // strategy for this effect chain
};
friend class RecordThread;
@@ -1142,12 +1164,6 @@
#endif
uint32_t mMode;
- // Maximum CPU load allocated to audio effects in 0.1 MIPS (ARMv5TE, 0 WS memory) units
- static const uint32_t MAX_EFFECTS_CPU_LOAD = 1000;
- // Maximum memory allocated to audio effects in KB
- static const uint32_t MAX_EFFECTS_MEMORY = 512;
- uint32_t mTotalEffectsCpuLoad; // current CPU load used by effects
- uint32_t mTotalEffectsMemory; // current memory used by effects
};
// ----------------------------------------------------------------------------
diff --git a/services/audioflinger/AudioPolicyManagerBase.cpp b/services/audioflinger/AudioPolicyManagerBase.cpp
index 549d661..4614c8d 100644
--- a/services/audioflinger/AudioPolicyManagerBase.cpp
+++ b/services/audioflinger/AudioPolicyManagerBase.cpp
@@ -538,9 +538,11 @@
return output;
}
-status_t AudioPolicyManagerBase::startOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+status_t AudioPolicyManagerBase::startOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session)
{
- LOGV("startOutput() output %d, stream %d", output, stream);
+ LOGV("startOutput() output %d, stream %d, session %d", output, stream, session);
ssize_t index = mOutputs.indexOfKey(output);
if (index < 0) {
LOGW("startOutput() unknow output %d", output);
@@ -574,9 +576,11 @@
return NO_ERROR;
}
-status_t AudioPolicyManagerBase::stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+status_t AudioPolicyManagerBase::stopOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session)
{
- LOGV("stopOutput() output %d, stream %d", output, stream);
+ LOGV("stopOutput() output %d, stream %d, session %d", output, stream, session);
ssize_t index = mOutputs.indexOfKey(output);
if (index < 0) {
LOGW("stopOutput() unknow output %d", output);
@@ -602,8 +606,12 @@
setOutputDevice(output, getNewDevice(output));
#ifdef WITH_A2DP
- if (mA2dpOutput != 0 && !a2dpUsedForSonification() && strategy == STRATEGY_SONIFICATION) {
- setStrategyMute(STRATEGY_MEDIA, false, mA2dpOutput, mOutputs.valueFor(mHardwareOutput)->mLatency*2);
+ if (mA2dpOutput != 0 && !a2dpUsedForSonification() &&
+ strategy == STRATEGY_SONIFICATION) {
+ setStrategyMute(STRATEGY_MEDIA,
+ false,
+ mA2dpOutput,
+ mOutputs.valueFor(mHardwareOutput)->mLatency*2);
}
#endif
if (output != mHardwareOutput) {
@@ -826,6 +834,85 @@
return NO_ERROR;
}
+audio_io_handle_t AudioPolicyManagerBase::getOutputForEffect(effect_descriptor_t *desc)
+{
+ LOGV("getOutputForEffect()");
+ // apply simple rule where global effects are attached to the same output as MUSIC streams
+ return getOutput(AudioSystem::MUSIC);
+}
+
+status_t AudioPolicyManagerBase::registerEffect(effect_descriptor_t *desc,
+ audio_io_handle_t output,
+ uint32_t strategy,
+ int session,
+ int id)
+{
+ ssize_t index = mOutputs.indexOfKey(output);
+ if (index < 0) {
+ LOGW("registerEffect() unknown output %d", output);
+ return INVALID_OPERATION;
+ }
+
+ if (mTotalEffectsCpuLoad + desc->cpuLoad > getMaxEffectsCpuLoad()) {
+ LOGW("registerEffect() CPU Load limit exceeded for Fx %s, CPU %f MIPS",
+ desc->name, (float)desc->cpuLoad/10);
+ return INVALID_OPERATION;
+ }
+ if (mTotalEffectsMemory + desc->memoryUsage > getMaxEffectsMemory()) {
+ LOGW("registerEffect() memory limit exceeded for Fx %s, Memory %d KB",
+ desc->name, desc->memoryUsage);
+ return INVALID_OPERATION;
+ }
+ mTotalEffectsCpuLoad += desc->cpuLoad;
+ mTotalEffectsMemory += desc->memoryUsage;
+ LOGV("registerEffect() effect %s, output %d, strategy %d session %d id %d",
+ desc->name, output, strategy, session, id);
+
+ LOGV("registerEffect() CPU %d, memory %d", desc->cpuLoad, desc->memoryUsage);
+ LOGV(" total CPU %d, total memory %d", mTotalEffectsCpuLoad, mTotalEffectsMemory);
+
+ EffectDescriptor *pDesc = new EffectDescriptor();
+ memcpy (&pDesc->mDesc, desc, sizeof(effect_descriptor_t));
+ pDesc->mOutput = output;
+ pDesc->mStrategy = (routing_strategy)strategy;
+ pDesc->mSession = session;
+ mEffects.add(id, pDesc);
+
+ return NO_ERROR;
+}
+
+status_t AudioPolicyManagerBase::unregisterEffect(int id)
+{
+ ssize_t index = mEffects.indexOfKey(id);
+ if (index < 0) {
+ LOGW("unregisterEffect() unknown effect ID %d", id);
+ return INVALID_OPERATION;
+ }
+
+ EffectDescriptor *pDesc = mEffects.valueAt(index);
+
+ if (mTotalEffectsCpuLoad < pDesc->mDesc.cpuLoad) {
+ LOGW("unregisterEffect() CPU load %d too high for total %d",
+ pDesc->mDesc.cpuLoad, mTotalEffectsCpuLoad);
+ pDesc->mDesc.cpuLoad = mTotalEffectsCpuLoad;
+ }
+ mTotalEffectsCpuLoad -= pDesc->mDesc.cpuLoad;
+ if (mTotalEffectsMemory < pDesc->mDesc.memoryUsage) {
+ LOGW("unregisterEffect() memory %d too big for total %d",
+ pDesc->mDesc.memoryUsage, mTotalEffectsMemory);
+ pDesc->mDesc.memoryUsage = mTotalEffectsMemory;
+ }
+ mTotalEffectsMemory -= pDesc->mDesc.memoryUsage;
+ LOGV("unregisterEffect() effect %s, ID %d, CPU %d, memory %d",
+ pDesc->mDesc.name, id, pDesc->mDesc.cpuLoad, pDesc->mDesc.memoryUsage);
+ LOGV(" total CPU %d, total memory %d", mTotalEffectsCpuLoad, mTotalEffectsMemory);
+
+ mEffects.removeItem(id);
+ delete pDesc;
+
+ return NO_ERROR;
+}
+
status_t AudioPolicyManagerBase::dump(int fd)
{
const size_t SIZE = 256;
@@ -890,6 +977,19 @@
write(fd, buffer, strlen(buffer));
}
+ snprintf(buffer, SIZE, "\nTotal Effects CPU: %f MIPS, Total Effects memory: %d KB\n",
+ (float)mTotalEffectsCpuLoad/10, mTotalEffectsMemory);
+ write(fd, buffer, strlen(buffer));
+
+ snprintf(buffer, SIZE, "Registered effects:\n");
+ write(fd, buffer, strlen(buffer));
+ for (size_t i = 0; i < mEffects.size(); i++) {
+ snprintf(buffer, SIZE, "- Effect %d dump:\n", mEffects.keyAt(i));
+ write(fd, buffer, strlen(buffer));
+ mEffects.valueAt(i)->dump(fd);
+ }
+
+
return NO_ERROR;
}
@@ -903,7 +1003,7 @@
Thread(false),
#endif //AUDIO_POLICY_TEST
mPhoneState(AudioSystem::MODE_NORMAL), mRingerMode(0), mMusicStopTime(0), mLimitRingtoneVolume(false),
- mLastVoiceVolume(-1.0f)
+ mLastVoiceVolume(-1.0f), mTotalEffectsCpuLoad(0), mTotalEffectsMemory(0)
{
mpClientInterface = clientInterface;
@@ -939,6 +1039,7 @@
} else {
addOutput(mHardwareOutput, outputDesc);
setOutputDevice(mHardwareOutput, (uint32_t)AudioSystem::DEVICE_OUT_SPEAKER, true);
+ //TODO: configure audio effect output stage here
}
updateDeviceForStrategy();
@@ -1153,6 +1254,9 @@
if (mA2dpOutput) {
// add A2DP output descriptor
addOutput(mA2dpOutput, outputDesc);
+
+ //TODO: configure audio effect output stage here
+
// set initial stream volume for A2DP device
applyStreamVolumes(mA2dpOutput, device);
if (a2dpUsedForSonification()) {
@@ -1270,6 +1374,7 @@
AudioParameter param;
param.add(String8("closing"), String8("true"));
mpClientInterface->setParameters(mA2dpOutput, param.toString());
+
mpClientInterface->closeOutput(mA2dpOutput);
delete mOutputs.valueFor(mA2dpOutput);
mOutputs.removeItem(mA2dpOutput);
@@ -1283,48 +1388,54 @@
uint32_t curDevice = getDeviceForStrategy(strategy, false);
bool a2dpWasUsed = AudioSystem::isA2dpDevice((AudioSystem::audio_devices)(prevDevice & ~AudioSystem::DEVICE_OUT_SPEAKER));
bool a2dpIsUsed = AudioSystem::isA2dpDevice((AudioSystem::audio_devices)(curDevice & ~AudioSystem::DEVICE_OUT_SPEAKER));
- AudioOutputDescriptor *hwOutputDesc = mOutputs.valueFor(mHardwareOutput);
- AudioOutputDescriptor *a2dpOutputDesc;
+ audio_io_handle_t srcOutput = 0;
+ audio_io_handle_t dstOutput = 0;
if (a2dpWasUsed && !a2dpIsUsed) {
bool dupUsed = a2dpUsedForSonification() && a2dpWasUsed && (AudioSystem::popCount(prevDevice) == 2);
-
+ dstOutput = mHardwareOutput;
if (dupUsed) {
- LOGV("checkOutputForStrategy() moving strategy %d to duplicated", strategy);
- a2dpOutputDesc = mOutputs.valueFor(mDuplicatedOutput);
+ LOGV("checkOutputForStrategy() moving strategy %d from duplicated", strategy);
+ srcOutput = mDuplicatedOutput;
} else {
- LOGV("checkOutputForStrategy() moving strategy %d to a2dp", strategy);
- a2dpOutputDesc = mOutputs.valueFor(mA2dpOutput);
+ LOGV("checkOutputForStrategy() moving strategy %d from a2dp", strategy);
+ srcOutput = mA2dpOutput;
}
- for (int i = 0; i < (int)AudioSystem::NUM_STREAM_TYPES; i++) {
- if (getStrategy((AudioSystem::stream_type)i) == strategy) {
- mpClientInterface->setStreamOutput((AudioSystem::stream_type)i, mHardwareOutput);
- }
- }
// do not change newDevice if it was already set before this call by a previous call to
// getNewDevice() or checkOutputForStrategy() for a strategy with higher priority
- if (newDevice == 0 && hwOutputDesc->isUsedByStrategy(strategy)) {
+ if (newDevice == 0 && mOutputs.valueFor(mHardwareOutput)->isUsedByStrategy(strategy)) {
newDevice = getDeviceForStrategy(strategy, false);
}
}
if (a2dpIsUsed && !a2dpWasUsed) {
bool dupUsed = a2dpUsedForSonification() && a2dpIsUsed && (AudioSystem::popCount(curDevice) == 2);
- audio_io_handle_t a2dpOutput;
-
+ srcOutput = mHardwareOutput;
if (dupUsed) {
- LOGV("checkOutputForStrategy() moving strategy %d from duplicated", strategy);
- a2dpOutputDesc = mOutputs.valueFor(mDuplicatedOutput);
- a2dpOutput = mDuplicatedOutput;
+ LOGV("checkOutputForStrategy() moving strategy %d to duplicated", strategy);
+ dstOutput = mDuplicatedOutput;
} else {
- LOGV("checkOutputForStrategy() moving strategy %d from a2dp", strategy);
- a2dpOutputDesc = mOutputs.valueFor(mA2dpOutput);
- a2dpOutput = mA2dpOutput;
+ LOGV("checkOutputForStrategy() moving strategy %d to a2dp", strategy);
+ dstOutput = mA2dpOutput;
}
+ }
+ if (srcOutput != 0 && dstOutput != 0) {
+ // Move effects associated to this strategy from previous output to new output
+ for (size_t i = 0; i < mEffects.size(); i++) {
+ EffectDescriptor *desc = mEffects.valueAt(i);
+ if (desc->mSession != AudioSystem::SESSION_OUTPUT_STAGE &&
+ desc->mStrategy == strategy &&
+ desc->mOutput == srcOutput) {
+ LOGV("checkOutputForStrategy() moving effect %d to output %d", mEffects.keyAt(i), dstOutput);
+ mpClientInterface->moveEffects(desc->mSession, srcOutput, dstOutput);
+ desc->mOutput = dstOutput;
+ }
+ }
+ // Move tracks associated to this strategy from previous output to new output
for (int i = 0; i < (int)AudioSystem::NUM_STREAM_TYPES; i++) {
if (getStrategy((AudioSystem::stream_type)i) == strategy) {
- mpClientInterface->setStreamOutput((AudioSystem::stream_type)i, a2dpOutput);
+ mpClientInterface->setStreamOutput((AudioSystem::stream_type)i, dstOutput);
}
}
}
@@ -1372,8 +1483,12 @@
return device;
}
-AudioPolicyManagerBase::routing_strategy AudioPolicyManagerBase::getStrategy(AudioSystem::stream_type stream)
-{
+uint32_t AudioPolicyManagerBase::getStrategyForStream(AudioSystem::stream_type stream) {
+ return (uint32_t)getStrategy(stream);
+}
+
+AudioPolicyManagerBase::routing_strategy AudioPolicyManagerBase::getStrategy(
+ AudioSystem::stream_type stream) {
// stream to strategy mapping
switch (stream) {
case AudioSystem::VOICE_CALL:
@@ -1846,6 +1961,16 @@
(format !=0 && !AudioSystem::isLinearPCM(format)));
}
+uint32_t AudioPolicyManagerBase::getMaxEffectsCpuLoad()
+{
+ return MAX_EFFECTS_CPU_LOAD;
+}
+
+uint32_t AudioPolicyManagerBase::getMaxEffectsMemory()
+{
+ return MAX_EFFECTS_MEMORY;
+}
+
// --- AudioOutputDescriptor class implementation
AudioPolicyManagerBase::AudioOutputDescriptor::AudioOutputDescriptor()
@@ -1979,5 +2104,27 @@
mCanBeMuted);
}
+// --- EffectDescriptor class implementation
+
+status_t AudioPolicyManagerBase::EffectDescriptor::dump(int fd)
+{
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ String8 result;
+
+ snprintf(buffer, SIZE, " Output: %d\n", mOutput);
+ result.append(buffer);
+ snprintf(buffer, SIZE, " Strategy: %d\n", mStrategy);
+ result.append(buffer);
+ snprintf(buffer, SIZE, " Session: %d\n", mSession);
+ result.append(buffer);
+ snprintf(buffer, SIZE, " Name: %s\n", mDesc.name);
+ result.append(buffer);
+ write(fd, result.string(), result.size());
+
+ return NO_ERROR;
+}
+
+
}; // namespace android
diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp
index bb3905c..f24e08e 100644
--- a/services/audioflinger/AudioPolicyService.cpp
+++ b/services/audioflinger/AudioPolicyService.cpp
@@ -119,7 +119,8 @@
if (!AudioSystem::isOutputDevice(device) && !AudioSystem::isInputDevice(device)) {
return BAD_VALUE;
}
- if (state != AudioSystem::DEVICE_STATE_AVAILABLE && state != AudioSystem::DEVICE_STATE_UNAVAILABLE) {
+ if (state != AudioSystem::DEVICE_STATE_AVAILABLE &&
+ state != AudioSystem::DEVICE_STATE_UNAVAILABLE) {
return BAD_VALUE;
}
@@ -128,8 +129,9 @@
return mpPolicyManager->setDeviceConnectionState(device, state, device_address);
}
-AudioSystem::device_connection_state AudioPolicyService::getDeviceConnectionState(AudioSystem::audio_devices device,
- const char *device_address)
+AudioSystem::device_connection_state AudioPolicyService::getDeviceConnectionState(
+ AudioSystem::audio_devices device,
+ const char *device_address)
{
if (mpPolicyManager == NULL) {
return AudioSystem::DEVICE_STATE_UNAVAILABLE;
@@ -175,7 +177,8 @@
return NO_ERROR;
}
-status_t AudioPolicyService::setForceUse(AudioSystem::force_use usage, AudioSystem::forced_config config)
+status_t AudioPolicyService::setForceUse(AudioSystem::force_use usage,
+ AudioSystem::forced_config config)
{
if (mpPolicyManager == NULL) {
return NO_INIT;
@@ -223,24 +226,28 @@
return mpPolicyManager->getOutput(stream, samplingRate, format, channels, flags);
}
-status_t AudioPolicyService::startOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+status_t AudioPolicyService::startOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session)
{
if (mpPolicyManager == NULL) {
return NO_INIT;
}
LOGV("startOutput() tid %d", gettid());
Mutex::Autolock _l(mLock);
- return mpPolicyManager->startOutput(output, stream);
+ return mpPolicyManager->startOutput(output, stream, session);
}
-status_t AudioPolicyService::stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream)
+status_t AudioPolicyService::stopOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session)
{
if (mpPolicyManager == NULL) {
return NO_INIT;
}
LOGV("stopOutput() tid %d", gettid());
Mutex::Autolock _l(mLock);
- return mpPolicyManager->stopOutput(output, stream);
+ return mpPolicyManager->stopOutput(output, stream, session);
}
void AudioPolicyService::releaseOutput(audio_io_handle_t output)
@@ -339,8 +346,46 @@
return mpPolicyManager->getStreamVolumeIndex(stream, index);
}
+uint32_t AudioPolicyService::getStrategyForStream(AudioSystem::stream_type stream)
+{
+ if (mpPolicyManager == NULL) {
+ return 0;
+ }
+ return mpPolicyManager->getStrategyForStream(stream);
+}
+
+audio_io_handle_t AudioPolicyService::getOutputForEffect(effect_descriptor_t *desc)
+{
+ if (mpPolicyManager == NULL) {
+ return NO_INIT;
+ }
+ Mutex::Autolock _l(mLock);
+ return mpPolicyManager->getOutputForEffect(desc);
+}
+
+status_t AudioPolicyService::registerEffect(effect_descriptor_t *desc,
+ audio_io_handle_t output,
+ uint32_t strategy,
+ int session,
+ int id)
+{
+ if (mpPolicyManager == NULL) {
+ return NO_INIT;
+ }
+ return mpPolicyManager->registerEffect(desc, output, strategy, session, id);
+}
+
+status_t AudioPolicyService::unregisterEffect(int id)
+{
+ if (mpPolicyManager == NULL) {
+ return NO_INIT;
+ }
+ return mpPolicyManager->unregisterEffect(id);
+}
+
void AudioPolicyService::binderDied(const wp<IBinder>& who) {
- LOGW("binderDied() %p, tid %d, calling tid %d", who.unsafe_get(), gettid(), IPCThreadState::self()->getCallingPid());
+ LOGW("binderDied() %p, tid %d, calling tid %d", who.unsafe_get(), gettid(),
+ IPCThreadState::self()->getCallingPid());
}
static bool tryLock(Mutex& mutex)
@@ -447,10 +492,16 @@
return 0;
}
- return af->openOutput(pDevices, pSamplingRate, (uint32_t *)pFormat, pChannels, pLatencyMs, flags);
+ return af->openOutput(pDevices,
+ pSamplingRate,
+ (uint32_t *)pFormat,
+ pChannels,
+ pLatencyMs,
+ flags);
}
-audio_io_handle_t AudioPolicyService::openDuplicateOutput(audio_io_handle_t output1, audio_io_handle_t output2)
+audio_io_handle_t AudioPolicyService::openDuplicateOutput(audio_io_handle_t output1,
+ audio_io_handle_t output2)
{
sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
if (af == 0) {
@@ -514,12 +565,16 @@
return af->closeInput(input);
}
-status_t AudioPolicyService::setStreamVolume(AudioSystem::stream_type stream, float volume, audio_io_handle_t output, int delayMs)
+status_t AudioPolicyService::setStreamVolume(AudioSystem::stream_type stream,
+ float volume,
+ audio_io_handle_t output,
+ int delayMs)
{
return mAudioCommandThread->volumeCommand((int)stream, volume, (int)output, delayMs);
}
-status_t AudioPolicyService::setStreamOutput(AudioSystem::stream_type stream, audio_io_handle_t output)
+status_t AudioPolicyService::setStreamOutput(AudioSystem::stream_type stream,
+ audio_io_handle_t output)
{
sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
@@ -527,8 +582,18 @@
return af->setStreamOutput(stream, output);
}
+status_t AudioPolicyService::moveEffects(int session, audio_io_handle_t srcOutput,
+ audio_io_handle_t dstOutput)
+{
+ sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
-void AudioPolicyService::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs, int delayMs)
+ return af->moveEffects(session, (int)srcOutput, (int)dstOutput);
+}
+
+void AudioPolicyService::setParameters(audio_io_handle_t ioHandle,
+ const String8& keyValuePairs,
+ int delayMs)
{
mAudioCommandThread->parametersCommand((int)ioHandle, keyValuePairs, delayMs);
}
@@ -539,7 +604,8 @@
return result;
}
-status_t AudioPolicyService::startTone(ToneGenerator::tone_type tone, AudioSystem::stream_type stream)
+status_t AudioPolicyService::startTone(ToneGenerator::tone_type tone,
+ AudioSystem::stream_type stream)
{
mTonePlaybackThread->startToneCommand(tone, stream);
return NO_ERROR;
@@ -623,8 +689,11 @@
}break;
case SET_VOLUME: {
VolumeData *data = (VolumeData *)command->mParam;
- LOGV("AudioCommandThread() processing set volume stream %d, volume %f, output %d", data->mStream, data->mVolume, data->mIO);
- command->mStatus = AudioSystem::setStreamVolume(data->mStream, data->mVolume, data->mIO);
+ LOGV("AudioCommandThread() processing set volume stream %d, \
+ volume %f, output %d", data->mStream, data->mVolume, data->mIO);
+ command->mStatus = AudioSystem::setStreamVolume(data->mStream,
+ data->mVolume,
+ data->mIO);
if (command->mWaitStatus) {
command->mCond.signal();
mWaitWorkCV.wait(mLock);
@@ -633,7 +702,8 @@
}break;
case SET_PARAMETERS: {
ParametersData *data = (ParametersData *)command->mParam;
- LOGV("AudioCommandThread() processing set parameters string %s, io %d", data->mKeyValuePairs.string(), data->mIO);
+ LOGV("AudioCommandThread() processing set parameters string %s, io %d",
+ data->mKeyValuePairs.string(), data->mIO);
command->mStatus = AudioSystem::setParameters(data->mIO, data->mKeyValuePairs);
if (command->mWaitStatus) {
command->mCond.signal();
@@ -643,7 +713,8 @@
}break;
case SET_VOICE_VOLUME: {
VoiceVolumeData *data = (VoiceVolumeData *)command->mParam;
- LOGV("AudioCommandThread() processing set voice volume volume %f", data->mVolume);
+ LOGV("AudioCommandThread() processing set voice volume volume %f",
+ data->mVolume);
command->mStatus = AudioSystem::setVoiceVolume(data->mVolume);
if (command->mWaitStatus) {
command->mCond.signal();
@@ -734,7 +805,10 @@
mWaitWorkCV.signal();
}
-status_t AudioPolicyService::AudioCommandThread::volumeCommand(int stream, float volume, int output, int delayMs)
+status_t AudioPolicyService::AudioCommandThread::volumeCommand(int stream,
+ float volume,
+ int output,
+ int delayMs)
{
status_t status = NO_ERROR;
@@ -752,7 +826,8 @@
}
Mutex::Autolock _l(mLock);
insertCommand_l(command, delayMs);
- LOGV("AudioCommandThread() adding set volume stream %d, volume %f, output %d", stream, volume, output);
+ LOGV("AudioCommandThread() adding set volume stream %d, volume %f, output %d",
+ stream, volume, output);
mWaitWorkCV.signal();
if (command->mWaitStatus) {
command->mCond.wait(mLock);
@@ -762,7 +837,9 @@
return status;
}
-status_t AudioPolicyService::AudioCommandThread::parametersCommand(int ioHandle, const String8& keyValuePairs, int delayMs)
+status_t AudioPolicyService::AudioCommandThread::parametersCommand(int ioHandle,
+ const String8& keyValuePairs,
+ int delayMs)
{
status_t status = NO_ERROR;
@@ -779,7 +856,8 @@
}
Mutex::Autolock _l(mLock);
insertCommand_l(command, delayMs);
- LOGV("AudioCommandThread() adding set parameter string %s, io %d ,delay %d", keyValuePairs.string(), ioHandle, delayMs);
+ LOGV("AudioCommandThread() adding set parameter string %s, io %d ,delay %d",
+ keyValuePairs.string(), ioHandle, delayMs);
mWaitWorkCV.signal();
if (command->mWaitStatus) {
command->mCond.wait(mLock);
@@ -840,7 +918,8 @@
ParametersData *data = (ParametersData *)command->mParam;
ParametersData *data2 = (ParametersData *)command2->mParam;
if (data->mIO != data2->mIO) break;
- LOGV("Comparing parameter command %s to new command %s", data2->mKeyValuePairs.string(), data->mKeyValuePairs.string());
+ LOGV("Comparing parameter command %s to new command %s",
+ data2->mKeyValuePairs.string(), data->mKeyValuePairs.string());
AudioParameter param = AudioParameter(data->mKeyValuePairs);
AudioParameter param2 = AudioParameter(data2->mKeyValuePairs);
for (size_t j = 0; j < param.size(); j++) {
@@ -872,7 +951,8 @@
VolumeData *data2 = (VolumeData *)command2->mParam;
if (data->mIO != data2->mIO) break;
if (data->mStream != data2->mStream) break;
- LOGV("Filtering out volume command on output %d for stream %d", data->mIO, data->mStream);
+ LOGV("Filtering out volume command on output %d for stream %d",
+ data->mIO, data->mStream);
removedCommands.add(command2);
} break;
case START_TONE:
@@ -896,7 +976,8 @@
removedCommands.clear();
// insert command at the right place according to its time stamp
- LOGV("inserting command: %d at index %d, num commands %d", command->mCommand, (int)i+1, mAudioCommands.size());
+ LOGV("inserting command: %d at index %d, num commands %d",
+ command->mCommand, (int)i+1, mAudioCommands.size());
mAudioCommands.insertAt(command, i + 1);
}
diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h
index a13d0bd..558f455 100644
--- a/services/audioflinger/AudioPolicyService.h
+++ b/services/audioflinger/AudioPolicyService.h
@@ -28,7 +28,8 @@
// ----------------------------------------------------------------------------
-class AudioPolicyService: public BnAudioPolicyService, public AudioPolicyClientInterface, public IBinder::DeathRecipient
+class AudioPolicyService: public BnAudioPolicyService, public AudioPolicyClientInterface,
+ public IBinder::DeathRecipient
{
public:
@@ -43,8 +44,9 @@
virtual status_t setDeviceConnectionState(AudioSystem::audio_devices device,
AudioSystem::device_connection_state state,
const char *device_address);
- virtual AudioSystem::device_connection_state getDeviceConnectionState(AudioSystem::audio_devices device,
- const char *device_address);
+ virtual AudioSystem::device_connection_state getDeviceConnectionState(
+ AudioSystem::audio_devices device,
+ const char *device_address);
virtual status_t setPhoneState(int state);
virtual status_t setRingerMode(uint32_t mode, uint32_t mask);
virtual status_t setForceUse(AudioSystem::force_use usage, AudioSystem::forced_config config);
@@ -53,15 +55,21 @@
uint32_t samplingRate = 0,
uint32_t format = AudioSystem::FORMAT_DEFAULT,
uint32_t channels = 0,
- AudioSystem::output_flags flags = AudioSystem::OUTPUT_FLAG_INDIRECT);
- virtual status_t startOutput(audio_io_handle_t output, AudioSystem::stream_type stream);
- virtual status_t stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream);
+ AudioSystem::output_flags flags =
+ AudioSystem::OUTPUT_FLAG_INDIRECT);
+ virtual status_t startOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session = 0);
+ virtual status_t stopOutput(audio_io_handle_t output,
+ AudioSystem::stream_type stream,
+ int session = 0);
virtual void releaseOutput(audio_io_handle_t output);
virtual audio_io_handle_t getInput(int inputSource,
uint32_t samplingRate = 0,
uint32_t format = AudioSystem::FORMAT_DEFAULT,
uint32_t channels = 0,
- AudioSystem::audio_in_acoustics acoustics = (AudioSystem::audio_in_acoustics)0);
+ AudioSystem::audio_in_acoustics acoustics =
+ (AudioSystem::audio_in_acoustics)0);
virtual status_t startInput(audio_io_handle_t input);
virtual status_t stopInput(audio_io_handle_t input);
virtual void releaseInput(audio_io_handle_t input);
@@ -71,6 +79,16 @@
virtual status_t setStreamVolumeIndex(AudioSystem::stream_type stream, int index);
virtual status_t getStreamVolumeIndex(AudioSystem::stream_type stream, int *index);
+ virtual uint32_t getStrategyForStream(AudioSystem::stream_type stream);
+
+ virtual audio_io_handle_t getOutputForEffect(effect_descriptor_t *desc);
+ virtual status_t registerEffect(effect_descriptor_t *desc,
+ audio_io_handle_t output,
+ uint32_t strategy,
+ int session,
+ int id);
+ virtual status_t unregisterEffect(int id);
+
virtual status_t onTransact(
uint32_t code,
const Parcel& data,
@@ -89,7 +107,8 @@
uint32_t *pChannels,
uint32_t *pLatencyMs,
AudioSystem::output_flags flags);
- virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1, audio_io_handle_t output2);
+ virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
+ audio_io_handle_t output2);
virtual status_t closeOutput(audio_io_handle_t output);
virtual status_t suspendOutput(audio_io_handle_t output);
virtual status_t restoreOutput(audio_io_handle_t output);
@@ -99,13 +118,21 @@
uint32_t *pChannels,
uint32_t acoustics);
virtual status_t closeInput(audio_io_handle_t input);
- virtual status_t setStreamVolume(AudioSystem::stream_type stream, float volume, audio_io_handle_t output, int delayMs = 0);
+ virtual status_t setStreamVolume(AudioSystem::stream_type stream,
+ float volume,
+ audio_io_handle_t output,
+ int delayMs = 0);
virtual status_t setStreamOutput(AudioSystem::stream_type stream, audio_io_handle_t output);
- virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs, int delayMs = 0);
+ virtual void setParameters(audio_io_handle_t ioHandle,
+ const String8& keyValuePairs,
+ int delayMs = 0);
virtual String8 getParameters(audio_io_handle_t ioHandle, const String8& keys);
virtual status_t startTone(ToneGenerator::tone_type tone, AudioSystem::stream_type stream);
virtual status_t stopTone();
virtual status_t setVoiceVolume(float volume, int delayMs = 0);
+ virtual status_t moveEffects(int session,
+ audio_io_handle_t srcOutput,
+ audio_io_handle_t dstOutput);
private:
AudioPolicyService();
diff --git a/services/java/com/android/server/am/ActivityManagerService.java b/services/java/com/android/server/am/ActivityManagerService.java
index 20bcbf3..a8df71d 100644
--- a/services/java/com/android/server/am/ActivityManagerService.java
+++ b/services/java/com/android/server/am/ActivityManagerService.java
@@ -58,6 +58,7 @@
import android.content.ComponentName;
import android.content.ContentResolver;
import android.content.Context;
+import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.IIntentReceiver;
@@ -1077,7 +1078,7 @@
d.setCancelable(false);
d.setTitle("System UIDs Inconsistent");
d.setMessage("UIDs on the system are inconsistent, you need to wipe your data partition or your device will be unstable.");
- d.setButton("I'm Feeling Lucky",
+ d.setButton(DialogInterface.BUTTON_POSITIVE, "I'm Feeling Lucky",
mHandler.obtainMessage(IM_FEELING_LUCKY_MSG));
mUidAlert = d;
d.show();
diff --git a/services/java/com/android/server/am/AppWaitingForDebuggerDialog.java b/services/java/com/android/server/am/AppWaitingForDebuggerDialog.java
index 8e9818d..9fb48b3 100644
--- a/services/java/com/android/server/am/AppWaitingForDebuggerDialog.java
+++ b/services/java/com/android/server/am/AppWaitingForDebuggerDialog.java
@@ -17,6 +17,7 @@
package com.android.server.am;
import android.content.Context;
+import android.content.DialogInterface;
import android.os.Handler;
import android.os.Message;
@@ -49,7 +50,7 @@
text.append(" is waiting for the debugger to attach.");
setMessage(text.toString());
- setButton("Force Close", mHandler.obtainMessage(1, app));
+ setButton(DialogInterface.BUTTON_POSITIVE, "Force Close", mHandler.obtainMessage(1, app));
setTitle("Waiting For Debugger");
getWindow().setTitle("Waiting For Debugger: " + app.info.processName);
}
diff --git a/services/java/com/android/server/am/FactoryErrorDialog.java b/services/java/com/android/server/am/FactoryErrorDialog.java
index 2e25474..b19bb5ca 100644
--- a/services/java/com/android/server/am/FactoryErrorDialog.java
+++ b/services/java/com/android/server/am/FactoryErrorDialog.java
@@ -17,6 +17,7 @@
package com.android.server.am;
import android.content.Context;
+import android.content.DialogInterface;
import android.os.Handler;
import android.os.Message;
@@ -26,7 +27,8 @@
setCancelable(false);
setTitle(context.getText(com.android.internal.R.string.factorytest_failed));
setMessage(msg);
- setButton(context.getText(com.android.internal.R.string.factorytest_reboot),
+ setButton(DialogInterface.BUTTON_POSITIVE,
+ context.getText(com.android.internal.R.string.factorytest_reboot),
mHandler.obtainMessage(0));
getWindow().setTitle("Factory Error");
}
diff --git a/tests/DumpRenderTree2/AndroidManifest.xml b/tests/DumpRenderTree2/AndroidManifest.xml
index 3454316..a5199e7 100644
--- a/tests/DumpRenderTree2/AndroidManifest.xml
+++ b/tests/DumpRenderTree2/AndroidManifest.xml
@@ -30,6 +30,7 @@
</activity>
</application>
+ <uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_SDCARD" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
</manifest>
\ No newline at end of file
diff --git a/tests/DumpRenderTree2/src/com/android/dumprendertree2/AbstractResult.java b/tests/DumpRenderTree2/src/com/android/dumprendertree2/AbstractResult.java
index f545840..880a5cb 100644
--- a/tests/DumpRenderTree2/src/com/android/dumprendertree2/AbstractResult.java
+++ b/tests/DumpRenderTree2/src/com/android/dumprendertree2/AbstractResult.java
@@ -16,6 +16,9 @@
package com.android.dumprendertree2;
+import android.os.Message;
+import android.webkit.WebView;
+
/**
* A class that represent a result of the test. It is responsible for returning the result's
* raw data and generating its own diff in HTML format.
@@ -47,12 +50,39 @@
}
/**
- * Returns result's raw data that can be written to the disk.
+ * Makes the result object obtain the result of the test from the webview
+ * and store it in the format that suits itself bests. This method is asynchronous.
+ * The message passed as a parameter is a message that should be sent to its target
+ * when the result finishes obtaining the result.
+ *
+ * @param webview
+ * @param resultObtainedMsg
+ */
+ public abstract void obtainActualResult(WebView webview, Message resultObtainedMsg);
+
+ public abstract void setExpectedImageResult(byte[] expectedResult);
+
+ public abstract void setExpectedTextResult(String expectedResult);
+
+ /**
+ * Returns result's image data that can be written to the disk. It can be null
+ * if there is an error of some sort or for example the test times out.
+ *
+ * <p> Some tests will not provide data (like text tests)
*
* @return
- * results raw data
+ * results image data
*/
- public abstract byte[] getData();
+ public abstract byte[] getActualImageResult();
+
+ /**
+ * Returns result's text data. It can be null
+ * if there is an error of some sort or for example the test times out.
+ *
+ * @return
+ * results text data
+ */
+ public abstract String getActualTextResult();
/**
* Returns the code of this result.
@@ -60,7 +90,7 @@
* @return
* the code of this result
*/
- public abstract ResultCode getCode();
+ public abstract ResultCode getResultCode();
/**
* Return the type of the result data.
diff --git a/tests/DumpRenderTree2/src/com/android/dumprendertree2/FsUtils.java b/tests/DumpRenderTree2/src/com/android/dumprendertree2/FsUtils.java
index 68daaf0..212c187 100644
--- a/tests/DumpRenderTree2/src/com/android/dumprendertree2/FsUtils.java
+++ b/tests/DumpRenderTree2/src/com/android/dumprendertree2/FsUtils.java
@@ -19,6 +19,7 @@
import android.util.Log;
import java.io.File;
+import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
@@ -48,4 +49,30 @@
Log.e(LOG_TAG + "::writeDataToStorage", e.getMessage());
}
}
-}
+
+ public static byte[] readDataFromStorage(File file) {
+ if (!file.exists()) {
+ Log.d(LOG_TAG + "::readDataFromStorage", "File does not exist: "
+ + file.getAbsolutePath());
+ return null;
+ }
+
+ byte[] bytes = null;
+ try {
+ FileInputStream fis = null;
+ try {
+ fis = new FileInputStream(file);
+ bytes = new byte[(int) file.length()];
+ fis.read(bytes);
+ } finally {
+ if (fis != null) {
+ fis.close();
+ }
+ }
+ } catch (IOException e) {
+ Log.e(LOG_TAG + "::readDataFromStorage", e.getMessage());
+ }
+
+ return bytes;
+ }
+}
\ No newline at end of file
diff --git a/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTest.java b/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTest.java
index 605afab..aa505b7 100644
--- a/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTest.java
+++ b/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTest.java
@@ -16,7 +16,19 @@
package com.android.dumprendertree2;
+import android.app.Activity;
+import android.net.Uri;
import android.os.Handler;
+import android.os.Message;
+import android.webkit.JsPromptResult;
+import android.webkit.JsResult;
+import android.webkit.WebChromeClient;
+import android.webkit.WebSettings;
+import android.webkit.WebView;
+import android.webkit.WebViewClient;
+import android.webkit.WebStorage.QuotaUpdater;
+
+import java.io.File;
/**
* A class that represents a single layout test. It is responsible for running the test,
@@ -24,18 +36,134 @@
*/
public class LayoutTest {
+ private static final String LOG_TAG = "LayoutTest";
+
+ public static final int MSG_ACTUAL_RESULT_OBTAINED = 0;
+
private String mRelativePath;
- private Handler mCallbackHandler;
+ private String mTestsRootDirPath;
+ private String mUrl;
+ private boolean mOnTestFinishedCalled;
+ private Message mTestFinishedMsg;
private AbstractResult mResult;
- public LayoutTest(String relativePath, Handler callbackHandler) {
+ private WebView mWebView;
+ private Activity mActivity;
+
+ private final Handler mResultHandler = new Handler() {
+ @Override
+ public void handleMessage(Message msg) {
+ if (msg.what == MSG_ACTUAL_RESULT_OBTAINED) {
+ mResult.setExpectedTextResult(LayoutTestsRunnerThread.getExpectedTextResult(mRelativePath));
+ mResult.setExpectedImageResult(LayoutTestsRunnerThread.getExpectedImageResult(mRelativePath));
+ mTestFinishedMsg.sendToTarget();
+ }
+ }
+ };
+
+ private WebViewClient mWebViewClient = new WebViewClient() {
+ @Override
+ public void onPageFinished(WebView view, String url) {
+ /** Some tests fire up many page loads, we don't want to detect them */
+ if (!url.equals(mUrl)) {
+ return;
+ }
+
+ onTestFinished();
+ }
+ };
+
+ private WebChromeClient mWebChromeClient = new WebChromeClient() {
+ @Override
+ public void onExceededDatabaseQuota(String url, String databaseIdentifier,
+ long currentQuota, long estimatedSize, long totalUsedQuota,
+ QuotaUpdater quotaUpdater) {
+ /** TODO: This should be recorded as part of the text result */
+ quotaUpdater.updateQuota(currentQuota + 5 * 1024 * 1024);
+ }
+
+ @Override
+ public boolean onJsAlert(WebView view, String url, String message, JsResult result) {
+ /** TODO: Alerts should be recorded as part of text result */
+ result.confirm();
+ return true;
+ }
+
+ @Override
+ public boolean onJsConfirm(WebView view, String url, String message, JsResult result) {
+ /** TODO: Alerts should be recorded as part of text result */
+ result.confirm();
+ return true;
+ }
+
+ @Override
+ public boolean onJsPrompt(WebView view, String url, String message, String defaultValue,
+ JsPromptResult result) {
+ /** TODO: Alerts should be recorded as part of text result */
+ result.confirm();
+ return true;
+ }
+
+ };
+
+ public LayoutTest(String relativePath, String testsRootDirPath, Message testFinishedMsg,
+ LayoutTestsRunner activity) {
mRelativePath = relativePath;
- mCallbackHandler = callbackHandler;
+ mTestsRootDirPath = testsRootDirPath;
+ mTestFinishedMsg = testFinishedMsg;
+ mActivity = activity;
}
public void run() {
- /** TODO: This is just a stub! */
- mCallbackHandler.obtainMessage(LayoutTestsRunnerThread.MSG_TEST_FINISHED).sendToTarget();
+ mWebView = new WebView(mActivity);
+ mActivity.setContentView(mWebView);
+
+ setupWebView();
+
+ /** TODO: Add timeout msg */
+ mUrl = Uri.fromFile(new File(mTestsRootDirPath, mRelativePath)).toString();
+ mWebView.loadUrl(mUrl);
+ }
+
+ private void onTestFinished() {
+ if (mOnTestFinishedCalled) {
+ return;
+ }
+
+ mOnTestFinishedCalled = true;
+
+ /**
+ * If the result has not been set by the time the test finishes we create
+ * a default type of result.
+ */
+ if (mResult == null) {
+ /** TODO: Default type should be RenderTreeResult. We don't support it now. */
+ mResult = new TextResult(mRelativePath);
+ }
+
+ /** TODO: Implement waitUntilDone */
+
+ mResult.obtainActualResult(mWebView,
+ mResultHandler.obtainMessage(MSG_ACTUAL_RESULT_OBTAINED));
+ }
+
+ private void setupWebView() {
+ WebSettings webViewSettings = mWebView.getSettings();
+ webViewSettings.setAppCacheEnabled(true);
+ webViewSettings.setAppCachePath(mActivity.getApplicationContext().getCacheDir().getPath());
+ webViewSettings.setAppCacheMaxSize(Long.MAX_VALUE);
+ webViewSettings.setJavaScriptEnabled(true);
+ webViewSettings.setJavaScriptCanOpenWindowsAutomatically(true);
+ webViewSettings.setSupportMultipleWindows(true);
+ webViewSettings.setLayoutAlgorithm(WebSettings.LayoutAlgorithm.NORMAL);
+ webViewSettings.setDatabaseEnabled(true);
+ webViewSettings.setDatabasePath(mActivity.getDir("databases", 0).getAbsolutePath());
+ webViewSettings.setDomStorageEnabled(true);
+ webViewSettings.setWorkersEnabled(false);
+ webViewSettings.setXSSAuditorEnabled(false);
+
+ mWebView.setWebViewClient(mWebViewClient);
+ mWebView.setWebChromeClient(mWebChromeClient);
}
public AbstractResult getResult() {
diff --git a/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTestsRunner.java b/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTestsRunner.java
index c18abcc..4421aba 100644
--- a/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTestsRunner.java
+++ b/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTestsRunner.java
@@ -85,6 +85,10 @@
}
String path = intent.getStringExtra(EXTRA_TEST_PATH);
- new LayoutTestsRunnerThread(path, mHandler).start();
+ new LayoutTestsRunnerThread(path, this).start();
+ }
+
+ public Handler getHandler() {
+ return mHandler;
}
}
\ No newline at end of file
diff --git a/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTestsRunnerThread.java b/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTestsRunnerThread.java
index d75579e..8c72e3b 100644
--- a/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTestsRunnerThread.java
+++ b/tests/DumpRenderTree2/src/com/android/dumprendertree2/LayoutTestsRunnerThread.java
@@ -54,6 +54,15 @@
File.separator + "android" +
File.separator + "LayoutTests-results";
+ /** TODO: Make it a setting */
+ private static final String EXPECTED_RESULT_SECONDARY_LOCATION_RELATIVE_DIR_PREFIX =
+ "platform" + File.separator +
+ "android-v8" + File.separator;
+
+ /** TODO: Make these settings */
+ private static final String TEXT_RESULT_EXTENSION = "txt";
+ private static final String IMAGE_RESULT_EXTENSION = "png";
+
/** A list containing relative paths of tests to run */
private LinkedList<String> mTestsList = new LinkedList<String>();
@@ -72,8 +81,7 @@
*/
private String mRelativePath;
- /** A handler obtained from UI thread to handle messages concerning updating the display */
- private Handler mUiDisplayHandler;
+ private LayoutTestsRunner mActivity;
private LayoutTest mCurrentTest;
private String mCurrentTestPath;
@@ -87,10 +95,10 @@
* @param path
* @param uiDisplayHandler
*/
- public LayoutTestsRunnerThread(String path, Handler uiDisplayHandler) {
+ public LayoutTestsRunnerThread(String path, LayoutTestsRunner activity) {
mFileFilter = new FileFilter(TESTS_ROOT_DIR_PATH);
mRelativePath = path;
- mUiDisplayHandler = uiDisplayHandler;
+ mActivity = activity;
/** This creates a handler that runs on the thread that _created_ this thread */
mHandlerOnUiThread = new Handler() {
@@ -111,6 +119,9 @@
mSummarizer = new Summarizer(mFileFilter, RESULTS_ROOT_DIR_PATH);
+ /** A handler obtained from UI thread to handle messages concerning updating the display */
+ final Handler uiDisplayHandler = mActivity.getHandler();
+
/** Creates a new handler in _this_ thread */
mHandler = new Handler() {
@Override
@@ -118,7 +129,7 @@
switch (msg.what) {
case MSG_TEST_FINISHED:
onTestFinished(mCurrentTest);
- mUiDisplayHandler.obtainMessage(LayoutTestsRunner.MSG_UPDATE_PROGRESS,
+ uiDisplayHandler.obtainMessage(LayoutTestsRunner.MSG_UPDATE_PROGRESS,
mCurrentTestCount, mTotalTestCount).sendToTarget();
runNextTest();
break;
@@ -135,9 +146,9 @@
/** Populate the tests' list accordingly */
if (file.isDirectory()) {
- mUiDisplayHandler.sendEmptyMessage(LayoutTestsRunner.MSG_SHOW_PROGRESS_DIALOG);
+ uiDisplayHandler.sendEmptyMessage(LayoutTestsRunner.MSG_SHOW_PROGRESS_DIALOG);
preloadTests(mRelativePath);
- mUiDisplayHandler.sendEmptyMessage(LayoutTestsRunner.MSG_DISMISS_PROGRESS_DIALOG);
+ uiDisplayHandler.sendEmptyMessage(LayoutTestsRunner.MSG_DISMISS_PROGRESS_DIALOG);
} else {
mTestsList.addLast(mRelativePath);
mTotalTestCount = 1;
@@ -194,7 +205,8 @@
mCurrentTestCount++;
mCurrentTestPath = mTestsList.removeFirst();
- mCurrentTest = new LayoutTest(mCurrentTestPath, mHandler);
+ mCurrentTest = new LayoutTest(mCurrentTestPath, TESTS_ROOT_DIR_PATH,
+ mHandler.obtainMessage(MSG_TEST_FINISHED), mActivity);
/**
* This will run the test on UI thread. The reason why we need to run the test
@@ -221,22 +233,30 @@
}
private void dumpResultData(AbstractResult result, String testPath) {
- String resultPath = null;
+ dumpActualTextResult(result, testPath);
+ dumpActualImageResult(result, testPath);
+ }
- switch (result.getType()) {
- case TEXT:
- resultPath = FileFilter.setPathEnding(testPath, "-actual.txt");
- break;
-
- case PIXEL:
- /** TODO: Check if it is for sure *.bmp */
- resultPath = FileFilter.setPathEnding(testPath, "-actual.bmp");
- break;
+ private void dumpActualTextResult(AbstractResult result, String testPath) {
+ String actualTextResult = result.getActualTextResult();
+ if (actualTextResult == null) {
+ return;
}
- /** Dump the result */
+ String resultPath = FileFilter.setPathEnding(testPath, "-actual." + TEXT_RESULT_EXTENSION);
FsUtils.writeDataToStorage(new File(RESULTS_ROOT_DIR_PATH, resultPath),
- result.getData(), false);
+ actualTextResult.getBytes(), false);
+ }
+
+ private void dumpActualImageResult(AbstractResult result, String testPath) {
+ byte[] actualImageResult = result.getActualImageResult();
+ if (actualImageResult == null) {
+ return;
+ }
+
+ String resultPath = FileFilter.setPathEnding(testPath, "-actual." + IMAGE_RESULT_EXTENSION);
+ FsUtils.writeDataToStorage(new File(RESULTS_ROOT_DIR_PATH, resultPath),
+ actualImageResult, false);
}
private void onFinishedTests() {
@@ -249,4 +269,24 @@
* - zip results
* - run more tests before zipping */
}
-}
+
+ public static String getExpectedTextResult(String relativePath) {
+ return new String(getExpectedResult(relativePath, TEXT_RESULT_EXTENSION));
+ }
+
+ public static byte[] getExpectedImageResult(String relativePath) {
+ return getExpectedResult(relativePath, IMAGE_RESULT_EXTENSION);
+ }
+
+ private static byte[] getExpectedResult(String relativePath, String extension) {
+ relativePath = FileFilter.setPathEnding(relativePath, "-expected." + extension);
+
+ byte[] bytes = FsUtils.readDataFromStorage(new File(TESTS_ROOT_DIR_PATH, relativePath));
+ if (bytes == null) {
+ relativePath = EXPECTED_RESULT_SECONDARY_LOCATION_RELATIVE_DIR_PREFIX + relativePath;
+ bytes = FsUtils.readDataFromStorage(new File(TESTS_ROOT_DIR_PATH, relativePath));
+ }
+
+ return bytes;
+ }
+}
\ No newline at end of file
diff --git a/tests/DumpRenderTree2/src/com/android/dumprendertree2/Summarizer.java b/tests/DumpRenderTree2/src/com/android/dumprendertree2/Summarizer.java
index 9b28fcd..4d15bb5 100644
--- a/tests/DumpRenderTree2/src/com/android/dumprendertree2/Summarizer.java
+++ b/tests/DumpRenderTree2/src/com/android/dumprendertree2/Summarizer.java
@@ -34,21 +34,70 @@
private static final String LOG_TAG = "Summarizer";
private static final String CSS =
- "body {font-family: Verdana;} a {font-size: 12px; color: black; } h3" +
- "{ font-size: 20px; padding: 0; margin: 0; margin-bottom: 10px; } " +
- ".space { margin-top:30px; } table.diff_both, table.diff_both tr, " +
- "table.diff_both td { border: 0; padding: 0; margin: 0; } " +
- "table.diff_both { width: 600px; } table.diff_both td.dleft, " +
- "table.diff_both td.dright { border: 0; width: 50%; } " +
- "table.diff " + "table.diff_both caption { text-align: left; margin-bottom: 3px;}" +
- "{ border:1px solid black; border-collapse: collapse; width: 100%; } " +
- "table.diff tr { vertical-align: top; border-bottom: 1px dashed black; " +
- "border-top: 1px dashed black; font-size: 15px; } table.diff td.linecount " +
- "{ border-right: 1px solid; background-color: #aaa; width: 20px; text-align: " +
- "right; padding-right: 1px; padding-top: 2px; padding-bottom: 2px; } " +
- "table.diff td.line { padding-left: 3px; padding-top: 2px; " +
- "padding-bottom: 2px; } span.eql { background-color: #f3f3f3;} " +
- "span.del { background-color: #ff8888; } span.ins { background-color: #88ff88; }";
+ "* {" +
+ " font-family: Verdana;" +
+ " border: 0;" +
+ " margin: 0;" +
+ " padding: 0;}" +
+ "body {" +
+ " margin: 10px;}" +
+ "a {" +
+ " font-size: 12px;" +
+ " color: black;}" +
+ "h1 {" +
+ " font-size: 33px;" +
+ " margin: 4px 0 4px 0;}" +
+ "h2 {" +
+ " font-size:22px;" +
+ " margin: 20px 0 3px 0;}" +
+ "h3 {" +
+ " font-size: 20px;" +
+ " margin-bottom: 6px;}" +
+ "table.visual_diff {" +
+ " border-bottom: 0px solid;" +
+ " border-collapse: collapse;" +
+ " width: 100%;" +
+ " margin-bottom: 3px;}" +
+ "table.visual_diff tr.headers td {" +
+ " border-bottom: 1px solid;" +
+ " border-top: 0;" +
+ " padding-bottom: 3px;}" +
+ "table.visual_diff tr.results td {" +
+ " border-top: 1px dashed;" +
+ " border-right: 1px solid;" +
+ " font-size: 15px;" +
+ " vertical-align: top;}" +
+ "table.visual_diff tr.results td.line_count {" +
+ " background-color:#aaa;" +
+ " min-width:20px;" +
+ " text-align: right;" +
+ " border-right: 1px solid;" +
+ " border-left: 1px solid;" +
+ " padding: 2px 1px 2px 0px;}" +
+ "table.visual_diff tr.results td.line {" +
+ " padding: 2px 0px 2px 4px;" +
+ " border-right: 1px solid;" +
+ " width: 49.8%;}" +
+ "table.visual_diff tr.footers td {" +
+ " border-top: 1px solid;" +
+ " border-bottom: 0;}" +
+ "table.visual_diff tr td.space {" +
+ " border: 0;" +
+ " width: 0.4%}" +
+ "div.space {" +
+ " margin-top:30px;}" +
+ "span.eql {" +
+ " background-color: #f3f3f3;}" +
+ "span.del {" +
+ " background-color: #ff8888; }" +
+ "span.ins {" +
+ " background-color: #88ff88; }" +
+ "span.fail {" +
+ " color: red;}" +
+ "span.pass {" +
+ " color: green;}" +
+ "span.time_out {" +
+ " color: orange;}";
private static final String HTML_DIFF_BEGINNING = "<html><head><style type=\"text/css\">" +
CSS + "</style></head><body>";
private static final String HTML_DIFF_ENDING = "</body></html>";
@@ -116,7 +165,7 @@
return;
}
- AbstractResult.ResultCode resultCode = result.getCode();
+ AbstractResult.ResultCode resultCode = result.getResultCode();
/** Add the test to correct collection according to its result code */
if (mFileFilter.isIgnoreRes(testPath)) {
@@ -149,9 +198,9 @@
StringBuilder html = new StringBuilder();
html.append(HTML_DIFF_BEGINNING);
Set<String> results;
- html.append("<h1>Tests that were _not_ ignored</h1>");
+ html.append("<h1>NOT ignored</h1>");
appendResultsMap(mResults, html);
- html.append("<h1>Tests that _were_ ignored</h1>");
+ html.append("<h1>Ignored</h1>");
appendResultsMap(mResultsIgnored, html);
html.append(HTML_DIFF_ENDING);
FsUtils.writeDataToStorage(new File(mResultsRootDirPath, HTML_DIFF_INDEX_RELATIVE_PATH),
@@ -179,4 +228,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/tests/DumpRenderTree2/src/com/android/dumprendertree2/TextResult.java b/tests/DumpRenderTree2/src/com/android/dumprendertree2/TextResult.java
new file mode 100644
index 0000000..e3dcef0
--- /dev/null
+++ b/tests/DumpRenderTree2/src/com/android/dumprendertree2/TextResult.java
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.dumprendertree2;
+
+import android.os.Handler;
+import android.os.Message;
+import android.webkit.WebView;
+
+/**
+ * A result object for which the expected output is text. It does not have an image
+ * expected result.
+ *
+ * <p>Created if layoutTestController.dumpAsText() was called.
+ */
+public class TextResult extends AbstractResult {
+
+ private static final int MSG_DOCUMENT_AS_TEXT = 0;
+
+ private String mExpectedResult;
+ private String mActualResult;
+ private String mRelativePath;
+ private ResultCode mResultCode;
+ private Message mResultObtainedMsg;
+
+ private Handler mHandler = new Handler() {
+ @Override
+ public void handleMessage(Message msg) {
+ if (msg.what == MSG_DOCUMENT_AS_TEXT) {
+ mActualResult = (String) msg.obj;
+ mResultObtainedMsg.sendToTarget();
+ }
+ }
+ };
+
+ public TextResult(String relativePath) {
+ mRelativePath = relativePath;
+ }
+
+ @Override
+ public ResultCode getResultCode() {
+ if (mResultCode != null) {
+ return mResultCode;
+ }
+
+ if (mExpectedResult == null) {
+ mResultCode = AbstractResult.ResultCode.FAIL_NO_EXPECTED_RESULT;
+ } else if (!mExpectedResult.equals(mActualResult)) {
+ mResultCode = AbstractResult.ResultCode.FAIL_RESULT_DIFFERS;
+ } else {
+ mResultCode = AbstractResult.ResultCode.PASS;
+ }
+
+ return mResultCode;
+ }
+
+ @Override
+ public byte[] getActualImageResult() {
+ return null;
+ }
+
+ @Override
+ public String getActualTextResult() {
+ return mActualResult;
+ }
+
+ @Override
+ public void setExpectedImageResult(byte[] expectedResult) {
+ /** This method is not applicable to this type of result */
+ }
+
+ @Override
+ public void setExpectedTextResult(String expectedResult) {
+ mExpectedResult = expectedResult;
+ }
+
+ @Override
+ public String getDiffAsHtml() {
+ /** TODO: just a stub
+ * Probably needs rethinking anyway - just one table would be much better
+ * This will require some changes in Summarizer in CSS as well */
+ StringBuilder html = new StringBuilder();
+ html.append("<h3>");
+ html.append(mRelativePath);
+ html.append("</h3>");
+ html.append("<table class=\"visual_diff\">");
+
+ html.append("<tr class=\"headers\">");
+ html.append("<td colspan=\"2\">Expected result:</td>");
+ html.append("<td class=\"space\"></td>");
+ html.append("<td colspan=\"2\">Actual result:</td>");
+ html.append("</tr>");
+
+ html.append("<tr class=\"results\">");
+ html.append("<td class=\"line_count\">1:</td>");
+ html.append("<td class=\"line\">");
+ if (mExpectedResult == null) {
+ html.append("NULL");
+ } else {
+ html.append(mExpectedResult.replace("\n", "<br />"));
+ }
+ html.append("</td>");
+ html.append("<td class=\"space\"></td>");
+ html.append("<td class=\"line_count\">1:</td>");
+ html.append("<td class=\"line\">");
+ if (mActualResult == null) {
+ html.append("NULL");
+ } else {
+ html.append(mActualResult.replace("\n", "<br />"));
+ }
+ html.append("</td>");
+ html.append("</tr>");
+
+ html.append("<tr class=\"footers\">");
+ html.append("<td colspan=\"2\"></td>");
+ html.append("<td class=\"space\"></td>");
+ html.append("<td colspan=\"2\"></td>");
+ html.append("</tr>");
+
+ html.append("</table>");
+
+ return html.toString();
+ }
+
+ @Override
+ public TestType getType() {
+ return TestType.TEXT;
+ }
+
+ @Override
+ public void obtainActualResult(WebView webview, Message resultObtainedMsg) {
+ mResultObtainedMsg = resultObtainedMsg;
+ Message msg = mHandler.obtainMessage(MSG_DOCUMENT_AS_TEXT);
+
+ /** TODO: mDumpTopFrameAsText and mDumpChildFramesAsText */
+ msg.arg1 = 1;
+ msg.arg2 = 0;
+ webview.documentAsText(msg);
+ }
+}
\ No newline at end of file