Merge "Basic implementation of input device properties."
diff --git a/cmds/servicemanager/service_manager.c b/cmds/servicemanager/service_manager.c
index cacfe14..7fa9a39 100644
--- a/cmds/servicemanager/service_manager.c
+++ b/cmds/servicemanager/service_manager.c
@@ -361,6 +361,7 @@
 
     selinux_enabled = is_selinux_enabled();
     sehandle = selinux_android_service_context_handle();
+    selinux_status_open(true);
 
     if (selinux_enabled > 0) {
         if (sehandle == NULL) {
diff --git a/include/binder/IBatteryStats.h b/include/binder/IBatteryStats.h
index 7ddac57..5f38186 100644
--- a/include/binder/IBatteryStats.h
+++ b/include/binder/IBatteryStats.h
@@ -36,6 +36,12 @@
     virtual void noteStopAudio(int uid) = 0;
     virtual void noteResetVideo() = 0;
     virtual void noteResetAudio() = 0;
+    virtual void noteFlashlightOn(int uid) = 0;
+    virtual void noteFlashlightOff(int uid) = 0;
+    virtual void noteStartCamera(int uid) = 0;
+    virtual void noteStopCamera(int uid) = 0;
+    virtual void noteResetCamera() = 0;
+    virtual void noteResetFlashlight() = 0;
 
     enum {
         NOTE_START_SENSOR_TRANSACTION = IBinder::FIRST_CALL_TRANSACTION,
@@ -46,6 +52,12 @@
         NOTE_STOP_AUDIO_TRANSACTION,
         NOTE_RESET_VIDEO_TRANSACTION,
         NOTE_RESET_AUDIO_TRANSACTION,
+        NOTE_FLASHLIGHT_ON_TRANSACTION,
+        NOTE_FLASHLIGHT_OFF_TRANSACTION,
+        NOTE_START_CAMERA_TRANSACTION,
+        NOTE_STOP_CAMERA_TRANSACTION,
+        NOTE_RESET_CAMERA_TRANSACTION,
+        NOTE_RESET_FLASHLIGHT_TRANSACTION
     };
 };
 
diff --git a/include/media/hardware/HardwareAPI.h b/include/media/hardware/HardwareAPI.h
index d5f42be..a682ab7 100644
--- a/include/media/hardware/HardwareAPI.h
+++ b/include/media/hardware/HardwareAPI.h
@@ -52,9 +52,9 @@
     OMX_BOOL enable;
 };
 
-// A pointer to this struct is passed to OMX_SetParameter() when the extension
-// index "OMX.google.android.index.storeMetaDataInBuffers"
-// is given.
+// A pointer to this struct is passed to OMX_SetParameter() when the extension index
+// "OMX.google.android.index.storeMetaDataInBuffers" or
+// "OMX.google.android.index.storeANWBufferInMetadata" is given.
 //
 // When meta data is stored in the video buffers passed between OMX clients
 // and OMX components, interpretation of the buffer data is up to the
@@ -62,19 +62,33 @@
 // some information helpful for the receiver to locate the actual data.
 // The buffer receiver thus needs to know how to interpret what is stored
 // in these buffers, with mechanisms pre-determined externally. How to
-// interpret the meta data is outside of the scope of this method.
+// interpret the meta data is outside of the scope of this parameter.
 //
-// Currently, this is specifically used to pass meta data from video source
-// (camera component, for instance) to video encoder to avoid memcpying of
-// input video frame data. To do this, bStoreMetaData is set to OMX_TRUE.
-// If bStoreMetaData is set to false, real YUV frame data will be stored
-// in the buffers. In addition, if no OMX_SetParameter() call is made
-// with the corresponding extension index, real YUV data is stored
-// in the buffers.
+// Currently, this is used to pass meta data from video source (camera component, for instance) to
+// video encoder to avoid memcpying of input video frame data, as well as to pass dynamic output
+// buffer to video decoder. To do this, bStoreMetaData is set to OMX_TRUE.
 //
-// For video decoder output port, the metadata buffer layout is defined below.
+// If bStoreMetaData is set to false, real YUV frame data will be stored in input buffers, and
+// the output buffers contain either real YUV frame data, or are themselves native handles as
+// directed by enable/use-android-native-buffer parameter settings.
+// In addition, if no OMX_SetParameter() call is made on a port with the corresponding extension
+// index, the component should not assume that the client is not using metadata mode for the port.
 //
-// Metadata buffers are registered with the component using UseBuffer calls.
+// If the component supports this using the "OMX.google.android.index.storeANWBufferInMetadata"
+// extension and bStoreMetaData is set to OMX_TRUE, data is passed using the VideoNativeMetadata
+// layout as defined below. Each buffer will be accompanied by a fence. The fence must signal
+// before the buffer can be used (e.g. read from or written into). When returning such buffer to
+// the client, component must provide a new fence that must signal before the returned buffer can
+// be used (e.g. read from or written into). The component owns the incoming fenceFd, and must close
+// it when fence has signaled. The client will own and close the returned fence file descriptor.
+//
+// If the component supports this using the "OMX.google.android.index.storeMetaDataInBuffers"
+// extension and bStoreMetaData is set to OMX_TRUE, data is passed using VideoGrallocMetadata
+// (the layout of which is the VideoGrallocMetadata defined below). Camera input can be also passed
+// as "CameraSource", the layout of which is vendor dependent.
+//
+// Metadata buffers are registered with the component using UseBuffer calls, or can be allocated
+// by the component for encoder-metadata-output buffers.
 struct StoreMetaDataInBuffersParams {
     OMX_U32 nSize;
     OMX_VERSIONTYPE nVersion;
@@ -84,9 +98,21 @@
 
 // Meta data buffer layout used to transport output frames to the decoder for
 // dynamic buffer handling.
+struct VideoGrallocMetadata {
+    MetadataBufferType eType;               // must be kMetadataBufferTypeGrallocSource
+    buffer_handle_t hHandle;
+};
+
+// Legacy name for VideoGrallocMetadata struct.
 struct VideoDecoderOutputMetaData {
-  MetadataBufferType eType;
-  buffer_handle_t pHandle;
+    MetadataBufferType eType;               // must be kMetadataBufferTypeGrallocSource
+    buffer_handle_t pHandle;
+};
+
+struct VideoNativeMetadata {
+    MetadataBufferType eType;               // must be kMetadataBufferTypeANWBuffer
+    struct ANativeWindowBuffer* pBuffer;
+    int nFenceFd;                           // -1 if unused
 };
 
 // A pointer to this struct is passed to OMX_SetParameter() when the extension
diff --git a/include/media/hardware/MetadataBufferType.h b/include/media/hardware/MetadataBufferType.h
index 5876c40..b765203 100644
--- a/include/media/hardware/MetadataBufferType.h
+++ b/include/media/hardware/MetadataBufferType.h
@@ -77,28 +77,43 @@
      * GRalloc buffer. The encoder needs to interpret this GRalloc handle
      * and encode the frames.
      * --------------------------------------------------------------
-     * |  kMetadataBufferTypeGrallocSource | sizeof(buffer_handle_t) |
+     * |  kMetadataBufferTypeGrallocSource | buffer_handle_t buffer |
      * --------------------------------------------------------------
+     *
+     * See the VideoGrallocMetadata structure.
      */
     kMetadataBufferTypeGrallocSource = 1,
 
     /*
      * kMetadataBufferTypeGraphicBuffer is used to indicate that
      * the payload of the metadata buffers can be interpreted as
-     * a GraphicBuffer.  It is only to be used by software encoders.
-     * In this case, the metadata that the encoder receives
-     * will have a byte stream that consists of two parts:
+     * an ANativeWindowBuffer, and that a fence is provided.
+     *
+     * In this case, the metadata will have a byte stream that consists of three parts:
      * 1. First, there is an integer indicating that the metadata
-     * contains a GraphicBuffer (kMetadataBufferTypeGraphicBuffer)
-     * 2. This is followed by the pointer to the GraphicBuffer that
-     * is to be encoded.  Encoder must not create a sp<> from this
-     * graphic buffer, or free it, as it does not actually own this
-     * buffer.
-     * --------------------------------------------------------------
-     * |  kMetadataBufferTypeGraphicBuffer | sizeof(GraphicBuffer *) |
-     * --------------------------------------------------------------
+     * contains an ANativeWindowBuffer (kMetadataBufferTypeANWBuffer)
+     * 2. This is followed by the pointer to the ANativeWindowBuffer.
+     * Codec must not free this buffer as it does not actually own this buffer.
+     * 3. Finally, there is an integer containing a fence file descriptor.
+     * The codec must wait on the fence before encoding or decoding into this
+     * buffer. When the buffer is returned, codec must replace this file descriptor
+     * with a new fence, that will be waited on before the buffer is replaced
+     * (encoder) or read (decoder).
+     * ---------------------------------
+     * |  kMetadataBufferTypeANWBuffer |
+     * ---------------------------------
+     * |  ANativeWindowBuffer *buffer  |
+     * ---------------------------------
+     * |  int fenceFd                  |
+     * ---------------------------------
+     *
+     * See the VideoNativeMetadata structure.
      */
-    kMetadataBufferTypeGraphicBuffer = 2,
+    kMetadataBufferTypeANWBuffer = 2,
+
+    /* This value is used by framework, but is never used inside a metadata buffer  */
+    kMetadataBufferTypeInvalid = -1,
+
 
     // Add more here...
 
diff --git a/libs/binder/IBatteryStats.cpp b/libs/binder/IBatteryStats.cpp
index 8f3b7b4..e32c628 100644
--- a/libs/binder/IBatteryStats.cpp
+++ b/libs/binder/IBatteryStats.cpp
@@ -89,6 +89,47 @@
         data.writeInterfaceToken(IBatteryStats::getInterfaceDescriptor());
         remote()->transact(NOTE_RESET_AUDIO_TRANSACTION, data, &reply);
     }
+
+    virtual void noteFlashlightOn(int uid) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IBatteryStats::getInterfaceDescriptor());
+        data.writeInt32(uid);
+        remote()->transact(NOTE_FLASHLIGHT_ON_TRANSACTION, data, &reply);
+    }
+
+    virtual void noteFlashlightOff(int uid) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IBatteryStats::getInterfaceDescriptor());
+        data.writeInt32(uid);
+        remote()->transact(NOTE_FLASHLIGHT_OFF_TRANSACTION, data, &reply);
+    }
+
+    virtual void noteStartCamera(int uid) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IBatteryStats::getInterfaceDescriptor());
+        data.writeInt32(uid);
+        remote()->transact(NOTE_START_CAMERA_TRANSACTION, data, &reply);
+    }
+
+    virtual void noteStopCamera(int uid) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IBatteryStats::getInterfaceDescriptor());
+        data.writeInt32(uid);
+        remote()->transact(NOTE_STOP_CAMERA_TRANSACTION, data, &reply);
+    }
+
+    virtual void noteResetCamera() {
+        Parcel data, reply;
+        data.writeInterfaceToken(IBatteryStats::getInterfaceDescriptor());
+        remote()->transact(NOTE_RESET_CAMERA_TRANSACTION, data, &reply);
+    }
+
+    virtual void noteResetFlashlight() {
+        Parcel data, reply;
+        data.writeInterfaceToken(IBatteryStats::getInterfaceDescriptor());
+        remote()->transact(NOTE_RESET_FLASHLIGHT_TRANSACTION, data, &reply);
+    }
+
 };
 
 IMPLEMENT_META_INTERFACE(BatteryStats, "com.android.internal.app.IBatteryStats");
@@ -155,6 +196,46 @@
             reply->writeNoException();
             return NO_ERROR;
         } break;
+        case NOTE_FLASHLIGHT_ON_TRANSACTION: {
+            CHECK_INTERFACE(IBatteryStats, data, reply);
+            int uid = data.readInt32();
+            noteFlashlightOn(uid);
+            reply->writeNoException();
+            return NO_ERROR;
+        } break;
+        case NOTE_FLASHLIGHT_OFF_TRANSACTION: {
+            CHECK_INTERFACE(IBatteryStats, data, reply);
+            int uid = data.readInt32();
+            noteFlashlightOff(uid);
+            reply->writeNoException();
+            return NO_ERROR;
+        } break;
+        case NOTE_START_CAMERA_TRANSACTION: {
+            CHECK_INTERFACE(IBatteryStats, data, reply);
+            int uid = data.readInt32();
+            noteStartCamera(uid);
+            reply->writeNoException();
+            return NO_ERROR;
+        } break;
+        case NOTE_STOP_CAMERA_TRANSACTION: {
+            CHECK_INTERFACE(IBatteryStats, data, reply);
+            int uid = data.readInt32();
+            noteStopCamera(uid);
+            reply->writeNoException();
+            return NO_ERROR;
+        } break;
+        case NOTE_RESET_CAMERA_TRANSACTION: {
+            CHECK_INTERFACE(IBatteryStats, data, reply);
+            noteResetCamera();
+            reply->writeNoException();
+            return NO_ERROR;
+        } break;
+        case NOTE_RESET_FLASHLIGHT_TRANSACTION: {
+            CHECK_INTERFACE(IBatteryStats, data, reply);
+            noteResetFlashlight();
+            reply->writeNoException();
+            return NO_ERROR;
+        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/libs/binder/Parcel.cpp b/libs/binder/Parcel.cpp
index 015866b..c1cfb1e 100644
--- a/libs/binder/Parcel.cpp
+++ b/libs/binder/Parcel.cpp
@@ -451,7 +451,7 @@
     // Count objects in range
     for (int i = 0; i < (int) size; i++) {
         size_t off = objects[i];
-        if ((off >= offset) && (off < offset + len)) {
+        if ((off >= offset) && (off + sizeof(flat_binder_object) <= offset + len)) {
             if (firstIndex == -1) {
                 firstIndex = i;
             }
diff --git a/libs/gui/IGraphicBufferAlloc.cpp b/libs/gui/IGraphicBufferAlloc.cpp
index 09b63a1..3009989 100644
--- a/libs/gui/IGraphicBufferAlloc.cpp
+++ b/libs/gui/IGraphicBufferAlloc.cpp
@@ -59,6 +59,9 @@
         if (result == NO_ERROR) {
             graphicBuffer = new GraphicBuffer();
             result = reply.read(*graphicBuffer);
+            if (result != NO_ERROR) {
+                graphicBuffer.clear();
+            }
             // reply.readStrongBinder();
             // here we don't even have to read the BufferReference from
             // the parcel, it'll die with the parcel.
diff --git a/libs/gui/tests/SurfaceTextureClient_test.cpp b/libs/gui/tests/SurfaceTextureClient_test.cpp
index d750cd0..1a50b24 100644
--- a/libs/gui/tests/SurfaceTextureClient_test.cpp
+++ b/libs/gui/tests/SurfaceTextureClient_test.cpp
@@ -27,6 +27,9 @@
 #include <utils/Log.h>
 #include <utils/Thread.h>
 
+EGLAPI const char* eglQueryStringImplementationANDROID(EGLDisplay dpy, EGLint name);
+#define CROP_EXT_STR "EGL_ANDROID_image_crop"
+
 namespace android {
 
 class SurfaceTextureClientTest : public ::testing::Test {
@@ -615,6 +618,18 @@
 }
 
 TEST_F(SurfaceTextureClientTest, GetTransformMatrixSucceedsAfterFreeingBuffersWithCrop) {
+    // Query to see if the image crop extension exists
+    EGLDisplay dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+    const char* exts = eglQueryStringImplementationANDROID(dpy, EGL_EXTENSIONS);
+    size_t cropExtLen = strlen(CROP_EXT_STR);
+    size_t extsLen = strlen(exts);
+    bool equal = !strcmp(CROP_EXT_STR, exts);
+    bool atStart = !strncmp(CROP_EXT_STR " ", exts, cropExtLen+1);
+    bool atEnd = (cropExtLen+1) < extsLen &&
+            !strcmp(" " CROP_EXT_STR, exts + extsLen - (cropExtLen+1));
+    bool inMiddle = strstr(exts, " " CROP_EXT_STR " ");
+    bool hasEglAndroidImageCrop = equal || atStart || atEnd || inMiddle;
+
     android_native_buffer_t* buf[3];
     float mtx[16] = {};
     android_native_rect_t crop;
@@ -633,15 +648,17 @@
     ASSERT_EQ(OK, native_window_set_buffer_count(mANW.get(), 6)); // frees buffers
     mST->getTransformMatrix(mtx);
 
-    // This accounts for the .5 texel shrink for each edge that's included in the
-    // transform matrix to avoid texturing outside the crop region.
-    EXPECT_EQ(0.5, mtx[0]);
+    // If the egl image crop extension is not present, this accounts for the
+    // .5 texel shrink for each edge that's included in the transform matrix
+    // to avoid texturing outside the crop region. Otherwise the crop is not
+    // included in the transform matrix.
+    EXPECT_EQ(hasEglAndroidImageCrop ? 1 : 0.5, mtx[0]);
     EXPECT_EQ(0.f, mtx[1]);
     EXPECT_EQ(0.f, mtx[2]);
     EXPECT_EQ(0.f, mtx[3]);
 
     EXPECT_EQ(0.f, mtx[4]);
-    EXPECT_EQ(-0.5, mtx[5]);
+    EXPECT_EQ(hasEglAndroidImageCrop ? -1 : -0.5, mtx[5]);
     EXPECT_EQ(0.f, mtx[6]);
     EXPECT_EQ(0.f, mtx[7]);
 
@@ -650,8 +667,8 @@
     EXPECT_EQ(1.f, mtx[10]);
     EXPECT_EQ(0.f, mtx[11]);
 
-    EXPECT_EQ(0.0625f, mtx[12]);
-    EXPECT_EQ(0.5625f, mtx[13]);
+    EXPECT_EQ(hasEglAndroidImageCrop ? 0 : 0.0625f, mtx[12]);
+    EXPECT_EQ(hasEglAndroidImageCrop ? 1 : 0.5625f, mtx[13]);
     EXPECT_EQ(0.f, mtx[14]);
     EXPECT_EQ(1.f, mtx[15]);
 }
diff --git a/libs/ui/GraphicBufferAllocator.cpp b/libs/ui/GraphicBufferAllocator.cpp
index 85e9675..9b265af 100644
--- a/libs/ui/GraphicBufferAllocator.cpp
+++ b/libs/ui/GraphicBufferAllocator.cpp
@@ -104,6 +104,9 @@
     // we have a h/w allocator and h/w buffer is requested
     status_t err;
 
+    // Filter out any usage bits that should not be passed to the gralloc module
+    usage &= GRALLOC_USAGE_ALLOC_MASK;
+
     int outStride = 0;
     err = mAllocDev->alloc(mAllocDev, static_cast<int>(width),
             static_cast<int>(height), format, static_cast<int>(usage), handle,
diff --git a/services/surfaceflinger/DispSync.h b/services/surfaceflinger/DispSync.h
index 96efc34..67142b6 100644
--- a/services/surfaceflinger/DispSync.h
+++ b/services/surfaceflinger/DispSync.h
@@ -139,7 +139,7 @@
     enum { MAX_RESYNC_SAMPLES = 32 };
     enum { MIN_RESYNC_SAMPLES_FOR_UPDATE = 3 };
     enum { NUM_PRESENT_SAMPLES = 8 };
-    enum { MAX_RESYNC_SAMPLES_WITHOUT_PRESENT = 12 };
+    enum { MAX_RESYNC_SAMPLES_WITHOUT_PRESENT = 4 };
 
     // mPeriod is the computed period of the modeled vsync events in
     // nanoseconds.