Merge "Pass VP9 Codec Specific Data from the Container" into nyc-dev
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index c52e581..bf9904c 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -232,6 +232,14 @@
c->releaseRecordingFrame(mem);
}
+void Camera::releaseRecordingFrameHandle(native_handle_t* handle)
+{
+ ALOGV("releaseRecordingFrameHandle");
+ sp <::android::hardware::ICamera> c = mCamera;
+ if (c == 0) return;
+ c->releaseRecordingFrameHandle(handle);
+}
+
// get preview state
bool Camera::previewEnabled()
{
@@ -381,6 +389,35 @@
}
}
+void Camera::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle)
+{
+ // If recording proxy listener is registered, forward the frame and return.
+ // The other listener (mListener) is ignored because the receiver needs to
+ // call releaseRecordingFrameHandle.
+ sp<ICameraRecordingProxyListener> proxylistener;
+ {
+ Mutex::Autolock _l(mLock);
+ proxylistener = mRecordingProxyListener;
+ }
+ if (proxylistener != NULL) {
+ proxylistener->recordingFrameHandleCallbackTimestamp(timestamp, handle);
+ return;
+ }
+
+ sp<CameraListener> listener;
+ {
+ Mutex::Autolock _l(mLock);
+ listener = mListener;
+ }
+
+ if (listener != NULL) {
+ listener->postRecordingFrameHandleTimestamp(timestamp, handle);
+ } else {
+ ALOGW("No listener was set. Drop a recording frame.");
+ releaseRecordingFrameHandle(handle);
+ }
+}
+
sp<ICameraRecordingProxy> Camera::getRecordingProxy() {
ALOGV("getProxy");
return new RecordingProxy(this);
@@ -406,6 +443,11 @@
mCamera->releaseRecordingFrame(mem);
}
+void Camera::RecordingProxy::releaseRecordingFrameHandle(native_handle_t* handle) {
+ ALOGV("RecordingProxy::releaseRecordingFrameHandle");
+ mCamera->releaseRecordingFrameHandle(handle);
+}
+
Camera::RecordingProxy::RecordingProxy(const sp<Camera>& camera)
{
mCamera = camera;
diff --git a/camera/CameraUtils.cpp b/camera/CameraUtils.cpp
index 26eebe3..1676be1 100644
--- a/camera/CameraUtils.cpp
+++ b/camera/CameraUtils.cpp
@@ -122,19 +122,4 @@
return OK;
}
-// Return whether the image data contains a native handle.
-bool CameraUtils::isNativeHandleMetadata(const sp<IMemory>& imageData) {
- if (imageData == nullptr) {
- return false;
- }
-
- if (imageData->size() == sizeof(VideoNativeHandleMetadata)) {
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(imageData->pointer());
- return metadata->eType == kMetadataBufferTypeNativeHandleSource;
- }
-
- return false;
-}
-
} /* namespace android */
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index 37b0a10..0680d7c 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -54,6 +54,7 @@
RELEASE_RECORDING_FRAME,
SET_VIDEO_BUFFER_MODE,
SET_VIDEO_BUFFER_TARGET,
+ RELEASE_RECORDING_FRAME_HANDLE,
};
class BpCamera: public BpInterface<ICamera>
@@ -155,21 +156,20 @@
data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
data.writeStrongBinder(IInterface::asBinder(mem));
- native_handle_t *nh = nullptr;
- if (CameraUtils::isNativeHandleMetadata(mem)) {
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(mem->pointer());
- nh = metadata->pHandle;
- data.writeNativeHandle(nh);
- }
-
remote()->transact(RELEASE_RECORDING_FRAME, data, &reply);
+ }
- if (nh) {
- // Close the native handle because camera received a dup copy.
- native_handle_close(nh);
- native_handle_delete(nh);
- }
+ void releaseRecordingFrameHandle(native_handle_t *handle) {
+ ALOGV("releaseRecordingFrameHandle");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+ data.writeNativeHandle(handle);
+
+ remote()->transact(RELEASE_RECORDING_FRAME_HANDLE, data, &reply);
+
+ // Close the native handle because camera received a dup copy.
+ native_handle_close(handle);
+ native_handle_delete(handle);
}
status_t setVideoBufferMode(int32_t videoBufferMode)
@@ -368,17 +368,16 @@
ALOGV("RELEASE_RECORDING_FRAME");
CHECK_INTERFACE(ICamera, data, reply);
sp<IMemory> mem = interface_cast<IMemory>(data.readStrongBinder());
-
- if (CameraUtils::isNativeHandleMetadata(mem)) {
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(mem->pointer());
- metadata->pHandle = data.readNativeHandle();
- // releaseRecordingFrame will be responsble to close the native handle.
- }
-
releaseRecordingFrame(mem);
return NO_ERROR;
} break;
+ case RELEASE_RECORDING_FRAME_HANDLE: {
+ ALOGV("RELEASE_RECORDING_FRAME_HANDLE");
+ CHECK_INTERFACE(ICamera, data, reply);
+ // releaseRecordingFrameHandle will be responsble to close the native handle.
+ releaseRecordingFrameHandle(data.readNativeHandle());
+ return NO_ERROR;
+ } break;
case SET_VIDEO_BUFFER_MODE: {
ALOGV("SET_VIDEO_BUFFER_MODE");
CHECK_INTERFACE(ICamera, data, reply);
diff --git a/camera/ICameraClient.cpp b/camera/ICameraClient.cpp
index d058138..68cbfb8 100644
--- a/camera/ICameraClient.cpp
+++ b/camera/ICameraClient.cpp
@@ -31,6 +31,7 @@
NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION,
DATA_CALLBACK,
DATA_CALLBACK_TIMESTAMP,
+ RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP,
};
class BpCameraClient: public BpInterface<ICameraClient>
@@ -78,15 +79,18 @@
data.writeInt64(timestamp);
data.writeInt32(msgType);
data.writeStrongBinder(IInterface::asBinder(imageData));
- // If imageData is metadata and it contains a native handle, write the native handle to
- // parcel.
- if (CameraUtils::isNativeHandleMetadata(imageData)) {
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(imageData->pointer());
- data.writeNativeHandle(metadata->pHandle);
- }
remote()->transact(DATA_CALLBACK_TIMESTAMP, data, &reply, IBinder::FLAG_ONEWAY);
}
+
+ void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle) {
+ ALOGV("recordingFrameHandleCallbackTimestamp");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICameraClient::getInterfaceDescriptor());
+ data.writeInt64(timestamp);
+ data.writeNativeHandle(handle);
+ remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP, data, &reply,
+ IBinder::FLAG_ONEWAY);
+ }
};
IMPLEMENT_META_INTERFACE(CameraClient, "android.hardware.ICameraClient");
@@ -128,20 +132,26 @@
nsecs_t timestamp = data.readInt64();
int32_t msgType = data.readInt32();
sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
-
- // If the image data contains a native handle, read the native handle from the parcel
- // and replace the native handle in the image data. (The native handle in image data is
- // not serielized/deserialized so it's not valid in the process.)
- if (CameraUtils::isNativeHandleMetadata(imageData)) {
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(imageData->pointer());
- metadata->pHandle = data.readNativeHandle();
-
- // The native handle will be freed in
- // BpCameraRecordingProxyListener::releaseRecordingFrame.
+ dataCallbackTimestamp(timestamp, msgType, imageData);
+ return NO_ERROR;
+ } break;
+ case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP: {
+ ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP");
+ CHECK_INTERFACE(ICameraClient, data, reply);
+ nsecs_t timestamp;
+ status_t res = data.readInt64(×tamp);
+ if (res != OK) {
+ ALOGE("%s: Failed to read timestamp: %s (%d)", __FUNCTION__, strerror(-res), res);
+ return BAD_VALUE;
+ }
+ native_handle_t* handle = data.readNativeHandle();
+ if (handle == nullptr) {
+ ALOGE("%s: Received a null native handle", __FUNCTION__);
+ return BAD_VALUE;
}
- dataCallbackTimestamp(timestamp, msgType, imageData);
+ // The native handle will be freed in BpCamera::releaseRecordingFrameHandle.
+ recordingFrameHandleCallbackTimestamp(timestamp, handle);
return NO_ERROR;
} break;
default:
diff --git a/camera/ICameraRecordingProxy.cpp b/camera/ICameraRecordingProxy.cpp
index d128f5b..63c4b1d 100644
--- a/camera/ICameraRecordingProxy.cpp
+++ b/camera/ICameraRecordingProxy.cpp
@@ -31,6 +31,7 @@
START_RECORDING = IBinder::FIRST_CALL_TRANSACTION,
STOP_RECORDING,
RELEASE_RECORDING_FRAME,
+ RELEASE_RECORDING_FRAME_HANDLE,
};
@@ -66,22 +67,20 @@
Parcel data, reply;
data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
data.writeStrongBinder(IInterface::asBinder(mem));
-
- native_handle_t *nh = nullptr;
- if (CameraUtils::isNativeHandleMetadata(mem)) {
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(mem->pointer());
- nh = metadata->pHandle;
- data.writeNativeHandle(nh);
- }
-
remote()->transact(RELEASE_RECORDING_FRAME, data, &reply);
+ }
- if (nh) {
- // Close the native handle because camera received a dup copy.
- native_handle_close(nh);
- native_handle_delete(nh);
- }
+ void releaseRecordingFrameHandle(native_handle_t *handle) {
+ ALOGV("releaseRecordingFrameHandle");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICameraRecordingProxy::getInterfaceDescriptor());
+ data.writeNativeHandle(handle);
+
+ remote()->transact(RELEASE_RECORDING_FRAME_HANDLE, data, &reply);
+
+ // Close the native handle because camera received a dup copy.
+ native_handle_close(handle);
+ native_handle_delete(handle);
}
};
@@ -111,19 +110,17 @@
ALOGV("RELEASE_RECORDING_FRAME");
CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
sp<IMemory> mem = interface_cast<IMemory>(data.readStrongBinder());
-
- if (CameraUtils::isNativeHandleMetadata(mem)) {
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(mem->pointer());
- metadata->pHandle = data.readNativeHandle();
-
- // releaseRecordingFrame will be responsble to close the native handle.
- }
releaseRecordingFrame(mem);
-
return NO_ERROR;
} break;
+ case RELEASE_RECORDING_FRAME_HANDLE: {
+ ALOGV("RELEASE_RECORDING_FRAME_HANDLE");
+ CHECK_INTERFACE(ICameraRecordingProxy, data, reply);
+ // releaseRecordingFrameHandle will be responsble to close the native handle.
+ releaseRecordingFrameHandle(data.readNativeHandle());
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/camera/ICameraRecordingProxyListener.cpp b/camera/ICameraRecordingProxyListener.cpp
index 447174e..fa4dfd8 100644
--- a/camera/ICameraRecordingProxyListener.cpp
+++ b/camera/ICameraRecordingProxyListener.cpp
@@ -27,6 +27,7 @@
enum {
DATA_CALLBACK_TIMESTAMP = IBinder::FIRST_CALL_TRANSACTION,
+ RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP,
};
class BpCameraRecordingProxyListener: public BpInterface<ICameraRecordingProxyListener>
@@ -45,22 +46,21 @@
data.writeInt64(timestamp);
data.writeInt32(msgType);
data.writeStrongBinder(IInterface::asBinder(imageData));
- native_handle_t* nh = nullptr;
-
- if (CameraUtils::isNativeHandleMetadata(imageData)) {
- VideoNativeHandleMetadata *metadata =
- (VideoNativeHandleMetadata*)(imageData->pointer());
- nh = metadata->pHandle;
- data.writeNativeHandle(nh);
- }
-
remote()->transact(DATA_CALLBACK_TIMESTAMP, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+
+ void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle) {
+ ALOGV("recordingFrameHandleCallbackTimestamp");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICameraRecordingProxyListener::getInterfaceDescriptor());
+ data.writeInt64(timestamp);
+ data.writeNativeHandle(handle);
+ remote()->transact(RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP, data, &reply,
+ IBinder::FLAG_ONEWAY);
// The native handle is dupped in ICameraClient so we need to free it here.
- if (nh) {
- native_handle_close(nh);
- native_handle_delete(nh);
- }
+ native_handle_close(handle);
+ native_handle_delete(handle);
}
};
@@ -78,16 +78,27 @@
nsecs_t timestamp = data.readInt64();
int32_t msgType = data.readInt32();
sp<IMemory> imageData = interface_cast<IMemory>(data.readStrongBinder());
-
- if (CameraUtils::isNativeHandleMetadata(imageData)) {
- VideoNativeHandleMetadata *meta = (VideoNativeHandleMetadata*)(imageData->pointer());
- meta->pHandle = data.readNativeHandle();
-
- // The native handle will be freed in
- // BpCameraRecordingProxyListener::releaseRecordingFrame.
+ dataCallbackTimestamp(timestamp, msgType, imageData);
+ return NO_ERROR;
+ } break;
+ case RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP: {
+ ALOGV("RECORDING_FRAME_HANDLE_CALLBACK_TIMESTAMP");
+ CHECK_INTERFACE(ICameraRecordingProxyListener, data, reply);
+ nsecs_t timestamp;
+ status_t res = data.readInt64(×tamp);
+ if (res != OK) {
+ ALOGE("%s: Failed to read timestamp: %s (%d)", __FUNCTION__, strerror(-res), res);
+ return BAD_VALUE;
}
- dataCallbackTimestamp(timestamp, msgType, imageData);
+ native_handle_t* handle = data.readNativeHandle();
+ if (handle == nullptr) {
+ ALOGE("%s: Received a null native handle", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ // The native handle will be freed in
+ // BpCameraRecordingProxy::releaseRecordingFrameHandle.
+ recordingFrameHandleCallbackTimestamp(timestamp, handle);
return NO_ERROR;
} break;
default:
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
index ab57db5..755ec8e 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
@@ -37,4 +37,11 @@
oneway void onResultReceived(in CameraMetadataNative result,
in CaptureResultExtras resultExtras);
oneway void onPrepared(int streamId);
+
+ /**
+ * Repeating request encountered an error and was stopped.
+ *
+ * @param lastFrameNumber Frame number of the last frame of the streaming request.
+ */
+ oneway void onRepeatingRequestError(in long lastFrameNumber);
}
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 250f15e..1e8744b 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -36,7 +36,10 @@
* Cancel the repeating request specified by requestId
* Returns the frame number of the last frame that will be produced from this
* repeating request, or NO_IN_FLIGHT_REPEATING_FRAMES if no frames were produced
- * by this repeating request
+ * by this repeating request.
+ *
+ * Repeating request may be stopped by camera device due to an error. Canceling a stopped
+ * repeating request will trigger ERROR_ILLEGAL_ARGUMENT.
*/
long cancelRequest(int requestId);
diff --git a/camera/cameraserver/cameraserver.rc b/camera/cameraserver/cameraserver.rc
index 37e2688..16d9da8 100644
--- a/camera/cameraserver/cameraserver.rc
+++ b/camera/cameraserver/cameraserver.rc
@@ -3,3 +3,4 @@
user cameraserver
group audio camera drmrpc inet media mediadrm net_bt net_bt_admin net_bw_acct
ioprio rt 4
+ writepid /dev/cpuset/foreground/tasks
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 0b758b6..bff5547 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -380,7 +380,11 @@
int64_t lastFrameNumber;
binder::Status remoteRet = mRemote->cancelRequest(repeatingSequenceId, &lastFrameNumber);
- if (!remoteRet.isOk()) {
+ if (remoteRet.serviceSpecificErrorCode() ==
+ hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT) {
+ ALOGV("Repeating request is already stopped.");
+ return ACAMERA_OK;
+ } else if (!remoteRet.isOk()) {
ALOGE("Stop repeating request fails in remote: %s", remoteRet.toString8().string());
return ACAMERA_ERROR_UNKNOWN;
}
@@ -1342,4 +1346,24 @@
return binder::Status::ok();
}
+binder::Status
+CameraDevice::ServiceCallback::onRepeatingRequestError(int64_t lastFrameNumber) {
+ binder::Status ret = binder::Status::ok();
+
+ sp<CameraDevice> dev = mDevice.promote();
+ if (dev == nullptr) {
+ return ret; // device has been closed
+ }
+
+ Mutex::Autolock _l(dev->mDeviceLock);
+
+ int repeatingSequenceId = dev->mRepeatingSequenceId;
+ dev->mRepeatingSequenceId = REQUEST_ID_NONE;
+
+ dev->checkRepeatingSequenceCompleteLocked(repeatingSequenceId, lastFrameNumber);
+
+ return ret;
+}
+
+
} // namespace android
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 3ccf95a..71e364d 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -74,6 +74,7 @@
binder::Status onResultReceived(const CameraMetadata& metadata,
const CaptureResultExtras& resultExtras) override;
binder::Status onPrepared(int streamId) override;
+ binder::Status onRepeatingRequestError(int64_t lastFrameNumber) override;
private:
const wp<CameraDevice> mDevice;
};
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 24d5282..26d6679 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -471,8 +471,38 @@
if (!serviceRet.isOk()) {
ALOGE("%s: connect camera device failed: %s", __FUNCTION__, serviceRet.toString8().string());
+ // Convert serviceRet to camera_status_t
+ switch(serviceRet.serviceSpecificErrorCode()) {
+ case hardware::ICameraService::ERROR_DISCONNECTED:
+ ret = ACAMERA_ERROR_CAMERA_DISCONNECTED;
+ break;
+ case hardware::ICameraService::ERROR_CAMERA_IN_USE:
+ ret = ACAMERA_ERROR_CAMERA_IN_USE;
+ break;
+ case hardware::ICameraService::ERROR_MAX_CAMERAS_IN_USE:
+ ret = ACAMERA_ERROR_MAX_CAMERA_IN_USE;
+ break;
+ case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
+ ret = ACAMERA_ERROR_INVALID_PARAMETER;
+ break;
+ case hardware::ICameraService::ERROR_DEPRECATED_HAL:
+ // Should not reach here since we filtered legacy HALs earlier
+ ret = ACAMERA_ERROR_INVALID_PARAMETER;
+ break;
+ case hardware::ICameraService::ERROR_DISABLED:
+ ret = ACAMERA_ERROR_CAMERA_DISABLED;
+ break;
+ case hardware::ICameraService::ERROR_PERMISSION_DENIED:
+ ret = ACAMERA_ERROR_PERMISSION_DENIED;
+ break;
+ case hardware::ICameraService::ERROR_INVALID_OPERATION:
+ default:
+ ret = ACAMERA_ERROR_UNKNOWN;
+ break;
+ }
+
delete device;
- return ACAMERA_ERROR_CAMERA_DISCONNECTED;
+ return ret;
}
if (deviceRemote == nullptr) {
ALOGE("%s: connect camera device failed! remote device is null", __FUNCTION__);
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 0b687b4..828a758 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -149,7 +149,8 @@
PREPARED,
RUNNING,
SENT_RESULT,
- UNINITIALIZED
+ UNINITIALIZED,
+ REPEATING_REQUEST_ERROR,
};
protected:
@@ -215,6 +216,15 @@
return binder::Status::ok();
}
+ virtual binder::Status onRepeatingRequestError(int64_t lastFrameNumber) {
+ (void) lastFrameNumber;
+ Mutex::Autolock l(mLock);
+ mLastStatus = REPEATING_REQUEST_ERROR;
+ mStatusesHit.push_back(mLastStatus);
+ mStatusCondition.broadcast();
+ return binder::Status::ok();
+ }
+
// Test helper functions:
bool hadError() const {
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index e5fbba0..9e15a81 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -39,7 +39,8 @@
LOCAL_C_INCLUDES:= \
frameworks/av/media/libstagefright \
- $(TOP)/frameworks/native/include/media/openmax
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/native/include/media/hardware
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
LOCAL_CLANG := true
@@ -63,7 +64,8 @@
LOCAL_C_INCLUDES:= \
frameworks/av/media/libstagefright \
- $(TOP)/frameworks/native/include/media/openmax
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/native/include/media/hardware
LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
LOCAL_CLANG := true
diff --git a/drm/drmserver/drmserver.rc b/drm/drmserver/drmserver.rc
index 42f5fc8..de46fb9 100644
--- a/drm/drmserver/drmserver.rc
+++ b/drm/drmserver/drmserver.rc
@@ -2,3 +2,4 @@
class main
user drm
group drm system inet drmrpc readproc
+ writepid /dev/cpuset/foreground/tasks
diff --git a/drm/libmediadrm/Android.mk b/drm/libmediadrm/Android.mk
new file mode 100644
index 0000000..6a2ed31
--- /dev/null
+++ b/drm/libmediadrm/Android.mk
@@ -0,0 +1,36 @@
+LOCAL_PATH:= $(call my-dir)
+
+#
+# libmediadrm
+#
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ Crypto.cpp \
+ Drm.cpp \
+ DrmSessionManager.cpp \
+ SharedLibrary.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ libcrypto \
+ libcutils \
+ libdl \
+ liblog \
+ libmedia \
+ libstagefright \
+ libstagefright_foundation \
+ libutils
+
+LOCAL_C_INCLUDES := \
+ libcore/include
+
+LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall
+LOCAL_CLANG := true
+
+LOCAL_MODULE:= libmediadrm
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/media/libmediaplayerservice/Crypto.cpp b/drm/libmediadrm/Crypto.cpp
similarity index 99%
rename from media/libmediaplayerservice/Crypto.cpp
rename to drm/libmediadrm/Crypto.cpp
index 9165b9d..79633cb 100644
--- a/media/libmediaplayerservice/Crypto.cpp
+++ b/drm/libmediadrm/Crypto.cpp
@@ -20,9 +20,8 @@
#include <dirent.h>
#include <dlfcn.h>
-#include "Crypto.h"
-
#include <binder/IMemory.h>
+#include <media/Crypto.h>
#include <media/hardware/CryptoAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AString.h>
diff --git a/media/libmediaplayerservice/Drm.cpp b/drm/libmediadrm/Drm.cpp
similarity index 98%
rename from media/libmediaplayerservice/Drm.cpp
rename to drm/libmediadrm/Drm.cpp
index 321ccbf..7c1f5c8 100644
--- a/media/libmediaplayerservice/Drm.cpp
+++ b/drm/libmediadrm/Drm.cpp
@@ -21,10 +21,9 @@
#include <dirent.h>
#include <dlfcn.h>
-#include "Drm.h"
-
-#include "DrmSessionClientInterface.h"
-#include "DrmSessionManager.h"
+#include <media/DrmSessionClientInterface.h>
+#include <media/DrmSessionManager.h>
+#include <media/Drm.h>
#include <media/drm/DrmAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AString.h>
diff --git a/media/libmediaplayerservice/DrmSessionManager.cpp b/drm/libmediadrm/DrmSessionManager.cpp
similarity index 98%
rename from media/libmediaplayerservice/DrmSessionManager.cpp
rename to drm/libmediadrm/DrmSessionManager.cpp
index 641f881..a87fb9d 100644
--- a/media/libmediaplayerservice/DrmSessionManager.cpp
+++ b/drm/libmediadrm/DrmSessionManager.cpp
@@ -18,12 +18,11 @@
#define LOG_TAG "DrmSessionManager"
#include <utils/Log.h>
-#include "DrmSessionManager.h"
-
-#include "DrmSessionClientInterface.h"
#include <binder/IPCThreadState.h>
#include <binder/IProcessInfoService.h>
#include <binder/IServiceManager.h>
+#include <media/DrmSessionManager.h>
+#include <media/DrmSessionClientInterface.h>
#include <media/stagefright/ProcessInfo.h>
#include <unistd.h>
#include <utils/String8.h>
diff --git a/media/libmediaplayerservice/SharedLibrary.cpp b/drm/libmediadrm/SharedLibrary.cpp
similarity index 97%
rename from media/libmediaplayerservice/SharedLibrary.cpp
rename to drm/libmediadrm/SharedLibrary.cpp
index 34db761..74b3a71 100644
--- a/media/libmediaplayerservice/SharedLibrary.cpp
+++ b/drm/libmediadrm/SharedLibrary.cpp
@@ -16,12 +16,11 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "Drm"
-#include <utils/Log.h>
-#include <media/stagefright/foundation/ADebug.h>
#include <dlfcn.h>
-
-#include "SharedLibrary.h"
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/SharedLibrary.h>
+#include <utils/Log.h>
namespace android {
diff --git a/include/camera/Camera.h b/include/camera/Camera.h
index b45bbfc..be793a2 100644
--- a/include/camera/Camera.h
+++ b/include/camera/Camera.h
@@ -43,6 +43,7 @@
virtual void postData(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata) = 0;
virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) = 0;
+ virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle) = 0;
};
class Camera;
@@ -114,6 +115,9 @@
// release a recording frame
void releaseRecordingFrame(const sp<IMemory>& mem);
+ // release a recording frame handle
+ void releaseRecordingFrameHandle(native_handle_t *handle);
+
// autoFocus - status returned from callback
status_t autoFocus();
@@ -161,6 +165,7 @@
virtual void dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata);
virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
+ virtual void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp, native_handle_t* handle);
class RecordingProxy : public BnCameraRecordingProxy
{
@@ -171,6 +176,7 @@
virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener);
virtual void stopRecording();
virtual void releaseRecordingFrame(const sp<IMemory>& mem);
+ virtual void releaseRecordingFrameHandle(native_handle_t* handle);
private:
sp<Camera> mCamera;
diff --git a/include/camera/ICameraRecordingProxy.h b/include/camera/ICameraRecordingProxy.h
index 2aac284..cb6824a 100644
--- a/include/camera/ICameraRecordingProxy.h
+++ b/include/camera/ICameraRecordingProxy.h
@@ -18,6 +18,7 @@
#define ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_H
#include <binder/IInterface.h>
+#include <cutils/native_handle.h>
#include <utils/RefBase.h>
namespace android {
@@ -83,6 +84,7 @@
virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener) = 0;
virtual void stopRecording() = 0;
virtual void releaseRecordingFrame(const sp<IMemory>& mem) = 0;
+ virtual void releaseRecordingFrameHandle(native_handle_t *handle) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/include/camera/ICameraRecordingProxyListener.h b/include/camera/ICameraRecordingProxyListener.h
index b6c0624..1fee5b9 100644
--- a/include/camera/ICameraRecordingProxyListener.h
+++ b/include/camera/ICameraRecordingProxyListener.h
@@ -18,6 +18,7 @@
#define ANDROID_HARDWARE_ICAMERA_RECORDING_PROXY_LISTENER_H
#include <binder/IInterface.h>
+#include <cutils/native_handle.h>
#include <stdint.h>
#include <utils/RefBase.h>
#include <utils/Timers.h>
@@ -34,6 +35,9 @@
virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType,
const sp<IMemory>& data) = 0;
+
+ virtual void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
+ native_handle_t* handle) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/include/camera/android/hardware/ICamera.h b/include/camera/android/hardware/ICamera.h
index 322b741..3b12afe 100644
--- a/include/camera/android/hardware/ICamera.h
+++ b/include/camera/android/hardware/ICamera.h
@@ -94,9 +94,13 @@
// get recording state
virtual bool recordingEnabled() = 0;
- // release a recording frame
+ // Release a recording frame that was received via ICameraClient::dataCallbackTimestamp.
virtual void releaseRecordingFrame(const sp<IMemory>& mem) = 0;
+ // Release a recording frame handle that was received via
+ // ICameraClient::recordingFrameHandleCallbackTimestamp.
+ virtual void releaseRecordingFrameHandle(native_handle_t *handle) = 0;
+
// auto focus
virtual status_t autoFocus() = 0;
diff --git a/include/camera/android/hardware/ICameraClient.h b/include/camera/android/hardware/ICameraClient.h
index d7f9a75..3f835a9 100644
--- a/include/camera/android/hardware/ICameraClient.h
+++ b/include/camera/android/hardware/ICameraClient.h
@@ -36,6 +36,11 @@
virtual void dataCallback(int32_t msgType, const sp<IMemory>& data,
camera_frame_metadata_t *metadata) = 0;
virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& data) = 0;
+
+ // Invoked to send a recording frame handle with a timestamp. Call
+ // ICamera::releaseRecordingFrameHandle to release the frame handle.
+ virtual void recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
+ native_handle_t* handle) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/include/camera/ndk/NdkCameraCaptureSession.h b/include/camera/ndk/NdkCameraCaptureSession.h
index 085b614..68eff7a 100644
--- a/include/camera/ndk/NdkCameraCaptureSession.h
+++ b/include/camera/ndk/NdkCameraCaptureSession.h
@@ -14,6 +14,15 @@
* limitations under the License.
*/
+/**
+ * @addtogroup Camera
+ * @{
+ */
+
+/**
+ * @file NdkCameraCaptureSession.h
+ */
+
/*
* This file defines an NDK API.
* Do not remove methods.
@@ -34,63 +43,337 @@
extern "C" {
#endif
+/**
+ * ACameraCaptureSession is an opaque type that manages frame captures of a camera device.
+ *
+ * A pointer can be obtained using {@link ACameraDevice_createCaptureSession} method.
+ */
typedef struct ACameraCaptureSession ACameraCaptureSession;
+/**
+ * The definition of camera capture session state callback.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraCaptureSession_stateCallbacks}.
+ * @param session The camera capture session whose state is changing.
+ */
typedef void (*ACameraCaptureSession_stateCallback)(void* context, ACameraCaptureSession *session);
typedef struct ACameraCaptureSession_stateCallbacks {
+ /// optional application context.
void* context;
- ACameraCaptureSession_stateCallback onClosed; // session is unusable after this callback
+
+ /**
+ * This callback is called when the session is closed and deleted from memory.
+ *
+ * <p>A session is closed when {@link ACameraCaptureSession_close} is called, a new session
+ * is created by the parent camera device,
+ * or when the parent camera device is closed (either by the user closing the device,
+ * or due to a camera device disconnection or fatal error).</p>
+ *
+ * <p>Once this callback is called, all access to this ACameraCaptureSession object will cause
+ * a crash.</p>
+ */
+ ACameraCaptureSession_stateCallback onClosed;
+
+ /**
+ * This callback is called every time the session has no more capture requests to process.
+ *
+ * <p>This callback will be invoked any time the session finishes processing
+ * all of its active capture requests, and no repeating request or burst is set up.</p>
+ */
ACameraCaptureSession_stateCallback onReady;
+
+ /**
+ * This callback is called when the session starts actively processing capture requests.
+ *
+ * <p>If the session runs out of capture requests to process and calls {@link onReady},
+ * then this callback will be invoked again once new requests are submitted for capture.</p>
+ */
ACameraCaptureSession_stateCallback onActive;
} ACameraCaptureSession_stateCallbacks;
+/// Enum for describing error reason in {@link ACameraCaptureFailure}
enum {
+ /**
+ * The capture session has dropped this frame due to an
+ * {@link ACameraCaptureSession_abortCaptures} call.
+ */
CAPTURE_FAILURE_REASON_FLUSHED = 0,
+
+ /**
+ * The capture session has dropped this frame due to an error in the framework.
+ */
CAPTURE_FAILURE_REASON_ERROR
};
+/// Struct to describe a capture failure
typedef struct ACameraCaptureFailure {
+ /**
+ * The frame number associated with this failed capture.
+ *
+ * <p>Whenever a request has been processed, regardless of failed capture or success,
+ * it gets a unique frame number assigned to its future result/failed capture.</p>
+ *
+ * <p>This value monotonically increments, starting with 0,
+ * for every new result or failure; and the scope is the lifetime of the
+ * {@link ACameraDevice}.</p>
+ */
int64_t frameNumber;
+
+ /**
+ * Determine why the request was dropped, whether due to an error or to a user
+ * action.
+ *
+ * @see CAPTURE_FAILURE_REASON_ERROR
+ * @see CAPTURE_FAILURE_REASON_FLUSHED
+ */
int reason;
+
+ /**
+ * The sequence ID for this failed capture that was returned by the
+ * {@link ACameraCaptureSession_capture} or {@link ACameraCaptureSession_setRepeatingRequest}.
+ *
+ * <p>The sequence ID is a unique monotonically increasing value starting from 0,
+ * incremented every time a new group of requests is submitted to the ACameraDevice.</p>
+ */
int sequenceId;
+
+ /**
+ * Determine if the image was captured from the camera.
+ *
+ * <p>If the image was not captured, no image buffers will be available.
+ * If the image was captured, then image buffers may be available.</p>
+ *
+ */
bool wasImageCaptured;
} ACameraCaptureFailure;
-/* Note that the ACaptureRequest* in the callback will be different to what app has submitted,
- but the contents will still be the same as what app submitted */
+/**
+ * The definition of camera capture start callback.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraCaptureSession_captureCallbacks}.
+ * @param session The camera capture session of interest.
+ * @param request The capture request that is starting. Note that this pointer points to a copy of
+ * capture request sent by application, so the address is different to what
+ * application sent but the content will match. This request will be freed by
+ * framework immediately after this callback returns.
+ * @param timestamp The timestamp when the capture is started. This timestmap will match
+ * {@link ACAMERA_SENSOR_TIMESTAMP} of the {@link ACameraMetadata} in
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted} callback.
+ */
typedef void (*ACameraCaptureSession_captureCallback_start)(
void* context, ACameraCaptureSession* session,
const ACaptureRequest* request, int64_t timestamp);
+/**
+ * The definition of camera capture progress/result callback.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraCaptureSession_captureCallbacks}.
+ * @param session The camera capture session of interest.
+ * @param request The capture request of interest. Note that this pointer points to a copy of
+ * capture request sent by application, so the address is different to what
+ * application sent but the content will match. This request will be freed by
+ * framework immediately after this callback returns.
+ * @param result The capture result metadata reported by camera device
+ */
typedef void (*ACameraCaptureSession_captureCallback_result)(
void* context, ACameraCaptureSession* session,
ACaptureRequest* request, const ACameraMetadata* result);
+/**
+ * The definition of camera capture failure callback.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraCaptureSession_captureCallbacks}.
+ * @param session The camera capture session of interest.
+ * @param request The capture request of interest. Note that this pointer points to a copy of
+ * capture request sent by application, so the address is different to what
+ * application sent but the content will match. This request will be freed by
+ * framework immediately after this callback returns.
+ * @param failure The {@link ACameraCaptureFailure} desribes the capture failure.
+ */
typedef void (*ACameraCaptureSession_captureCallback_failed)(
void* context, ACameraCaptureSession* session,
ACaptureRequest* request, ACameraCaptureFailure* failure);
+/**
+ * The definition of camera sequence end callback.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraCaptureSession_captureCallbacks}.
+ * @param session The camera capture session of interest.
+ * @param sequenceId The capture sequence ID of the finished sequence.
+ * @param frameNumber The frame number of the last frame of this sequence.
+ */
typedef void (*ACameraCaptureSession_captureCallback_sequenceEnd)(
void* context, ACameraCaptureSession* session,
int sequenceId, int64_t frameNumber);
+/**
+ * The definition of camera sequence aborted callback.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraCaptureSession_captureCallbacks}.
+ * @param session The camera capture session of interest.
+ * @param sequenceId The capture sequence ID of the aborted sequence.
+ */
typedef void (*ACameraCaptureSession_captureCallback_sequenceAbort)(
void* context, ACameraCaptureSession* session,
int sequenceId);
+/**
+ * The definition of camera buffer lost callback.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraCaptureSession_captureCallbacks}.
+ * @param session The camera capture session of interest.
+ * @param request The capture request of interest. Note that this pointer points to a copy of
+ * capture request sent by application, so the address is different to what
+ * application sent but the content will match. This request will be freed by
+ * framework immediately after this callback returns.
+ * @param window The {@link ANativeWindow} that the lost buffer would have been sent to.
+ * @param frameNumber The frame number of the lost buffer.
+ */
typedef void (*ACameraCaptureSession_captureCallback_bufferLost)(
void* context, ACameraCaptureSession* session,
ACaptureRequest* request, ANativeWindow* window, int64_t frameNumber);
typedef struct ACameraCaptureSession_captureCallbacks {
- void* context;
+ /// optional application context.
+ void* context;
+
+ /**
+ * This callback is called when the camera device has started capturing
+ * the output image for the request, at the beginning of image exposure.
+ *
+ * <p>This callback is invoked right as
+ * the capture of a frame begins, so it is the most appropriate time
+ * for playing a shutter sound, or triggering UI indicators of capture.</p>
+ *
+ * <p>The request that is being used for this capture is provided, along
+ * with the actual timestamp for the start of exposure.
+ * This timestamp matches the timestamps that will be
+ * included in {@link ACAMERA_SENSOR_TIMESTAMP} of the {@link ACameraMetadata} in
+ * {@link onCaptureCompleted} callback,
+ * and in the buffers sent to each output ANativeWindow. These buffer
+ * timestamps are accessible through, for example,
+ * {@link AImage_getTimestamp} or
+ * <a href="http://developer.android.com/reference/android/graphics/SurfaceTexture.html#getTimestamp()">
+ * android.graphics.SurfaceTexture#getTimestamp()</a>.</p>
+ *
+ * <p>Note that the ACaptureRequest pointer in the callback will not match what application has
+ * submitted, but the contents the ACaptureRequest will match what application submitted.</p>
+ *
+ */
ACameraCaptureSession_captureCallback_start onCaptureStarted;
+
+ /**
+ * This callback is called when an image capture makes partial forward progress; some
+ * (but not all) results from an image capture are available.
+ *
+ * <p>The result provided here will contain some subset of the fields of
+ * a full result. Multiple {@link onCaptureProgressed} calls may happen per
+ * capture; a given result field will only be present in one partial
+ * capture at most. The final {@link onCaptureCompleted} call will always
+ * contain all the fields (in particular, the union of all the fields of all
+ * the partial results composing the total result).</p>
+ *
+ * <p>For each request, some result data might be available earlier than others. The typical
+ * delay between each partial result (per request) is a single frame interval.
+ * For performance-oriented use-cases, applications should query the metadata they need
+ * to make forward progress from the partial results and avoid waiting for the completed
+ * result.</p>
+ *
+ * <p>For a particular request, {@link onCaptureProgressed} may happen before or after
+ * {@link onCaptureStarted}.</p>
+ *
+ * <p>Each request will generate at least `1` partial results, and at most
+ * {@link ACAMERA_REQUEST_PARTIAL_RESULT_COUNT} partial results.</p>
+ *
+ * <p>Depending on the request settings, the number of partial results per request
+ * will vary, although typically the partial count could be the same as long as the
+ * camera device subsystems enabled stay the same.</p>
+ *
+ * <p>Note that the ACaptureRequest pointer in the callback will not match what application has
+ * submitted, but the contents the ACaptureRequest will match what application submitted.</p>
+ */
ACameraCaptureSession_captureCallback_result onCaptureProgressed;
+
+ /**
+ * This callback is called when an image capture has fully completed and all the
+ * result metadata is available.
+ *
+ * <p>This callback will always fire after the last {@link onCaptureProgressed};
+ * in other words, no more partial results will be delivered once the completed result
+ * is available.</p>
+ *
+ * <p>For performance-intensive use-cases where latency is a factor, consider
+ * using {@link onCaptureProgressed} instead.</p>
+ *
+ * <p>Note that the ACaptureRequest pointer in the callback will not match what application has
+ * submitted, but the contents the ACaptureRequest will match what application submitted.</p>
+ */
ACameraCaptureSession_captureCallback_result onCaptureCompleted;
+
+ /**
+ * This callback is called instead of {@link onCaptureCompleted} when the
+ * camera device failed to produce a capture result for the
+ * request.
+ *
+ * <p>Other requests are unaffected, and some or all image buffers from
+ * the capture may have been pushed to their respective output
+ * streams.</p>
+ *
+ * <p>Note that the ACaptureRequest pointer in the callback will not match what application has
+ * submitted, but the contents the ACaptureRequest will match what application submitted.</p>
+ *
+ * @see ACameraCaptureFailure
+ */
ACameraCaptureSession_captureCallback_failed onCaptureFailed;
+
+ /**
+ * This callback is called independently of the others in {@link ACameraCaptureSession_captureCallbacks},
+ * when a capture sequence finishes and all capture result
+ * or capture failure for it have been returned via this {@link ACameraCaptureSession_captureCallbacks}.
+ *
+ * <p>In total, there will be at least one result/failure returned by this listener
+ * before this callback is invoked. If the capture sequence is aborted before any
+ * requests have been processed, {@link onCaptureSequenceAborted} is invoked instead.</p>
+ */
ACameraCaptureSession_captureCallback_sequenceEnd onCaptureSequenceCompleted;
+
+ /**
+ * This callback is called independently of the others in {@link ACameraCaptureSession_captureCallbacks},
+ * when a capture sequence aborts before any capture result
+ * or capture failure for it have been returned via this {@link ACameraCaptureSession_captureCallbacks}.
+ *
+ * <p>Due to the asynchronous nature of the camera device, not all submitted captures
+ * are immediately processed. It is possible to clear out the pending requests
+ * by a variety of operations such as {@link ACameraCaptureSession_stopRepeating} or
+ * {@link ACameraCaptureSession_abortCaptures}. When such an event happens,
+ * {@link onCaptureSequenceCompleted} will not be called.</p>
+ */
ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+
+ /**
+ * This callback is called if a single buffer for a capture could not be sent to its
+ * destination ANativeWindow.
+ *
+ * <p>If the whole capture failed, then {@link onCaptureFailed} will be called instead. If
+ * some but not all buffers were captured but the result metadata will not be available,
+ * then onCaptureFailed will be invoked with {@link ACameraCaptureFailure#wasImageCaptured}
+ * returning true, along with one or more calls to {@link onCaptureBufferLost} for the
+ * failed outputs.</p>
+ *
+ * <p>Note that the ACaptureRequest pointer in the callback will not match what application has
+ * submitted, but the contents the ACaptureRequest will match what application submitted.
+ * The ANativeWindow pointer will always match what application submitted in
+ * {@link ACameraDevice_createCaptureSession}</p>
+ *
+ */
ACameraCaptureSession_captureCallback_bufferLost onCaptureBufferLost;
} ACameraCaptureSession_captureCallbacks;
@@ -98,45 +381,208 @@
CAPTURE_SEQUENCE_ID_NONE = -1
};
-/*
- * Close capture session
+/**
+ * Close this capture session.
+ *
+ * <p>Closing a session frees up the target output Surfaces of the session for reuse with either
+ * a new session, or to other APIs that can draw to Surfaces.</p>
+ *
+ * <p>Note that creating a new capture session with {@link ACameraDevice_createCaptureSession}
+ * will close any existing capture session automatically, and call the older session listener's
+ * {@link ACameraCaptureSession_stateCallbacks#onClosed} callback. Using
+ * {@link ACameraDevice_createCaptureSession} directly without closing is the recommended approach
+ * for quickly switching to a new session, since unchanged target outputs can be reused more
+ * efficiently.</p>
+ *
+ * <p>After a session is closed and before {@link ACameraCaptureSession_stateCallbacks#onClosed}
+ * is called, all methods invoked on the session will return {@link ACAMERA_ERROR_SESSION_CLOSED},
+ * and any repeating requests are stopped (as if {@link ACameraCaptureSession_stopRepeating} was
+ * called). However, any in-progress capture requests submitted to the session will be completed as
+ * normal; once all captures have completed and the session has been torn down,
+ * {@link ACameraCaptureSession_stateCallbacks#onClosed} callback will be called and the seesion
+ * will be removed from memory.</p>
+ *
+ * <p>Closing a session is idempotent; closing more than once has no effect.</p>
+ *
+ * @param session the capture session of interest
*/
-void ACameraCaptureSession_close(ACameraCaptureSession*);
+void ACameraCaptureSession_close(ACameraCaptureSession* session);
struct ACameraDevice;
typedef struct ACameraDevice ACameraDevice;
/**
- * Get the camera device associated with this capture session
+ * Get the ACameraDevice pointer associated with this capture session in the device argument
+ * if the method succeeds.
+ *
+ * @param session the capture session of interest
+ * @param device the {@link ACameraDevice} associated with session. Will be set to NULL
+ * if the session is closed or this method fails.
+ * @return <ul><li>
+ * {@link ACAMERA_OK} if the method call succeeds. The {@link ACameraDevice}
+ * will be stored in device argument</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or device is NULL</li>
+ * <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
+ *
*/
camera_status_t ACameraCaptureSession_getDevice(
- ACameraCaptureSession*, ACameraDevice** device);
+ ACameraCaptureSession* session, /*out*/ACameraDevice** device);
/**
- * Send capture request(s)
+ * Submit an array of requests to be captured in sequence as a burst in the minimum of time possible.
+ *
+ * <p>The burst will be captured in the minimum amount of time possible, and will not be
+ * interleaved with requests submitted by other capture or repeat calls.</p>
+ *
+ * <p>Each capture produces one {@link ACameraMetadata} as a capture result and image buffers for
+ * one or more target {@link ANativeWindow}s. The target ANativeWindows (set with
+ * {@link ACaptureRequest_addTarget}) must be a subset of the ANativeWindow provided when
+ * this capture session was created.</p>
+ *
+ * @param session the capture session of interest
+ * @param callbacks the {@link ACameraCaptureSession_captureCallbacks} to be associated this capture
+ * sequence. No capture callback will be fired if this is set to NULL.
+ * @param numRequests number of requests in requests argument. Must be at least 1.
+ * @param requests an array of {@link ACaptureRequest} to be captured. Length must be at least
+ * numRequests.
+ * @param captureSequenceId the capture sequence ID associated with this capture method invocation
+ * will be stored here if this argument is not NULL and the method call succeeds.
+ * When this argument is set to NULL, the capture sequence ID will not be returned.
+ *
+ * @return <ul><li>
+ * {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
+ * if it is not NULL.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or requests is NULL, or
+ * if numRequests < 1</li>
+ * <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
*/
camera_status_t ACameraCaptureSession_capture(
- ACameraCaptureSession*, /*optional*/ACameraCaptureSession_captureCallbacks*,
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_captureCallbacks* callbacks,
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId);
/**
- * Send repeating capture request(s)
+ * Request endlessly repeating capture of a sequence of images by this capture session.
+ *
+ * <p>With this method, the camera device will continually capture images,
+ * cycling through the settings in the provided list of
+ * {@link ACaptureRequest}, at the maximum rate possible.</p>
+ *
+ * <p>If a request is submitted through {@link ACameraCaptureSession_capture},
+ * the current repetition of the request list will be
+ * completed before the higher-priority request is handled. This guarantees
+ * that the application always receives a complete repeat burst captured in
+ * minimal time, instead of bursts interleaved with higher-priority
+ * captures, or incomplete captures.</p>
+ *
+ * <p>Repeating burst requests are a simple way for an application to
+ * maintain a preview or other continuous stream of frames where each
+ * request is different in a predicatable way, without having to continually
+ * submit requests through {@link ACameraCaptureSession_capture}.</p>
+ *
+ * <p>To stop the repeating capture, call {@link ACameraCaptureSession_stopRepeating}. Any
+ * ongoing burst will still be completed, however. Calling
+ * {@link ACameraCaptureSession_abortCaptures} will also clear the request.</p>
+ *
+ * <p>Calling this method will replace a previously-set repeating requests
+ * set up by this method, although any in-progress burst will be completed before the new repeat
+ * burst will be used.</p>
+ *
+ * @param session the capture session of interest
+ * @param callbacks the {@link ACameraCaptureSession_captureCallbacks} to be associated with this
+ * capture sequence. No capture callback will be fired if callbacks is set to NULL.
+ * @param numRequests number of requests in requests array. Must be at least 1.
+ * @param requests an array of {@link ACaptureRequest} to be captured. Length must be at least
+ * numRequests.
+ * @param captureSequenceId the capture sequence ID associated with this capture method invocation
+ * will be stored here if this argument is not NULL and the method call succeeds.
+ * When this argument is set to NULL, the capture sequence ID will not be returned.
+ *
+ * @return <ul><li>
+ * {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
+ * if it is not NULL.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or requests is NULL, or
+ * if numRequests < 1</li>
+ * <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
*/
camera_status_t ACameraCaptureSession_setRepeatingRequest(
- ACameraCaptureSession*, /*optional*/ACameraCaptureSession_captureCallbacks*,
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_captureCallbacks* callbacks,
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId);
/**
- * Stop repeating capture request(s)
+ * Cancel any ongoing repeating capture set by {@link ACameraCaptureSession_setRepeatingRequest}.
+ * Has no effect on requests submitted through {@link ACameraCaptureSession_capture}.
+ *
+ * <p>Any currently in-flight captures will still complete, as will any burst that is
+ * mid-capture. To ensure that the device has finished processing all of its capture requests
+ * and is in ready state, wait for the {@link ACameraCaptureSession_stateCallbacks#onReady} callback
+ * after calling this method.</p>
+ *
+ * @param session the capture session of interest
+ *
+ * @return <ul><li>
+ * {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
+ * if it is not NULL.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
+ * <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
*/
-camera_status_t ACameraCaptureSession_stopRepeating(ACameraCaptureSession*);
+camera_status_t ACameraCaptureSession_stopRepeating(ACameraCaptureSession* session);
/**
- * Stop all capture requests as soon as possible
+ * Discard all captures currently pending and in-progress as fast as possible.
+ *
+ * <p>The camera device will discard all of its current work as fast as possible. Some in-flight
+ * captures may complete successfully and call
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted},
+ * while others will trigger their {@link ACameraCaptureSession_captureCallbacks#onCaptureFailed}
+ * callbacks. If a repeating request list is set, it will be cleared.</p>
+ *
+ * <p>This method is the fastest way to switch the camera device to a new session with
+ * {@link ACameraDevice_createCaptureSession}, at the cost of discarding in-progress
+ * work. It must be called before the new session is created. Once all pending requests are
+ * either completed or thrown away, the {@link ACameraCaptureSession_stateCallbacks#onReady}
+ * callback will be called, if the session has not been closed. Otherwise, the
+ * {@link ACameraCaptureSession_stateCallbacks#onClosed}
+ * callback will be fired when a new session is created by the camera device and the previous
+ * session is being removed from memory.</p>
+ *
+ * <p>Cancelling will introduce at least a brief pause in the stream of data from the camera
+ * device, since once the camera device is emptied, the first new request has to make it through
+ * the entire camera pipeline before new output buffers are produced.</p>
+ *
+ * <p>This means that using ACameraCaptureSession_abortCaptures to simply remove pending requests is
+ * not recommended; it's best used for quickly switching output configurations, or for cancelling
+ * long in-progress requests (such as a multi-second capture).</p>
+ *
+ * @param session the capture session of interest
+ *
+ * @return <ul><li>
+ * {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
+ * if it is not NULL.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
+ * <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
*/
-camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession*);
+camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* session);
#ifdef __cplusplus
@@ -144,3 +590,5 @@
#endif
#endif // _NDK_CAMERA_CAPTURE_SESSION_H
+
+/** @} */
diff --git a/include/camera/ndk/NdkCameraDevice.h b/include/camera/ndk/NdkCameraDevice.h
index 2008a68..fb124a0 100644
--- a/include/camera/ndk/NdkCameraDevice.h
+++ b/include/camera/ndk/NdkCameraDevice.h
@@ -14,6 +14,15 @@
* limitations under the License.
*/
+/**
+ * @addtogroup Camera
+ * @{
+ */
+
+/**
+ * @file NdkCameraDevice.h
+ */
+
/*
* This file defines an NDK API.
* Do not remove methods.
@@ -36,71 +45,614 @@
extern "C" {
#endif
+/**
+ * ACameraDevice is opaque type that provides access to a camera device.
+ *
+ * A pointer can be obtained using {@link ACameraManager_openCamera} method.
+ */
typedef struct ACameraDevice ACameraDevice;
-// Struct to hold camera state callbacks
+/// Enum for ACameraDevice_ErrorStateCallback error code
+enum {
+ /**
+ * The camera device is in use already.
+ */
+ ERROR_CAMERA_IN_USE = 1,
+
+ /**
+ * The system-wide limit for number of open cameras or camera resources has
+ * been reached, and more camera devices cannot be opened until previous
+ * instances are closed.
+ */
+ ERROR_MAX_CAMERAS_IN_USE = 2,
+
+ /**
+ * The camera is disabled due to a device policy, and cannot be opened.
+ */
+ ERROR_CAMERA_DISABLED = 3,
+
+ /**
+ * The camera device has encountered a fatal error.
+ * <p>The camera device needs to be re-opened to be used again.</p>
+ */
+ ERROR_CAMERA_DEVICE = 4,
+
+ /**
+ * The camera service has encountered a fatal error.
+ * <p>The Android device may need to be shut down and restarted to restore
+ * camera function, or there may be a persistent hardware problem.
+ * An attempt at recovery may be possible by closing the
+ * CameraDevice and the CameraManager, and trying to acquire all resources
+ * again from scratch.</p>
+ */
+ ERROR_CAMERA_SERVICE = 5
+};
+
+/**
+ * Camera device state callbacks to be used in {@link ACameraDevice_stateCallbacks}.
+ *
+ * @param context The optional context in {@link ACameraDevice_stateCallbacks} will be
+ * passed to this callback.
+ * @param device The {@link ACameraDevice} that is being disconnected.
+ */
typedef void (*ACameraDevice_StateCallback)(void* context, ACameraDevice* device);
+
+/**
+ * Camera device error state callbacks to be used in {@link ACameraDevice_stateCallbacks}.
+ *
+ * @param context The optional context in {@link ACameraDevice_stateCallbacks} will be
+ * passed to this callback.
+ * @param device The {@link ACameraDevice} that is being disconnected.
+ * @param error The error code describes the cause of this error callback. See the folowing
+ * links for more detail.
+ *
+ * @see ERROR_CAMERA_IN_USE
+ * @see ERROR_MAX_CAMERAS_IN_USE
+ * @see ERROR_CAMERA_DISABLED
+ * @see ERROR_CAMERA_DEVICE
+ * @see ERROR_CAMERA_SERVICE
+ */
typedef void (*ACameraDevice_ErrorStateCallback)(void* context, ACameraDevice* device, int error);
typedef struct ACameraDevice_StateCallbacks {
+ /// optional application context.
void* context;
- ACameraDevice_StateCallback onDisconnected; // Device is unusable after this callback
- ACameraDevice_ErrorStateCallback onError; // Device is unusable after this callback
+
+ /**
+ * The function is called when a camera device is no longer available for use.
+ *
+ * <p>Any attempt to call API methods on this ACameraDevice will return
+ * {@link ACAMERA_ERROR_CAMERA_DISCONNECTED}. The disconnection could be due to a
+ * change in security policy or permissions; the physical disconnection
+ * of a removable camera device; or the camera being needed for a
+ * higher-priority camera API client.</p>
+ *
+ * <p>Application should clean up the camera with {@link ACameraDevice_close} after
+ * this happens, as it is not recoverable until the camera can be opened
+ * again.</p>
+ *
+ */
+ ACameraDevice_StateCallback onDisconnected;
+
+ /**
+ * The function called when a camera device has encountered a serious error.
+ *
+ * <p>This indicates a failure of the camera device or camera service in some way.
+ * Any attempt to call API methods on this ACameraDevice in the future will return
+ * {@link ACAMERA_ERROR_CAMERA_DISCONNECTED}.</p>
+ *
+ * <p>There may still be capture completion or camera stream callbacks that will be called
+ * after this error is received.</p>
+ *
+ * <p>Application should clean up the camera with {@link ACameraDevice_close} after this
+ * happens. Further attempts at recovery are error-code specific.</p>
+ *
+ */
+ ACameraDevice_ErrorStateCallback onError;
} ACameraDevice_stateCallbacks;
/**
- * Close the camera device synchronously. Open is done in ACameraManager_openCamera
+ * Close the connection and free this ACameraDevice synchronously. Access to the ACameraDevice
+ * after calling this method will cause a crash.
+ *
+ * <p>After this call, all calls to the active ACameraCaptureSession associated to this
+ * ACameraDevice will return {@link ACAMERA_ERROR_SESSION_CLOSED} except for calls to
+ * {@link ACameraCaptureSession_close}.</p>
+ *
+ * <p>This method will stop all repeating captures sent via
+ * {@link ACameraCaptureSession_setRepeatingRequest} and block until all capture requests sent via
+ * {@link ACameraCaptureSession_capture} is complete. Once the method returns, the camera device
+ * will be removed from memory and access to the closed camera device pointer will cause a crash.</p>
+ *
+ * @param device the camera device to be closed
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if device is NULL.</li></ul>
*/
-camera_status_t ACameraDevice_close(ACameraDevice*);
+camera_status_t ACameraDevice_close(ACameraDevice* device);
/**
- * Return the camera id associated with this camera device
- * The returned pointer is still owned by framework and should not be delete/free by app
- * The returned pointer should not be used after the device has been closed
+ * Return the camera id associated with this camera device.
+ *
+ * @param device the camera device to be closed
+ *
+ * @return camera ID string. The returned string is managed by framework and should not be
+ * delete/free by the application. Also the returned string must not be used after the device
+ * has been closed.
*/
-const char* ACameraDevice_getId(const ACameraDevice*);
+const char* ACameraDevice_getId(const ACameraDevice* device);
typedef enum {
+ /**
+ * Create a request suitable for a camera preview window. Specifically, this
+ * means that high frame rate is given priority over the highest-quality
+ * post-processing. These requests would normally be used with the
+ * {@link ACameraCaptureSession_setRepeatingRequest} method.
+ * This template is guaranteed to be supported on all camera devices.
+ *
+ * @see ACameraDevice_createCaptureRequest
+ */
TEMPLATE_PREVIEW = 1,
- TEMPLATE_STILL_CAPTURE,
- TEMPLATE_RECORD,
- TEMPLATE_VIDEO_SNAPSHOT,
- TEMPLATE_ZERO_SHUTTER_LAG,
- TEMPLATE_MANUAL,
+
+ /**
+ * Create a request suitable for still image capture. Specifically, this
+ * means prioritizing image quality over frame rate. These requests would
+ * commonly be used with the {@link ACameraCaptureSession_capture} method.
+ * This template is guaranteed to be supported on all camera devices.
+ *
+ * @see ACameraDevice_createCaptureRequest
+ */
+ TEMPLATE_STILL_CAPTURE = 2,
+
+ /**
+ * Create a request suitable for video recording. Specifically, this means
+ * that a stable frame rate is used, and post-processing is set for
+ * recording quality. These requests would commonly be used with the
+ * {@link ACameraCaptureSession_setRepeatingRequest} method.
+ * This template is guaranteed to be supported on all camera devices.
+ *
+ * @see ACameraDevice_createCaptureRequest
+ */
+ TEMPLATE_RECORD = 3,
+
+ /**
+ * Create a request suitable for still image capture while recording
+ * video. Specifically, this means maximizing image quality without
+ * disrupting the ongoing recording. These requests would commonly be used
+ * with the {@link ACameraCaptureSession_capture} method while a request based on
+ * {@link TEMPLATE_RECORD} is is in use with {@link ACameraCaptureSession_setRepeatingRequest}.
+ * This template is guaranteed to be supported on all camera devices.
+ *
+ * @see ACameraDevice_createCaptureRequest
+ */
+ TEMPLATE_VIDEO_SNAPSHOT = 4,
+
+ /**
+ * Create a request suitable for zero shutter lag still capture. This means
+ * means maximizing image quality without compromising preview frame rate.
+ * AE/AWB/AF should be on auto mode.
+ *
+ * @see ACameraDevice_createCaptureRequest
+ */
+ TEMPLATE_ZERO_SHUTTER_LAG = 5,
+
+ /**
+ * A basic template for direct application control of capture
+ * parameters. All automatic control is disabled (auto-exposure, auto-white
+ * balance, auto-focus), and post-processing parameters are set to preview
+ * quality. The manual capture parameters (exposure, sensitivity, and so on)
+ * are set to reasonable defaults, but should be overriden by the
+ * application depending on the intended use case.
+ * This template is guaranteed to be supported on camera devices that support the
+ * {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR} capability.
+ *
+ * @see ACameraDevice_createCaptureRequest
+ */
+ TEMPLATE_MANUAL = 6,
} ACameraDevice_request_template;
/**
- * Create/free a default capture request for input template
+ * Create a ACaptureRequest for capturing images, initialized with template
+ * for a target use case.
+ *
+ * <p>The settings are chosen to be the best options for this camera device,
+ * so it is not recommended to reuse the same request for a different camera device.</p>
+ *
+ * @param device the camera device of interest
+ * @param templateId the type of capture request to be created.
+ * See {@link ACameraDevice_request_template}.
+ * @param request the output request will be stored here if the method call succeeds.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds. The created capture request will be
+ * filled in request argument.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if device or request is NULL, or templateId
+ * is undefined.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error.</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ *
+ * @see TEMPLATE_PREVIEW
+ * @see TEMPLATE_RECORD
+ * @see TEMPLATE_STILL_CAPTURE
+ * @see TEMPLATE_VIDEO_SNAPSHOT
+ * @see TEMPLATE_MANUAL
*/
camera_status_t ACameraDevice_createCaptureRequest(
- const ACameraDevice*, ACameraDevice_request_template, /*out*/ACaptureRequest** request);
+ const ACameraDevice* device, ACameraDevice_request_template templateId,
+ /*out*/ACaptureRequest** request);
-/**
- * APIs for createing capture session
- */
+
typedef struct ACaptureSessionOutputContainer ACaptureSessionOutputContainer;
typedef struct ACaptureSessionOutput ACaptureSessionOutput;
-camera_status_t ACaptureSessionOutputContainer_create(/*out*/ACaptureSessionOutputContainer**);
-void ACaptureSessionOutputContainer_free(ACaptureSessionOutputContainer*);
+/**
+ * Create a capture session output container.
+ *
+ * <p>The container is used in {@link ACameraDevice_createCaptureSession} method to create a capture
+ * session. Use {@link ACaptureSessionOutputContainer_free} to free the container and its memory
+ * after application no longer needs the ACaptureSessionOutputContainer.</p>
+ *
+ * @param container the output {@link ACaptureSessionOutputContainer} will be stored here if the
+ * method call succeeds.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds. The created container will be
+ * filled in container argument.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if container is NULL.</li></ul>
+ */
+camera_status_t ACaptureSessionOutputContainer_create(
+ /*out*/ACaptureSessionOutputContainer** container);
-camera_status_t ACaptureSessionOutput_create(ANativeWindow*, /*out*/ACaptureSessionOutput**);
-void ACaptureSessionOutput_free(ACaptureSessionOutput*);
+/**
+ * Free a capture session output container.
+ *
+ * @param container the {@link ACaptureSessionOutputContainer} to be freed.
+ *
+ * @see ACaptureSessionOutputContainer_create
+ */
+void ACaptureSessionOutputContainer_free(ACaptureSessionOutputContainer* container);
+/**
+ * Create a ACaptureSessionOutput object.
+ *
+ * <p>The ACaptureSessionOutput is used in {@link ACaptureSessionOutputContainer_add} method to add
+ * an output {@link ANativeWindow} to ACaptureSessionOutputContainer. Use
+ * {@link ACaptureSessionOutput_free} to free the object and its memory after application no longer
+ * needs the {@link ACaptureSessionOutput}.</p>
+ *
+ * @param anw the {@link ANativeWindow} to be associated with the {@link ACaptureSessionOutput}
+ * @param output the output {@link ACaptureSessionOutput} will be stored here if the
+ * method call succeeds.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds. The created container will be
+ * filled in the output argument.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if anw or output is NULL.</li></ul>
+ *
+ * @see ACaptureSessionOutputContainer_add
+ */
+camera_status_t ACaptureSessionOutput_create(
+ ANativeWindow* anw, /*out*/ACaptureSessionOutput** output);
+
+/**
+ * Free a ACaptureSessionOutput object.
+ *
+ * @param output the {@link ACaptureSessionOutput} to be freed.
+ *
+ * @see ACaptureSessionOutput_create
+ */
+void ACaptureSessionOutput_free(ACaptureSessionOutput* output);
+
+/**
+ * Add an {@link ACaptureSessionOutput} object to {@link ACaptureSessionOutputContainer}.
+ *
+ * @param container the {@link ACaptureSessionOutputContainer} of interest.
+ * @param output the output {@link ACaptureSessionOutput} to be added to container.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if container or output is NULL.</li></ul>
+ */
camera_status_t ACaptureSessionOutputContainer_add(
- ACaptureSessionOutputContainer*, const ACaptureSessionOutput*);
-camera_status_t ACaptureSessionOutputContainer_remove(
- ACaptureSessionOutputContainer*, const ACaptureSessionOutput*);
+ ACaptureSessionOutputContainer* container, const ACaptureSessionOutput* output);
-/*
- * Create a new capture session.
- * If there is a preexisting session, the previous session will be closed automatically.
- * However, app still needs to call ACameraCaptureSession_close on previous session.
- * Otherwise the resources hold by previous session won't be freed
+/**
+ * Remove an {@link ACaptureSessionOutput} object from {@link ACaptureSessionOutputContainer}.
+ *
+ * <p>This method has no effect if the ACaptureSessionOutput does not exist in
+ * ACaptureSessionOutputContainer.</p>
+ *
+ * @param container the {@link ACaptureSessionOutputContainer} of interest.
+ * @param output the output {@link ACaptureSessionOutput} to be removed from container.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if container or output is NULL.</li></ul>
+ */
+camera_status_t ACaptureSessionOutputContainer_remove(
+ ACaptureSessionOutputContainer* container, const ACaptureSessionOutput* output);
+
+/**
+ * Create a new camera capture session by providing the target output set of {@link ANativeWindow}
+ * to the camera device.
+ *
+ * <p>If there is a preexisting session, the previous session will be closed
+ * automatically. However, app still needs to call {@link ACameraCaptureSession_close} on previous
+ * session. Otherwise the resources held by previous session will NOT be freed.</p>
+ *
+ * <p>The active capture session determines the set of potential output {@link ANativeWindow}s for
+ * the camera device for each capture request. A given request may use all
+ * or only some of the outputs. Once the ACameraCaptureSession is created, requests can be
+ * submitted with {@link ACameraCaptureSession_capture} or
+ * {@link ACameraCaptureSession_setRepeatingRequest}.</p>
+ *
+ * <p>Often the {@link ANativeWindow} used with this method can be obtained from a <a href=
+ * "http://developer.android.com/reference/android/view/Surface.html">Surface</a> java object by
+ * {@link ANativeWindow_fromSurface} NDK method. Surfaces or ANativeWindow suitable for inclusion as a camera
+ * output can be created for various use cases and targets:</p>
+ *
+ * <ul>
+ *
+ * <li>For drawing to a
+ * <a href="http://developer.android.com/reference/android/view/SurfaceView.html">SurfaceView</a>:
+ * Once the SurfaceView's Surface is created, set the size
+ * of the Surface with
+ * <a href="http://developer.android.com/reference/android/view/SurfaceHolder.html#setFixedSize(int, int)">
+ * android.view.SurfaceHolder\#setFixedSize</a> to be one of the PRIVATE output sizes
+ * returned by {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}
+ * and then obtain the Surface by calling <a href=
+ * "http://developer.android.com/reference/android/view/SurfaceHolder.html#getSurface()">
+ * android.view.SurfaceHolder\#getSurface</a>. If the size is not set by the application, it will
+ * be rounded to the nearest supported size less than 1080p, by the camera device.</li>
+ *
+ * <li>For accessing through an OpenGL texture via a <a href=
+ * "http://developer.android.com/reference/android/graphics/SurfaceTexture.html">SurfaceTexture</a>:
+ * Set the size of the SurfaceTexture with <a href=
+ * "http://developer.android.com/reference/android/graphics/SurfaceTexture.html#setDefaultBufferSize(int, int)">
+ * setDefaultBufferSize</a> to be one of the PRIVATE output sizes
+ * returned by {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}
+ * before creating a Surface from the SurfaceTexture with <a href=
+ * "http://developer.android.com/reference/android/view/Surface.html#Surface(android.graphics.SurfaceTexture)">
+ * Surface\#Surface(SurfaceTextrue)</a>. If the size is not set by the application, it will be set to be the
+ * smallest supported size less than 1080p, by the camera device.</li>
+ *
+ * <li>For recording with <a href=
+ * "http://developer.android.com/reference/android/media/MediaCodec.html">
+ * MediaCodec</a>: Call
+ * <a href=
+ * "http://developer.android.com/reference/android/media/MediaCodec.html#createInputSurface()">
+ * android.media.MediaCodec\#createInputSurface</a> after configuring
+ * the media codec to use one of the PRIVATE output sizes
+ * returned by {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}.
+ * </li>
+ *
+ * <li>For recording with <a href=
+ * "http://developer.android.com/reference/android/media/MediaRecorder.html">
+ * MediaRecorder</a>: Call
+ * <a href="http://developer.android.com/reference/android/media/MediaRecorder.html#getSurface()">
+ * android.media.MediaRecorder\#getSurface</a> after configuring the media recorder to use
+ * one of the PRIVATE output sizes returned by
+ * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, or configuring it to use one of the supported
+ * <a href="http://developer.android.com/reference/android/media/CamcorderProfile.html">
+ * CamcorderProfiles</a>.</li>
+ *
+ * <li>For efficient YUV processing with <a href=
+ * "http://developer.android.com/reference/android/renderscript/package-summary.html">
+ * RenderScript</a>:
+ * Create a RenderScript
+ * <a href="http://developer.android.com/reference/android/renderscript/Allocation.html">
+ * Allocation</a> with a supported YUV
+ * type, the IO_INPUT flag, and one of the YUV output sizes returned by
+ * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS},
+ * Then obtain the Surface with
+ * <a href="http://developer.android.com/reference/android/renderscript/Allocation.html#getSurface()">
+ * Allocation#getSurface}</a>.</li>
+ *
+ * <li>For access to RAW, uncompressed YUV, or compressed JPEG data in the application: Create an
+ * {@link AImageReader} object using the {@link AImageReader_new} method with one of the supported
+ * output formats given by {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}. Then obtain a
+ * ANativeWindow from it with {@link AImageReader_getWindow}.
+ * If the AImageReader size is not set to a supported size, it will be rounded to a supported
+ * size less than 1080p by the camera device.
+ * </li>
+ *
+ * </ul>
+ *
+ * <p>The camera device will query each ANativeWindow's size and formats upon this
+ * call, so they must be set to a valid setting at this time.</p>
+ *
+ * <p>It can take several hundred milliseconds for the session's configuration to complete,
+ * since camera hardware may need to be powered on or reconfigured.</p>
+ *
+ * <p>If a prior ACameraCaptureSession already exists when this method is called, the previous
+ * session will no longer be able to accept new capture requests and will be closed. Any
+ * in-progress capture requests made on the prior session will be completed before it's closed.
+ * To minimize the transition time,
+ * the ACameraCaptureSession_abortCaptures method can be used to discard the remaining
+ * requests for the prior capture session before a new one is created. Note that once the new
+ * session is created, the old one can no longer have its captures aborted.</p>
+ *
+ * <p>Using larger resolution outputs, or more outputs, can result in slower
+ * output rate from the device.</p>
+ *
+ * <p>Configuring a session with an empty list will close the current session, if
+ * any. This can be used to release the current session's target surfaces for another use.</p>
+ *
+ * <p>While any of the sizes from {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} can be used when
+ * a single output stream is configured, a given camera device may not be able to support all
+ * combination of sizes, formats, and targets when multiple outputs are configured at once. The
+ * tables below list the maximum guaranteed resolutions for combinations of streams and targets,
+ * given the capabilities of the camera device.</p>
+ *
+ * <p>If an application tries to create a session using a set of targets that exceed the limits
+ * described in the below tables, one of three possibilities may occur. First, the session may
+ * be successfully created and work normally. Second, the session may be successfully created,
+ * but the camera device won't meet the frame rate guarantees as described in
+ * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}. Or third, if the output set
+ * cannot be used at all, session creation will fail entirely, with
+ * {@link ACAMERA_ERROR_STREAM_CONFIGURE_FAIL} being returned.</p>
+ *
+ * <p>For the type column `PRIV` refers to output format {@link AIMAGE_FORMAT_PRIVATE},
+ * `YUV` refers to output format {@link AIMAGE_FORMAT_YUV_420_888},
+ * `JPEG` refers to output format {@link AIMAGE_FORMAT_JPEG},
+ * and `RAW` refers to output format {@link AIMAGE_FORMAT_RAW16}
+ *
+ *
+ * <p>For the maximum size column, `PREVIEW` refers to the best size match to the
+ * device's screen resolution, or to 1080p `(1920x1080)`, whichever is
+ * smaller. `RECORD` refers to the camera device's maximum supported recording resolution,
+ * as determined by <a href="http://developer.android.com/reference/android/media/CamcorderProfile.html">
+ * android.media.CamcorderProfiles</a>. And `MAXIMUM` refers to the
+ * camera device's maximum output resolution for that format or target from
+ * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}.</p>
+ *
+ * <p>To use these tables, determine the number and the formats/targets of outputs needed, and
+ * find the row(s) of the table with those targets. The sizes indicate the maximum set of sizes
+ * that can be used; it is guaranteed that for those targets, the listed sizes and anything
+ * smaller from the list given by {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} can be
+ * successfully used to create a session. For example, if a row indicates that a 8 megapixel
+ * (MP) YUV_420_888 output can be used together with a 2 MP `PRIV` output, then a session
+ * can be created with targets `[8 MP YUV, 2 MP PRIV]` or targets `[2 MP YUV, 2 MP PRIV]`;
+ * but a session with targets `[8 MP YUV, 4 MP PRIV]`, targets `[4 MP YUV, 4 MP PRIV]`,
+ * or targets `[8 MP PRIV, 2 MP YUV]` would not be guaranteed to work, unless
+ * some other row of the table lists such a combination.</p>
+ *
+ * <p>Legacy devices ({@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL}
+ * `== `{@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}) support at
+ * least the following stream combinations:
+ *
+ * <table>
+ * <tr><th colspan="7">LEGACY-level guaranteed configurations</th></tr>
+ * <tr> <th colspan="2" id="rb">Target 1</th> <th colspan="2" id="rb">Target 2</th> <th colspan="2" id="rb">Target 3</th> <th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr> <th>Type</th><th id="rb">Max size</th> <th>Type</th><th id="rb">Max size</th> <th>Type</th><th id="rb">Max size</th></tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td colspan="2" id="rb"></td> <td>Simple preview, GPU video processing, or no-preview video recording.</td> </tr>
+ * <tr> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td colspan="2" id="rb"></td> <td>No-viewfinder still image capture.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td colspan="2" id="rb"></td> <td>In-application video/image processing.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Standard still imaging.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>In-app processing plus still capture.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td colspan="2" id="rb"></td> <td>Standard recording.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td colspan="2" id="rb"></td> <td>Preview plus in-app processing.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td>Still capture plus in-app processing.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>Limited-level ({@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL}
+ * `== `{@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED}) devices
+ * support at least the following stream combinations in addition to those for
+ * {@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY} devices:
+ *
+ * <table>
+ * <tr><th colspan="7">LIMITED-level additional guaranteed configurations</th></tr>
+ * <tr><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th> <th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th></tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`RECORD `</td> <td colspan="2" id="rb"></td> <td>High-resolution video recording with preview.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`RECORD `</td> <td colspan="2" id="rb"></td> <td>High-resolution in-app video processing with preview.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`RECORD `</td> <td colspan="2" id="rb"></td> <td>Two-input in-app video processing.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`RECORD `</td> <td>`JPEG`</td><td id="rb">`RECORD `</td> <td>High-resolution recording with video snapshot.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`RECORD `</td> <td>`JPEG`</td><td id="rb">`RECORD `</td> <td>High-resolution in-app processing with video snapshot.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td>Two-input in-app processing with still capture.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>FULL-level ({@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL}
+ * `== `{@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL}) devices
+ * support at least the following stream combinations in addition to those for
+ * {@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED} devices:
+ *
+ * <table>
+ * <tr><th colspan="7">FULL-level additional guaranteed configurations</th></tr>
+ * <tr><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th> <th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution GPU processing with preview.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution in-app processing with preview.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution two-input in-app processsing.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td>Video recording with maximum-size video snapshot</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`640x480`</td> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Standard video recording plus maximum-resolution in-app processing.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`640x480`</td> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Preview plus two-input maximum-resolution in-app processing.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>RAW-capability ({@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES} includes
+ * {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_RAW RAW}) devices additionally support
+ * at least the following stream combinations on both
+ * {@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL} and
+ * {@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED} devices:
+ *
+ * <table>
+ * <tr><th colspan="7">RAW-capability additional guaranteed configurations</th></tr>
+ * <tr><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th> <th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
+ * <tr> <td>`RAW `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td colspan="2" id="rb"></td> <td>No-preview DNG capture.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`RAW `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Standard DNG capture.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`RAW `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>In-app processing plus DNG capture.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`RAW `</td><td id="rb">`MAXIMUM`</td> <td>Video recording with DNG capture.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`RAW `</td><td id="rb">`MAXIMUM`</td> <td>Preview with in-app processing and DNG capture.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`RAW `</td><td id="rb">`MAXIMUM`</td> <td>Two-input in-app processing plus DNG capture.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td>`RAW `</td><td id="rb">`MAXIMUM`</td> <td>Still capture with simultaneous JPEG and DNG.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td>`RAW `</td><td id="rb">`MAXIMUM`</td> <td>In-app processing with simultaneous JPEG and DNG.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>BURST-capability ({@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES} includes
+ * {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}) devices
+ * support at least the below stream combinations in addition to those for
+ * {@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED} devices. Note that all
+ * FULL-level devices support the BURST capability, and the below list is a strict subset of the
+ * list for FULL-level devices, so this table is only relevant for LIMITED-level devices that
+ * support the BURST_CAPTURE capability.
+ *
+ * <table>
+ * <tr><th colspan="5">BURST-capability additional guaranteed configurations</th></tr>
+ * <tr><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution GPU processing with preview.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution in-app processing with preview.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution two-input in-app processsing.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>LEVEL-3 ({@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL}
+ * `== `{@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_3 LEVEL_3})
+ * support at least the following stream combinations in addition to the combinations for
+ * {@link ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL} and for
+ * RAW capability ({@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES} includes
+ * {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_RAW RAW}):
+ *
+ * <table>
+ * <tr><th colspan="11">LEVEL-3 additional guaranteed configurations</th></tr>
+ * <tr><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th><th colspan="2" id="rb">Target 4</th><th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`640x480`</td> <td>`YUV`</td><td id="rb">`MAXIMUM`</td> <td>`RAW`</td><td id="rb">`MAXIMUM`</td> <td>In-app viewfinder analysis with dynamic selection of output format.</td> </tr>
+ * <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`640x480`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td>`RAW`</td><td id="rb">`MAXIMUM`</td> <td>In-app viewfinder analysis with dynamic selection of output format.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>Since the capabilities of camera devices vary greatly, a given camera device may support
+ * target combinations with sizes outside of these guarantees, but this can only be tested for
+ * by attempting to create a session with such targets.</p>
+ *
+ * @param device the camera device of interest.
+ * @param outputs the {@link ACaptureSessionOutputContainer} describes all output streams.
+ * @param callbacks the {@link ACameraCaptureSession_stateCallbacks capture session state callbacks}.
+ * @param session the created {@link ACameraCaptureSession} will be filled here if the method call
+ * succeeds.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds. The created capture session will be
+ * filled in session argument.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if any of device, outputs, callbacks or
+ * session is NULL.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error.</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
*/
camera_status_t ACameraDevice_createCaptureSession(
- ACameraDevice*,
+ ACameraDevice* device,
const ACaptureSessionOutputContainer* outputs,
const ACameraCaptureSession_stateCallbacks* callbacks,
/*out*/ACameraCaptureSession** session);
@@ -110,3 +662,6 @@
#endif
#endif // _NDK_CAMERA_DEVICE_H
+
+/** @} */
+
diff --git a/include/camera/ndk/NdkCameraError.h b/include/camera/ndk/NdkCameraError.h
index 6d671de..e817eef 100644
--- a/include/camera/ndk/NdkCameraError.h
+++ b/include/camera/ndk/NdkCameraError.h
@@ -14,6 +14,14 @@
* limitations under the License.
*/
+/**
+ * @addtogroup Camera
+ * @{
+ */
+
+/**
+ * @file NdkCameraError.h
+ */
/*
* This file defines an NDK API.
@@ -52,6 +60,10 @@
ACAMERA_ERROR_INVALID_OPERATION = ACAMERA_ERROR_BASE - 13,
ACAMERA_ERROR_TIMEOUT = ACAMERA_ERROR_BASE - 14,
ACAMERA_ERROR_STREAM_CONFIGURE_FAIL = ACAMERA_ERROR_BASE - 15,
+ ACAMERA_ERROR_CAMERA_IN_USE = ACAMERA_ERROR_BASE - 16,
+ ACAMERA_ERROR_MAX_CAMERA_IN_USE = ACAMERA_ERROR_BASE - 17,
+ ACAMERA_ERROR_CAMERA_DISABLED = ACAMERA_ERROR_BASE - 18,
+ ACAMERA_ERROR_PERMISSION_DENIED = ACAMERA_ERROR_BASE - 19,
} camera_status_t;
@@ -60,3 +72,5 @@
#endif
#endif // _NDK_CAMERA_ERROR_H
+
+/** @} */
diff --git a/include/camera/ndk/NdkCameraManager.h b/include/camera/ndk/NdkCameraManager.h
index adef6ed..9188e94 100644
--- a/include/camera/ndk/NdkCameraManager.h
+++ b/include/camera/ndk/NdkCameraManager.h
@@ -14,6 +14,15 @@
* limitations under the License.
*/
+/**
+ * @addtogroup Camera
+ * @{
+ */
+
+/**
+ * @file NdkCameraManager.h
+ */
+
/*
* This file defines an NDK API.
* Do not remove methods.
@@ -35,66 +44,230 @@
extern "C" {
#endif
+/**
+ * ACameraManager is opaque type that provides access to camera service.
+ *
+ * A pointer can be obtained using {@link ACameraManager_create} method.
+ */
typedef struct ACameraManager ACameraManager;
/**
- * Create CameraManager instance.
- * The caller must call ACameraManager_delete to free the resources
+ * Create ACameraManager instance.
+ *
+ * <p>The ACameraManager is responsible for
+ * detecting, characterizing, and connecting to {@link ACameraDevice}s.</p>
+ *
+ * <p>The caller must call {@link ACameraManager_delete} to free the resources once it is done
+ * using the ACameraManager instance.</p>
+ *
+ * @return a {@link ACameraManager} instance.
+ *
*/
ACameraManager* ACameraManager_create();
/**
- * delete the ACameraManager and free its resources
+ * <p>Delete the {@link ACameraManager} instance and free its resources. </p>
+ *
+ * @param manager the {@link ACameraManager} instance to be deleted.
*/
-void ACameraManager_delete(ACameraManager*);
+void ACameraManager_delete(ACameraManager* manager);
-// Struct to hold list of camera devices
+/// Struct to hold list of camera devices
typedef struct ACameraIdList {
- int numCameras;
- const char** cameraIds;
+ int numCameras; ///< Number of connected camera devices
+ const char** cameraIds; ///< list of identifier of connected camera devices
} ACameraIdList;
/**
- * Create/delete a list of camera devices.
- * ACameraManager_getCameraIdList will allocate and return an ACameraIdList.
- * The caller must call ACameraManager_deleteCameraIdList to free the memory
+ * Create a list of currently connected camera devices, including
+ * cameras that may be in use by other camera API clients.
+ *
+ * <p>Non-removable cameras use integers starting at 0 for their
+ * identifiers, while removable cameras have a unique identifier for each
+ * individual device, even if they are the same model.</p>
+ *
+ * <p>ACameraManager_getCameraIdList will allocate and return an {@link ACameraIdList}.
+ * The caller must call {@link ACameraManager_deleteCameraIdList} to free the memory</p>
+ *
+ * @param manager the {@link ACameraManager} of interest
+ * @param cameraIdList the output {@link ACameraIdList} will be filled in here if the method call
+ * succeeds.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if manager or cameraIdList is NULL.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if connection to camera service fails.</li>
+ * <li>{@link ACAMERA_ERROR_NOT_ENOUGH_MEMORY} if allocating memory fails.</li></ul>
*/
-camera_status_t ACameraManager_getCameraIdList(ACameraManager*,
+camera_status_t ACameraManager_getCameraIdList(ACameraManager* manager,
/*out*/ACameraIdList** cameraIdList);
+
+/**
+ * Delete a list of camera devices allocated via {@link ACameraManager_getCameraIdList}.
+ *
+ * @param cameraIdList the {@link ACameraIdList} to be deleted.
+ */
void ACameraManager_deleteCameraIdList(ACameraIdList* cameraIdList);
-
-// Struct to hold camera availability callbacks
+/**
+ * Definition of camera availability callbacks.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraManager_AvailabilityCallbacks}.
+ * @param cameraId The ID of the camera device whose availability is changing. The memory of this
+ * argument is owned by camera framework and will become invalid immediately after
+ * this callback returns.
+ */
typedef void (*ACameraManager_AvailabilityCallback)(void* context, const char* cameraId);
+/**
+ * A listener for camera devices becoming available or unavailable to open.
+ *
+ * <p>Cameras become available when they are no longer in use, or when a new
+ * removable camera is connected. They become unavailable when some
+ * application or service starts using a camera, or when a removable camera
+ * is disconnected.</p>
+ *
+ * @see ACameraManager_registerAvailabilityCallback
+ */
typedef struct ACameraManager_AvailabilityListener {
- void* context; // optional application context.
+ /// Optional application context.
+ void* context;
+ /// Called when a camera becomes available
ACameraManager_AvailabilityCallback onCameraAvailable;
+ /// Called when a camera becomes unavailable
ACameraManager_AvailabilityCallback onCameraUnavailable;
} ACameraManager_AvailabilityCallbacks;
/**
- * register/unregister camera availability callbacks
+ * Register camera availability callbacks.
+ *
+ * <p>onCameraUnavailable will be called whenever a camera device is opened by any camera API client.
+ * Other camera API clients may still be able to open such a camera device, evicting the existing
+ * client if they have higher priority than the existing client of a camera device.
+ * See {@link ACameraManager_openCamera} for more details.</p>
+ *
+ * <p>The callbacks will be called on a dedicated thread shared among all ACameraManager
+ * instances.</p>
+ *
+ * <p>Since this callback will be registered with the camera service, remember to unregister it
+ * once it is no longer needed; otherwise the callback will continue to receive events
+ * indefinitely and it may prevent other resources from being released. Specifically, the
+ * callbacks will be invoked independently of the general activity lifecycle and independently
+ * of the state of individual ACameraManager instances.</p>
+ *
+ * @param manager the {@link ACameraManager} of interest.
+ * @param callback the {@link ACameraManager_AvailabilityCallbacks} to be registered.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if manager or callback is NULL, or
+ * {ACameraManager_AvailabilityCallbacks#onCameraAvailable} or
+ * {ACameraManager_AvailabilityCallbacks#onCameraUnavailable} is NULL.</li></ul>
*/
camera_status_t ACameraManager_registerAvailabilityCallback(
- ACameraManager*, const ACameraManager_AvailabilityCallbacks *callback);
-camera_status_t ACameraManager_unregisterAvailabilityCallback(
- ACameraManager*, const ACameraManager_AvailabilityCallbacks *callback);
+ ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback);
/**
- * Query the characteristics of a camera.
- * The caller must call ACameraMetadata_free to free the memory of the output characteristics.
+ * Unregister camera availability callbacks.
+ *
+ * <p>Removing a callback that isn't registered has no effect.</p>
+ *
+ * @param manager the {@link ACameraManager} of interest.
+ * @param callback the {@link ACameraManager_AvailabilityCallbacks} to be unregistered.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if callback,
+ * {ACameraManager_AvailabilityCallbacks#onCameraAvailable} or
+ * {ACameraManager_AvailabilityCallbacks#onCameraUnavailable} is NULL.</li></ul>
+ */
+camera_status_t ACameraManager_unregisterAvailabilityCallback(
+ ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback);
+
+/**
+ * Query the capabilities of a camera device. These capabilities are
+ * immutable for a given camera.
+ *
+ * <p>See {@link ACameraMetadata} document and {@link NdkCameraMetadataTags.h} for more details.</p>
+ *
+ * <p>The caller must call {@link ACameraMetadata_free} to free the memory of the output
+ * characteristics.</p>
+ *
+ * @param manager the {@link ACameraManager} of interest.
+ * @param cameraId the ID string of the camera device of interest.
+ * @param characteristics the output {@link ACameraMetadata} will be filled here if the method call
+ * succeeeds.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if manager, cameraId, or characteristics
+ * is NULL, or cameraId does not match any camera devices connected.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if connection to camera service fails.</li>
+ * <li>{@link ACAMERA_ERROR_NOT_ENOUGH_MEMORY} if allocating memory fails.</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
*/
camera_status_t ACameraManager_getCameraCharacteristics(
- ACameraManager*, const char *cameraId,
- /*out*/ACameraMetadata **characteristics);
+ ACameraManager* manager, const char* cameraId,
+ /*out*/ACameraMetadata** characteristics);
/**
- * Open a camera device synchronously.
- * The opened camera device will be returned in
+ * Open a connection to a camera with the given ID. The opened camera device will be
+ * returned in the `device` parameter.
+ *
+ * <p>Use {@link ACameraManager_getCameraIdList} to get the list of available camera
+ * devices. Note that even if an id is listed, open may fail if the device
+ * is disconnected between the calls to {@link ACameraManager_getCameraIdList} and
+ * {@link ACameraManager_openCamera}, or if a higher-priority camera API client begins using the
+ * camera device.</p>
+ *
+ * <p>Devices for which the
+ * {@link ACameraManager_AvailabilityCallbacks#onCameraUnavailable} callback has been called due to
+ * the device being in use by a lower-priority, background camera API client can still potentially
+ * be opened by calling this method when the calling camera API client has a higher priority
+ * than the current camera API client using this device. In general, if the top, foreground
+ * activity is running within your application process, your process will be given the highest
+ * priority when accessing the camera, and this method will succeed even if the camera device is
+ * in use by another camera API client. Any lower-priority application that loses control of the
+ * camera in this way will receive an
+ * {@link ACameraDevice_stateCallbacks#onDisconnected} callback.</p>
+ *
+ * <p>Once the camera is successfully opened,the ACameraDevice can then be set up
+ * for operation by calling {@link ACameraDevice_createCaptureSession} and
+ * {@link ACameraDevice_createCaptureRequest}.</p>
+ *
+ * <p>If the camera becomes disconnected after this function call returns,
+ * {@link ACameraDevice_stateCallbacks#onDisconnected} with a
+ * ACameraDevice in the disconnected state will be called.</p>
+ *
+ * <p>If the camera runs into error after this function call returns,
+ * {@link ACameraDevice_stateCallbacks#onError} with a
+ * ACameraDevice in the error state will be called.</p>
+ *
+ * @param manager the {@link ACameraManager} of interest.
+ * @param cameraId the ID string of the camera device to be opened.
+ * @param callback the {@link ACameraDevice_StateCallbacks} associated with the opened camera device.
+ * @param device the opened {@link ACameraDevice} will be filled here if the method call succeeds.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if manager, cameraId, callback, or device
+ * is NULL, or cameraId does not match any camera devices connected.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if connection to camera service fails.</li>
+ * <li>{@link ACAMERA_ERROR_NOT_ENOUGH_MEMORY} if allocating memory fails.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_IN_USE} if camera device is being used by a higher
+ * priority camera API client.</li>
+ * <li>{@link ACAMERA_ERROR_MAX_CAMERA_IN_USE} if the system-wide limit for number of open
+ * cameras or camera resources has been reached, and more camera devices cannot be
+ * opened until previous instances are closed.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISABLED} if the camera is disabled due to a device
+ * policy, and cannot be opened.</li>
+ * <li>{@link ACAMERA_ERROR_PERMISSION_DENIED} if the application does not have permission
+ * to open camera.</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
*/
camera_status_t ACameraManager_openCamera(
- ACameraManager*, const char* cameraId,
+ ACameraManager* manager, const char* cameraId,
ACameraDevice_StateCallbacks* callback,
/*out*/ACameraDevice** device);
@@ -103,3 +276,5 @@
#endif
#endif //_NDK_CAMERA_MANAGER_H
+
+/** @} */
diff --git a/include/camera/ndk/NdkCameraMetadata.h b/include/camera/ndk/NdkCameraMetadata.h
index 9b56a9d..8a8865d 100644
--- a/include/camera/ndk/NdkCameraMetadata.h
+++ b/include/camera/ndk/NdkCameraMetadata.h
@@ -14,6 +14,15 @@
* limitations under the License.
*/
+/**
+ * @addtogroup Camera
+ * @{
+ */
+
+/**
+ * @file NdkCameraMetadata.h
+ */
+
/*
* This file defines an NDK API.
* Do not remove methods.
@@ -119,3 +128,5 @@
#endif
#endif //_NDK_CAMERA_METADATA_H
+
+/** @} */
diff --git a/include/camera/ndk/NdkCameraMetadataTags.h b/include/camera/ndk/NdkCameraMetadataTags.h
index 3ec164c..ec2e159 100644
--- a/include/camera/ndk/NdkCameraMetadataTags.h
+++ b/include/camera/ndk/NdkCameraMetadataTags.h
@@ -96,372 +96,5253 @@
* Main enum for camera metadata tags.
*/
typedef enum acamera_metadata_tag {
+ /**
+ * <p>The mode control selects how the image data is converted from the
+ * sensor's native color into linear sRGB color.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>When auto-white balance (AWB) is enabled with ACAMERA_CONTROL_AWB_MODE, this
+ * control is overridden by the AWB routine. When AWB is disabled, the
+ * application controls how the color mapping is performed.</p>
+ * <p>We define the expected processing pipeline below. For consistency
+ * across devices, this is always the case with TRANSFORM_MATRIX.</p>
+ * <p>When either FULL or HIGH_QUALITY is used, the camera device may
+ * do additional processing but ACAMERA_COLOR_CORRECTION_GAINS and
+ * ACAMERA_COLOR_CORRECTION_TRANSFORM will still be provided by the
+ * camera device (in the results) and be roughly correct.</p>
+ * <p>Switching to TRANSFORM_MATRIX and using the data provided from
+ * FAST or HIGH_QUALITY will yield a picture with the same white point
+ * as what was produced by the camera device in the earlier frame.</p>
+ * <p>The expected processing pipeline is as follows:</p>
+ * <p><img alt="White balance processing pipeline" src="../../../../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
+ * <p>The white balance is encoded by two values, a 4-channel white-balance
+ * gain vector (applied in the Bayer domain), and a 3x3 color transform
+ * matrix (applied after demosaic).</p>
+ * <p>The 4-channel white-balance gains are defined as:</p>
+ * <pre><code>ACAMERA_COLOR_CORRECTION_GAINS = [ R G_even G_odd B ]
+ * </code></pre>
+ * <p>where <code>G_even</code> is the gain for green pixels on even rows of the
+ * output, and <code>G_odd</code> is the gain for green pixels on the odd rows.
+ * These may be identical for a given camera device implementation; if
+ * the camera device does not support a separate gain for even/odd green
+ * channels, it will use the <code>G_even</code> value, and write <code>G_odd</code> equal to
+ * <code>G_even</code> in the output result metadata.</p>
+ * <p>The matrices for color transforms are defined as a 9-entry vector:</p>
+ * <pre><code>ACAMERA_COLOR_CORRECTION_TRANSFORM = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
+ * </code></pre>
+ * <p>which define a transform from input sensor colors, <code>P_in = [ r g b ]</code>,
+ * to output linear sRGB, <code>P_out = [ r' g' b' ]</code>,</p>
+ * <p>with colors as follows:</p>
+ * <pre><code>r' = I0r + I1g + I2b
+ * g' = I3r + I4g + I5b
+ * b' = I6r + I7g + I8b
+ * </code></pre>
+ * <p>Both the input and output value ranges must match. Overflow/underflow
+ * values are clipped to fit within the range.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * @see ACAMERA_CONTROL_AWB_MODE
+ */
ACAMERA_COLOR_CORRECTION_MODE = // byte (enum)
ACAMERA_COLOR_CORRECTION_START,
+ /**
+ * <p>A color transform matrix to use to transform
+ * from sensor RGB color space to output linear sRGB color space.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This matrix is either set by the camera device when the request
+ * ACAMERA_COLOR_CORRECTION_MODE is not TRANSFORM_MATRIX, or
+ * directly by the application in the request when the
+ * ACAMERA_COLOR_CORRECTION_MODE is TRANSFORM_MATRIX.</p>
+ * <p>In the latter case, the camera device may round the matrix to account
+ * for precision issues; the final rounded matrix should be reported back
+ * in this matrix result metadata. The transform should keep the magnitude
+ * of the output color values within <code>[0, 1.0]</code> (assuming input color
+ * values is within the normalized range <code>[0, 1.0]</code>), or clipping may occur.</p>
+ * <p>The valid range of each matrix element varies on different devices, but
+ * values within [-1.5, 3.0] are guaranteed not to be clipped.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
ACAMERA_COLOR_CORRECTION_TRANSFORM = // rational[3*3]
ACAMERA_COLOR_CORRECTION_START + 1,
+ /**
+ * <p>Gains applying to Bayer raw color channels for
+ * white-balance.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>These per-channel gains are either set by the camera device
+ * when the request ACAMERA_COLOR_CORRECTION_MODE is not
+ * TRANSFORM_MATRIX, or directly by the application in the
+ * request when the ACAMERA_COLOR_CORRECTION_MODE is
+ * TRANSFORM_MATRIX.</p>
+ * <p>The gains in the result metadata are the gains actually
+ * applied by the camera device to the current frame.</p>
+ * <p>The valid range of gains varies on different devices, but gains
+ * between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
+ * device allows gains below 1.0, this is usually not recommended because
+ * this can create color artifacts.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
ACAMERA_COLOR_CORRECTION_GAINS = // float[4]
ACAMERA_COLOR_CORRECTION_START + 2,
+ /**
+ * <p>Mode of operation for the chromatic aberration correction algorithm.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
+ * can not focus on the same point after exiting from the lens. This metadata defines
+ * the high level control of chromatic aberration correction algorithm, which aims to
+ * minimize the chromatic artifacts that may occur along the object boundaries in an
+ * image.</p>
+ * <p>FAST/HIGH_QUALITY both mean that camera device determined aberration
+ * correction will be applied. HIGH_QUALITY mode indicates that the camera device will
+ * use the highest-quality aberration correction algorithms, even if it slows down
+ * capture rate. FAST means the camera device will not slow down capture rate when
+ * applying aberration correction.</p>
+ * <p>LEGACY devices will always be in FAST mode.</p>
+ */
ACAMERA_COLOR_CORRECTION_ABERRATION_MODE = // byte (enum)
ACAMERA_COLOR_CORRECTION_START + 3,
+ /**
+ * <p>List of aberration correction modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE that are
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This key lists the valid modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE. If no
+ * aberration correction modes are available for a device, this list will solely include
+ * OFF mode. All camera devices will support either OFF or FAST mode.</p>
+ * <p>Camera devices that support the MANUAL_POST_PROCESSING capability will always list
+ * OFF mode. This includes all FULL level devices.</p>
+ * <p>LEGACY devices will always only support FAST mode.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
+ */
ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES = // byte[n]
ACAMERA_COLOR_CORRECTION_START + 4,
ACAMERA_COLOR_CORRECTION_END,
+ /**
+ * <p>The desired setting for the camera device's auto-exposure
+ * algorithm's antibanding compensation.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Some kinds of lighting fixtures, such as some fluorescent
+ * lights, flicker at the rate of the power supply frequency
+ * (60Hz or 50Hz, depending on country). While this is
+ * typically not noticeable to a person, it can be visible to
+ * a camera device. If a camera sets its exposure time to the
+ * wrong value, the flicker may become visible in the
+ * viewfinder as flicker or in a final captured image, as a
+ * set of variable-brightness bands across the image.</p>
+ * <p>Therefore, the auto-exposure routines of camera devices
+ * include antibanding routines that ensure that the chosen
+ * exposure value will not cause such banding. The choice of
+ * exposure time depends on the rate of flicker, which the
+ * camera device can detect automatically, or the expected
+ * rate can be selected by the application using this
+ * control.</p>
+ * <p>A given camera device may not support all of the possible
+ * options for the antibanding mode. The
+ * ACAMERA_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES key contains
+ * the available modes for a given camera device.</p>
+ * <p>AUTO mode is the default if it is available on given
+ * camera device. When AUTO mode is not available, the
+ * default will be either 50HZ or 60HZ, and both 50HZ
+ * and 60HZ will be available.</p>
+ * <p>If manual exposure control is enabled (by setting
+ * ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE to OFF),
+ * then this setting has no effect, and the application must
+ * ensure it selects exposure times that do not cause banding
+ * issues. The ACAMERA_STATISTICS_SCENE_FLICKER key can assist
+ * the application in this.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_STATISTICS_SCENE_FLICKER
+ */
ACAMERA_CONTROL_AE_ANTIBANDING_MODE = // byte (enum)
ACAMERA_CONTROL_START,
+ /**
+ * <p>Adjustment to auto-exposure (AE) target image
+ * brightness.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>The adjustment is measured as a count of steps, with the
+ * step size defined by ACAMERA_CONTROL_AE_COMPENSATION_STEP and the
+ * allowed range by ACAMERA_CONTROL_AE_COMPENSATION_RANGE.</p>
+ * <p>For example, if the exposure value (EV) step is 0.333, '6'
+ * will mean an exposure compensation of +2 EV; -3 will mean an
+ * exposure compensation of -1 EV. One EV represents a doubling
+ * of image brightness. Note that this control will only be
+ * effective if ACAMERA_CONTROL_AE_MODE <code>!=</code> OFF. This control
+ * will take effect even when ACAMERA_CONTROL_AE_LOCK <code>== true</code>.</p>
+ * <p>In the event of exposure compensation value being changed, camera device
+ * may take several frames to reach the newly requested exposure target.
+ * During that time, ACAMERA_CONTROL_AE_STATE field will be in the SEARCHING
+ * state. Once the new exposure target is reached, ACAMERA_CONTROL_AE_STATE will
+ * change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
+ * FLASH_REQUIRED (if the scene is too dark for still capture).</p>
+ *
+ * @see ACAMERA_CONTROL_AE_COMPENSATION_RANGE
+ * @see ACAMERA_CONTROL_AE_COMPENSATION_STEP
+ * @see ACAMERA_CONTROL_AE_LOCK
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_STATE
+ */
ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION = // int32
ACAMERA_CONTROL_START + 1,
+ /**
+ * <p>Whether auto-exposure (AE) is currently locked to its latest
+ * calculated values.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>When set to <code>true</code> (ON), the AE algorithm is locked to its latest parameters,
+ * and will not change exposure settings until the lock is set to <code>false</code> (OFF).</p>
+ * <p>Note that even when AE is locked, the flash may be fired if
+ * the ACAMERA_CONTROL_AE_MODE is ON_AUTO_FLASH /
+ * ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.</p>
+ * <p>When ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION is changed, even if the AE lock
+ * is ON, the camera device will still adjust its exposure value.</p>
+ * <p>If AE precapture is triggered (see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER)
+ * when AE is already locked, the camera device will not change the exposure time
+ * (ACAMERA_SENSOR_EXPOSURE_TIME) and sensitivity (ACAMERA_SENSOR_SENSITIVITY)
+ * parameters. The flash may be fired if the ACAMERA_CONTROL_AE_MODE
+ * is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
+ * ACAMERA_CONTROL_AE_MODE is ON_ALWAYS_FLASH, the scene may become overexposed.
+ * Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.</p>
+ * <p>When an AE precapture sequence is triggered, AE unlock will not be able to unlock
+ * the AE if AE is locked by the camera device internally during precapture metering
+ * sequence In other words, submitting requests with AE unlock has no effect for an
+ * ongoing precapture metering sequence. Otherwise, the precapture metering sequence
+ * will never succeed in a sequence of preview requests where AE lock is always set
+ * to <code>false</code>.</p>
+ * <p>Since the camera device has a pipeline of in-flight requests, the settings that
+ * get locked do not necessarily correspond to the settings that were present in the
+ * latest capture result received from the camera device, since additional captures
+ * and AE updates may have occurred even before the result was sent out. If an
+ * application is switching between automatic and manual control and wishes to eliminate
+ * any flicker during the switch, the following procedure is recommended:</p>
+ * <ol>
+ * <li>Starting in auto-AE mode:</li>
+ * <li>Lock AE</li>
+ * <li>Wait for the first result to be output that has the AE locked</li>
+ * <li>Copy exposure settings from that result into a request, set the request to manual AE</li>
+ * <li>Submit the capture request, proceed to run manual AE as desired.</li>
+ * </ol>
+ * <p>See ACAMERA_CONTROL_AE_STATE for AE lock related state transition details.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see ACAMERA_CONTROL_AE_STATE
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_CONTROL_AE_LOCK = // byte (enum)
ACAMERA_CONTROL_START + 2,
+ /**
+ * <p>The desired mode for the camera device's
+ * auto-exposure routine.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This control is only effective if ACAMERA_CONTROL_MODE is
+ * AUTO.</p>
+ * <p>When set to any of the ON modes, the camera device's
+ * auto-exposure routine is enabled, overriding the
+ * application's selected exposure time, sensor sensitivity,
+ * and frame duration (ACAMERA_SENSOR_EXPOSURE_TIME,
+ * ACAMERA_SENSOR_SENSITIVITY, and
+ * ACAMERA_SENSOR_FRAME_DURATION). If one of the FLASH modes
+ * is selected, the camera device's flash unit controls are
+ * also overridden.</p>
+ * <p>The FLASH modes are only available if the camera device
+ * has a flash unit (ACAMERA_FLASH_INFO_AVAILABLE is <code>true</code>).</p>
+ * <p>If flash TORCH mode is desired, this field must be set to
+ * ON or OFF, and ACAMERA_FLASH_MODE set to TORCH.</p>
+ * <p>When set to any of the ON modes, the values chosen by the
+ * camera device auto-exposure routine for the overridden
+ * fields for a given capture will be available in its
+ * CaptureResult.</p>
+ *
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_FLASH_INFO_AVAILABLE
+ * @see ACAMERA_FLASH_MODE
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_CONTROL_AE_MODE = // byte (enum)
ACAMERA_CONTROL_START + 3,
+ /**
+ * <p>List of metering areas to use for auto-exposure adjustment.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Not available if android.control.maxRegionsAe is 0.
+ * Otherwise will always be present.</p>
+ * <p>The maximum number of regions supported by the device is determined by the value
+ * of android.control.maxRegionsAe.</p>
+ * <p>The coordinate system is based on the active pixel array,
+ * with (0,0) being the top-left pixel in the active pixel array, and
+ * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the
+ * bottom-right pixel in the active pixel array.</p>
+ * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.</p>
+ * <p>The weights are relative to weights of other exposure metering regions, so if only one
+ * region is used, all non-zero weights will have the same effect. A region with 0
+ * weight is ignored.</p>
+ * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.</p>
+ * <p>If the metering region is outside the used ACAMERA_SCALER_CROP_REGION returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.</p>
+ *
+ * @see ACAMERA_SCALER_CROP_REGION
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_CONTROL_AE_REGIONS = // int32[5*area_count]
ACAMERA_CONTROL_START + 4,
+ /**
+ * <p>Range over which the auto-exposure routine can
+ * adjust the capture frame rate to maintain good
+ * exposure.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Only constrains auto-exposure (AE) algorithm, not
+ * manual control of ACAMERA_SENSOR_EXPOSURE_TIME and
+ * ACAMERA_SENSOR_FRAME_DURATION.</p>
+ *
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ */
ACAMERA_CONTROL_AE_TARGET_FPS_RANGE = // int32[2]
ACAMERA_CONTROL_START + 5,
+ /**
+ * <p>Whether the camera device will trigger a precapture
+ * metering sequence when it processes this request.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This entry is normally set to IDLE, or is not
+ * included at all in the request settings. When included and
+ * set to START, the camera device will trigger the auto-exposure (AE)
+ * precapture metering sequence.</p>
+ * <p>When set to CANCEL, the camera device will cancel any active
+ * precapture metering trigger, and return to its initial AE state.
+ * If a precapture metering sequence is already completed, and the camera
+ * device has implicitly locked the AE for subsequent still capture, the
+ * CANCEL trigger will unlock the AE and return to its initial AE state.</p>
+ * <p>The precapture sequence should be triggered before starting a
+ * high-quality still capture for final metering decisions to
+ * be made, and for firing pre-capture flash pulses to estimate
+ * scene brightness and required final capture flash power, when
+ * the flash is enabled.</p>
+ * <p>Normally, this entry should be set to START for only a
+ * single request, and the application should wait until the
+ * sequence completes before starting a new one.</p>
+ * <p>When a precapture metering sequence is finished, the camera device
+ * may lock the auto-exposure routine internally to be able to accurately expose the
+ * subsequent still capture image (<code>ACAMERA_CONTROL_CAPTURE_INTENT == STILL_CAPTURE</code>).
+ * For this case, the AE may not resume normal scan if no subsequent still capture is
+ * submitted. To ensure that the AE routine restarts normal scan, the application should
+ * submit a request with <code>ACAMERA_CONTROL_AE_LOCK == true</code>, followed by a request
+ * with <code>ACAMERA_CONTROL_AE_LOCK == false</code>, if the application decides not to submit a
+ * still capture request after the precapture sequence completes. Alternatively, for
+ * API level 23 or newer devices, the CANCEL can be used to unlock the camera device
+ * internally locked AE if the application doesn't submit a still capture request after
+ * the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
+ * be used in devices that have earlier API levels.</p>
+ * <p>The exact effect of auto-exposure (AE) precapture trigger
+ * depends on the current AE mode and state; see
+ * ACAMERA_CONTROL_AE_STATE for AE precapture state transition
+ * details.</p>
+ * <p>On LEGACY-level devices, the precapture trigger is not supported;
+ * capturing a high-resolution JPEG image will automatically trigger a
+ * precapture sequence before the high-resolution capture, including
+ * potentially firing a pre-capture flash.</p>
+ * <p>Using the precapture trigger and the auto-focus trigger ACAMERA_CONTROL_AF_TRIGGER
+ * simultaneously is allowed. However, since these triggers often require cooperation between
+ * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+ * focus sweep), the camera device may delay acting on a later trigger until the previous
+ * trigger has been fully handled. This may lead to longer intervals between the trigger and
+ * changes to ACAMERA_CONTROL_AE_STATE indicating the start of the precapture sequence, for
+ * example.</p>
+ * <p>If both the precapture and the auto-focus trigger are activated on the same request, then
+ * the camera device will complete them in the optimal order for that device.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_LOCK
+ * @see ACAMERA_CONTROL_AE_STATE
+ * @see ACAMERA_CONTROL_AF_TRIGGER
+ * @see ACAMERA_CONTROL_CAPTURE_INTENT
+ */
ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER = // byte (enum)
ACAMERA_CONTROL_START + 6,
+ /**
+ * <p>Whether auto-focus (AF) is currently enabled, and what
+ * mode it is set to.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Only effective if ACAMERA_CONTROL_MODE = AUTO and the lens is not fixed focus
+ * (i.e. <code>ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE > 0</code>). Also note that
+ * when ACAMERA_CONTROL_AE_MODE is OFF, the behavior of AF is device
+ * dependent. It is recommended to lock AF by using ACAMERA_CONTROL_AF_TRIGGER before
+ * setting ACAMERA_CONTROL_AE_MODE to OFF, or set AF mode to OFF when AE is OFF.</p>
+ * <p>If the lens is controlled by the camera device auto-focus algorithm,
+ * the camera device will report the current AF status in ACAMERA_CONTROL_AF_STATE
+ * in result metadata.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AF_STATE
+ * @see ACAMERA_CONTROL_AF_TRIGGER
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ */
ACAMERA_CONTROL_AF_MODE = // byte (enum)
ACAMERA_CONTROL_START + 7,
+ /**
+ * <p>List of metering areas to use for auto-focus.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Not available if android.control.maxRegionsAf is 0.
+ * Otherwise will always be present.</p>
+ * <p>The maximum number of focus areas supported by the device is determined by the value
+ * of android.control.maxRegionsAf.</p>
+ * <p>The coordinate system is based on the active pixel array,
+ * with (0,0) being the top-left pixel in the active pixel array, and
+ * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the
+ * bottom-right pixel in the active pixel array.</p>
+ * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.</p>
+ * <p>The weights are relative to weights of other metering regions, so if only one region
+ * is used, all non-zero weights will have the same effect. A region with 0 weight is
+ * ignored.</p>
+ * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.</p>
+ * <p>If the metering region is outside the used ACAMERA_SCALER_CROP_REGION returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.</p>
+ *
+ * @see ACAMERA_SCALER_CROP_REGION
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_CONTROL_AF_REGIONS = // int32[5*area_count]
ACAMERA_CONTROL_START + 8,
+ /**
+ * <p>Whether the camera device will trigger autofocus for this request.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This entry is normally set to IDLE, or is not
+ * included at all in the request settings.</p>
+ * <p>When included and set to START, the camera device will trigger the
+ * autofocus algorithm. If autofocus is disabled, this trigger has no effect.</p>
+ * <p>When set to CANCEL, the camera device will cancel any active trigger,
+ * and return to its initial AF state.</p>
+ * <p>Generally, applications should set this entry to START or CANCEL for only a
+ * single capture, and then return it to IDLE (or not set at all). Specifying
+ * START for multiple captures in a row means restarting the AF operation over
+ * and over again.</p>
+ * <p>See ACAMERA_CONTROL_AF_STATE for what the trigger means for each AF mode.</p>
+ * <p>Using the autofocus trigger and the precapture trigger ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * simultaneously is allowed. However, since these triggers often require cooperation between
+ * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+ * focus sweep), the camera device may delay acting on a later trigger until the previous
+ * trigger has been fully handled. This may lead to longer intervals between the trigger and
+ * changes to ACAMERA_CONTROL_AF_STATE, for example.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see ACAMERA_CONTROL_AF_STATE
+ */
ACAMERA_CONTROL_AF_TRIGGER = // byte (enum)
ACAMERA_CONTROL_START + 9,
+ /**
+ * <p>Whether auto-white balance (AWB) is currently locked to its
+ * latest calculated values.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>When set to <code>true</code> (ON), the AWB algorithm is locked to its latest parameters,
+ * and will not change color balance settings until the lock is set to <code>false</code> (OFF).</p>
+ * <p>Since the camera device has a pipeline of in-flight requests, the settings that
+ * get locked do not necessarily correspond to the settings that were present in the
+ * latest capture result received from the camera device, since additional captures
+ * and AWB updates may have occurred even before the result was sent out. If an
+ * application is switching between automatic and manual control and wishes to eliminate
+ * any flicker during the switch, the following procedure is recommended:</p>
+ * <ol>
+ * <li>Starting in auto-AWB mode:</li>
+ * <li>Lock AWB</li>
+ * <li>Wait for the first result to be output that has the AWB locked</li>
+ * <li>Copy AWB settings from that result into a request, set the request to manual AWB</li>
+ * <li>Submit the capture request, proceed to run manual AWB as desired.</li>
+ * </ol>
+ * <p>Note that AWB lock is only meaningful when
+ * ACAMERA_CONTROL_AWB_MODE is in the AUTO mode; in other modes,
+ * AWB is already fixed to a specific setting.</p>
+ * <p>Some LEGACY devices may not support ON; the value is then overridden to OFF.</p>
+ *
+ * @see ACAMERA_CONTROL_AWB_MODE
+ */
ACAMERA_CONTROL_AWB_LOCK = // byte (enum)
ACAMERA_CONTROL_START + 10,
+ /**
+ * <p>Whether auto-white balance (AWB) is currently setting the color
+ * transform fields, and what its illumination target
+ * is.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This control is only effective if ACAMERA_CONTROL_MODE is AUTO.</p>
+ * <p>When set to the ON mode, the camera device's auto-white balance
+ * routine is enabled, overriding the application's selected
+ * ACAMERA_COLOR_CORRECTION_TRANSFORM, ACAMERA_COLOR_CORRECTION_GAINS and
+ * ACAMERA_COLOR_CORRECTION_MODE. Note that when ACAMERA_CONTROL_AE_MODE
+ * is OFF, the behavior of AWB is device dependent. It is recommened to
+ * also set AWB mode to OFF or lock AWB by using ACAMERA_CONTROL_AWB_LOCK before
+ * setting AE mode to OFF.</p>
+ * <p>When set to the OFF mode, the camera device's auto-white balance
+ * routine is disabled. The application manually controls the white
+ * balance by ACAMERA_COLOR_CORRECTION_TRANSFORM, ACAMERA_COLOR_CORRECTION_GAINS
+ * and ACAMERA_COLOR_CORRECTION_MODE.</p>
+ * <p>When set to any other modes, the camera device's auto-white
+ * balance routine is disabled. The camera device uses each
+ * particular illumination target for white balance
+ * adjustment. The application's values for
+ * ACAMERA_COLOR_CORRECTION_TRANSFORM,
+ * ACAMERA_COLOR_CORRECTION_GAINS and
+ * ACAMERA_COLOR_CORRECTION_MODE are ignored.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AWB_LOCK
+ * @see ACAMERA_CONTROL_MODE
+ */
ACAMERA_CONTROL_AWB_MODE = // byte (enum)
ACAMERA_CONTROL_START + 11,
+ /**
+ * <p>List of metering areas to use for auto-white-balance illuminant
+ * estimation.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Not available if android.control.maxRegionsAwb is 0.
+ * Otherwise will always be present.</p>
+ * <p>The maximum number of regions supported by the device is determined by the value
+ * of android.control.maxRegionsAwb.</p>
+ * <p>The coordinate system is based on the active pixel array,
+ * with (0,0) being the top-left pixel in the active pixel array, and
+ * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the
+ * bottom-right pixel in the active pixel array.</p>
+ * <p>The weight must range from 0 to 1000, and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.</p>
+ * <p>The weights are relative to weights of other white balance metering regions, so if
+ * only one region is used, all non-zero weights will have the same effect. A region with
+ * 0 weight is ignored.</p>
+ * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.</p>
+ * <p>If the metering region is outside the used ACAMERA_SCALER_CROP_REGION returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.</p>
+ *
+ * @see ACAMERA_SCALER_CROP_REGION
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_CONTROL_AWB_REGIONS = // int32[5*area_count]
ACAMERA_CONTROL_START + 12,
+ /**
+ * <p>Information to the camera device 3A (auto-exposure,
+ * auto-focus, auto-white balance) routines about the purpose
+ * of this capture, to help the camera device to decide optimal 3A
+ * strategy.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This control (except for MANUAL) is only effective if
+ * <code>ACAMERA_CONTROL_MODE != OFF</code> and any 3A routine is active.</p>
+ * <p>ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ * contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
+ * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains MANUAL_SENSOR. Other intent values are
+ * always supported.</p>
+ *
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ */
ACAMERA_CONTROL_CAPTURE_INTENT = // byte (enum)
ACAMERA_CONTROL_START + 13,
+ /**
+ * <p>A special color effect to apply.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>When this mode is set, a color effect will be applied
+ * to images produced by the camera device. The interpretation
+ * and implementation of these color effects is left to the
+ * implementor of the camera device, and should not be
+ * depended on to be consistent (or present) across all
+ * devices.</p>
+ */
ACAMERA_CONTROL_EFFECT_MODE = // byte (enum)
ACAMERA_CONTROL_START + 14,
+ /**
+ * <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
+ * routines.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This is a top-level 3A control switch. When set to OFF, all 3A control
+ * by the camera device is disabled. The application must set the fields for
+ * capture parameters itself.</p>
+ * <p>When set to AUTO, the individual algorithm controls in
+ * ACAMERA_CONTROL_* are in effect, such as ACAMERA_CONTROL_AF_MODE.</p>
+ * <p>When set to USE_SCENE_MODE, the individual controls in
+ * ACAMERA_CONTROL_* are mostly disabled, and the camera device implements
+ * one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
+ * as it wishes. The camera device scene mode 3A settings are provided by
+ * {@link android.hardware.camera2.CaptureResult capture results}.</p>
+ * <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
+ * is that this frame will not be used by camera device background 3A statistics
+ * update, as if this frame is never captured. This mode can be used in the scenario
+ * where the application doesn't want a 3A manual control capture to affect
+ * the subsequent auto 3A capture results.</p>
+ *
+ * @see ACAMERA_CONTROL_AF_MODE
+ */
ACAMERA_CONTROL_MODE = // byte (enum)
ACAMERA_CONTROL_START + 15,
+ /**
+ * <p>Control for which scene mode is currently active.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Scene modes are custom camera modes optimized for a certain set of conditions and
+ * capture settings.</p>
+ * <p>This is the mode that that is active when
+ * <code>ACAMERA_CONTROL_MODE == USE_SCENE_MODE</code>. Aside from FACE_PRIORITY, these modes will
+ * disable ACAMERA_CONTROL_AE_MODE, ACAMERA_CONTROL_AWB_MODE, and ACAMERA_CONTROL_AF_MODE
+ * while in use.</p>
+ * <p>The interpretation and implementation of these scene modes is left
+ * to the implementor of the camera device. Their behavior will not be
+ * consistent across all devices, and any given device may only implement
+ * a subset of these modes.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AF_MODE
+ * @see ACAMERA_CONTROL_AWB_MODE
+ * @see ACAMERA_CONTROL_MODE
+ */
ACAMERA_CONTROL_SCENE_MODE = // byte (enum)
ACAMERA_CONTROL_START + 16,
+ /**
+ * <p>Whether video stabilization is
+ * active.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Video stabilization automatically warps images from
+ * the camera in order to stabilize motion between consecutive frames.</p>
+ * <p>If enabled, video stabilization can modify the
+ * ACAMERA_SCALER_CROP_REGION to keep the video stream stabilized.</p>
+ * <p>Switching between different video stabilization modes may take several
+ * frames to initialize, the camera device will report the current mode
+ * in capture result metadata. For example, When "ON" mode is requested,
+ * the video stabilization modes in the first several capture results may
+ * still be "OFF", and it will become "ON" when the initialization is
+ * done.</p>
+ * <p>In addition, not all recording sizes or frame rates may be supported for
+ * stabilization by a device that reports stabilization support. It is guaranteed
+ * that an output targeting a MediaRecorder or MediaCodec will be stabilized if
+ * the recording resolution is less than or equal to 1920 x 1080 (width less than
+ * or equal to 1920, height less than or equal to 1080), and the recording
+ * frame rate is less than or equal to 30fps. At other sizes, the CaptureResult
+ * ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE field will return
+ * OFF if the recording output is not stabilized, or if there are no output
+ * Surface types that can be stabilized.</p>
+ * <p>If a camera device supports both this mode and OIS
+ * (ACAMERA_LENS_OPTICAL_STABILIZATION_MODE), turning both modes on may
+ * produce undesirable interaction, so it is recommended not to enable
+ * both at the same time.</p>
+ *
+ * @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
+ * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
+ * @see ACAMERA_SCALER_CROP_REGION
+ */
ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE = // byte (enum)
ACAMERA_CONTROL_START + 17,
+ /**
+ * <p>List of auto-exposure antibanding modes for ACAMERA_CONTROL_AE_ANTIBANDING_MODE that are
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_ANTIBANDING_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Not all of the auto-exposure anti-banding modes may be
+ * supported by a given camera device. This field lists the
+ * valid anti-banding modes that the application may request
+ * for this camera device with the
+ * ACAMERA_CONTROL_AE_ANTIBANDING_MODE control.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_ANTIBANDING_MODE
+ */
ACAMERA_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES = // byte[n]
ACAMERA_CONTROL_START + 18,
+ /**
+ * <p>List of auto-exposure modes for ACAMERA_CONTROL_AE_MODE that are supported by this camera
+ * device.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Not all the auto-exposure modes may be supported by a
+ * given camera device, especially if no flash unit is
+ * available. This entry lists the valid modes for
+ * ACAMERA_CONTROL_AE_MODE for this camera device.</p>
+ * <p>All camera devices support ON, and all camera devices with flash
+ * units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.</p>
+ * <p>FULL mode camera devices always support OFF mode,
+ * which enables application control of camera exposure time,
+ * sensitivity, and frame duration.</p>
+ * <p>LEGACY mode camera devices never support OFF mode.
+ * LIMITED mode devices support OFF if they support the MANUAL_SENSOR
+ * capability.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ */
ACAMERA_CONTROL_AE_AVAILABLE_MODES = // byte[n]
ACAMERA_CONTROL_START + 19,
+ /**
+ * <p>List of frame rate ranges for ACAMERA_CONTROL_AE_TARGET_FPS_RANGE supported by
+ * this camera device.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>For devices at the LEGACY level or above:</p>
+ * <ul>
+ * <li>
+ * <p>For constant-framerate recording, for each normal
+ * {@link android.media.CamcorderProfile CamcorderProfile}, that is, a
+ * {@link android.media.CamcorderProfile CamcorderProfile} that has
+ * {@link android.media.CamcorderProfile#quality quality} in
+ * the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW},
+ * {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is
+ * supported by the device and has
+ * {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} <code>x</code>, this list will
+ * always include (<code>x</code>,<code>x</code>).</p>
+ * </li>
+ * <li>
+ * <p>Also, a camera device must either not support any
+ * {@link android.media.CamcorderProfile CamcorderProfile},
+ * or support at least one
+ * normal {@link android.media.CamcorderProfile CamcorderProfile} that has
+ * {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} <code>x</code> >= 24.</p>
+ * </li>
+ * </ul>
+ * <p>For devices at the LIMITED level or above:</p>
+ * <ul>
+ * <li>For YUV_420_888 burst capture use case, this list will always include (<code>min</code>, <code>max</code>)
+ * and (<code>max</code>, <code>max</code>) where <code>min</code> <= 15 and <code>max</code> = the maximum output frame rate of the
+ * maximum YUV_420_888 output size.</li>
+ * </ul>
+ */
ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES = // int32[2*n]
ACAMERA_CONTROL_START + 20,
+ /**
+ * <p>Maximum and minimum exposure compensation values for
+ * ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, in counts of ACAMERA_CONTROL_AE_COMPENSATION_STEP,
+ * that are supported by this camera device.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_COMPENSATION_STEP
+ * @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>None</p>
+ */
ACAMERA_CONTROL_AE_COMPENSATION_RANGE = // int32[2]
ACAMERA_CONTROL_START + 21,
+ /**
+ * <p>Smallest step by which the exposure compensation
+ * can be changed.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This is the unit for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION. For example, if this key has
+ * a value of <code>1/2</code>, then a setting of <code>-2</code> for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION means
+ * that the target EV offset for the auto-exposure routine is -1 EV.</p>
+ * <p>One unit of EV compensation changes the brightness of the captured image by a factor
+ * of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
+ */
ACAMERA_CONTROL_AE_COMPENSATION_STEP = // rational
ACAMERA_CONTROL_START + 22,
+ /**
+ * <p>List of auto-focus (AF) modes for ACAMERA_CONTROL_AF_MODE that are
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_CONTROL_AF_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Not all the auto-focus modes may be supported by a
+ * given camera device. This entry lists the valid modes for
+ * ACAMERA_CONTROL_AF_MODE for this camera device.</p>
+ * <p>All LIMITED and FULL mode camera devices will support OFF mode, and all
+ * camera devices with adjustable focuser units
+ * (<code>ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE > 0</code>) will support AUTO mode.</p>
+ * <p>LEGACY devices will support OFF mode only if they support
+ * focusing to infinity (by also setting ACAMERA_LENS_FOCUS_DISTANCE to
+ * <code>0.0f</code>).</p>
+ *
+ * @see ACAMERA_CONTROL_AF_MODE
+ * @see ACAMERA_LENS_FOCUS_DISTANCE
+ * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ */
ACAMERA_CONTROL_AF_AVAILABLE_MODES = // byte[n]
ACAMERA_CONTROL_START + 23,
+ /**
+ * <p>List of color effects for ACAMERA_CONTROL_EFFECT_MODE that are supported by this camera
+ * device.</p>
+ *
+ * @see ACAMERA_CONTROL_EFFECT_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This list contains the color effect modes that can be applied to
+ * images produced by the camera device.
+ * Implementations are not expected to be consistent across all devices.
+ * If no color effect modes are available for a device, this will only list
+ * OFF.</p>
+ * <p>A color effect will only be applied if
+ * ACAMERA_CONTROL_MODE != OFF. OFF is always included in this list.</p>
+ * <p>This control has no effect on the operation of other control routines such
+ * as auto-exposure, white balance, or focus.</p>
+ *
+ * @see ACAMERA_CONTROL_MODE
+ */
ACAMERA_CONTROL_AVAILABLE_EFFECTS = // byte[n]
ACAMERA_CONTROL_START + 24,
+ /**
+ * <p>List of scene modes for ACAMERA_CONTROL_SCENE_MODE that are supported by this camera
+ * device.</p>
+ *
+ * @see ACAMERA_CONTROL_SCENE_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This list contains scene modes that can be set for the camera device.
+ * Only scene modes that have been fully implemented for the
+ * camera device may be included here. Implementations are not expected
+ * to be consistent across all devices.</p>
+ * <p>If no scene modes are supported by the camera device, this
+ * will be set to DISABLED. Otherwise DISABLED will not be listed.</p>
+ * <p>FACE_PRIORITY is always listed if face detection is
+ * supported (i.e.<code>ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT >
+ * 0</code>).</p>
+ *
+ * @see ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT
+ */
ACAMERA_CONTROL_AVAILABLE_SCENE_MODES = // byte[n]
ACAMERA_CONTROL_START + 25,
+ /**
+ * <p>List of video stabilization modes for ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
+ * that are supported by this camera device.</p>
+ *
+ * @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>OFF will always be listed.</p>
+ */
ACAMERA_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES = // byte[n]
ACAMERA_CONTROL_START + 26,
+ /**
+ * <p>List of auto-white-balance modes for ACAMERA_CONTROL_AWB_MODE that are supported by this
+ * camera device.</p>
+ *
+ * @see ACAMERA_CONTROL_AWB_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Not all the auto-white-balance modes may be supported by a
+ * given camera device. This entry lists the valid modes for
+ * ACAMERA_CONTROL_AWB_MODE for this camera device.</p>
+ * <p>All camera devices will support ON mode.</p>
+ * <p>Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF
+ * mode, which enables application control of white balance, by using
+ * ACAMERA_COLOR_CORRECTION_TRANSFORM and ACAMERA_COLOR_CORRECTION_GAINS(ACAMERA_COLOR_CORRECTION_MODE must be set to TRANSFORM_MATRIX). This includes all FULL
+ * mode camera devices.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * @see ACAMERA_CONTROL_AWB_MODE
+ */
ACAMERA_CONTROL_AWB_AVAILABLE_MODES = // byte[n]
ACAMERA_CONTROL_START + 27,
+ /**
+ * <p>List of the maximum number of regions that can be used for metering in
+ * auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
+ * this corresponds to the the maximum number of elements in
+ * ACAMERA_CONTROL_AE_REGIONS, ACAMERA_CONTROL_AWB_REGIONS,
+ * and ACAMERA_CONTROL_AF_REGIONS.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_REGIONS
+ * @see ACAMERA_CONTROL_AF_REGIONS
+ * @see ACAMERA_CONTROL_AWB_REGIONS
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>None</p>
+ */
ACAMERA_CONTROL_MAX_REGIONS = // int32[3]
ACAMERA_CONTROL_START + 28,
+ /**
+ * <p>Current state of the auto-exposure (AE) algorithm.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>Switching between or enabling AE modes (ACAMERA_CONTROL_AE_MODE) always
+ * resets the AE state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
+ * or ACAMERA_CONTROL_SCENE_MODE if <code>ACAMERA_CONTROL_MODE == USE_SCENE_MODE</code> resets all
+ * the algorithm states to INACTIVE.</p>
+ * <p>The camera device can do several state transitions between two results, if it is
+ * allowed by the state transition table. For example: INACTIVE may never actually be
+ * seen in a result.</p>
+ * <p>The state in the result is the state for this image (in sync with this image): if
+ * AE state becomes CONVERGED, then the image data associated with this result should
+ * be good to use.</p>
+ * <p>Below are state transition tables for different AE modes.</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center"></td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device auto exposure algorithm is disabled</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When ACAMERA_CONTROL_AE_MODE is AE_MODE_ON_*:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device initiates AE scan</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Camera device finishes AE scan</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Good values, not changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Camera device finishes AE scan</td>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Converged but too dark w/o flash</td>
+ * </tr>
+ * <tr>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Camera device initiates AE scan</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Camera device initiates AE scan</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">ACAMERA_CONTROL_AE_LOCK is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">ACAMERA_CONTROL_AE_LOCK is OFF</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values not good after unlock</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">ACAMERA_CONTROL_AE_LOCK is OFF</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Values good after unlock</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">ACAMERA_CONTROL_AE_LOCK is OFF</td>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Exposure good, but too dark</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PRECAPTURE</td>
+ * <td align="center">Sequence done. ACAMERA_CONTROL_AE_LOCK is OFF</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Ready for high-quality capture</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PRECAPTURE</td>
+ * <td align="center">Sequence done. ACAMERA_CONTROL_AE_LOCK is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Ready for high-quality capture</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">aeLock is ON and aePrecaptureTrigger is START</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Precapture trigger is ignored when AE is already locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">aeLock is ON and aePrecaptureTrigger is CANCEL</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Precapture trigger is ignored when AE is already locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START</td>
+ * <td align="center">PRECAPTURE</td>
+ * <td align="center">Start AE precapture metering sequence</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Currently active precapture metering sequence is canceled</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>For the above table, the camera device may skip reporting any state changes that happen
+ * without application intervention (i.e. mode switch, trigger, locking). Any state that
+ * can be skipped in that manner is called a transient state.</p>
+ * <p>For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
+ * listed in above table, it is also legal for the camera device to skip one or more
+ * transient states between two results. See below table for examples:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device finished AE scan</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Values are already good, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done</td>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Converged after a precapture sequence, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged</td>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Converged after a precapture sequenceis canceled, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Camera device finished AE scan</td>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Camera device finished AE scan</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Converged after a new scan, transient states are skipped by camera device.</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ *
+ * @see ACAMERA_CONTROL_AE_LOCK
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_CONTROL_SCENE_MODE
+ */
ACAMERA_CONTROL_AE_STATE = // byte (enum)
ACAMERA_CONTROL_START + 31,
+ /**
+ * <p>Current state of auto-focus (AF) algorithm.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>Switching between or enabling AF modes (ACAMERA_CONTROL_AF_MODE) always
+ * resets the AF state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
+ * or ACAMERA_CONTROL_SCENE_MODE if <code>ACAMERA_CONTROL_MODE == USE_SCENE_MODE</code> resets all
+ * the algorithm states to INACTIVE.</p>
+ * <p>The camera device can do several state transitions between two results, if it is
+ * allowed by the state transition table. For example: INACTIVE may never actually be
+ * seen in a result.</p>
+ * <p>The state in the result is the state for this image (in sync with this image): if
+ * AF state becomes FOCUSED, then the image data associated with this result should
+ * be sharp.</p>
+ * <p>Below are state transition tables for different AF modes.</p>
+ * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_OFF or AF_MODE_EDOF:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center"></td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Never changes</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_AUTO or AF_MODE_MACRO:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">Start AF sweep, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">AF sweep done</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Focused, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">AF sweep done</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Not focused, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Cancel/reset AF, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Cancel/reset AF</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">Start new sweep, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Cancel/reset AF</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">Start new sweep, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state</td>
+ * <td align="center">Mode change</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center"></td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>For the above table, the camera device may skip reporting any state changes that happen
+ * without application intervention (i.e. mode switch, trigger, locking). Any state that
+ * can be skipped in that manner is called a transient state.</p>
+ * <p>For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
+ * state transitions listed in above table, it is also legal for the camera device to skip
+ * one or more transient states between two results. See below table for examples:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Focus failed after a scan, lens is now locked.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Focus is good after a scan, lens is not locked.</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_CONTINUOUS_VIDEO:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF state query, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Camera device completes current scan</td>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">End AF scan, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Camera device fails current scan</td>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">End AF scan, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Immediate transition, if focus is good. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Immediate transition, if focus is bad. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Reset lens position, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Immediate transition, lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Immediate transition, lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">No effect</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Restart AF scan</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">No effect</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Restart AF scan</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When ACAMERA_CONTROL_AF_MODE is AF_MODE_CONTINUOUS_PICTURE:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF state query, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Camera device completes current scan</td>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">End AF scan, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Camera device fails current scan</td>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">End AF scan, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Eventual transition once the focus is good. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Eventual transition if cannot find focus. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Reset lens position, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Immediate trans. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Immediate trans. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">No effect</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Restart AF scan</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">No effect</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Restart AF scan</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
+ * (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
+ * camera device. When a trigger is included in a mode switch request, the trigger
+ * will be evaluated in the context of the new mode in the request.
+ * See below table for examples:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">any state</td>
+ * <td align="center">CAF-->AUTO mode switch</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Mode switch without trigger, initial state must be INACTIVE</td>
+ * </tr>
+ * <tr>
+ * <td align="center">any state</td>
+ * <td align="center">CAF-->AUTO mode switch with AF_TRIGGER</td>
+ * <td align="center">trigger-reachable states from INACTIVE</td>
+ * <td align="center">Mode switch with trigger, INACTIVE is skipped</td>
+ * </tr>
+ * <tr>
+ * <td align="center">any state</td>
+ * <td align="center">AUTO-->CAF mode switch</td>
+ * <td align="center">passively reachable states from INACTIVE</td>
+ * <td align="center">Mode switch without trigger, passive transient state is skipped</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ *
+ * @see ACAMERA_CONTROL_AF_MODE
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_CONTROL_SCENE_MODE
+ */
ACAMERA_CONTROL_AF_STATE = // byte (enum)
ACAMERA_CONTROL_START + 32,
+ /**
+ * <p>Current state of auto-white balance (AWB) algorithm.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>Switching between or enabling AWB modes (ACAMERA_CONTROL_AWB_MODE) always
+ * resets the AWB state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
+ * or ACAMERA_CONTROL_SCENE_MODE if <code>ACAMERA_CONTROL_MODE == USE_SCENE_MODE</code> resets all
+ * the algorithm states to INACTIVE.</p>
+ * <p>The camera device can do several state transitions between two results, if it is
+ * allowed by the state transition table. So INACTIVE may never actually be seen in
+ * a result.</p>
+ * <p>The state in the result is the state for this image (in sync with this image): if
+ * AWB state becomes CONVERGED, then the image data associated with this result should
+ * be good to use.</p>
+ * <p>Below are state transition tables for different AWB modes.</p>
+ * <p>When <code>ACAMERA_CONTROL_AWB_MODE != AWB_MODE_AUTO</code>:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center"></td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device auto white balance algorithm is disabled</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When ACAMERA_CONTROL_AWB_MODE is AWB_MODE_AUTO:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device initiates AWB scan</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">ACAMERA_CONTROL_AWB_LOCK is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Camera device finishes AWB scan</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Good values, not changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">ACAMERA_CONTROL_AWB_LOCK is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Camera device initiates AWB scan</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">ACAMERA_CONTROL_AWB_LOCK is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">ACAMERA_CONTROL_AWB_LOCK is OFF</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values not good after unlock</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>For the above table, the camera device may skip reporting any state changes that happen
+ * without application intervention (i.e. mode switch, trigger, locking). Any state that
+ * can be skipped in that manner is called a transient state.</p>
+ * <p>For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
+ * listed in above table, it is also legal for the camera device to skip one or more
+ * transient states between two results. See below table for examples:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device finished AWB scan</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Values are already good, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">ACAMERA_CONTROL_AWB_LOCK is OFF</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Values good after unlock, transient states are skipped by camera device.</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ *
+ * @see ACAMERA_CONTROL_AWB_LOCK
+ * @see ACAMERA_CONTROL_AWB_MODE
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_CONTROL_SCENE_MODE
+ */
ACAMERA_CONTROL_AWB_STATE = // byte (enum)
ACAMERA_CONTROL_START + 34,
+ /**
+ * <p>Whether the camera device supports ACAMERA_CONTROL_AE_LOCK</p>
+ *
+ * @see ACAMERA_CONTROL_AE_LOCK
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
+ * list <code>true</code>. This includes FULL devices.</p>
+ */
ACAMERA_CONTROL_AE_LOCK_AVAILABLE = // byte (enum)
ACAMERA_CONTROL_START + 36,
+ /**
+ * <p>Whether the camera device supports ACAMERA_CONTROL_AWB_LOCK</p>
+ *
+ * @see ACAMERA_CONTROL_AWB_LOCK
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
+ * always list <code>true</code>. This includes FULL devices.</p>
+ */
ACAMERA_CONTROL_AWB_LOCK_AVAILABLE = // byte (enum)
ACAMERA_CONTROL_START + 37,
+ /**
+ * <p>List of control modes for ACAMERA_CONTROL_MODE that are supported by this camera
+ * device.</p>
+ *
+ * @see ACAMERA_CONTROL_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This list contains control modes that can be set for the camera device.
+ * LEGACY mode devices will always support AUTO mode. LIMITED and FULL
+ * devices will always support OFF, AUTO modes.</p>
+ */
ACAMERA_CONTROL_AVAILABLE_MODES = // byte[n]
ACAMERA_CONTROL_START + 38,
+ /**
+ * <p>Range of boosts for ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST supported
+ * by this camera device.</p>
+ *
+ * @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Devices support post RAW sensitivity boost will advertise
+ * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST key for controling
+ * post RAW sensitivity boost.</p>
+ * <p>This key will be <code>null</code> for devices that do not support any RAW format
+ * outputs. For devices that do support RAW format outputs, this key will always
+ * present, and if a device does not support post RAW sensitivity boost, it will
+ * list <code>(100, 100)</code> in this key.</p>
+ *
+ * @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
+ */
ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE = // int32[2]
ACAMERA_CONTROL_START + 39,
+ /**
+ * <p>The amount of additional sensitivity boost applied to output images
+ * after RAW sensor data is captured.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Some camera devices support additional digital sensitivity boosting in the
+ * camera processing pipeline after sensor RAW image is captured.
+ * Such a boost will be applied to YUV/JPEG format output images but will not
+ * have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.</p>
+ * <p>This key will be <code>null</code> for devices that do not support any RAW format
+ * outputs. For devices that do support RAW format outputs, this key will always
+ * present, and if a device does not support post RAW sensitivity boost, it will
+ * list <code>100</code> in this key.</p>
+ * <p>If the camera device cannot apply the exact boost requested, it will reduce the
+ * boost to the nearest supported value.
+ * The final boost value used will be available in the output capture result.</p>
+ * <p>For devices that support post RAW sensitivity boost, the YUV/JPEG output images
+ * of such device will have the total sensitivity of
+ * <code>ACAMERA_SENSOR_SENSITIVITY * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST / 100</code>
+ * The sensitivity of RAW format images will always be <code>ACAMERA_SENSOR_SENSITIVITY</code></p>
+ * <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST = // int32
ACAMERA_CONTROL_START + 40,
ACAMERA_CONTROL_END,
+ /**
+ * <p>Operation mode for edge
+ * enhancement.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Edge enhancement improves sharpness and details in the captured image. OFF means
+ * no enhancement will be applied by the camera device.</p>
+ * <p>FAST/HIGH_QUALITY both mean camera device determined enhancement
+ * will be applied. HIGH_QUALITY mode indicates that the
+ * camera device will use the highest-quality enhancement algorithms,
+ * even if it slows down capture rate. FAST means the camera device will
+ * not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
+ * edge enhancement will slow down capture rate. Every output stream will have a similar
+ * amount of enhancement applied.</p>
+ * <p>ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+ * buffer of high-resolution images during preview and reprocess image(s) from that buffer
+ * into a final capture when triggered by the user. In this mode, the camera device applies
+ * edge enhancement to low-resolution streams (below maximum recording resolution) to
+ * maximize preview quality, but does not apply edge enhancement to high-resolution streams,
+ * since those will be reprocessed later if necessary.</p>
+ * <p>For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
+ * device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
+ * The camera device may adjust its internal edge enhancement parameters for best
+ * image quality based on the android.reprocess.effectiveExposureFactor, if it is set.</p>
+ */
ACAMERA_EDGE_MODE = // byte (enum)
ACAMERA_EDGE_START,
+ /**
+ * <p>List of edge enhancement modes for ACAMERA_EDGE_MODE that are supported by this camera
+ * device.</p>
+ *
+ * @see ACAMERA_EDGE_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Full-capability camera devices must always support OFF; camera devices that support
+ * YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
+ * list FAST.</p>
+ */
ACAMERA_EDGE_AVAILABLE_EDGE_MODES = // byte[n]
ACAMERA_EDGE_START + 2,
ACAMERA_EDGE_END,
+ /**
+ * <p>The desired mode for for the camera device's flash control.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This control is only effective when flash unit is available
+ * (<code>ACAMERA_FLASH_INFO_AVAILABLE == true</code>).</p>
+ * <p>When this control is used, the ACAMERA_CONTROL_AE_MODE must be set to ON or OFF.
+ * Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
+ * ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.</p>
+ * <p>When set to OFF, the camera device will not fire flash for this capture.</p>
+ * <p>When set to SINGLE, the camera device will fire flash regardless of the camera
+ * device's auto-exposure routine's result. When used in still capture case, this
+ * control should be used along with auto-exposure (AE) precapture metering sequence
+ * (ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER), otherwise, the image may be incorrectly exposed.</p>
+ * <p>When set to TORCH, the flash will be on continuously. This mode can be used
+ * for use cases such as preview, auto-focus assist, still capture, or video recording.</p>
+ * <p>The flash status will be reported by ACAMERA_FLASH_STATE in the capture result metadata.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see ACAMERA_FLASH_INFO_AVAILABLE
+ * @see ACAMERA_FLASH_STATE
+ */
ACAMERA_FLASH_MODE = // byte (enum)
ACAMERA_FLASH_START + 2,
+ /**
+ * <p>Current state of the flash
+ * unit.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>When the camera device doesn't have flash unit
+ * (i.e. <code>ACAMERA_FLASH_INFO_AVAILABLE == false</code>), this state will always be UNAVAILABLE.
+ * Other states indicate the current flash status.</p>
+ * <p>In certain conditions, this will be available on LEGACY devices:</p>
+ * <ul>
+ * <li>Flash-less cameras always return UNAVAILABLE.</li>
+ * <li>Using ACAMERA_CONTROL_AE_MODE <code>==</code> ON_ALWAYS_FLASH
+ * will always return FIRED.</li>
+ * <li>Using ACAMERA_FLASH_MODE <code>==</code> TORCH
+ * will always return FIRED.</li>
+ * </ul>
+ * <p>In all other conditions the state will not be available on
+ * LEGACY devices (i.e. it will be <code>null</code>).</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_FLASH_INFO_AVAILABLE
+ * @see ACAMERA_FLASH_MODE
+ */
ACAMERA_FLASH_STATE = // byte (enum)
ACAMERA_FLASH_START + 5,
ACAMERA_FLASH_END,
+ /**
+ * <p>Whether this camera device has a
+ * flash unit.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Will be <code>false</code> if no flash is available.</p>
+ * <p>If there is no flash unit, none of the flash controls do
+ * anything.</p>
+ */
ACAMERA_FLASH_INFO_AVAILABLE = // byte (enum)
ACAMERA_FLASH_INFO_START,
ACAMERA_FLASH_INFO_END,
+ /**
+ * <p>Operational mode for hot pixel correction.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Hotpixel correction interpolates out, or otherwise removes, pixels
+ * that do not accurately measure the incoming light (i.e. pixels that
+ * are stuck at an arbitrary value or are oversensitive).</p>
+ */
ACAMERA_HOT_PIXEL_MODE = // byte (enum)
ACAMERA_HOT_PIXEL_START,
+ /**
+ * <p>List of hot pixel correction modes for ACAMERA_HOT_PIXEL_MODE that are supported by this
+ * camera device.</p>
+ *
+ * @see ACAMERA_HOT_PIXEL_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>FULL mode camera devices will always support FAST.</p>
+ */
ACAMERA_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES = // byte[n]
ACAMERA_HOT_PIXEL_START + 1,
ACAMERA_HOT_PIXEL_END,
+ /**
+ * <p>GPS coordinates to include in output JPEG
+ * EXIF.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>None</p>
+ */
ACAMERA_JPEG_GPS_COORDINATES = // double[3]
ACAMERA_JPEG_START,
+ /**
+ * <p>32 characters describing GPS algorithm to
+ * include in EXIF.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>None</p>
+ */
ACAMERA_JPEG_GPS_PROCESSING_METHOD = // byte
ACAMERA_JPEG_START + 1,
+ /**
+ * <p>Time GPS fix was made to include in
+ * EXIF.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>None</p>
+ */
ACAMERA_JPEG_GPS_TIMESTAMP = // int64
ACAMERA_JPEG_START + 2,
+ /**
+ * <p>The orientation for a JPEG image.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>The clockwise rotation angle in degrees, relative to the orientation
+ * to the camera, that the JPEG picture needs to be rotated by, to be viewed
+ * upright.</p>
+ * <p>Camera devices may either encode this value into the JPEG EXIF header, or
+ * rotate the image data to match this orientation. When the image data is rotated,
+ * the thumbnail data will also be rotated.</p>
+ * <p>Note that this orientation is relative to the orientation of the camera sensor, given
+ * by ACAMERA_SENSOR_ORIENTATION.</p>
+ * <p>To translate from the device orientation given by the Android sensor APIs, the following
+ * sample code may be used:</p>
+ * <pre><code>private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
+ * if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
+ * int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ *
+ * // Round device orientation to a multiple of 90
+ * deviceOrientation = (deviceOrientation + 45) / 90 * 90;
+ *
+ * // Reverse device orientation for front-facing cameras
+ * boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
+ * if (facingFront) deviceOrientation = -deviceOrientation;
+ *
+ * // Calculate desired JPEG orientation relative to camera orientation to make
+ * // the image upright relative to the device orientation
+ * int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
+ *
+ * return jpegOrientation;
+ * }
+ * </code></pre>
+ *
+ * @see ACAMERA_SENSOR_ORIENTATION
+ */
ACAMERA_JPEG_ORIENTATION = // int32
ACAMERA_JPEG_START + 3,
+ /**
+ * <p>Compression quality of the final JPEG
+ * image.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>85-95 is typical usage range.</p>
+ */
ACAMERA_JPEG_QUALITY = // byte
ACAMERA_JPEG_START + 4,
+ /**
+ * <p>Compression quality of JPEG
+ * thumbnail.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>None</p>
+ */
ACAMERA_JPEG_THUMBNAIL_QUALITY = // byte
ACAMERA_JPEG_START + 5,
+ /**
+ * <p>Resolution of embedded JPEG thumbnail.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
+ * but the captured JPEG will still be a valid image.</p>
+ * <p>For best results, when issuing a request for a JPEG image, the thumbnail size selected
+ * should have the same aspect ratio as the main JPEG output.</p>
+ * <p>If the thumbnail image aspect ratio differs from the JPEG primary image aspect
+ * ratio, the camera device creates the thumbnail by cropping it from the primary image.
+ * For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
+ * 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
+ * generate the thumbnail image. The thumbnail image will always have a smaller Field
+ * Of View (FOV) than the primary image when aspect ratios differ.</p>
+ * <p>When an ACAMERA_JPEG_ORIENTATION of non-zero degree is requested,
+ * the camera device will handle thumbnail rotation in one of the following ways:</p>
+ * <ul>
+ * <li>Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
+ * and keep jpeg and thumbnail image data unrotated.</li>
+ * <li>Rotate the jpeg and thumbnail image data and not set
+ * {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
+ * case, LIMITED or FULL hardware level devices will report rotated thumnail size in
+ * capture result, so the width and height will be interchanged if 90 or 270 degree
+ * orientation is requested. LEGACY device will always report unrotated thumbnail
+ * size.</li>
+ * </ul>
+ *
+ * @see ACAMERA_JPEG_ORIENTATION
+ */
ACAMERA_JPEG_THUMBNAIL_SIZE = // int32[2]
ACAMERA_JPEG_START + 6,
+ /**
+ * <p>List of JPEG thumbnail sizes for ACAMERA_JPEG_THUMBNAIL_SIZE supported by this
+ * camera device.</p>
+ *
+ * @see ACAMERA_JPEG_THUMBNAIL_SIZE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This list will include at least one non-zero resolution, plus <code>(0,0)</code> for indicating no
+ * thumbnail should be generated.</p>
+ * <p>Below condiditions will be satisfied for this size list:</p>
+ * <ul>
+ * <li>The sizes will be sorted by increasing pixel area (width x height).
+ * If several resolutions have the same area, they will be sorted by increasing width.</li>
+ * <li>The aspect ratio of the largest thumbnail size will be same as the
+ * aspect ratio of largest JPEG output size in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS.
+ * The largest size is defined as the size that has the largest pixel area
+ * in a given size list.</li>
+ * <li>Each output JPEG size in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS will have at least
+ * one corresponding size that has the same aspect ratio in availableThumbnailSizes,
+ * and vice versa.</li>
+ * <li>All non-<code>(0, 0)</code> sizes will have non-zero widths and heights.</li>
+ * </ul>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+ */
ACAMERA_JPEG_AVAILABLE_THUMBNAIL_SIZES = // int32[2*n]
ACAMERA_JPEG_START + 7,
ACAMERA_JPEG_END,
+ /**
+ * <p>The desired lens aperture size, as a ratio of lens focal length to the
+ * effective aperture diameter.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Setting this value is only supported on the camera devices that have a variable
+ * aperture lens.</p>
+ * <p>When this is supported and ACAMERA_CONTROL_AE_MODE is OFF,
+ * this can be set along with ACAMERA_SENSOR_EXPOSURE_TIME,
+ * ACAMERA_SENSOR_SENSITIVITY, and ACAMERA_SENSOR_FRAME_DURATION
+ * to achieve manual exposure control.</p>
+ * <p>The requested aperture value may take several frames to reach the
+ * requested value; the camera device will report the current (intermediate)
+ * aperture size in capture result metadata while the aperture is changing.
+ * While the aperture is still changing, ACAMERA_LENS_STATE will be set to MOVING.</p>
+ * <p>When this is supported and ACAMERA_CONTROL_AE_MODE is one of
+ * the ON modes, this will be overridden by the camera device
+ * auto-exposure algorithm, the overridden values are then provided
+ * back to the user in the corresponding result.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_LENS_STATE
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_LENS_APERTURE = // float
ACAMERA_LENS_START,
+ /**
+ * <p>The desired setting for the lens neutral density filter(s).</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This control will not be supported on most camera devices.</p>
+ * <p>Lens filters are typically used to lower the amount of light the
+ * sensor is exposed to (measured in steps of EV). As used here, an EV
+ * step is the standard logarithmic representation, which are
+ * non-negative, and inversely proportional to the amount of light
+ * hitting the sensor. For example, setting this to 0 would result
+ * in no reduction of the incoming light, and setting this to 2 would
+ * mean that the filter is set to reduce incoming light by two stops
+ * (allowing 1/4 of the prior amount of light to the sensor).</p>
+ * <p>It may take several frames before the lens filter density changes
+ * to the requested value. While the filter density is still changing,
+ * ACAMERA_LENS_STATE will be set to MOVING.</p>
+ *
+ * @see ACAMERA_LENS_STATE
+ */
ACAMERA_LENS_FILTER_DENSITY = // float
ACAMERA_LENS_START + 1,
+ /**
+ * <p>The desired lens focal length; used for optical zoom.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This setting controls the physical focal length of the camera
+ * device's lens. Changing the focal length changes the field of
+ * view of the camera device, and is usually used for optical zoom.</p>
+ * <p>Like ACAMERA_LENS_FOCUS_DISTANCE and ACAMERA_LENS_APERTURE, this
+ * setting won't be applied instantaneously, and it may take several
+ * frames before the lens can change to the requested focal length.
+ * While the focal length is still changing, ACAMERA_LENS_STATE will
+ * be set to MOVING.</p>
+ * <p>Optical zoom will not be supported on most devices.</p>
+ *
+ * @see ACAMERA_LENS_APERTURE
+ * @see ACAMERA_LENS_FOCUS_DISTANCE
+ * @see ACAMERA_LENS_STATE
+ */
ACAMERA_LENS_FOCAL_LENGTH = // float
ACAMERA_LENS_START + 2,
+ /**
+ * <p>Desired distance to plane of sharpest focus,
+ * measured from frontmost surface of the lens.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Should be zero for fixed-focus cameras</p>
+ */
ACAMERA_LENS_FOCUS_DISTANCE = // float
ACAMERA_LENS_START + 3,
+ /**
+ * <p>Sets whether the camera device uses optical image stabilization (OIS)
+ * when capturing images.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>OIS is used to compensate for motion blur due to small
+ * movements of the camera during capture. Unlike digital image
+ * stabilization (ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE), OIS
+ * makes use of mechanical elements to stabilize the camera
+ * sensor, and thus allows for longer exposure times before
+ * camera shake becomes apparent.</p>
+ * <p>Switching between different optical stabilization modes may take several
+ * frames to initialize, the camera device will report the current mode in
+ * capture result metadata. For example, When "ON" mode is requested, the
+ * optical stabilization modes in the first several capture results may still
+ * be "OFF", and it will become "ON" when the initialization is done.</p>
+ * <p>If a camera device supports both OIS and digital image stabilization
+ * (ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE), turning both modes on may produce undesirable
+ * interaction, so it is recommended not to enable both at the same time.</p>
+ * <p>Not all devices will support OIS; see
+ * ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION for
+ * available controls.</p>
+ *
+ * @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
+ * @see ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
+ */
ACAMERA_LENS_OPTICAL_STABILIZATION_MODE = // byte (enum)
ACAMERA_LENS_START + 4,
+ /**
+ * <p>Direction the camera faces relative to
+ * device screen.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>None</p>
+ */
ACAMERA_LENS_FACING = // byte (enum)
ACAMERA_LENS_START + 5,
+ /**
+ * <p>The orientation of the camera relative to the sensor
+ * coordinate system.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>The four coefficients that describe the quaternion
+ * rotation from the Android sensor coordinate system to a
+ * camera-aligned coordinate system where the X-axis is
+ * aligned with the long side of the image sensor, the Y-axis
+ * is aligned with the short side of the image sensor, and
+ * the Z-axis is aligned with the optical axis of the sensor.</p>
+ * <p>To convert from the quaternion coefficients <code>(x,y,z,w)</code>
+ * to the axis of rotation <code>(a_x, a_y, a_z)</code> and rotation
+ * amount <code>theta</code>, the following formulas can be used:</p>
+ * <pre><code> theta = 2 * acos(w)
+ * a_x = x / sin(theta/2)
+ * a_y = y / sin(theta/2)
+ * a_z = z / sin(theta/2)
+ * </code></pre>
+ * <p>To create a 3x3 rotation matrix that applies the rotation
+ * defined by this quaternion, the following matrix can be
+ * used:</p>
+ * <pre><code>R = [ 1 - 2y^2 - 2z^2, 2xy - 2zw, 2xz + 2yw,
+ * 2xy + 2zw, 1 - 2x^2 - 2z^2, 2yz - 2xw,
+ * 2xz - 2yw, 2yz + 2xw, 1 - 2x^2 - 2y^2 ]
+ * </code></pre>
+ * <p>This matrix can then be used to apply the rotation to a
+ * column vector point with</p>
+ * <p><code>p' = Rp</code></p>
+ * <p>where <code>p</code> is in the device sensor coordinate system, and
+ * <code>p'</code> is in the camera-oriented coordinate system.</p>
+ */
ACAMERA_LENS_POSE_ROTATION = // float[4]
ACAMERA_LENS_START + 6,
+ /**
+ * <p>Position of the camera optical center.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>The position of the camera device's lens optical center,
+ * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
+ * optical center of the largest camera device facing in the
+ * same direction as this camera, in the {@link
+ * android.hardware.SensorEvent Android sensor coordinate
+ * axes}. Note that only the axis definitions are shared with
+ * the sensor coordinate system, but not the origin.</p>
+ * <p>If this device is the largest or only camera device with a
+ * given facing, then this position will be <code>(0, 0, 0)</code>; a
+ * camera device with a lens optical center located 3 cm from
+ * the main sensor along the +X axis (to the right from the
+ * user's perspective) will report <code>(0.03, 0, 0)</code>.</p>
+ * <p>To transform a pixel coordinates between two cameras
+ * facing the same direction, first the source camera
+ * ACAMERA_LENS_RADIAL_DISTORTION must be corrected for. Then
+ * the source camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs
+ * to be applied, followed by the ACAMERA_LENS_POSE_ROTATION
+ * of the source camera, the translation of the source camera
+ * relative to the destination camera, the
+ * ACAMERA_LENS_POSE_ROTATION of the destination camera, and
+ * finally the inverse of ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * of the destination camera. This obtains a
+ * radial-distortion-free coordinate in the destination
+ * camera pixel coordinates.</p>
+ * <p>To compare this against a real image from the destination
+ * camera, the destination camera image then needs to be
+ * corrected for radial distortion before comparison or
+ * sampling.</p>
+ *
+ * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_RADIAL_DISTORTION
+ */
ACAMERA_LENS_POSE_TRANSLATION = // float[3]
ACAMERA_LENS_START + 7,
+ /**
+ * <p>The range of scene distances that are in
+ * sharp focus (depth of field).</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>If variable focus not supported, can still report
+ * fixed depth of field range</p>
+ */
ACAMERA_LENS_FOCUS_RANGE = // float[2]
ACAMERA_LENS_START + 8,
+ /**
+ * <p>Current lens status.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>For lens parameters ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
+ * ACAMERA_LENS_FILTER_DENSITY and ACAMERA_LENS_APERTURE, when changes are requested,
+ * they may take several frames to reach the requested values. This state indicates
+ * the current status of the lens parameters.</p>
+ * <p>When the state is STATIONARY, the lens parameters are not changing. This could be
+ * either because the parameters are all fixed, or because the lens has had enough
+ * time to reach the most recently-requested values.
+ * If all these lens parameters are not changable for a camera device, as listed below:</p>
+ * <ul>
+ * <li>Fixed focus (<code>ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE == 0</code>), which means
+ * ACAMERA_LENS_FOCUS_DISTANCE parameter will always be 0.</li>
+ * <li>Fixed focal length (ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS contains single value),
+ * which means the optical zoom is not supported.</li>
+ * <li>No ND filter (ACAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES contains only 0).</li>
+ * <li>Fixed aperture (ACAMERA_LENS_INFO_AVAILABLE_APERTURES contains single value).</li>
+ * </ul>
+ * <p>Then this state will always be STATIONARY.</p>
+ * <p>When the state is MOVING, it indicates that at least one of the lens parameters
+ * is changing.</p>
+ *
+ * @see ACAMERA_LENS_APERTURE
+ * @see ACAMERA_LENS_FILTER_DENSITY
+ * @see ACAMERA_LENS_FOCAL_LENGTH
+ * @see ACAMERA_LENS_FOCUS_DISTANCE
+ * @see ACAMERA_LENS_INFO_AVAILABLE_APERTURES
+ * @see ACAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES
+ * @see ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS
+ * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ */
ACAMERA_LENS_STATE = // byte (enum)
ACAMERA_LENS_START + 9,
+ /**
+ * <p>The parameters for this camera device's intrinsic
+ * calibration.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>The five calibration parameters that describe the
+ * transform from camera-centric 3D coordinates to sensor
+ * pixel coordinates:</p>
+ * <pre><code>[f_x, f_y, c_x, c_y, s]
+ * </code></pre>
+ * <p>Where <code>f_x</code> and <code>f_y</code> are the horizontal and vertical
+ * focal lengths, <code>[c_x, c_y]</code> is the position of the optical
+ * axis, and <code>s</code> is a skew parameter for the sensor plane not
+ * being aligned with the lens plane.</p>
+ * <p>These are typically used within a transformation matrix K:</p>
+ * <pre><code>K = [ f_x, s, c_x,
+ * 0, f_y, c_y,
+ * 0 0, 1 ]
+ * </code></pre>
+ * <p>which can then be combined with the camera pose rotation
+ * <code>R</code> and translation <code>t</code> (ACAMERA_LENS_POSE_ROTATION and
+ * ACAMERA_LENS_POSE_TRANSLATION, respective) to calculate the
+ * complete transform from world coordinates to pixel
+ * coordinates:</p>
+ * <pre><code>P = [ K 0 * [ R t
+ * 0 1 ] 0 1 ]
+ * </code></pre>
+ * <p>and with <code>p_w</code> being a point in the world coordinate system
+ * and <code>p_s</code> being a point in the camera active pixel array
+ * coordinate system, and with the mapping including the
+ * homogeneous division by z:</p>
+ * <pre><code> p_h = (x_h, y_h, z_h) = P p_w
+ * p_s = p_h / z_h
+ * </code></pre>
+ * <p>so <code>[x_s, y_s]</code> is the pixel coordinates of the world
+ * point, <code>z_s = 1</code>, and <code>w_s</code> is a measurement of disparity
+ * (depth) in pixel coordinates.</p>
+ * <p>Note that the coordinate system for this transform is the
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE system,
+ * where <code>(0,0)</code> is the top-left of the
+ * preCorrectionActiveArraySize rectangle. Once the pose and
+ * intrinsic calibration transforms have been applied to a
+ * world point, then the ACAMERA_LENS_RADIAL_DISTORTION
+ * transform needs to be applied, and the result adjusted to
+ * be in the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE coordinate
+ * system (where <code>(0, 0)</code> is the top-left of the
+ * activeArraySize rectangle), to determine the final pixel
+ * coordinate of the world point for processed (non-RAW)
+ * output buffers.</p>
+ *
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ * @see ACAMERA_LENS_RADIAL_DISTORTION
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_LENS_INTRINSIC_CALIBRATION = // float[5]
ACAMERA_LENS_START + 10,
+ /**
+ * <p>The correction coefficients to correct for this camera device's
+ * radial and tangential lens distortion.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>Four radial distortion coefficients <code>[kappa_0, kappa_1, kappa_2,
+ * kappa_3]</code> and two tangential distortion coefficients
+ * <code>[kappa_4, kappa_5]</code> that can be used to correct the
+ * lens's geometric distortion with the mapping equations:</p>
+ * <pre><code> x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+ * kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
+ * y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+ * kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
+ * </code></pre>
+ * <p>Here, <code>[x_c, y_c]</code> are the coordinates to sample in the
+ * input image that correspond to the pixel values in the
+ * corrected image at the coordinate <code>[x_i, y_i]</code>:</p>
+ * <pre><code> correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
+ * </code></pre>
+ * <p>The pixel coordinates are defined in a normalized
+ * coordinate system related to the
+ * ACAMERA_LENS_INTRINSIC_CALIBRATION calibration fields.
+ * Both <code>[x_i, y_i]</code> and <code>[x_c, y_c]</code> have <code>(0,0)</code> at the
+ * lens optical center <code>[c_x, c_y]</code>. The maximum magnitudes
+ * of both x and y coordinates are normalized to be 1 at the
+ * edge further from the optical center, so the range
+ * for both dimensions is <code>-1 <= x <= 1</code>.</p>
+ * <p>Finally, <code>r</code> represents the radial distance from the
+ * optical center, <code>r^2 = x_i^2 + y_i^2</code>, and its magnitude
+ * is therefore no larger than <code>|r| <= sqrt(2)</code>.</p>
+ * <p>The distortion model used is the Brown-Conrady model.</p>
+ *
+ * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ */
ACAMERA_LENS_RADIAL_DISTORTION = // float[6]
ACAMERA_LENS_START + 11,
ACAMERA_LENS_END,
+ /**
+ * <p>List of aperture size values for ACAMERA_LENS_APERTURE that are
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_LENS_APERTURE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If the camera device doesn't support a variable lens aperture,
+ * this list will contain only one value, which is the fixed aperture size.</p>
+ * <p>If the camera device supports a variable aperture, the aperture values
+ * in this list will be sorted in ascending order.</p>
+ */
ACAMERA_LENS_INFO_AVAILABLE_APERTURES = // float[n]
ACAMERA_LENS_INFO_START,
+ /**
+ * <p>List of neutral density filter values for
+ * ACAMERA_LENS_FILTER_DENSITY that are supported by this camera device.</p>
+ *
+ * @see ACAMERA_LENS_FILTER_DENSITY
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If a neutral density filter is not supported by this camera device,
+ * this list will contain only 0. Otherwise, this list will include every
+ * filter density supported by the camera device, in ascending order.</p>
+ */
ACAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES = // float[n]
ACAMERA_LENS_INFO_START + 1,
+ /**
+ * <p>List of focal lengths for ACAMERA_LENS_FOCAL_LENGTH that are supported by this camera
+ * device.</p>
+ *
+ * @see ACAMERA_LENS_FOCAL_LENGTH
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If optical zoom is not supported, this list will only contain
+ * a single value corresponding to the fixed focal length of the
+ * device. Otherwise, this list will include every focal length supported
+ * by the camera device, in ascending order.</p>
+ */
ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS = // float[n]
ACAMERA_LENS_INFO_START + 2,
+ /**
+ * <p>List of optical image stabilization (OIS) modes for
+ * ACAMERA_LENS_OPTICAL_STABILIZATION_MODE that are supported by this camera device.</p>
+ *
+ * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If OIS is not supported by a given camera device, this list will
+ * contain only OFF.</p>
+ */
ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION = // byte[n]
ACAMERA_LENS_INFO_START + 3,
+ /**
+ * <p>Hyperfocal distance for this lens.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If the lens is not fixed focus, the camera device will report this
+ * field when ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION is APPROXIMATE or CALIBRATED.</p>
+ *
+ * @see ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ */
ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE = // float
ACAMERA_LENS_INFO_START + 4,
+ /**
+ * <p>Shortest distance from frontmost surface
+ * of the lens that can be brought into sharp focus.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If the lens is fixed-focus, this will be
+ * 0.</p>
+ */
ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE = // float
ACAMERA_LENS_INFO_START + 5,
+ /**
+ * <p>Dimensions of lens shading map.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>The map should be on the order of 30-40 rows and columns, and
+ * must be smaller than 64x64.</p>
+ */
ACAMERA_LENS_INFO_SHADING_MAP_SIZE = // int32[2]
ACAMERA_LENS_INFO_START + 6,
+ /**
+ * <p>The lens focus distance calibration quality.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>The lens focus distance calibration quality determines the reliability of
+ * focus related metadata entries, i.e. ACAMERA_LENS_FOCUS_DISTANCE,
+ * ACAMERA_LENS_FOCUS_RANGE, ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE, and
+ * ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE.</p>
+ * <p>APPROXIMATE and CALIBRATED devices report the focus metadata in
+ * units of diopters (1/meter), so <code>0.0f</code> represents focusing at infinity,
+ * and increasing positive numbers represent focusing closer and closer
+ * to the camera device. The focus distance control also uses diopters
+ * on these devices.</p>
+ * <p>UNCALIBRATED devices do not use units that are directly comparable
+ * to any real physical measurement, but <code>0.0f</code> still represents farthest
+ * focus, and ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE represents the
+ * nearest focus the device can achieve.</p>
+ *
+ * @see ACAMERA_LENS_FOCUS_DISTANCE
+ * @see ACAMERA_LENS_FOCUS_RANGE
+ * @see ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE
+ * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ */
ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION = // byte (enum)
ACAMERA_LENS_INFO_START + 7,
ACAMERA_LENS_INFO_END,
+ /**
+ * <p>Mode of operation for the noise reduction algorithm.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>The noise reduction algorithm attempts to improve image quality by removing
+ * excessive noise added by the capture process, especially in dark conditions.</p>
+ * <p>OFF means no noise reduction will be applied by the camera device, for both raw and
+ * YUV domain.</p>
+ * <p>MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
+ * demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
+ * This mode is optional, may not be support by all devices. The application should check
+ * ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES before using it.</p>
+ * <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering
+ * will be applied. HIGH_QUALITY mode indicates that the camera device
+ * will use the highest-quality noise filtering algorithms,
+ * even if it slows down capture rate. FAST means the camera device will not
+ * slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
+ * MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
+ * Every output stream will have a similar amount of enhancement applied.</p>
+ * <p>ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+ * buffer of high-resolution images during preview and reprocess image(s) from that buffer
+ * into a final capture when triggered by the user. In this mode, the camera device applies
+ * noise reduction to low-resolution streams (below maximum recording resolution) to maximize
+ * preview quality, but does not apply noise reduction to high-resolution streams, since
+ * those will be reprocessed later if necessary.</p>
+ * <p>For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
+ * will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
+ * may adjust the noise reduction parameters for best image quality based on the
+ * android.reprocess.effectiveExposureFactor if it is set.</p>
+ *
+ * @see ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
+ */
ACAMERA_NOISE_REDUCTION_MODE = // byte (enum)
ACAMERA_NOISE_REDUCTION_START,
+ /**
+ * <p>List of noise reduction modes for ACAMERA_NOISE_REDUCTION_MODE that are supported
+ * by this camera device.</p>
+ *
+ * @see ACAMERA_NOISE_REDUCTION_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Full-capability camera devices will always support OFF and FAST.</p>
+ * <p>Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
+ * ZERO_SHUTTER_LAG.</p>
+ * <p>Legacy-capability camera devices will only support FAST mode.</p>
+ */
ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES = // byte[n]
ACAMERA_NOISE_REDUCTION_START + 2,
ACAMERA_NOISE_REDUCTION_END,
+ /**
+ * <p>The maximum numbers of different types of output streams
+ * that can be configured and used simultaneously by a camera device.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This is a 3 element tuple that contains the max number of output simultaneous
+ * streams for raw sensor, processed (but not stalling), and processed (and stalling)
+ * formats respectively. For example, assuming that JPEG is typically a processed and
+ * stalling stream, if max raw sensor format output stream number is 1, max YUV streams
+ * number is 3, and max JPEG stream number is 2, then this tuple should be <code>(1, 3, 2)</code>.</p>
+ * <p>This lists the upper bound of the number of output streams supported by
+ * the camera device. Using more streams simultaneously may require more hardware and
+ * CPU resources that will consume more power. The image format for an output stream can
+ * be any supported format provided by ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS.
+ * The formats defined in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS can be catergorized
+ * into the 3 stream types as below:</p>
+ * <ul>
+ * <li>Processed (but stalling): any non-RAW format with a stallDurations > 0.
+ * Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.</li>
+ * <li>Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link
+ * android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12
+ * RAW12}.</li>
+ * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
+ * Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888},
+ * {@link android.graphics.ImageFormat#NV21 NV21}, or
+ * {@link android.graphics.ImageFormat#YV12 YV12}.</li>
+ * </ul>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+ */
ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS = // int32[3]
ACAMERA_REQUEST_START + 6,
+ /**
+ * <p>The maximum numbers of any type of input streams
+ * that can be configured and used simultaneously by a camera device.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>When set to 0, it means no input stream is supported.</p>
+ * <p>The image format for a input stream can be any supported format returned by {@link
+ * android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an
+ * input stream, there must be at least one output stream configured to to receive the
+ * reprocessed images.</p>
+ * <p>When an input stream and some output streams are used in a reprocessing request,
+ * only the input buffer will be used to produce these output stream buffers, and a
+ * new sensor image will not be captured.</p>
+ * <p>For example, for Zero Shutter Lag (ZSL) still capture use case, the input
+ * stream image format will be PRIVATE, the associated output stream image format
+ * should be JPEG.</p>
+ */
ACAMERA_REQUEST_MAX_NUM_INPUT_STREAMS = // int32
ACAMERA_REQUEST_START + 8,
+ /**
+ * <p>Specifies the number of pipeline stages the frame went
+ * through from when it was exposed to when the final completed result
+ * was available to the framework.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>Depending on what settings are used in the request, and
+ * what streams are configured, the data may undergo less processing,
+ * and some pipeline stages skipped.</p>
+ * <p>See ACAMERA_REQUEST_PIPELINE_MAX_DEPTH for more details.</p>
+ *
+ * @see ACAMERA_REQUEST_PIPELINE_MAX_DEPTH
+ */
ACAMERA_REQUEST_PIPELINE_DEPTH = // byte
ACAMERA_REQUEST_START + 9,
+ /**
+ * <p>Specifies the number of maximum pipeline stages a frame
+ * has to go through from when it's exposed to when it's available
+ * to the framework.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>A typical minimum value for this is 2 (one stage to expose,
+ * one stage to readout) from the sensor. The ISP then usually adds
+ * its own stages to do custom HW processing. Further stages may be
+ * added by SW processing.</p>
+ * <p>Depending on what settings are used (e.g. YUV, JPEG) and what
+ * processing is enabled (e.g. face detection), the actual pipeline
+ * depth (specified by ACAMERA_REQUEST_PIPELINE_DEPTH) may be less than
+ * the max pipeline depth.</p>
+ * <p>A pipeline depth of X stages is equivalent to a pipeline latency of
+ * X frame intervals.</p>
+ * <p>This value will normally be 8 or less, however, for high speed capture session,
+ * the max pipeline depth will be up to 8 x size of high speed capture request list.</p>
+ *
+ * @see ACAMERA_REQUEST_PIPELINE_DEPTH
+ */
ACAMERA_REQUEST_PIPELINE_MAX_DEPTH = // byte
ACAMERA_REQUEST_START + 10,
+ /**
+ * <p>Defines how many sub-components
+ * a result will be composed of.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>In order to combat the pipeline latency, partial results
+ * may be delivered to the application layer from the camera device as
+ * soon as they are available.</p>
+ * <p>Optional; defaults to 1. A value of 1 means that partial
+ * results are not supported, and only the final TotalCaptureResult will
+ * be produced by the camera device.</p>
+ * <p>A typical use case for this might be: after requesting an
+ * auto-focus (AF) lock the new AF state might be available 50%
+ * of the way through the pipeline. The camera device could
+ * then immediately dispatch this state via a partial result to
+ * the application, and the rest of the metadata via later
+ * partial results.</p>
+ */
ACAMERA_REQUEST_PARTIAL_RESULT_COUNT = // int32
ACAMERA_REQUEST_START + 11,
+ /**
+ * <p>List of capabilities that this camera device
+ * advertises as fully supporting.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>A capability is a contract that the camera device makes in order
+ * to be able to satisfy one or more use cases.</p>
+ * <p>Listing a capability guarantees that the whole set of features
+ * required to support a common use will all be available.</p>
+ * <p>Using a subset of the functionality provided by an unsupported
+ * capability may be possible on a specific camera device implementation;
+ * to do this query each of ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS,
+ * ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS,
+ * ACAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS.</p>
+ * <p>The following capabilities are guaranteed to be available on
+ * ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL <code>==</code> FULL devices:</p>
+ * <ul>
+ * <li>MANUAL_SENSOR</li>
+ * <li>MANUAL_POST_PROCESSING</li>
+ * </ul>
+ * <p>Other capabilities may be available on either FULL or LIMITED
+ * devices, but the application should query this key to be sure.</p>
+ *
+ * @see ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see ACAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
+ * @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
+ * @see ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS
+ */
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES = // byte[n] (enum)
ACAMERA_REQUEST_START + 12,
+ /**
+ * <p>A list of all keys that the camera device has available
+ * to use with {@link android.hardware.camera2.CaptureRequest}.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Attempting to set a key into a CaptureRequest that is not
+ * listed here will result in an invalid request and will be rejected
+ * by the camera device.</p>
+ * <p>This field can be used to query the feature set of a camera device
+ * at a more granular level than capabilities. This is especially
+ * important for optional keys that are not listed under any capability
+ * in ACAMERA_REQUEST_AVAILABLE_CAPABILITIES.</p>
+ *
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ */
ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS = // int32[n]
ACAMERA_REQUEST_START + 13,
+ /**
+ * <p>A list of all keys that the camera device has available
+ * to use with {@link android.hardware.camera2.CaptureResult}.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Attempting to get a key from a CaptureResult that is not
+ * listed here will always return a <code>null</code> value. Getting a key from
+ * a CaptureResult that is listed here will generally never return a <code>null</code>
+ * value.</p>
+ * <p>The following keys may return <code>null</code> unless they are enabled:</p>
+ * <ul>
+ * <li>ACAMERA_STATISTICS_LENS_SHADING_MAP (non-null iff ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE == ON)</li>
+ * </ul>
+ * <p>(Those sometimes-null keys will nevertheless be listed here
+ * if they are available.)</p>
+ * <p>This field can be used to query the feature set of a camera device
+ * at a more granular level than capabilities. This is especially
+ * important for optional keys that are not listed under any capability
+ * in ACAMERA_REQUEST_AVAILABLE_CAPABILITIES.</p>
+ *
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ * @see ACAMERA_STATISTICS_LENS_SHADING_MAP
+ * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
+ */
ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS = // int32[n]
ACAMERA_REQUEST_START + 14,
+ /**
+ * <p>A list of all keys that the camera device has available
+ * to use with {@link android.hardware.camera2.CameraCharacteristics}.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This entry follows the same rules as
+ * ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS (except that it applies for
+ * CameraCharacteristics instead of CaptureResult). See above for more
+ * details.</p>
+ *
+ * @see ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS
+ */
ACAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS = // int32[n]
ACAMERA_REQUEST_START + 15,
ACAMERA_REQUEST_END,
+ /**
+ * <p>The desired region of the sensor to read out for this capture.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>This control can be used to implement digital zoom.</p>
+ * <p>The crop region coordinate system is based off
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with <code>(0, 0)</code> being the
+ * top-left corner of the sensor active array.</p>
+ * <p>Output streams use this rectangle to produce their output,
+ * cropping to a smaller region if necessary to maintain the
+ * stream's aspect ratio, then scaling the sensor input to
+ * match the output's configured resolution.</p>
+ * <p>The crop region is applied after the RAW to other color
+ * space (e.g. YUV) conversion. Since raw streams
+ * (e.g. RAW16) don't have the conversion stage, they are not
+ * croppable. The crop region will be ignored by raw streams.</p>
+ * <p>For non-raw streams, any additional per-stream cropping will
+ * be done to maximize the final pixel area of the stream.</p>
+ * <p>For example, if the crop region is set to a 4:3 aspect
+ * ratio, then 4:3 streams will use the exact crop
+ * region. 16:9 streams will further crop vertically
+ * (letterbox).</p>
+ * <p>Conversely, if the crop region is set to a 16:9, then 4:3
+ * outputs will crop horizontally (pillarbox), and 16:9
+ * streams will match exactly. These additional crops will
+ * be centered within the crop region.</p>
+ * <p>The width and height of the crop region cannot
+ * be set to be smaller than
+ * <code>floor( activeArraySize.width / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM )</code> and
+ * <code>floor( activeArraySize.height / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM )</code>, respectively.</p>
+ * <p>The camera device may adjust the crop region to account
+ * for rounding and other hardware requirements; the final
+ * crop region used will be included in the output capture
+ * result.</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_SCALER_CROP_REGION = // int32[4]
ACAMERA_SCALER_START,
+ /**
+ * <p>The maximum ratio between both active area width
+ * and crop region width, and active area height and
+ * crop region height, for ACAMERA_SCALER_CROP_REGION.</p>
+ *
+ * @see ACAMERA_SCALER_CROP_REGION
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This represents the maximum amount of zooming possible by
+ * the camera device, or equivalently, the minimum cropping
+ * window size.</p>
+ * <p>Crop regions that have a width or height that is smaller
+ * than this ratio allows will be rounded up to the minimum
+ * allowed size by the camera device.</p>
+ */
ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM = // float
ACAMERA_SCALER_START + 4,
+ /**
+ * <p>The available stream configurations that this
+ * camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>The configurations are listed as <code>(format, width, height, input?)</code>
+ * tuples.</p>
+ * <p>For a given use case, the actual maximum supported resolution
+ * may be lower than what is listed here, depending on the destination
+ * Surface for the image data. For example, for recording video,
+ * the video encoder chosen may have a maximum size limit (e.g. 1080p)
+ * smaller than what the camera (e.g. maximum resolution is 3264x2448)
+ * can provide.</p>
+ * <p>Please reference the documentation for the image data destination to
+ * check if it limits the maximum size for image data.</p>
+ * <p>Not all output formats may be supported in a configuration with
+ * an input stream of a particular format. For more details, see
+ * android.scaler.availableInputOutputFormatsMap.</p>
+ * <p>The following table describes the minimum required output stream
+ * configurations based on the hardware level
+ * (ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL):</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">Format</th>
+ * <th align="center">Size</th>
+ * <th align="center">Hardware Level</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE</td>
+ * <td align="center">Any</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">1920x1080 (1080p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 1080p <= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">1280x720 (720)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 720p <= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">640x480 (480p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 480p <= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">320x240 (240p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 240p <= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">YUV_420_888</td>
+ * <td align="center">all output sizes available for JPEG</td>
+ * <td align="center">FULL</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">YUV_420_888</td>
+ * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
+ * <td align="center">LIMITED</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">IMPLEMENTATION_DEFINED</td>
+ * <td align="center">same as YUV_420_888</td>
+ * <td align="center">Any</td>
+ * <td align="center"></td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>Refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES for additional
+ * mandatory stream configurations on a per-capability basis.</p>
+ *
+ * @see ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS = // int32[n*4] (enum)
ACAMERA_SCALER_START + 10,
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.</p>
+ * <p>When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).</p>
+ * <p>The minimum frame duration of a stream (of a particular format, size)
+ * is the same regardless of whether the stream is input or output.</p>
+ * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+ * calculating the max frame rate.</p>
+ * <p>(Keep in sync with
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ */
ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS = // int64[4*n]
ACAMERA_SCALER_START + 11,
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.</p>
+ * <p>For example, consider JPEG captures which have the following
+ * characteristics:</p>
+ * <ul>
+ * <li>JPEG streams act like processed YUV streams in requests for which
+ * they are not included; in requests in which they are directly
+ * referenced, they act as JPEG streams. This is because supporting a
+ * JPEG stream requires the underlying YUV data to always be ready for
+ * use by a JPEG encoder, but the encoder will only be used (and impact
+ * frame duration) on requests that actually reference a JPEG stream.</li>
+ * <li>The JPEG processor can run concurrently to the rest of the camera
+ * pipeline, but cannot process more than 1 capture at a time.</li>
+ * </ul>
+ * <p>In other words, using a repeating YUV request would result
+ * in a steady frame rate (let's say it's 30 FPS). If a single
+ * JPEG request is submitted periodically, the frame rate will stay
+ * at 30 FPS (as long as we wait for the previous JPEG to return each
+ * time). If we try to submit a repeating YUV + JPEG request, then
+ * the frame rate will drop from 30 FPS.</p>
+ * <p>In general, submitting a new request with a non-0 stall time
+ * stream will <em>not</em> cause a frame rate drop unless there are still
+ * outstanding buffers for that stream from previous requests.</p>
+ * <p>Submitting a repeating request with streams (call this <code>S</code>)
+ * is the same as setting the minimum frame duration from
+ * the normal minimum frame duration corresponding to <code>S</code>, added with
+ * the maximum stall duration for <code>S</code>.</p>
+ * <p>If interleaving requests with and without a stall duration,
+ * a request will stall by the maximum of the remaining times
+ * for each can-stall stream with outstanding buffers.</p>
+ * <p>This means that a stalling request will not have an exposure start
+ * until the stall has completed.</p>
+ * <p>This should correspond to the stall duration when only that stream is
+ * active, with all processing (typically in android.*.mode) set to FAST
+ * or OFF. Setting any of the processing modes to HIGH_QUALITY
+ * effectively results in an indeterminate stall duration for all
+ * streams in a request (the regular stall calculation rules are
+ * ignored).</p>
+ * <p>The following formats may always have a stall duration:</p>
+ * <ul>
+ * <li>{@link android.graphics.ImageFormat#JPEG}</li>
+ * <li>{@link android.graphics.ImageFormat#RAW_SENSOR}</li>
+ * </ul>
+ * <p>The following formats will never have a stall duration:</p>
+ * <ul>
+ * <li>{@link android.graphics.ImageFormat#YUV_420_888}</li>
+ * <li>{@link android.graphics.ImageFormat#RAW10}</li>
+ * </ul>
+ * <p>All other formats may or may not have an allowed stall duration on
+ * a per-capability basis; refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ * for more details.</p>
+ * <p>See ACAMERA_SENSOR_FRAME_DURATION for more information about
+ * calculating the max frame rate (absent stalls).</p>
+ * <p>(Keep up to date with
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} )</p>
+ *
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ */
ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS = // int64[4*n]
ACAMERA_SCALER_START + 12,
+ /**
+ * <p>The crop type that this camera device supports.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>When passing a non-centered crop region (ACAMERA_SCALER_CROP_REGION) to a camera
+ * device that only supports CENTER_ONLY cropping, the camera device will move the
+ * crop region to the center of the sensor active array (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE)
+ * and keep the crop region width and height unchanged. The camera device will return the
+ * final used crop region in metadata result ACAMERA_SCALER_CROP_REGION.</p>
+ * <p>Camera devices that support FREEFORM cropping will support any crop region that
+ * is inside of the active array. The camera device will apply the same crop region and
+ * return the final used crop region in capture result metadata ACAMERA_SCALER_CROP_REGION.</p>
+ * <p>LEGACY capability devices will only support CENTER_ONLY cropping.</p>
+ *
+ * @see ACAMERA_SCALER_CROP_REGION
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_SCALER_CROPPING_TYPE = // byte (enum)
ACAMERA_SCALER_START + 13,
ACAMERA_SCALER_END,
+ /**
+ * <p>Duration each pixel is exposed to
+ * light.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>If the sensor can't expose this exact duration, it will shorten the
+ * duration exposed to the nearest possible value (rather than expose longer).
+ * The final exposure time used will be available in the output capture result.</p>
+ * <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_MODE
+ */
ACAMERA_SENSOR_EXPOSURE_TIME = // int64
ACAMERA_SENSOR_START,
+ /**
+ * <p>Duration from start of frame exposure to
+ * start of next frame exposure.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>The maximum frame rate that can be supported by a camera subsystem is
+ * a function of many factors:</p>
+ * <ul>
+ * <li>Requested resolutions of output image streams</li>
+ * <li>Availability of binning / skipping modes on the imager</li>
+ * <li>The bandwidth of the imager interface</li>
+ * <li>The bandwidth of the various ISP processing blocks</li>
+ * </ul>
+ * <p>Since these factors can vary greatly between different ISPs and
+ * sensors, the camera abstraction tries to represent the bandwidth
+ * restrictions with as simple a model as possible.</p>
+ * <p>The model presented has the following characteristics:</p>
+ * <ul>
+ * <li>The image sensor is always configured to output the smallest
+ * resolution possible given the application's requested output stream
+ * sizes. The smallest resolution is defined as being at least as large
+ * as the largest requested output stream size; the camera pipeline must
+ * never digitally upsample sensor data when the crop region covers the
+ * whole sensor. In general, this means that if only small output stream
+ * resolutions are configured, the sensor can provide a higher frame
+ * rate.</li>
+ * <li>Since any request may use any or all the currently configured
+ * output streams, the sensor and ISP must be configured to support
+ * scaling a single capture to all the streams at the same time. This
+ * means the camera pipeline must be ready to produce the largest
+ * requested output size without any delay. Therefore, the overall
+ * frame rate of a given configured stream set is governed only by the
+ * largest requested stream resolution.</li>
+ * <li>Using more than one output stream in a request does not affect the
+ * frame duration.</li>
+ * <li>Certain format-streams may need to do additional background processing
+ * before data is consumed/produced by that stream. These processors
+ * can run concurrently to the rest of the camera pipeline, but
+ * cannot process more than 1 capture at a time.</li>
+ * </ul>
+ * <p>The necessary information for the application, given the model above,
+ * is provided via the android.scaler.streamConfigurationMap field using
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.
+ * These are used to determine the maximum frame rate / minimum frame
+ * duration that is possible for a given stream configuration.</p>
+ * <p>Specifically, the application can use the following rules to
+ * determine the minimum frame duration it can request from the camera
+ * device:</p>
+ * <ol>
+ * <li>Let the set of currently configured input/output streams
+ * be called <code>S</code>.</li>
+ * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking
+ * it up in android.scaler.streamConfigurationMap using {@link
+ * android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
+ * (with its respective size/format). Let this set of frame durations be
+ * called <code>F</code>.</li>
+ * <li>For any given request <code>R</code>, the minimum frame duration allowed
+ * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams
+ * used in <code>R</code> be called <code>S_r</code>.</li>
+ * </ol>
+ * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link
+ * android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}
+ * using its respective size/format), then the frame duration in <code>F</code>
+ * determines the steady state frame rate that the application will get
+ * if it uses <code>R</code> as a repeating request. Let this special kind of
+ * request be called <code>Rsimple</code>.</p>
+ * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
+ * by a single capture of a new request <code>Rstall</code> (which has at least
+ * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
+ * same minimum frame duration this will not cause a frame rate loss
+ * if all buffers from the previous <code>Rstall</code> have already been
+ * delivered.</p>
+ * <p>For more details about stalling, see
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}.</p>
+ * <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_MODE
+ */
ACAMERA_SENSOR_FRAME_DURATION = // int64
ACAMERA_SENSOR_START + 1,
+ /**
+ * <p>The amount of gain applied to sensor data
+ * before processing.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>The sensitivity is the standard ISO sensitivity value,
+ * as defined in ISO 12232:2006.</p>
+ * <p>The sensitivity must be within ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, and
+ * if if it less than ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY, the camera device
+ * is guaranteed to use only analog amplification for applying the gain.</p>
+ * <p>If the camera device cannot apply the exact sensitivity
+ * requested, it will reduce the gain to the nearest supported
+ * value. The final sensitivity used will be available in the
+ * output capture result.</p>
+ * <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE
+ * @see ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY
+ */
ACAMERA_SENSOR_SENSITIVITY = // int32
ACAMERA_SENSOR_START + 2,
+ /**
+ * <p>The standard reference illuminant used as the scene light source when
+ * calculating the ACAMERA_SENSOR_COLOR_TRANSFORM1,
+ * ACAMERA_SENSOR_CALIBRATION_TRANSFORM1, and
+ * ACAMERA_SENSOR_FORWARD_MATRIX1 matrices.</p>
+ *
+ * @see ACAMERA_SENSOR_CALIBRATION_TRANSFORM1
+ * @see ACAMERA_SENSOR_COLOR_TRANSFORM1
+ * @see ACAMERA_SENSOR_FORWARD_MATRIX1
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>The values in this key correspond to the values defined for the
+ * EXIF LightSource tag. These illuminants are standard light sources
+ * that are often used calibrating camera devices.</p>
+ * <p>If this key is present, then ACAMERA_SENSOR_COLOR_TRANSFORM1,
+ * ACAMERA_SENSOR_CALIBRATION_TRANSFORM1, and
+ * ACAMERA_SENSOR_FORWARD_MATRIX1 will also be present.</p>
+ * <p>Some devices may choose to provide a second set of calibration
+ * information for improved quality, including
+ * ACAMERA_SENSOR_REFERENCE_ILLUMINANT2 and its corresponding matrices.</p>
+ *
+ * @see ACAMERA_SENSOR_CALIBRATION_TRANSFORM1
+ * @see ACAMERA_SENSOR_COLOR_TRANSFORM1
+ * @see ACAMERA_SENSOR_FORWARD_MATRIX1
+ * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
+ */
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 = // byte (enum)
ACAMERA_SENSOR_START + 3,
+ /**
+ * <p>The standard reference illuminant used as the scene light source when
+ * calculating the ACAMERA_SENSOR_COLOR_TRANSFORM2,
+ * ACAMERA_SENSOR_CALIBRATION_TRANSFORM2, and
+ * ACAMERA_SENSOR_FORWARD_MATRIX2 matrices.</p>
+ *
+ * @see ACAMERA_SENSOR_CALIBRATION_TRANSFORM2
+ * @see ACAMERA_SENSOR_COLOR_TRANSFORM2
+ * @see ACAMERA_SENSOR_FORWARD_MATRIX2
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>See ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 for more details.</p>
+ * <p>If this key is present, then ACAMERA_SENSOR_COLOR_TRANSFORM2,
+ * ACAMERA_SENSOR_CALIBRATION_TRANSFORM2, and
+ * ACAMERA_SENSOR_FORWARD_MATRIX2 will also be present.</p>
+ *
+ * @see ACAMERA_SENSOR_CALIBRATION_TRANSFORM2
+ * @see ACAMERA_SENSOR_COLOR_TRANSFORM2
+ * @see ACAMERA_SENSOR_FORWARD_MATRIX2
+ * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
+ */
ACAMERA_SENSOR_REFERENCE_ILLUMINANT2 = // byte
ACAMERA_SENSOR_START + 4,
+ /**
+ * <p>A per-device calibration transform matrix that maps from the
+ * reference sensor colorspace to the actual device sensor colorspace.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This matrix is used to correct for per-device variations in the
+ * sensor colorspace, and is used for processing raw buffer data.</p>
+ * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a per-device calibration transform that maps colors
+ * from reference sensor color space (i.e. the "golden module"
+ * colorspace) into this camera device's native sensor color
+ * space under the first reference illuminant
+ * (ACAMERA_SENSOR_REFERENCE_ILLUMINANT1).</p>
+ *
+ * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
+ */
ACAMERA_SENSOR_CALIBRATION_TRANSFORM1 = // rational[3*3]
ACAMERA_SENSOR_START + 5,
+ /**
+ * <p>A per-device calibration transform matrix that maps from the
+ * reference sensor colorspace to the actual device sensor colorspace
+ * (this is the colorspace of the raw buffer data).</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This matrix is used to correct for per-device variations in the
+ * sensor colorspace, and is used for processing raw buffer data.</p>
+ * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a per-device calibration transform that maps colors
+ * from reference sensor color space (i.e. the "golden module"
+ * colorspace) into this camera device's native sensor color
+ * space under the second reference illuminant
+ * (ACAMERA_SENSOR_REFERENCE_ILLUMINANT2).</p>
+ * <p>This matrix will only be present if the second reference
+ * illuminant is present.</p>
+ *
+ * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
+ */
ACAMERA_SENSOR_CALIBRATION_TRANSFORM2 = // rational[3*3]
ACAMERA_SENSOR_START + 6,
+ /**
+ * <p>A matrix that transforms color values from CIE XYZ color space to
+ * reference sensor color space.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This matrix is used to convert from the standard CIE XYZ color
+ * space to the reference sensor colorspace, and is used when processing
+ * raw buffer data.</p>
+ * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a color transform matrix that maps colors from the CIE
+ * XYZ color space to the reference sensor color space (i.e. the
+ * "golden module" colorspace) under the first reference illuminant
+ * (ACAMERA_SENSOR_REFERENCE_ILLUMINANT1).</p>
+ * <p>The white points chosen in both the reference sensor color space
+ * and the CIE XYZ colorspace when calculating this transform will
+ * match the standard white point for the first reference illuminant
+ * (i.e. no chromatic adaptation will be applied by this transform).</p>
+ *
+ * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
+ */
ACAMERA_SENSOR_COLOR_TRANSFORM1 = // rational[3*3]
ACAMERA_SENSOR_START + 7,
+ /**
+ * <p>A matrix that transforms color values from CIE XYZ color space to
+ * reference sensor color space.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This matrix is used to convert from the standard CIE XYZ color
+ * space to the reference sensor colorspace, and is used when processing
+ * raw buffer data.</p>
+ * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a color transform matrix that maps colors from the CIE
+ * XYZ color space to the reference sensor color space (i.e. the
+ * "golden module" colorspace) under the second reference illuminant
+ * (ACAMERA_SENSOR_REFERENCE_ILLUMINANT2).</p>
+ * <p>The white points chosen in both the reference sensor color space
+ * and the CIE XYZ colorspace when calculating this transform will
+ * match the standard white point for the second reference illuminant
+ * (i.e. no chromatic adaptation will be applied by this transform).</p>
+ * <p>This matrix will only be present if the second reference
+ * illuminant is present.</p>
+ *
+ * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
+ */
ACAMERA_SENSOR_COLOR_TRANSFORM2 = // rational[3*3]
ACAMERA_SENSOR_START + 8,
+ /**
+ * <p>A matrix that transforms white balanced camera colors from the reference
+ * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
+ * is used when processing raw buffer data.</p>
+ * <p>This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+ * a color transform matrix that maps white balanced colors from the
+ * reference sensor color space to the CIE XYZ color space with a D50 white
+ * point.</p>
+ * <p>Under the first reference illuminant (ACAMERA_SENSOR_REFERENCE_ILLUMINANT1)
+ * this matrix is chosen so that the standard white point for this reference
+ * illuminant in the reference sensor colorspace is mapped to D50 in the
+ * CIE XYZ colorspace.</p>
+ *
+ * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
+ */
ACAMERA_SENSOR_FORWARD_MATRIX1 = // rational[3*3]
ACAMERA_SENSOR_START + 9,
+ /**
+ * <p>A matrix that transforms white balanced camera colors from the reference
+ * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
+ * is used when processing raw buffer data.</p>
+ * <p>This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+ * a color transform matrix that maps white balanced colors from the
+ * reference sensor color space to the CIE XYZ color space with a D50 white
+ * point.</p>
+ * <p>Under the second reference illuminant (ACAMERA_SENSOR_REFERENCE_ILLUMINANT2)
+ * this matrix is chosen so that the standard white point for this reference
+ * illuminant in the reference sensor colorspace is mapped to D50 in the
+ * CIE XYZ colorspace.</p>
+ * <p>This matrix will only be present if the second reference
+ * illuminant is present.</p>
+ *
+ * @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
+ */
ACAMERA_SENSOR_FORWARD_MATRIX2 = // rational[3*3]
ACAMERA_SENSOR_START + 10,
+ /**
+ * <p>A fixed black level offset for each of the color filter arrangement
+ * (CFA) mosaic channels.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This key specifies the zero light value for each of the CFA mosaic
+ * channels in the camera sensor. The maximal value output by the
+ * sensor is represented by the value in ACAMERA_SENSOR_INFO_WHITE_LEVEL.</p>
+ * <p>The values are given in the same order as channels listed for the CFA
+ * layout key (see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT), i.e. the
+ * nth value given corresponds to the black level offset for the nth
+ * color channel listed in the CFA.</p>
+ * <p>The black level values of captured images may vary for different
+ * capture settings (e.g., ACAMERA_SENSOR_SENSITIVITY). This key
+ * represents a coarse approximation for such case. It is recommended to
+ * use ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL or use pixels from
+ * ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS directly for captures when
+ * supported by the camera device, which provides more accurate black
+ * level values. For raw capture in particular, it is recommended to use
+ * pixels from ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS to calculate black
+ * level values for each frame.</p>
+ *
+ * @see ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL
+ * @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ * @see ACAMERA_SENSOR_INFO_WHITE_LEVEL
+ * @see ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_SENSOR_BLACK_LEVEL_PATTERN = // int32[4]
ACAMERA_SENSOR_START + 12,
+ /**
+ * <p>Maximum sensitivity that is implemented
+ * purely through analog gain.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>For ACAMERA_SENSOR_SENSITIVITY values less than or
+ * equal to this, all applied gain must be analog. For
+ * values above this, the gain applied can be a mix of analog and
+ * digital.</p>
+ *
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY = // int32
ACAMERA_SENSOR_START + 13,
+ /**
+ * <p>Clockwise angle through which the output image needs to be rotated to be
+ * upright on the device screen in its native orientation.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Also defines the direction of rolling shutter readout, which is from top to bottom in
+ * the sensor's coordinate system.</p>
+ */
ACAMERA_SENSOR_ORIENTATION = // int32
ACAMERA_SENSOR_START + 14,
+ /**
+ * <p>Time at start of exposure of first
+ * row of the image sensor active array, in nanoseconds.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>The timestamps are also included in all image
+ * buffers produced for the same capture, and will be identical
+ * on all the outputs.</p>
+ * <p>When ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE <code>==</code> UNKNOWN,
+ * the timestamps measure time since an unspecified starting point,
+ * and are monotonically increasing. They can be compared with the
+ * timestamps for other captures from the same camera device, but are
+ * not guaranteed to be comparable to any other time source.</p>
+ * <p>When ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE <code>==</code> REALTIME, the
+ * timestamps measure time in the same timebase as {@link
+ * android.os.SystemClock#elapsedRealtimeNanos}, and they can
+ * be compared to other timestamps from other subsystems that
+ * are using that base.</p>
+ * <p>For reprocessing, the timestamp will match the start of exposure of
+ * the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
+ * timestamp} in the TotalCaptureResult that was used to create the
+ * reprocess capture request.</p>
+ *
+ * @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
+ */
ACAMERA_SENSOR_TIMESTAMP = // int64
ACAMERA_SENSOR_START + 16,
+ /**
+ * <p>The estimated camera neutral color in the native sensor colorspace at
+ * the time of capture.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>This value gives the neutral color point encoded as an RGB value in the
+ * native sensor color space. The neutral color point indicates the
+ * currently estimated white point of the scene illumination. It can be
+ * used to interpolate between the provided color transforms when
+ * processing raw sensor data.</p>
+ * <p>The order of the values is R, G, B; where R is in the lowest index.</p>
+ */
ACAMERA_SENSOR_NEUTRAL_COLOR_POINT = // rational[3]
ACAMERA_SENSOR_START + 18,
+ /**
+ * <p>Noise model coefficients for each CFA mosaic channel.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>This key contains two noise model coefficients for each CFA channel
+ * corresponding to the sensor amplification (S) and sensor readout
+ * noise (O). These are given as pairs of coefficients for each channel
+ * in the same order as channels listed for the CFA layout key
+ * (see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT). This is
+ * represented as an array of Pair<Double, Double>, where
+ * the first member of the Pair at index n is the S coefficient and the
+ * second member is the O coefficient for the nth color channel in the CFA.</p>
+ * <p>These coefficients are used in a two parameter noise model to describe
+ * the amount of noise present in the image for each CFA channel. The
+ * noise model used here is:</p>
+ * <p>N(x) = sqrt(Sx + O)</p>
+ * <p>Where x represents the recorded signal of a CFA channel normalized to
+ * the range [0, 1], and S and O are the noise model coeffiecients for
+ * that channel.</p>
+ * <p>A more detailed description of the noise model can be found in the
+ * Adobe DNG specification for the NoiseProfile tag.</p>
+ *
+ * @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ */
ACAMERA_SENSOR_NOISE_PROFILE = // double[2*CFA Channels]
ACAMERA_SENSOR_START + 19,
+ /**
+ * <p>The worst-case divergence between Bayer green channels.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>This value is an estimate of the worst case split between the
+ * Bayer green channels in the red and blue rows in the sensor color
+ * filter array.</p>
+ * <p>The green split is calculated as follows:</p>
+ * <ol>
+ * <li>A 5x5 pixel (or larger) window W within the active sensor array is
+ * chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
+ * mosaic channels (R, Gr, Gb, B). The location and size of the window
+ * chosen is implementation defined, and should be chosen to provide a
+ * green split estimate that is both representative of the entire image
+ * for this camera sensor, and can be calculated quickly.</li>
+ * <li>The arithmetic mean of the green channels from the red
+ * rows (mean_Gr) within W is computed.</li>
+ * <li>The arithmetic mean of the green channels from the blue
+ * rows (mean_Gb) within W is computed.</li>
+ * <li>The maximum ratio R of the two means is computed as follows:
+ * <code>R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))</code></li>
+ * </ol>
+ * <p>The ratio R is the green split divergence reported for this property,
+ * which represents how much the green channels differ in the mosaic
+ * pattern. This value is typically used to determine the treatment of
+ * the green mosaic channels when demosaicing.</p>
+ * <p>The green split value can be roughly interpreted as follows:</p>
+ * <ul>
+ * <li>R < 1.03 is a negligible split (<3% divergence).</li>
+ * <li>1.20 <= R >= 1.03 will require some software
+ * correction to avoid demosaic errors (3-20% divergence).</li>
+ * <li>R > 1.20 will require strong software correction to produce
+ * a usuable image (>20% divergence).</li>
+ * </ul>
+ */
ACAMERA_SENSOR_GREEN_SPLIT = // float
ACAMERA_SENSOR_START + 22,
+ /**
+ * <p>A pixel <code>[R, G_even, G_odd, B]</code> that supplies the test pattern
+ * when ACAMERA_SENSOR_TEST_PATTERN_MODE is SOLID_COLOR.</p>
+ *
+ * @see ACAMERA_SENSOR_TEST_PATTERN_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Each color channel is treated as an unsigned 32-bit integer.
+ * The camera device then uses the most significant X bits
+ * that correspond to how many bits are in its Bayer raw sensor
+ * output.</p>
+ * <p>For example, a sensor with RAW10 Bayer output would use the
+ * 10 most significant bits from each color channel.</p>
+ */
ACAMERA_SENSOR_TEST_PATTERN_DATA = // int32[4]
ACAMERA_SENSOR_START + 23,
+ /**
+ * <p>When enabled, the sensor sends a test pattern instead of
+ * doing a real exposure from the camera.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>When a test pattern is enabled, all manual sensor controls specified
+ * by ACAMERA_SENSOR_* will be ignored. All other controls should
+ * work as normal.</p>
+ * <p>For example, if manual flash is enabled, flash firing should still
+ * occur (and that the test pattern remain unmodified, since the flash
+ * would not actually affect it).</p>
+ * <p>Defaults to OFF.</p>
+ */
ACAMERA_SENSOR_TEST_PATTERN_MODE = // int32 (enum)
ACAMERA_SENSOR_START + 24,
+ /**
+ * <p>List of sensor test pattern modes for ACAMERA_SENSOR_TEST_PATTERN_MODE
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_SENSOR_TEST_PATTERN_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Defaults to OFF, and always includes OFF if defined.</p>
+ */
ACAMERA_SENSOR_AVAILABLE_TEST_PATTERN_MODES = // int32[n]
ACAMERA_SENSOR_START + 25,
+ /**
+ * <p>Duration between the start of first row exposure
+ * and the start of last row exposure.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>This is the exposure time skew between the first and last
+ * row exposure start times. The first row and the last row are
+ * the first and last rows inside of the
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
+ * <p>For typical camera sensors that use rolling shutters, this is also equivalent
+ * to the frame readout time.</p>
+ *
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW = // int64
ACAMERA_SENSOR_START + 26,
+ /**
+ * <p>List of disjoint rectangles indicating the sensor
+ * optically shielded black pixel regions.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>In most camera sensors, the active array is surrounded by some
+ * optically shielded pixel areas. By blocking light, these pixels
+ * provides a reliable black reference for black level compensation
+ * in active array region.</p>
+ * <p>This key provides a list of disjoint rectangles specifying the
+ * regions of optically shielded (with metal shield) black pixel
+ * regions if the camera device is capable of reading out these black
+ * pixels in the output raw images. In comparison to the fixed black
+ * level values reported by ACAMERA_SENSOR_BLACK_LEVEL_PATTERN, this key
+ * may provide a more accurate way for the application to calculate
+ * black level of each captured raw images.</p>
+ * <p>When this key is reported, the ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL and
+ * ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL will also be reported.</p>
+ *
+ * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
+ * @see ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL
+ * @see ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL
+ */
ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS = // int32[4*num_regions]
ACAMERA_SENSOR_START + 27,
+ /**
+ * <p>A per-frame dynamic black level offset for each of the color filter
+ * arrangement (CFA) mosaic channels.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>Camera sensor black levels may vary dramatically for different
+ * capture settings (e.g. ACAMERA_SENSOR_SENSITIVITY). The fixed black
+ * level reported by ACAMERA_SENSOR_BLACK_LEVEL_PATTERN may be too
+ * inaccurate to represent the actual value on a per-frame basis. The
+ * camera device internal pipeline relies on reliable black level values
+ * to process the raw images appropriately. To get the best image
+ * quality, the camera device may choose to estimate the per frame black
+ * level values either based on optically shielded black regions
+ * (ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS) or its internal model.</p>
+ * <p>This key reports the camera device estimated per-frame zero light
+ * value for each of the CFA mosaic channels in the camera sensor. The
+ * ACAMERA_SENSOR_BLACK_LEVEL_PATTERN may only represent a coarse
+ * approximation of the actual black level values. This value is the
+ * black level used in camera device internal image processing pipeline
+ * and generally more accurate than the fixed black level values.
+ * However, since they are estimated values by the camera device, they
+ * may not be as accurate as the black level values calculated from the
+ * optical black pixels reported by ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS.</p>
+ * <p>The values are given in the same order as channels listed for the CFA
+ * layout key (see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT), i.e. the
+ * nth value given corresponds to the black level offset for the nth
+ * color channel listed in the CFA.</p>
+ * <p>This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
+ * available or the camera device advertises this key via
+ * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.</p>
+ *
+ * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
+ * @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ * @see ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL = // float[4]
ACAMERA_SENSOR_START + 28,
+ /**
+ * <p>Maximum raw value output by sensor for this frame.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>Since the ACAMERA_SENSOR_BLACK_LEVEL_PATTERN may change for different
+ * capture settings (e.g., ACAMERA_SENSOR_SENSITIVITY), the white
+ * level will change accordingly. This key is similar to
+ * ACAMERA_SENSOR_INFO_WHITE_LEVEL, but specifies the camera device
+ * estimated white level for each frame.</p>
+ * <p>This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
+ * available or the camera device advertises this key via
+ * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.</p>
+ *
+ * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
+ * @see ACAMERA_SENSOR_INFO_WHITE_LEVEL
+ * @see ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL = // int32
ACAMERA_SENSOR_START + 29,
ACAMERA_SENSOR_END,
+ /**
+ * <p>The area of the image sensor which corresponds to active pixels after any geometric
+ * distortion correction has been applied.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
+ * the region that actually receives light from the scene) after any geometric correction
+ * has been applied, and should be treated as the maximum size in pixels of any of the
+ * image output formats aside from the raw formats.</p>
+ * <p>This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
+ * the full pixel array, and the size of the full pixel array is given by
+ * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
+ * <p>The coordinate system for most other keys that list pixel coordinates, including
+ * ACAMERA_SCALER_CROP_REGION, is defined relative to the active array rectangle given in
+ * this field, with <code>(0, 0)</code> being the top-left of this rectangle.</p>
+ * <p>The active array may be smaller than the full pixel array, since the full array may
+ * include black calibration pixels or other inactive regions, and geometric correction
+ * resulting in scaling or cropping may have been applied.</p>
+ *
+ * @see ACAMERA_SCALER_CROP_REGION
+ * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+ */
ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE = // int32[4]
ACAMERA_SENSOR_INFO_START,
+ /**
+ * <p>Range of sensitivities for ACAMERA_SENSOR_SENSITIVITY supported by this
+ * camera device.</p>
+ *
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>The values are the standard ISO sensitivity values,
+ * as defined in ISO 12232:2006.</p>
+ */
ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE = // int32[2]
ACAMERA_SENSOR_INFO_START + 1,
+ /**
+ * <p>The arrangement of color filters on sensor;
+ * represents the colors in the top-left 2x2 section of
+ * the sensor, in reading order.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>None</p>
+ */
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT = // byte (enum)
ACAMERA_SENSOR_INFO_START + 2,
+ /**
+ * <p>The range of image exposure times for ACAMERA_SENSOR_EXPOSURE_TIME supported
+ * by this camera device.</p>
+ *
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>None</p>
+ */
ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE = // int64[2]
ACAMERA_SENSOR_INFO_START + 3,
+ /**
+ * <p>The maximum possible frame duration (minimum frame rate) for
+ * ACAMERA_SENSOR_FRAME_DURATION that is supported this camera device.</p>
+ *
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Attempting to use frame durations beyond the maximum will result in the frame
+ * duration being clipped to the maximum. See that control for a full definition of frame
+ * durations.</p>
+ * <p>Refer to {@link
+ * android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
+ * for the minimum frame duration values.</p>
+ */
ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION = // int64
ACAMERA_SENSOR_INFO_START + 4,
+ /**
+ * <p>The physical dimensions of the full pixel
+ * array.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This is the physical size of the sensor pixel
+ * array defined by ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
+ *
+ * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+ */
ACAMERA_SENSOR_INFO_PHYSICAL_SIZE = // float[2]
ACAMERA_SENSOR_INFO_START + 5,
+ /**
+ * <p>Dimensions of the full pixel array, possibly
+ * including black calibration pixels.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>The pixel count of the full pixel array of the image sensor, which covers
+ * ACAMERA_SENSOR_INFO_PHYSICAL_SIZE area. This represents the full pixel dimensions of
+ * the raw buffers produced by this sensor.</p>
+ * <p>If a camera device supports raw sensor formats, either this or
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE is the maximum dimensions for the raw
+ * output formats listed in android.scaler.streamConfigurationMap (this depends on
+ * whether or not the image sensor returns buffers containing pixels that are not
+ * part of the active array region for blacklevel calibration or other purposes).</p>
+ * <p>Some parts of the full pixel array may not receive light from the scene,
+ * or be otherwise inactive. The ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE key
+ * defines the rectangle of active pixels that will be included in processed image
+ * formats.</p>
+ *
+ * @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
+ * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE = // int32[2]
ACAMERA_SENSOR_INFO_START + 6,
+ /**
+ * <p>Maximum raw value output by sensor.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This specifies the fully-saturated encoding level for the raw
+ * sample values from the sensor. This is typically caused by the
+ * sensor becoming highly non-linear or clipping. The minimum for
+ * each channel is specified by the offset in the
+ * ACAMERA_SENSOR_BLACK_LEVEL_PATTERN key.</p>
+ * <p>The white level is typically determined either by sensor bit depth
+ * (8-14 bits is expected), or by the point where the sensor response
+ * becomes too non-linear to be useful. The default value for this is
+ * maximum representable value for a 16-bit raw sample (2^16 - 1).</p>
+ * <p>The white level values of captured images may vary for different
+ * capture settings (e.g., ACAMERA_SENSOR_SENSITIVITY). This key
+ * represents a coarse approximation for such case. It is recommended
+ * to use ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL for captures when supported
+ * by the camera device, which provides more accurate white level values.</p>
+ *
+ * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
+ * @see ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_SENSOR_INFO_WHITE_LEVEL = // int32
ACAMERA_SENSOR_INFO_START + 7,
+ /**
+ * <p>The time base source for sensor capture start timestamps.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>The timestamps provided for captures are always in nanoseconds and monotonic, but
+ * may not based on a time source that can be compared to other system time sources.</p>
+ * <p>This characteristic defines the source for the timestamps, and therefore whether they
+ * can be compared against other system time sources/timestamps.</p>
+ */
ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE = // byte (enum)
ACAMERA_SENSOR_INFO_START + 8,
+ /**
+ * <p>Whether the RAW images output from this camera device are subject to
+ * lens shading correction.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If TRUE, all images produced by the camera device in the RAW image formats will
+ * have lens shading correction already applied to it. If FALSE, the images will
+ * not be adjusted for lens shading correction.
+ * See android.request.maxNumOutputRaw for a list of RAW image formats.</p>
+ * <p>This key will be <code>null</code> for all devices do not report this information.
+ * Devices with RAW capability will always report this information in this key.</p>
+ */
ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED = // byte (enum)
ACAMERA_SENSOR_INFO_START + 9,
+ /**
+ * <p>The area of the image sensor which corresponds to active pixels prior to the
+ * application of any geometric distortion correction.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
+ * the region that actually receives light from the scene) before any geometric correction
+ * has been applied, and should be treated as the active region rectangle for any of the
+ * raw formats. All metadata associated with raw processing (e.g. the lens shading
+ * correction map, and radial distortion fields) treats the top, left of this rectangle as
+ * the origin, (0,0).</p>
+ * <p>The size of this region determines the maximum field of view and the maximum number of
+ * pixels that an image from this sensor can contain, prior to the application of
+ * geometric distortion correction. The effective maximum pixel dimensions of a
+ * post-distortion-corrected image is given by the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * field, and the effective maximum field of view for a post-distortion-corrected image
+ * can be calculated by applying the geometric distortion correction fields to this
+ * rectangle, and cropping to the rectangle given in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
+ * <p>E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
+ * dimensions in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE given the position of a pixel,
+ * (x', y'), in the raw pixel array with dimensions give in
+ * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE:</p>
+ * <ol>
+ * <li>Choose a pixel (x', y') within the active array region of the raw buffer given in
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, otherwise this pixel is considered
+ * to be outside of the FOV, and will not be shown in the processed output image.</li>
+ * <li>Apply geometric distortion correction to get the post-distortion pixel coordinate,
+ * (x_i, y_i). When applying geometric correction metadata, note that metadata for raw
+ * buffers is defined relative to the top, left of the
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE rectangle.</li>
+ * <li>If the resulting corrected pixel coordinate is within the region given in
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, then the position of this pixel in the
+ * processed output image buffer is <code>(x_i - activeArray.left, y_i - activeArray.top)</code>,
+ * when the top, left coordinate of that buffer is treated as (0, 0).</li>
+ * </ol>
+ * <p>Thus, for pixel x',y' = (25, 25) on a sensor where ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+ * is (100,100), ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE is (10, 10, 100, 100),
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE is (20, 20, 80, 80), and the geometric distortion
+ * correction doesn't change the pixel coordinate, the resulting pixel selected in
+ * pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer
+ * with dimensions given in ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, and would be (5, 5)
+ * relative to the top,left of post-processed YUV output buffer with dimensions given in
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
+ * <p>The currently supported fields that correct for geometric distortion are:</p>
+ * <ol>
+ * <li>ACAMERA_LENS_RADIAL_DISTORTION.</li>
+ * </ol>
+ * <p>If all of the geometric distortion fields are no-ops, this rectangle will be the same
+ * as the post-distortion-corrected rectangle given in
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
+ * <p>This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
+ * the full pixel array, and the size of the full pixel array is given by
+ * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
+ * <p>The pre-correction active array may be smaller than the full pixel array, since the
+ * full array may include black calibration pixels or other inactive regions.</p>
+ *
+ * @see ACAMERA_LENS_RADIAL_DISTORTION
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+ * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE = // int32[4]
ACAMERA_SENSOR_INFO_START + 10,
ACAMERA_SENSOR_INFO_END,
+ /**
+ * <p>Quality of lens shading correction applied
+ * to the image data.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>When set to OFF mode, no lens shading correction will be applied by the
+ * camera device, and an identity lens shading map data will be provided
+ * if <code>ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE == ON</code>. For example, for lens
+ * shading map with size of <code>[ 4, 3 ]</code>,
+ * the output ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP for this case will be an identity
+ * map shown below:</p>
+ * <pre><code>[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ]
+ * </code></pre>
+ * <p>When set to other modes, lens shading correction will be applied by the camera
+ * device. Applications can request lens shading map data by setting
+ * ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE to ON, and then the camera device will provide lens
+ * shading map data in ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP; the returned shading map
+ * data will be the one applied by the camera device for this capture request.</p>
+ * <p>The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
+ * the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
+ * AWB are in AUTO modes(ACAMERA_CONTROL_AE_MODE <code>!=</code> OFF and ACAMERA_CONTROL_AWB_MODE <code>!=</code>
+ * OFF), to get best results, it is recommended that the applications wait for the AE and AWB
+ * to be converged before using the returned shading map data.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AWB_MODE
+ * @see ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP
+ * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
+ */
ACAMERA_SHADING_MODE = // byte (enum)
ACAMERA_SHADING_START,
+ /**
+ * <p>List of lens shading modes for ACAMERA_SHADING_MODE that are supported by this camera device.</p>
+ *
+ * @see ACAMERA_SHADING_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This list contains lens shading modes that can be set for the camera device.
+ * Camera devices that support the MANUAL_POST_PROCESSING capability will always
+ * list OFF and FAST mode. This includes all FULL level devices.
+ * LEGACY devices will always only support FAST mode.</p>
+ */
ACAMERA_SHADING_AVAILABLE_MODES = // byte[n]
ACAMERA_SHADING_START + 2,
ACAMERA_SHADING_END,
+ /**
+ * <p>Operating mode for the face detector
+ * unit.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Whether face detection is enabled, and whether it
+ * should output just the basic fields or the full set of
+ * fields.</p>
+ */
ACAMERA_STATISTICS_FACE_DETECT_MODE = // byte (enum)
ACAMERA_STATISTICS_START,
+ /**
+ * <p>Operating mode for hot pixel map generation.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>If set to <code>true</code>, a hot pixel map is returned in ACAMERA_STATISTICS_HOT_PIXEL_MAP.
+ * If set to <code>false</code>, no hot pixel map will be returned.</p>
+ *
+ * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
+ */
ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE = // byte (enum)
ACAMERA_STATISTICS_START + 3,
+ /**
+ * <p>List of unique IDs for detected faces.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>Each detected face is given a unique ID that is valid for as long as the face is visible
+ * to the camera device. A face that leaves the field of view and later returns may be
+ * assigned a new ID.</p>
+ * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE == FULL</p>
+ *
+ * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
+ */
ACAMERA_STATISTICS_FACE_IDS = // int32[n]
ACAMERA_STATISTICS_START + 4,
+ /**
+ * <p>List of landmarks for detected
+ * faces.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+ * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
+ * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE == FULL</p>
+ *
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
+ */
ACAMERA_STATISTICS_FACE_LANDMARKS = // int32[n*6]
ACAMERA_STATISTICS_START + 5,
+ /**
+ * <p>List of the bounding rectangles for detected
+ * faces.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+ * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
+ * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF</p>
+ *
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
+ */
ACAMERA_STATISTICS_FACE_RECTANGLES = // int32[n*4]
ACAMERA_STATISTICS_START + 6,
+ /**
+ * <p>List of the face confidence scores for
+ * detected faces</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF.</p>
+ *
+ * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
+ */
ACAMERA_STATISTICS_FACE_SCORES = // byte[n]
ACAMERA_STATISTICS_START + 7,
+ /**
+ * <p>The shading map is a low-resolution floating-point map
+ * that lists the coefficients used to correct for vignetting, for each
+ * Bayer color channel.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>The least shaded section of the image should have a gain factor
+ * of 1; all other sections should have gains above 1.</p>
+ * <p>When ACAMERA_COLOR_CORRECTION_MODE = TRANSFORM_MATRIX, the map
+ * must take into account the colorCorrection settings.</p>
+ * <p>The shading map is for the entire active pixel array, and is not
+ * affected by the crop region specified in the request. Each shading map
+ * entry is the value of the shading compensation map over a specific
+ * pixel on the sensor. Specifically, with a (N x M) resolution shading
+ * map, and an active pixel array size (W x H), shading map entry
+ * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
+ * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
+ * The map is assumed to be bilinearly interpolated between the sample points.</p>
+ * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
+ * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+ * The shading map is stored in a fully interleaved format.</p>
+ * <p>The shading map should have on the order of 30-40 rows and columns,
+ * and must be smaller than 64x64.</p>
+ * <p>As an example, given a very small map defined as:</p>
+ * <pre><code>width,height = [ 4, 3 ]
+ * values =
+ * [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
+ * 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
+ * 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
+ * 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
+ * 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ]
+ * </code></pre>
+ * <p>The low-resolution scaling map images for each channel are
+ * (displayed using nearest-neighbor interpolation):</p>
+ * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
+ * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
+ * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
+ * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
+ * <p>As a visualization only, inverting the full-color map to recover an
+ * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
+ * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
ACAMERA_STATISTICS_LENS_SHADING_CORRECTION_MAP = // byte
ACAMERA_STATISTICS_START + 10,
+ /**
+ * <p>The shading map is a low-resolution floating-point map
+ * that lists the coefficients used to correct for vignetting and color shading,
+ * for each Bayer color channel of RAW image data.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>The lens shading correction is defined as a full shading correction that
+ * corrects both color shading for the output non-RAW images. After the
+ * shading map is applied, the output non-RAW images will be flat-field images
+ * for flat scenes under uniform illumination.</p>
+ * <p>When there is no lens shading correction applied to RAW output images
+ * (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code> false), this map is a full lens
+ * shading correction map; when there is some lens shading correction applied
+ * to the RAW output image (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED <code>==</code> true),
+ * this map reports the remaining lens shading correction map that needs to be
+ * applied to get fully shading corrected images.</p>
+ * <p>For a full shading correction map, the least shaded section of the image
+ * should have a gain factor of 1; all other sections should have gains above 1.</p>
+ * <p>When ACAMERA_COLOR_CORRECTION_MODE = TRANSFORM_MATRIX, the map
+ * must take into account the colorCorrection settings.</p>
+ * <p>The shading map is for the entire active pixel array, and is not
+ * affected by the crop region specified in the request. Each shading map
+ * entry is the value of the shading compensation map over a specific
+ * pixel on the sensor. Specifically, with a (N x M) resolution shading
+ * map, and an active pixel array size (W x H), shading map entry
+ * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
+ * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
+ * The map is assumed to be bilinearly interpolated between the sample points.</p>
+ * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
+ * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+ * The shading map is stored in a fully interleaved format, and its size
+ * is provided in the camera static metadata by ACAMERA_LENS_INFO_SHADING_MAP_SIZE.</p>
+ * <p>The shading map should have on the order of 30-40 rows and columns,
+ * and must be smaller than 64x64.</p>
+ * <p>As an example, given a very small map defined as:</p>
+ * <pre><code>ACAMERA_LENS_INFO_SHADING_MAP_SIZE = [ 4, 3 ]
+ * ACAMERA_STATISTICS_LENS_SHADING_MAP =
+ * [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
+ * 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
+ * 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
+ * 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
+ * 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ]
+ * </code></pre>
+ * <p>The low-resolution scaling map images for each channel are
+ * (displayed using nearest-neighbor interpolation):</p>
+ * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
+ * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
+ * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
+ * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
+ * <p>As a visualization only, inverting the full-color map to recover an
+ * image of a gray wall (using bicubic interpolation for visual quality)
+ * as captured by the sensor gives:</p>
+ * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+ * <p>Note that the RAW image data might be subject to lens shading
+ * correction not reported on this map. Query
+ * ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED to see if RAW image data has subject
+ * to lens shading correction. If ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED
+ * is TRUE, the RAW image data is subject to partial or full lens shading
+ * correction. In the case full lens shading correction is applied to RAW
+ * images, the gain factor map reported in this key will contain all 1.0 gains.
+ * In other words, the map reported in this key is the remaining lens shading
+ * that needs to be applied on the RAW image to get images without lens shading
+ * artifacts. See android.request.maxNumOutputRaw for a list of RAW image
+ * formats.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ * @see ACAMERA_LENS_INFO_SHADING_MAP_SIZE
+ * @see ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED
+ * @see ACAMERA_STATISTICS_LENS_SHADING_MAP
+ */
ACAMERA_STATISTICS_LENS_SHADING_MAP = // float[4*n*m]
ACAMERA_STATISTICS_START + 11,
+ /**
+ * <p>The camera device estimated scene illumination lighting
+ * frequency.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>Many light sources, such as most fluorescent lights, flicker at a rate
+ * that depends on the local utility power standards. This flicker must be
+ * accounted for by auto-exposure routines to avoid artifacts in captured images.
+ * The camera device uses this entry to tell the application what the scene
+ * illuminant frequency is.</p>
+ * <p>When manual exposure control is enabled
+ * (<code>ACAMERA_CONTROL_AE_MODE == OFF</code> or <code>ACAMERA_CONTROL_MODE ==
+ * OFF</code>), the ACAMERA_CONTROL_AE_ANTIBANDING_MODE doesn't perform
+ * antibanding, and the application can ensure it selects
+ * exposure times that do not cause banding issues by looking
+ * into this metadata field. See
+ * ACAMERA_CONTROL_AE_ANTIBANDING_MODE for more details.</p>
+ * <p>Reports NONE if there doesn't appear to be flickering illumination.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_ANTIBANDING_MODE
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_MODE
+ */
ACAMERA_STATISTICS_SCENE_FLICKER = // byte (enum)
ACAMERA_STATISTICS_START + 14,
+ /**
+ * <p>List of <code>(x, y)</code> coordinates of hot/defective pixels on the sensor.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>A coordinate <code>(x, y)</code> must lie between <code>(0, 0)</code>, and
+ * <code>(width - 1, height - 1)</code> (inclusive), which are the top-left and
+ * bottom-right of the pixel array, respectively. The width and
+ * height dimensions are given in ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.
+ * This may include hot pixels that lie outside of the active array
+ * bounds given by ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
+ *
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+ */
ACAMERA_STATISTICS_HOT_PIXEL_MAP = // int32[2*n]
ACAMERA_STATISTICS_START + 15,
+ /**
+ * <p>Whether the camera device will output the lens
+ * shading map in output result metadata.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>When set to ON,
+ * ACAMERA_STATISTICS_LENS_SHADING_MAP will be provided in
+ * the output result metadata.</p>
+ * <p>ON is always supported on devices with the RAW capability.</p>
+ *
+ * @see ACAMERA_STATISTICS_LENS_SHADING_MAP
+ */
ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE = // byte (enum)
ACAMERA_STATISTICS_START + 16,
ACAMERA_STATISTICS_END,
+ /**
+ * <p>List of face detection modes for ACAMERA_STATISTICS_FACE_DETECT_MODE that are
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>OFF is always supported.</p>
+ */
ACAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES = // byte[n]
ACAMERA_STATISTICS_INFO_START,
+ /**
+ * <p>The maximum number of simultaneously detectable
+ * faces.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>None</p>
+ */
ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT = // int32
ACAMERA_STATISTICS_INFO_START + 2,
+ /**
+ * <p>List of hot pixel map output modes for ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE that are
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If no hotpixel map output is available for this camera device, this will contain only
+ * <code>false</code>.</p>
+ * <p>ON is always supported on devices with the RAW capability.</p>
+ */
ACAMERA_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES = // byte[n]
ACAMERA_STATISTICS_INFO_START + 6,
+ /**
+ * <p>List of lens shading map output modes for ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE that
+ * are supported by this camera device.</p>
+ *
+ * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If no lens shading map output is available for this camera device, this key will
+ * contain only OFF.</p>
+ * <p>ON is always supported on devices with the RAW capability.
+ * LEGACY mode devices will always only support OFF.</p>
+ */
ACAMERA_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES = // byte[n]
ACAMERA_STATISTICS_INFO_START + 7,
ACAMERA_STATISTICS_INFO_END,
+ /**
+ * <p>Tonemapping / contrast / gamma curve for the blue
+ * channel, to use when ACAMERA_TONEMAP_MODE is
+ * CONTRAST_CURVE.</p>
+ *
+ * @see ACAMERA_TONEMAP_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>See ACAMERA_TONEMAP_CURVE_RED for more details.</p>
+ *
+ * @see ACAMERA_TONEMAP_CURVE_RED
+ */
ACAMERA_TONEMAP_CURVE_BLUE = // float[n*2]
ACAMERA_TONEMAP_START,
+ /**
+ * <p>Tonemapping / contrast / gamma curve for the green
+ * channel, to use when ACAMERA_TONEMAP_MODE is
+ * CONTRAST_CURVE.</p>
+ *
+ * @see ACAMERA_TONEMAP_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>See ACAMERA_TONEMAP_CURVE_RED for more details.</p>
+ *
+ * @see ACAMERA_TONEMAP_CURVE_RED
+ */
ACAMERA_TONEMAP_CURVE_GREEN = // float[n*2]
ACAMERA_TONEMAP_START + 1,
+ /**
+ * <p>Tonemapping / contrast / gamma curve for the red
+ * channel, to use when ACAMERA_TONEMAP_MODE is
+ * CONTRAST_CURVE.</p>
+ *
+ * @see ACAMERA_TONEMAP_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Each channel's curve is defined by an array of control points:</p>
+ * <pre><code>ACAMERA_TONEMAP_CURVE_RED =
+ * [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
+ * 2 <= N <= ACAMERA_TONEMAP_MAX_CURVE_POINTS</code></pre>
+ * <p>These are sorted in order of increasing <code>Pin</code>; it is
+ * required that input values 0.0 and 1.0 are included in the list to
+ * define a complete mapping. For input values between control points,
+ * the camera device must linearly interpolate between the control
+ * points.</p>
+ * <p>Each curve can have an independent number of points, and the number
+ * of points can be less than max (that is, the request doesn't have to
+ * always provide a curve with number of points equivalent to
+ * ACAMERA_TONEMAP_MAX_CURVE_POINTS).</p>
+ * <p>A few examples, and their corresponding graphical mappings; these
+ * only specify the red channel and the precision is limited to 4
+ * digits, for conciseness.</p>
+ * <p>Linear mapping:</p>
+ * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [ 0, 0, 1.0, 1.0 ]
+ * </code></pre>
+ * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+ * <p>Invert mapping:</p>
+ * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [ 0, 1.0, 1.0, 0 ]
+ * </code></pre>
+ * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+ * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
+ * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [
+ * 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
+ * 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
+ * 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
+ * 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
+ * </code></pre>
+ * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+ * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
+ * <pre><code>ACAMERA_TONEMAP_CURVE_RED = [
+ * 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
+ * 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
+ * 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
+ * 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
+ * </code></pre>
+ * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ *
+ * @see ACAMERA_TONEMAP_CURVE_RED
+ * @see ACAMERA_TONEMAP_MAX_CURVE_POINTS
+ */
ACAMERA_TONEMAP_CURVE_RED = // float[n*2]
ACAMERA_TONEMAP_START + 2,
+ /**
+ * <p>High-level global contrast/gamma/tonemapping control.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>When switching to an application-defined contrast curve by setting
+ * ACAMERA_TONEMAP_MODE to CONTRAST_CURVE, the curve is defined
+ * per-channel with a set of <code>(in, out)</code> points that specify the
+ * mapping from input high-bit-depth pixel value to the output
+ * low-bit-depth value. Since the actual pixel ranges of both input
+ * and output may change depending on the camera pipeline, the values
+ * are specified by normalized floating-point numbers.</p>
+ * <p>More-complex color mapping operations such as 3D color look-up
+ * tables, selective chroma enhancement, or other non-linear color
+ * transforms will be disabled when ACAMERA_TONEMAP_MODE is
+ * CONTRAST_CURVE.</p>
+ * <p>When using either FAST or HIGH_QUALITY, the camera device will
+ * emit its own tonemap curve in android.tonemap.curve.
+ * These values are always available, and as close as possible to the
+ * actually used nonlinear/nonglobal transforms.</p>
+ * <p>If a request is sent with CONTRAST_CURVE with the camera device's
+ * provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
+ * roughly the same.</p>
+ *
+ * @see ACAMERA_TONEMAP_MODE
+ */
ACAMERA_TONEMAP_MODE = // byte (enum)
ACAMERA_TONEMAP_START + 3,
+ /**
+ * <p>Maximum number of supported points in the
+ * tonemap curve that can be used for android.tonemap.curve.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If the actual number of points provided by the application (in ACAMERA_TONEMAPCURVE_*) is
+ * less than this maximum, the camera device will resample the curve to its internal
+ * representation, using linear interpolation.</p>
+ * <p>The output curves in the result metadata may have a different number
+ * of points than the input curves, and will represent the actual
+ * hardware curves used as closely as possible when linearly interpolated.</p>
+ */
ACAMERA_TONEMAP_MAX_CURVE_POINTS = // int32
ACAMERA_TONEMAP_START + 4,
+ /**
+ * <p>List of tonemapping modes for ACAMERA_TONEMAP_MODE that are supported by this camera
+ * device.</p>
+ *
+ * @see ACAMERA_TONEMAP_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
+ * at least one of below mode combinations:</p>
+ * <ul>
+ * <li>CONTRAST_CURVE, FAST and HIGH_QUALITY</li>
+ * <li>GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY</li>
+ * </ul>
+ * <p>This includes all FULL level devices.</p>
+ */
ACAMERA_TONEMAP_AVAILABLE_TONE_MAP_MODES = // byte[n]
ACAMERA_TONEMAP_START + 5,
+ /**
+ * <p>Tonemapping curve to use when ACAMERA_TONEMAP_MODE is
+ * GAMMA_VALUE</p>
+ *
+ * @see ACAMERA_TONEMAP_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>The tonemap curve will be defined the following formula:
+ * * OUT = pow(IN, 1.0 / gamma)
+ * where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
+ * pow is the power function and gamma is the gamma value specified by this
+ * key.</p>
+ * <p>The same curve will be applied to all color channels. The camera device
+ * may clip the input gamma value to its supported range. The actual applied
+ * value will be returned in capture result.</p>
+ * <p>The valid range of gamma value varies on different devices, but values
+ * within [1.0, 5.0] are guaranteed not to be clipped.</p>
+ */
ACAMERA_TONEMAP_GAMMA = // float
ACAMERA_TONEMAP_START + 6,
+ /**
+ * <p>Tonemapping curve to use when ACAMERA_TONEMAP_MODE is
+ * PRESET_CURVE</p>
+ *
+ * @see ACAMERA_TONEMAP_MODE
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>The tonemap curve will be defined by specified standard.</p>
+ * <p>sRGB (approximated by 16 control points):</p>
+ * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p>Rec. 709 (approximated by 16 control points):</p>
+ * <p><img alt="Rec. 709 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
+ * <p>Note that above figures show a 16 control points approximation of preset
+ * curves. Camera devices may apply a different approximation to the curve.</p>
+ */
ACAMERA_TONEMAP_PRESET_CURVE = // byte (enum)
ACAMERA_TONEMAP_START + 7,
ACAMERA_TONEMAP_END,
+ /**
+ * <p>Generally classifies the overall set of the camera device functionality.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>The supported hardware level is a high-level description of the camera device's
+ * capabilities, summarizing several capabilities into one field. Each level adds additional
+ * features to the previous one, and is always a strict superset of the previous level.
+ * The ordering is <code>LEGACY < LIMITED < FULL < LEVEL_3</code>.</p>
+ * <p>Starting from <code>LEVEL_3</code>, the level enumerations are guaranteed to be in increasing
+ * numerical value as well. To check if a given device is at least at a given hardware level,
+ * the following code snippet can be used:</p>
+ * <pre><code>// Returns true if the device supports the required hardware level, or better.
+ * boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
+ * int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+ * if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ * return requiredLevel == deviceLevel;
+ * }
+ * // deviceLevel is not LEGACY, can use numerical sort
+ * return requiredLevel <= deviceLevel;
+ * }
+ * </code></pre>
+ * <p>At a high level, the levels are:</p>
+ * <ul>
+ * <li><code>LEGACY</code> devices operate in a backwards-compatibility mode for older
+ * Android devices, and have very limited capabilities.</li>
+ * <li><code>LIMITED</code> devices represent the
+ * baseline feature set, and may also include additional capabilities that are
+ * subsets of <code>FULL</code>.</li>
+ * <li><code>FULL</code> devices additionally support per-frame manual control of sensor, flash, lens and
+ * post-processing settings, and image capture at a high rate.</li>
+ * <li><code>LEVEL_3</code> devices additionally support YUV reprocessing and RAW image capture, along
+ * with additional output stream configurations.</li>
+ * </ul>
+ * <p>See the individual level enums for full descriptions of the supported capabilities. The
+ * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES entry describes the device's capabilities at a
+ * finer-grain level, if needed. In addition, many controls have their available settings or
+ * ranges defined in individual {@link android.hardware.camera2.CameraCharacteristics} entries.</p>
+ * <p>Some features are not part of any particular hardware level or capability and must be
+ * queried separately. These include:</p>
+ * <ul>
+ * <li>Calibrated timestamps (ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE <code>==</code> REALTIME)</li>
+ * <li>Precision lens control (ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION <code>==</code> CALIBRATED)</li>
+ * <li>Face detection (ACAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES)</li>
+ * <li>Optical or electrical image stabilization
+ * (ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+ * ACAMERA_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)</li>
+ * </ul>
+ *
+ * @see ACAMERA_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES
+ * @see ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
+ * @see ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ * @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
+ * @see ACAMERA_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES
+ */
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL = // byte (enum)
ACAMERA_INFO_START,
ACAMERA_INFO_END,
+ /**
+ * <p>Whether black-level compensation is locked
+ * to its current values, or is free to vary.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul>
+ *
+ * <p>Whether the black level offset was locked for this frame. Should be
+ * ON if ACAMERA_BLACK_LEVEL_LOCK was ON in the capture request, unless
+ * a change in other capture settings forced the camera device to
+ * perform a black level reset.</p>
+ *
+ * @see ACAMERA_BLACK_LEVEL_LOCK
+ */
ACAMERA_BLACK_LEVEL_LOCK = // byte (enum)
ACAMERA_BLACK_LEVEL_START,
ACAMERA_BLACK_LEVEL_END,
+ /**
+ * <p>The frame number corresponding to the last request
+ * with which the output result (metadata + buffers) has been fully
+ * synchronized.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul>
+ *
+ * <p>When a request is submitted to the camera device, there is usually a
+ * delay of several frames before the controls get applied. A camera
+ * device may either choose to account for this delay by implementing a
+ * pipeline and carefully submit well-timed atomic control updates, or
+ * it may start streaming control changes that span over several frame
+ * boundaries.</p>
+ * <p>In the latter case, whenever a request's settings change relative to
+ * the previous submitted request, the full set of changes may take
+ * multiple frame durations to fully take effect. Some settings may
+ * take effect sooner (in less frame durations) than others.</p>
+ * <p>While a set of control changes are being propagated, this value
+ * will be CONVERGING.</p>
+ * <p>Once it is fully known that a set of control changes have been
+ * finished propagating, and the resulting updated control settings
+ * have been read back by the camera device, this value will be set
+ * to a non-negative frame number (corresponding to the request to
+ * which the results have synchronized to).</p>
+ * <p>Older camera device implementations may not have a way to detect
+ * when all camera controls have been applied, and will always set this
+ * value to UNKNOWN.</p>
+ * <p>FULL capability devices will always have this value set to the
+ * frame number of the request corresponding to this result.</p>
+ * <p><em>Further details</em>:</p>
+ * <ul>
+ * <li>Whenever a request differs from the last request, any future
+ * results not yet returned may have this value set to CONVERGING (this
+ * could include any in-progress captures not yet returned by the camera
+ * device, for more details see pipeline considerations below).</li>
+ * <li>Submitting a series of multiple requests that differ from the
+ * previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
+ * moves the new synchronization frame to the last non-repeating
+ * request (using the smallest frame number from the contiguous list of
+ * repeating requests).</li>
+ * <li>Submitting the same request repeatedly will not change this value
+ * to CONVERGING, if it was already a non-negative value.</li>
+ * <li>When this value changes to non-negative, that means that all of the
+ * metadata controls from the request have been applied, all of the
+ * metadata controls from the camera device have been read to the
+ * updated values (into the result), and all of the graphics buffers
+ * corresponding to this result are also synchronized to the request.</li>
+ * </ul>
+ * <p><em>Pipeline considerations</em>:</p>
+ * <p>Submitting a request with updated controls relative to the previously
+ * submitted requests may also invalidate the synchronization state
+ * of all the results corresponding to currently in-flight requests.</p>
+ * <p>In other words, results for this current request and up to
+ * ACAMERA_REQUEST_PIPELINE_MAX_DEPTH prior requests may have their
+ * ACAMERA_SYNC_FRAME_NUMBER change to CONVERGING.</p>
+ *
+ * @see ACAMERA_REQUEST_PIPELINE_MAX_DEPTH
+ * @see ACAMERA_SYNC_FRAME_NUMBER
+ */
ACAMERA_SYNC_FRAME_NUMBER = // int64 (enum)
ACAMERA_SYNC_START,
+ /**
+ * <p>The maximum number of frames that can occur after a request
+ * (different than the previous) has been submitted, and before the
+ * result's state becomes synchronized.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This defines the maximum distance (in number of metadata results),
+ * between the frame number of the request that has new controls to apply
+ * and the frame number of the result that has all the controls applied.</p>
+ * <p>In other words this acts as an upper boundary for how many frames
+ * must occur before the camera device knows for a fact that the new
+ * submitted camera settings have been applied in outgoing frames.</p>
+ */
ACAMERA_SYNC_MAX_LATENCY = // int32 (enum)
ACAMERA_SYNC_START + 1,
ACAMERA_SYNC_END,
+ /**
+ * <p>The available depth dataspace stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>These are output stream configurations for use with
+ * dataSpace HAL_DATASPACE_DEPTH. The configurations are
+ * listed as <code>(format, width, height, input?)</code> tuples.</p>
+ * <p>Only devices that support depth output for at least
+ * the HAL_PIXEL_FORMAT_Y16 dense depth map may include
+ * this entry.</p>
+ * <p>A device that also supports the HAL_PIXEL_FORMAT_BLOB
+ * sparse depth point cloud must report a single entry for
+ * the format in this list as <code>(HAL_PIXEL_FORMAT_BLOB,
+ * android.depth.maxDepthSamples, 1, OUTPUT)</code> in addition to
+ * the entries for HAL_PIXEL_FORMAT_Y16.</p>
+ */
ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS = // int32[n*4] (enum)
ACAMERA_DEPTH_START + 1,
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination for depth output formats.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.</p>
+ * <p>When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).</p>
+ * <p>The minimum frame duration of a stream (of a particular format, size)
+ * is the same regardless of whether the stream is input or output.</p>
+ * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+ * calculating the max frame rate.</p>
+ * <p>(Keep in sync with {@link
+ * android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ */
ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS = // int64[4*n]
ACAMERA_DEPTH_START + 2,
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination for depth streams.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.</p>
+ * <p>This functions similarly to
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for depth
+ * streams.</p>
+ * <p>All depth output stream formats may have a nonzero stall
+ * duration.</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ */
ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS = // int64[4*n]
ACAMERA_DEPTH_START + 3,
+ /**
+ * <p>Indicates whether a capture request may target both a
+ * DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
+ * YUV_420_888, JPEG, or RAW) simultaneously.</p>
+ *
+ * <p>This tag may appear in:</p>
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul>
+ *
+ * <p>If TRUE, including both depth and color outputs in a single
+ * capture request is not supported. An application must interleave color
+ * and depth requests. If FALSE, a single request can target both types
+ * of output.</p>
+ * <p>Typically, this restriction exists on camera devices that
+ * need to emit a specific pattern or wavelength of light to
+ * measure depth values, which causes the color image to be
+ * corrupted during depth measurement.</p>
+ */
ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE = // byte (enum)
ACAMERA_DEPTH_START + 4,
ACAMERA_DEPTH_END,
@@ -474,7 +5355,7 @@
// ACAMERA_COLOR_CORRECTION_MODE
typedef enum acamera_metadata_enum_acamera_color_correction_mode {
- /*
+ /**
* <p>Use the ACAMERA_COLOR_CORRECTION_TRANSFORM matrix
* and ACAMERA_COLOR_CORRECTION_GAINS to do color conversion.</p>
* <p>All advanced white balance adjustments (not specified
@@ -489,7 +5370,7 @@
*/
ACAMERA_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX = 0,
- /*
+ /**
* <p>Color correction processing must not slow down
* capture rate relative to sensor raw output.</p>
* <p>Advanced white balance adjustments above and beyond
@@ -502,7 +5383,7 @@
*/
ACAMERA_COLOR_CORRECTION_MODE_FAST = 1,
- /*
+ /**
* <p>Color correction processing operates at improved
* quality but the capture rate might be reduced (relative to sensor
* raw output rate)</p>
@@ -520,18 +5401,18 @@
// ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
typedef enum acamera_metadata_enum_acamera_color_correction_aberration_mode {
- /*
+ /**
* <p>No aberration correction is applied.</p>
*/
ACAMERA_COLOR_CORRECTION_ABERRATION_MODE_OFF = 0,
- /*
+ /**
* <p>Aberration correction will not slow down capture rate
* relative to sensor raw output.</p>
*/
ACAMERA_COLOR_CORRECTION_ABERRATION_MODE_FAST = 1,
- /*
+ /**
* <p>Aberration correction operates at improved quality but the capture rate might be
* reduced (relative to sensor raw output rate)</p>
*/
@@ -542,26 +5423,26 @@
// ACAMERA_CONTROL_AE_ANTIBANDING_MODE
typedef enum acamera_metadata_enum_acamera_control_ae_antibanding_mode {
- /*
+ /**
* <p>The camera device will not adjust exposure duration to
* avoid banding problems.</p>
*/
ACAMERA_CONTROL_AE_ANTIBANDING_MODE_OFF = 0,
- /*
+ /**
* <p>The camera device will adjust exposure duration to
* avoid banding problems with 50Hz illumination sources.</p>
*/
ACAMERA_CONTROL_AE_ANTIBANDING_MODE_50HZ = 1,
- /*
+ /**
* <p>The camera device will adjust exposure duration to
* avoid banding problems with 60Hz illumination
* sources.</p>
*/
ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ = 2,
- /*
+ /**
* <p>The camera device will automatically adapt its
* antibanding routine to the current illumination
* condition. This is the default mode if AUTO is
@@ -573,13 +5454,13 @@
// ACAMERA_CONTROL_AE_LOCK
typedef enum acamera_metadata_enum_acamera_control_ae_lock {
- /*
+ /**
* <p>Auto-exposure lock is disabled; the AE algorithm
* is free to update its parameters.</p>
*/
ACAMERA_CONTROL_AE_LOCK_OFF = 0,
- /*
+ /**
* <p>Auto-exposure lock is enabled; the AE algorithm
* must not update the exposure and sensitivity parameters
* while the lock is active.</p>
@@ -596,7 +5477,7 @@
// ACAMERA_CONTROL_AE_MODE
typedef enum acamera_metadata_enum_acamera_control_ae_mode {
- /*
+ /**
* <p>The camera device's autoexposure routine is disabled.</p>
* <p>The application-selected ACAMERA_SENSOR_EXPOSURE_TIME,
* ACAMERA_SENSOR_SENSITIVITY and
@@ -624,7 +5505,7 @@
*/
ACAMERA_CONTROL_AE_MODE_OFF = 0,
- /*
+ /**
* <p>The camera device's autoexposure routine is active,
* with no flash control.</p>
* <p>The application's values for
@@ -640,7 +5521,7 @@
*/
ACAMERA_CONTROL_AE_MODE_ON = 1,
- /*
+ /**
* <p>Like ON, except that the camera device also controls
* the camera's flash unit, firing it in low-light
* conditions.</p>
@@ -655,7 +5536,7 @@
*/
ACAMERA_CONTROL_AE_MODE_ON_AUTO_FLASH = 2,
- /*
+ /**
* <p>Like ON, except that the camera device also controls
* the camera's flash unit, always firing it for still
* captures.</p>
@@ -670,7 +5551,7 @@
*/
ACAMERA_CONTROL_AE_MODE_ON_ALWAYS_FLASH = 3,
- /*
+ /**
* <p>Like ON_AUTO_FLASH, but with automatic red eye
* reduction.</p>
* <p>If deemed necessary by the camera device, a red eye
@@ -683,12 +5564,12 @@
// ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
typedef enum acamera_metadata_enum_acamera_control_ae_precapture_trigger {
- /*
+ /**
* <p>The trigger is idle.</p>
*/
ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE = 0,
- /*
+ /**
* <p>The precapture metering sequence will be started
* by the camera device.</p>
* <p>The exact effect of the precapture trigger depends on
@@ -696,7 +5577,7 @@
*/
ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START = 1,
- /*
+ /**
* <p>The camera device will cancel any currently active or completed
* precapture metering sequence, the auto-exposure routine will return to its
* initial state.</p>
@@ -707,7 +5588,7 @@
// ACAMERA_CONTROL_AF_MODE
typedef enum acamera_metadata_enum_acamera_control_af_mode {
- /*
+ /**
* <p>The auto-focus routine does not control the lens;
* ACAMERA_LENS_FOCUS_DISTANCE is controlled by the
* application.</p>
@@ -716,7 +5597,7 @@
*/
ACAMERA_CONTROL_AF_MODE_OFF = 0,
- /*
+ /**
* <p>Basic automatic focus mode.</p>
* <p>In this mode, the lens does not move unless
* the autofocus trigger action is called. When that trigger
@@ -732,7 +5613,7 @@
*/
ACAMERA_CONTROL_AF_MODE_AUTO = 1,
- /*
+ /**
* <p>Close-up focusing mode.</p>
* <p>In this mode, the lens does not move unless the
* autofocus trigger action is called. When that trigger is
@@ -748,7 +5629,7 @@
*/
ACAMERA_CONTROL_AF_MODE_MACRO = 2,
- /*
+ /**
* <p>In this mode, the AF algorithm modifies the lens
* position continually to attempt to provide a
* constantly-in-focus image stream.</p>
@@ -769,7 +5650,7 @@
*/
ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO = 3,
- /*
+ /**
* <p>In this mode, the AF algorithm modifies the lens
* position continually to attempt to provide a
* constantly-in-focus image stream.</p>
@@ -789,7 +5670,7 @@
*/
ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE = 4,
- /*
+ /**
* <p>Extended depth of field (digital focus) mode.</p>
* <p>The camera device will produce images with an extended
* depth of field automatically; no special focusing
@@ -803,17 +5684,17 @@
// ACAMERA_CONTROL_AF_TRIGGER
typedef enum acamera_metadata_enum_acamera_control_af_trigger {
- /*
+ /**
* <p>The trigger is idle.</p>
*/
ACAMERA_CONTROL_AF_TRIGGER_IDLE = 0,
- /*
+ /**
* <p>Autofocus will trigger now.</p>
*/
ACAMERA_CONTROL_AF_TRIGGER_START = 1,
- /*
+ /**
* <p>Autofocus will return to its initial
* state, and cancel any currently active trigger.</p>
*/
@@ -823,14 +5704,14 @@
// ACAMERA_CONTROL_AWB_LOCK
typedef enum acamera_metadata_enum_acamera_control_awb_lock {
- /*
+ /**
* <p>Auto-white balance lock is disabled; the AWB
* algorithm is free to update its parameters if in AUTO
* mode.</p>
*/
ACAMERA_CONTROL_AWB_LOCK_OFF = 0,
- /*
+ /**
* <p>Auto-white balance lock is enabled; the AWB
* algorithm will not update its parameters while the lock
* is active.</p>
@@ -841,7 +5722,7 @@
// ACAMERA_CONTROL_AWB_MODE
typedef enum acamera_metadata_enum_acamera_control_awb_mode {
- /*
+ /**
* <p>The camera device's auto-white balance routine is disabled.</p>
* <p>The application-selected color transform matrix
* (ACAMERA_COLOR_CORRECTION_TRANSFORM) and gains
@@ -853,7 +5734,7 @@
*/
ACAMERA_CONTROL_AWB_MODE_OFF = 0,
- /*
+ /**
* <p>The camera device's auto-white balance routine is active.</p>
* <p>The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
* and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
@@ -866,7 +5747,7 @@
*/
ACAMERA_CONTROL_AWB_MODE_AUTO = 1,
- /*
+ /**
* <p>The camera device's auto-white balance routine is disabled;
* the camera device uses incandescent light as the assumed scene
* illumination for white balance.</p>
@@ -884,7 +5765,7 @@
*/
ACAMERA_CONTROL_AWB_MODE_INCANDESCENT = 2,
- /*
+ /**
* <p>The camera device's auto-white balance routine is disabled;
* the camera device uses fluorescent light as the assumed scene
* illumination for white balance.</p>
@@ -902,7 +5783,7 @@
*/
ACAMERA_CONTROL_AWB_MODE_FLUORESCENT = 3,
- /*
+ /**
* <p>The camera device's auto-white balance routine is disabled;
* the camera device uses warm fluorescent light as the assumed scene
* illumination for white balance.</p>
@@ -920,7 +5801,7 @@
*/
ACAMERA_CONTROL_AWB_MODE_WARM_FLUORESCENT = 4,
- /*
+ /**
* <p>The camera device's auto-white balance routine is disabled;
* the camera device uses daylight light as the assumed scene
* illumination for white balance.</p>
@@ -938,7 +5819,7 @@
*/
ACAMERA_CONTROL_AWB_MODE_DAYLIGHT = 5,
- /*
+ /**
* <p>The camera device's auto-white balance routine is disabled;
* the camera device uses cloudy daylight light as the assumed scene
* illumination for white balance.</p>
@@ -953,7 +5834,7 @@
*/
ACAMERA_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT = 6,
- /*
+ /**
* <p>The camera device's auto-white balance routine is disabled;
* the camera device uses twilight light as the assumed scene
* illumination for white balance.</p>
@@ -968,7 +5849,7 @@
*/
ACAMERA_CONTROL_AWB_MODE_TWILIGHT = 7,
- /*
+ /**
* <p>The camera device's auto-white balance routine is disabled;
* the camera device uses shade light as the assumed scene
* illumination for white balance.</p>
@@ -987,34 +5868,34 @@
// ACAMERA_CONTROL_CAPTURE_INTENT
typedef enum acamera_metadata_enum_acamera_control_capture_intent {
- /*
+ /**
* <p>The goal of this request doesn't fall into the other
* categories. The camera device will default to preview-like
* behavior.</p>
*/
ACAMERA_CONTROL_CAPTURE_INTENT_CUSTOM = 0,
- /*
+ /**
* <p>This request is for a preview-like use case.</p>
* <p>The precapture trigger may be used to start off a metering
* w/flash sequence.</p>
*/
ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW = 1,
- /*
+ /**
* <p>This request is for a still capture-type
* use case.</p>
* <p>If the flash unit is under automatic control, it may fire as needed.</p>
*/
ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE = 2,
- /*
+ /**
* <p>This request is for a video recording
* use case.</p>
*/
ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_RECORD = 3,
- /*
+ /**
* <p>This request is for a video snapshot (still
* image while recording video) use case.</p>
* <p>The camera device should take the highest-quality image
@@ -1023,7 +5904,7 @@
*/
ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT = 4,
- /*
+ /**
* <p>This request is for a ZSL usecase; the
* application will stream full-resolution images and
* reprocess one or several later for a final
@@ -1031,7 +5912,7 @@
*/
ACAMERA_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG = 5,
- /*
+ /**
* <p>This request is for manual capture use case where
* the applications want to directly control the capture parameters.</p>
* <p>For example, the application may wish to manually control
@@ -1046,57 +5927,57 @@
// ACAMERA_CONTROL_EFFECT_MODE
typedef enum acamera_metadata_enum_acamera_control_effect_mode {
- /*
+ /**
* <p>No color effect will be applied.</p>
*/
ACAMERA_CONTROL_EFFECT_MODE_OFF = 0,
- /*
+ /**
* <p>A "monocolor" effect where the image is mapped into
* a single color.</p>
* <p>This will typically be grayscale.</p>
*/
ACAMERA_CONTROL_EFFECT_MODE_MONO = 1,
- /*
+ /**
* <p>A "photo-negative" effect where the image's colors
* are inverted.</p>
*/
ACAMERA_CONTROL_EFFECT_MODE_NEGATIVE = 2,
- /*
+ /**
* <p>A "solarisation" effect (Sabattier effect) where the
* image is wholly or partially reversed in
* tone.</p>
*/
ACAMERA_CONTROL_EFFECT_MODE_SOLARIZE = 3,
- /*
+ /**
* <p>A "sepia" effect where the image is mapped into warm
* gray, red, and brown tones.</p>
*/
ACAMERA_CONTROL_EFFECT_MODE_SEPIA = 4,
- /*
+ /**
* <p>A "posterization" effect where the image uses
* discrete regions of tone rather than a continuous
* gradient of tones.</p>
*/
ACAMERA_CONTROL_EFFECT_MODE_POSTERIZE = 5,
- /*
+ /**
* <p>A "whiteboard" effect where the image is typically displayed
* as regions of white, with black or grey details.</p>
*/
ACAMERA_CONTROL_EFFECT_MODE_WHITEBOARD = 6,
- /*
+ /**
* <p>A "blackboard" effect where the image is typically displayed
* as regions of black, with white or grey details.</p>
*/
ACAMERA_CONTROL_EFFECT_MODE_BLACKBOARD = 7,
- /*
+ /**
* <p>An "aqua" effect where a blue hue is added to the image.</p>
*/
ACAMERA_CONTROL_EFFECT_MODE_AQUA = 8,
@@ -1105,7 +5986,7 @@
// ACAMERA_CONTROL_MODE
typedef enum acamera_metadata_enum_acamera_control_mode {
- /*
+ /**
* <p>Full application control of pipeline.</p>
* <p>All control by the device's metering and focusing (3A)
* routines is disabled, and no other settings in
@@ -1123,7 +6004,7 @@
*/
ACAMERA_CONTROL_MODE_OFF = 0,
- /*
+ /**
* <p>Use settings for each individual 3A routine.</p>
* <p>Manual control of capture parameters is disabled. All
* controls in ACAMERA_CONTROL_* besides sceneMode take
@@ -1131,7 +6012,7 @@
*/
ACAMERA_CONTROL_MODE_AUTO = 1,
- /*
+ /**
* <p>Use a specific scene mode.</p>
* <p>Enabling this disables control.aeMode, control.awbMode and
* control.afMode controls; the camera device will ignore
@@ -1145,7 +6026,7 @@
*/
ACAMERA_CONTROL_MODE_USE_SCENE_MODE = 2,
- /*
+ /**
* <p>Same as OFF mode, except that this capture will not be
* used by camera device background auto-exposure, auto-white balance and
* auto-focus algorithms (3A) to update their statistics.</p>
@@ -1161,12 +6042,12 @@
// ACAMERA_CONTROL_SCENE_MODE
typedef enum acamera_metadata_enum_acamera_control_scene_mode {
- /*
+ /**
* <p>Indicates that no scene modes are set for a given capture request.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_DISABLED = 0,
- /*
+ /**
* <p>If face detection support exists, use face
* detection data for auto-focus, auto-white balance, and
* auto-exposure routines.</p>
@@ -1185,91 +6066,91 @@
*/
ACAMERA_CONTROL_SCENE_MODE_FACE_PRIORITY = 1,
- /*
+ /**
* <p>Optimized for photos of quickly moving objects.</p>
* <p>Similar to SPORTS.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_ACTION = 2,
- /*
+ /**
* <p>Optimized for still photos of people.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_PORTRAIT = 3,
- /*
+ /**
* <p>Optimized for photos of distant macroscopic objects.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_LANDSCAPE = 4,
- /*
+ /**
* <p>Optimized for low-light settings.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_NIGHT = 5,
- /*
+ /**
* <p>Optimized for still photos of people in low-light
* settings.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_NIGHT_PORTRAIT = 6,
- /*
+ /**
* <p>Optimized for dim, indoor settings where flash must
* remain off.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_THEATRE = 7,
- /*
+ /**
* <p>Optimized for bright, outdoor beach settings.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_BEACH = 8,
- /*
+ /**
* <p>Optimized for bright, outdoor settings containing snow.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_SNOW = 9,
- /*
+ /**
* <p>Optimized for scenes of the setting sun.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_SUNSET = 10,
- /*
+ /**
* <p>Optimized to avoid blurry photos due to small amounts of
* device motion (for example: due to hand shake).</p>
*/
ACAMERA_CONTROL_SCENE_MODE_STEADYPHOTO = 11,
- /*
+ /**
* <p>Optimized for nighttime photos of fireworks.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_FIREWORKS = 12,
- /*
+ /**
* <p>Optimized for photos of quickly moving people.</p>
* <p>Similar to ACTION.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_SPORTS = 13,
- /*
+ /**
* <p>Optimized for dim, indoor settings with multiple moving
* people.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_PARTY = 14,
- /*
+ /**
* <p>Optimized for dim settings where the main light source
* is a flame.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_CANDLELIGHT = 15,
- /*
+ /**
* <p>Optimized for accurately capturing a photo of barcode
* for use by camera applications that wish to read the
* barcode value.</p>
*/
ACAMERA_CONTROL_SCENE_MODE_BARCODE = 16,
- /*
+ /**
* <p>This is deprecated, please use {@link
* android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
* and {@link
@@ -1354,7 +6235,7 @@
*/
ACAMERA_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO = 17,
- /*
+ /**
* <p>Turn on a device-specific high dynamic range (HDR) mode.</p>
* <p>In this scene mode, the camera device captures images
* that keep a larger range of scene illumination levels
@@ -1382,11 +6263,20 @@
* produced in response to a capture request submitted
* while in HDR mode.</p>
* <p>Since substantial post-processing is generally needed to
- * produce an HDR image, only YUV and JPEG outputs are
- * supported for LIMITED/FULL device HDR captures, and only
- * JPEG outputs are supported for LEGACY HDR
- * captures. Using a RAW output for HDR capture is not
+ * produce an HDR image, only YUV, PRIVATE, and JPEG
+ * outputs are supported for LIMITED/FULL device HDR
+ * captures, and only JPEG outputs are supported for LEGACY
+ * HDR captures. Using a RAW output for HDR capture is not
* supported.</p>
+ * <p>Some devices may also support always-on HDR, which
+ * applies HDR processing at full frame rate. For these
+ * devices, intents other than STILL_CAPTURE will also
+ * produce an HDR output with no frame rate impact compared
+ * to normal operation, though the quality may be lower
+ * than for STILL_CAPTURE intents.</p>
+ * <p>If SCENE_MODE_HDR is used with unsupported output types
+ * or capture intents, the images captured will be as if
+ * the SCENE_MODE was not enabled at all.</p>
*
* @see ACAMERA_CONTROL_CAPTURE_INTENT
*/
@@ -1396,12 +6286,12 @@
// ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
typedef enum acamera_metadata_enum_acamera_control_video_stabilization_mode {
- /*
+ /**
* <p>Video stabilization is disabled.</p>
*/
ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE_OFF = 0,
- /*
+ /**
* <p>Video stabilization is enabled.</p>
*/
ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE_ON = 1,
@@ -1410,7 +6300,7 @@
// ACAMERA_CONTROL_AE_STATE
typedef enum acamera_metadata_enum_acamera_control_ae_state {
- /*
+ /**
* <p>AE is off or recently reset.</p>
* <p>When a camera device is opened, it starts in
* this state. This is a transient state, the camera device may skip reporting
@@ -1418,7 +6308,7 @@
*/
ACAMERA_CONTROL_AE_STATE_INACTIVE = 0,
- /*
+ /**
* <p>AE doesn't yet have a good set of control values
* for the current scene.</p>
* <p>This is a transient state, the camera device may skip
@@ -1426,25 +6316,25 @@
*/
ACAMERA_CONTROL_AE_STATE_SEARCHING = 1,
- /*
+ /**
* <p>AE has a good set of control values for the
* current scene.</p>
*/
ACAMERA_CONTROL_AE_STATE_CONVERGED = 2,
- /*
+ /**
* <p>AE has been locked.</p>
*/
ACAMERA_CONTROL_AE_STATE_LOCKED = 3,
- /*
+ /**
* <p>AE has a good set of control values, but flash
* needs to be fired for good quality still
* capture.</p>
*/
ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED = 4,
- /*
+ /**
* <p>AE has been asked to do a precapture sequence
* and is currently executing it.</p>
* <p>Precapture can be triggered through setting
@@ -1465,7 +6355,7 @@
// ACAMERA_CONTROL_AF_STATE
typedef enum acamera_metadata_enum_acamera_control_af_state {
- /*
+ /**
* <p>AF is off or has not yet tried to scan/been asked
* to scan.</p>
* <p>When a camera device is opened, it starts in this
@@ -1475,7 +6365,7 @@
*/
ACAMERA_CONTROL_AF_STATE_INACTIVE = 0,
- /*
+ /**
* <p>AF is currently performing an AF scan initiated the
* camera device in a continuous autofocus mode.</p>
* <p>Only used by CONTINUOUS_* AF modes. This is a transient
@@ -1484,7 +6374,7 @@
*/
ACAMERA_CONTROL_AF_STATE_PASSIVE_SCAN = 1,
- /*
+ /**
* <p>AF currently believes it is in focus, but may
* restart scanning at any time.</p>
* <p>Only used by CONTINUOUS_* AF modes. This is a transient
@@ -1493,7 +6383,7 @@
*/
ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED = 2,
- /*
+ /**
* <p>AF is performing an AF scan because it was
* triggered by AF trigger.</p>
* <p>Only used by AUTO or MACRO AF modes. This is a transient
@@ -1502,7 +6392,7 @@
*/
ACAMERA_CONTROL_AF_STATE_ACTIVE_SCAN = 3,
- /*
+ /**
* <p>AF believes it is focused correctly and has locked
* focus.</p>
* <p>This state is reached only after an explicit START AF trigger has been
@@ -1515,7 +6405,7 @@
*/
ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED = 4,
- /*
+ /**
* <p>AF has failed to focus successfully and has locked
* focus.</p>
* <p>This state is reached only after an explicit START AF trigger has been
@@ -1528,7 +6418,7 @@
*/
ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED = 5,
- /*
+ /**
* <p>AF finished a passive scan without finding focus,
* and may restart scanning at any time.</p>
* <p>Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
@@ -1542,7 +6432,7 @@
// ACAMERA_CONTROL_AWB_STATE
typedef enum acamera_metadata_enum_acamera_control_awb_state {
- /*
+ /**
* <p>AWB is not in auto mode, or has not yet started metering.</p>
* <p>When a camera device is opened, it starts in this
* state. This is a transient state, the camera device may
@@ -1551,7 +6441,7 @@
*/
ACAMERA_CONTROL_AWB_STATE_INACTIVE = 0,
- /*
+ /**
* <p>AWB doesn't yet have a good set of control
* values for the current scene.</p>
* <p>This is a transient state, the camera device
@@ -1559,13 +6449,13 @@
*/
ACAMERA_CONTROL_AWB_STATE_SEARCHING = 1,
- /*
+ /**
* <p>AWB has a good set of control values for the
* current scene.</p>
*/
ACAMERA_CONTROL_AWB_STATE_CONVERGED = 2,
- /*
+ /**
* <p>AWB has been locked.</p>
*/
ACAMERA_CONTROL_AWB_STATE_LOCKED = 3,
@@ -1592,24 +6482,24 @@
// ACAMERA_EDGE_MODE
typedef enum acamera_metadata_enum_acamera_edge_mode {
- /*
+ /**
* <p>No edge enhancement is applied.</p>
*/
ACAMERA_EDGE_MODE_OFF = 0,
- /*
+ /**
* <p>Apply edge enhancement at a quality level that does not slow down frame rate
* relative to sensor output. It may be the same as OFF if edge enhancement will
* slow down frame rate relative to sensor.</p>
*/
ACAMERA_EDGE_MODE_FAST = 1,
- /*
+ /**
* <p>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.</p>
*/
ACAMERA_EDGE_MODE_HIGH_QUALITY = 2,
- /*
+ /**
* <p>Edge enhancement is applied at different levels for different output streams,
* based on resolution. Streams at maximum recording resolution (see {@link
* android.hardware.camera2.CameraDevice#createCaptureSession}) or below have
@@ -1639,18 +6529,18 @@
// ACAMERA_FLASH_MODE
typedef enum acamera_metadata_enum_acamera_flash_mode {
- /*
+ /**
* <p>Do not fire the flash for this capture.</p>
*/
ACAMERA_FLASH_MODE_OFF = 0,
- /*
+ /**
* <p>If the flash is available and charged, fire flash
* for this capture.</p>
*/
ACAMERA_FLASH_MODE_SINGLE = 1,
- /*
+ /**
* <p>Transition flash to continuously on.</p>
*/
ACAMERA_FLASH_MODE_TORCH = 2,
@@ -1659,27 +6549,27 @@
// ACAMERA_FLASH_STATE
typedef enum acamera_metadata_enum_acamera_flash_state {
- /*
+ /**
* <p>No flash on camera.</p>
*/
ACAMERA_FLASH_STATE_UNAVAILABLE = 0,
- /*
+ /**
* <p>Flash is charging and cannot be fired.</p>
*/
ACAMERA_FLASH_STATE_CHARGING = 1,
- /*
+ /**
* <p>Flash is ready to fire.</p>
*/
ACAMERA_FLASH_STATE_READY = 2,
- /*
+ /**
* <p>Flash fired for this capture.</p>
*/
ACAMERA_FLASH_STATE_FIRED = 3,
- /*
+ /**
* <p>Flash partially illuminated this frame.</p>
* <p>This is usually due to the next or previous frame having
* the flash fire, and the flash spilling into this capture
@@ -1701,7 +6591,7 @@
// ACAMERA_HOT_PIXEL_MODE
typedef enum acamera_metadata_enum_acamera_hot_pixel_mode {
- /*
+ /**
* <p>No hot pixel correction is applied.</p>
* <p>The frame rate must not be reduced relative to sensor raw output
* for this option.</p>
@@ -1711,7 +6601,7 @@
*/
ACAMERA_HOT_PIXEL_MODE_OFF = 0,
- /*
+ /**
* <p>Hot pixel correction is applied, without reducing frame
* rate relative to sensor raw output.</p>
* <p>The hotpixel map may be returned in ACAMERA_STATISTICS_HOT_PIXEL_MAP.</p>
@@ -1720,7 +6610,7 @@
*/
ACAMERA_HOT_PIXEL_MODE_FAST = 1,
- /*
+ /**
* <p>High-quality hot pixel correction is applied, at a cost
* of possibly reduced frame rate relative to sensor raw output.</p>
* <p>The hotpixel map may be returned in ACAMERA_STATISTICS_HOT_PIXEL_MAP.</p>
@@ -1735,12 +6625,12 @@
// ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
typedef enum acamera_metadata_enum_acamera_lens_optical_stabilization_mode {
- /*
+ /**
* <p>Optical stabilization is unavailable.</p>
*/
ACAMERA_LENS_OPTICAL_STABILIZATION_MODE_OFF = 0,
- /*
+ /**
* <p>Optical stabilization is enabled.</p>
*/
ACAMERA_LENS_OPTICAL_STABILIZATION_MODE_ON = 1,
@@ -1749,17 +6639,17 @@
// ACAMERA_LENS_FACING
typedef enum acamera_metadata_enum_acamera_lens_facing {
- /*
+ /**
* <p>The camera device faces the same direction as the device's screen.</p>
*/
ACAMERA_LENS_FACING_FRONT = 0,
- /*
+ /**
* <p>The camera device faces the opposite direction as the device's screen.</p>
*/
ACAMERA_LENS_FACING_BACK = 1,
- /*
+ /**
* <p>The camera device is an external camera, and has no fixed facing relative to the
* device's screen.</p>
*/
@@ -1769,7 +6659,7 @@
// ACAMERA_LENS_STATE
typedef enum acamera_metadata_enum_acamera_lens_state {
- /*
+ /**
* <p>The lens parameters (ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
* ACAMERA_LENS_FILTER_DENSITY and ACAMERA_LENS_APERTURE) are not changing.</p>
*
@@ -1780,7 +6670,7 @@
*/
ACAMERA_LENS_STATE_STATIONARY = 0,
- /*
+ /**
* <p>One or several of the lens parameters
* (ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
* ACAMERA_LENS_FILTER_DENSITY or ACAMERA_LENS_APERTURE) is
@@ -1798,7 +6688,7 @@
// ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
typedef enum acamera_metadata_enum_acamera_lens_info_focus_distance_calibration {
- /*
+ /**
* <p>The lens focus distance is not accurate, and the units used for
* ACAMERA_LENS_FOCUS_DISTANCE do not correspond to any physical units.</p>
* <p>Setting the lens to the same focus distance on separate occasions may
@@ -1813,7 +6703,7 @@
*/
ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED = 0,
- /*
+ /**
* <p>The lens focus distance is measured in diopters.</p>
* <p>However, setting the lens to the same focus distance
* on separate occasions may result in a different real
@@ -1823,7 +6713,7 @@
*/
ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE = 1,
- /*
+ /**
* <p>The lens focus distance is measured in diopters, and
* is calibrated.</p>
* <p>The lens mechanism is calibrated so that setting the
@@ -1839,31 +6729,31 @@
// ACAMERA_NOISE_REDUCTION_MODE
typedef enum acamera_metadata_enum_acamera_noise_reduction_mode {
- /*
+ /**
* <p>No noise reduction is applied.</p>
*/
ACAMERA_NOISE_REDUCTION_MODE_OFF = 0,
- /*
+ /**
* <p>Noise reduction is applied without reducing frame rate relative to sensor
* output. It may be the same as OFF if noise reduction will reduce frame rate
* relative to sensor.</p>
*/
ACAMERA_NOISE_REDUCTION_MODE_FAST = 1,
- /*
+ /**
* <p>High-quality noise reduction is applied, at the cost of possibly reduced frame
* rate relative to sensor output.</p>
*/
ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY = 2,
- /*
+ /**
* <p>MINIMAL noise reduction is applied without reducing frame rate relative to
* sensor output. </p>
*/
ACAMERA_NOISE_REDUCTION_MODE_MINIMAL = 3,
- /*
+ /**
* <p>Noise reduction is applied at different levels for different output streams,
* based on resolution. Streams at maximum recording resolution (see {@link
* android.hardware.camera2.CameraDevice#createCaptureSession}) or below have noise
@@ -1895,7 +6785,7 @@
// ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
typedef enum acamera_metadata_enum_acamera_request_available_capabilities {
- /*
+ /**
* <p>The minimal set of capabilities that every camera
* device (regardless of ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL)
* supports.</p>
@@ -1911,7 +6801,7 @@
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE = 0,
- /*
+ /**
* <p>The camera device can be manually controlled (3A algorithms such
* as auto-exposure, and auto-focus can be bypassed).
* The camera device supports basic manual control of the sensor image
@@ -1970,7 +6860,7 @@
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR = 1,
- /*
+ /**
* <p>The camera device post-processing stages can be manually controlled.
* The camera device supports basic manual control of the image post-processing
* stages. This means the following controls are guaranteed to be supported:</p>
@@ -2030,7 +6920,7 @@
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING = 2,
- /*
+ /**
* <p>The camera device supports outputting RAW buffers and
* metadata for interpreting them.</p>
* <p>Devices supporting the RAW capability allow both for
@@ -2051,7 +6941,7 @@
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_RAW = 3,
- /*
+ /**
* <p>The camera device supports accurately reporting the sensor settings for many of
* the sensor controls while the built-in 3A algorithm is running. This allows
* reporting of sensor settings even when these settings cannot be manually changed.</p>
@@ -2082,7 +6972,7 @@
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS = 5,
- /*
+ /**
* <p>The camera device supports capturing high-resolution images at >= 20 frames per
* second, in at least the uncompressed YUV format, when post-processing settings are set
* to FAST. Additionally, maximum-resolution images can be captured at >= 10 frames
@@ -2120,7 +7010,7 @@
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE = 6,
- /*
+ /**
* <p>The camera device can produce depth measurements from its field of view.</p>
* <p>This capability requires the camera device to support the following:</p>
* <ul>
@@ -2173,12 +7063,12 @@
// ACAMERA_SCALER_CROPPING_TYPE
typedef enum acamera_metadata_enum_acamera_scaler_cropping_type {
- /*
+ /**
* <p>The camera device only supports centered crop regions.</p>
*/
ACAMERA_SCALER_CROPPING_TYPE_CENTER_ONLY = 0,
- /*
+ /**
* <p>The camera device supports arbitrarily chosen crop regions.</p>
*/
ACAMERA_SCALER_CROPPING_TYPE_FREEFORM = 1,
@@ -2192,7 +7082,7 @@
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2,
- /*
+ /**
* <p>Incandescent light</p>
*/
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN = 3,
@@ -2205,22 +7095,22 @@
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11,
- /*
+ /**
* <p>D 5700 - 7100K</p>
*/
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT = 12,
- /*
+ /**
* <p>N 4600 - 5400K</p>
*/
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13,
- /*
+ /**
* <p>W 3900 - 4500K</p>
*/
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14,
- /*
+ /**
* <p>WW 3200 - 3700K</p>
*/
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT = 15,
@@ -2245,14 +7135,14 @@
// ACAMERA_SENSOR_TEST_PATTERN_MODE
typedef enum acamera_metadata_enum_acamera_sensor_test_pattern_mode {
- /*
+ /**
* <p>No test pattern mode is used, and the camera
* device returns captures from the image sensor.</p>
* <p>This is the default if the key is not set.</p>
*/
ACAMERA_SENSOR_TEST_PATTERN_MODE_OFF = 0,
- /*
+ /**
* <p>Each pixel in <code>[R, G_even, G_odd, B]</code> is replaced by its
* respective color channel provided in
* ACAMERA_SENSOR_TEST_PATTERN_DATA.</p>
@@ -2269,7 +7159,7 @@
*/
ACAMERA_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR = 1,
- /*
+ /**
* <p>All pixel data is replaced with an 8-bar color pattern.</p>
* <p>The vertical bars (left-to-right) are as follows:</p>
* <ul>
@@ -2305,7 +7195,7 @@
*/
ACAMERA_SENSOR_TEST_PATTERN_MODE_COLOR_BARS = 2,
- /*
+ /**
* <p>The test pattern is similar to COLOR_BARS, except that
* each bar should start at its specified color at the top,
* and fade to gray at the bottom.</p>
@@ -2322,7 +7212,7 @@
*/
ACAMERA_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY = 3,
- /*
+ /**
* <p>All pixel data is replaced by a pseudo-random sequence
* generated from a PN9 512-bit sequence (typically implemented
* in hardware with a linear feedback shift register).</p>
@@ -2332,7 +7222,7 @@
*/
ACAMERA_SENSOR_TEST_PATTERN_MODE_PN9 = 4,
- /*
+ /**
* <p>The first custom test pattern. All custom patterns that are
* available only on this camera device are at least this numeric
* value.</p>
@@ -2354,7 +7244,7 @@
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR = 3,
- /*
+ /**
* <p>Sensor is not Bayer; output has 3 16-bit
* values for each pixel, instead of just 1 16-bit value
* per pixel.</p>
@@ -2365,7 +7255,7 @@
// ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
typedef enum acamera_metadata_enum_acamera_sensor_info_timestamp_source {
- /*
+ /**
* <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic,
* but can not be compared to timestamps from other subsystems
* (e.g. accelerometer, gyro etc.), or other instances of the same or different
@@ -2377,7 +7267,7 @@
*/
ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN = 0,
- /*
+ /**
* <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
* {@link android.os.SystemClock#elapsedRealtimeNanos},
* and they can be compared to other timestamps using that base.</p>
@@ -2399,18 +7289,18 @@
// ACAMERA_SHADING_MODE
typedef enum acamera_metadata_enum_acamera_shading_mode {
- /*
+ /**
* <p>No lens shading correction is applied.</p>
*/
ACAMERA_SHADING_MODE_OFF = 0,
- /*
+ /**
* <p>Apply lens shading corrections, without slowing
* frame rate relative to sensor raw output</p>
*/
ACAMERA_SHADING_MODE_FAST = 1,
- /*
+ /**
* <p>Apply high-quality lens shading correction, at the
* cost of possibly reduced frame rate.</p>
*/
@@ -2421,18 +7311,18 @@
// ACAMERA_STATISTICS_FACE_DETECT_MODE
typedef enum acamera_metadata_enum_acamera_statistics_face_detect_mode {
- /*
+ /**
* <p>Do not include face detection statistics in capture
* results.</p>
*/
ACAMERA_STATISTICS_FACE_DETECT_MODE_OFF = 0,
- /*
+ /**
* <p>Return face rectangle and confidence values only.</p>
*/
ACAMERA_STATISTICS_FACE_DETECT_MODE_SIMPLE = 1,
- /*
+ /**
* <p>Return all face
* metadata.</p>
* <p>In this mode, face rectangles, scores, landmarks, and face IDs are all valid.</p>
@@ -2443,12 +7333,12 @@
// ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE
typedef enum acamera_metadata_enum_acamera_statistics_hot_pixel_map_mode {
- /*
+ /**
* <p>Hot pixel map production is disabled.</p>
*/
ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE_OFF = 0,
- /*
+ /**
* <p>Hot pixel map production is enabled.</p>
*/
ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE_ON = 1,
@@ -2457,19 +7347,19 @@
// ACAMERA_STATISTICS_SCENE_FLICKER
typedef enum acamera_metadata_enum_acamera_statistics_scene_flicker {
- /*
+ /**
* <p>The camera device does not detect any flickering illumination
* in the current scene.</p>
*/
ACAMERA_STATISTICS_SCENE_FLICKER_NONE = 0,
- /*
+ /**
* <p>The camera device detects illumination flickering at 50Hz
* in the current scene.</p>
*/
ACAMERA_STATISTICS_SCENE_FLICKER_50HZ = 1,
- /*
+ /**
* <p>The camera device detects illumination flickering at 60Hz
* in the current scene.</p>
*/
@@ -2479,12 +7369,12 @@
// ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
typedef enum acamera_metadata_enum_acamera_statistics_lens_shading_map_mode {
- /*
+ /**
* <p>Do not include a lens shading map in the capture result.</p>
*/
ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE_OFF = 0,
- /*
+ /**
* <p>Include a lens shading map in the capture result.</p>
*/
ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE_ON = 1,
@@ -2495,7 +7385,7 @@
// ACAMERA_TONEMAP_MODE
typedef enum acamera_metadata_enum_acamera_tonemap_mode {
- /*
+ /**
* <p>Use the tone mapping curve specified in
* the ACAMERA_TONEMAPCURVE_* entries.</p>
* <p>All color enhancement and tonemapping must be disabled, except
@@ -2506,19 +7396,19 @@
*/
ACAMERA_TONEMAP_MODE_CONTRAST_CURVE = 0,
- /*
+ /**
* <p>Advanced gamma mapping and color enhancement may be applied, without
* reducing frame rate compared to raw sensor output.</p>
*/
ACAMERA_TONEMAP_MODE_FAST = 1,
- /*
+ /**
* <p>High-quality gamma mapping and color enhancement will be applied, at
* the cost of possibly reduced frame rate compared to raw sensor output.</p>
*/
ACAMERA_TONEMAP_MODE_HIGH_QUALITY = 2,
- /*
+ /**
* <p>Use the gamma value specified in ACAMERA_TONEMAP_GAMMA to peform
* tonemapping.</p>
* <p>All color enhancement and tonemapping must be disabled, except
@@ -2529,7 +7419,7 @@
*/
ACAMERA_TONEMAP_MODE_GAMMA_VALUE = 3,
- /*
+ /**
* <p>Use the preset tonemapping curve specified in
* ACAMERA_TONEMAP_PRESET_CURVE to peform tonemapping.</p>
* <p>All color enhancement and tonemapping must be disabled, except
@@ -2545,12 +7435,12 @@
// ACAMERA_TONEMAP_PRESET_CURVE
typedef enum acamera_metadata_enum_acamera_tonemap_preset_curve {
- /*
+ /**
* <p>Tonemapping curve is defined by sRGB</p>
*/
ACAMERA_TONEMAP_PRESET_CURVE_SRGB = 0,
- /*
+ /**
* <p>Tonemapping curve is defined by ITU-R BT.709</p>
*/
ACAMERA_TONEMAP_PRESET_CURVE_REC709 = 1,
@@ -2561,7 +7451,7 @@
// ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL
typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
- /*
+ /**
* <p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
* better.</p>
* <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
@@ -2589,7 +7479,7 @@
*/
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED = 0,
- /*
+ /**
* <p>This camera device is capable of supporting advanced imaging applications.</p>
* <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code> tables in the
* {@link android.hardware.camera2.CameraDevice#createCaptureSession
@@ -2618,7 +7508,7 @@
*/
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_FULL = 1,
- /*
+ /**
* <p>This camera device is running in backward compatibility mode.</p>
* <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link
* android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
@@ -2639,7 +7529,7 @@
*/
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY = 2,
- /*
+ /**
* <p>This camera device is capable of YUV reprocessing and RAW data capture, in addition to
* FULL-level capabilities.</p>
* <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
@@ -2672,7 +7562,7 @@
// ACAMERA_SYNC_FRAME_NUMBER
typedef enum acamera_metadata_enum_acamera_sync_frame_number {
- /*
+ /**
* <p>The current result is not yet fully synchronized to any request.</p>
* <p>Synchronization is in progress, and reading metadata from this
* result may include a mix of data that have taken effect since the
@@ -2686,7 +7576,7 @@
*/
ACAMERA_SYNC_FRAME_NUMBER_CONVERGING = -1,
- /*
+ /**
* <p>The current result's synchronization status is unknown.</p>
* <p>The result may have already converged, or it may be in
* progress. Reading from this result may include some mix
@@ -2705,7 +7595,7 @@
// ACAMERA_SYNC_MAX_LATENCY
typedef enum acamera_metadata_enum_acamera_sync_max_latency {
- /*
+ /**
* <p>Every frame has the requests immediately applied.</p>
* <p>Changing controls over multiple requests one after another will
* produce results that have those controls applied atomically
@@ -2714,7 +7604,7 @@
*/
ACAMERA_SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0,
- /*
+ /**
* <p>Each new frame has some subset (potentially the entire set)
* of the past requests applied to the camera settings.</p>
* <p>By submitting a series of identical requests, the camera device
diff --git a/include/camera/ndk/NdkCaptureRequest.h b/include/camera/ndk/NdkCaptureRequest.h
index d9fb164..e278196 100644
--- a/include/camera/ndk/NdkCaptureRequest.h
+++ b/include/camera/ndk/NdkCaptureRequest.h
@@ -14,6 +14,15 @@
* limitations under the License.
*/
+/**
+ * @addtogroup Camera
+ * @{
+ */
+
+/**
+ * @file NdkCaptureRequest.h
+ */
+
/*
* This file defines an NDK API.
* Do not remove methods.
@@ -95,3 +104,5 @@
#endif
#endif // _NDK_CAPTURE_REQUEST_H
+
+/** @} */
diff --git a/include/media/AudioIoDescriptor.h b/include/media/AudioIoDescriptor.h
index a4907cc..fed86c9 100644
--- a/include/media/AudioIoDescriptor.h
+++ b/include/media/AudioIoDescriptor.h
@@ -35,7 +35,7 @@
AudioIoDescriptor() :
mIoHandle(AUDIO_IO_HANDLE_NONE),
mSamplingRate(0), mFormat(AUDIO_FORMAT_DEFAULT), mChannelMask(AUDIO_CHANNEL_NONE),
- mFrameCount(0), mLatency(0)
+ mFrameCount(0), mFrameCountHAL(0), mLatency(0)
{
memset(&mPatch, 0, sizeof(struct audio_patch));
}
@@ -62,6 +62,7 @@
audio_format_t mFormat;
audio_channel_mask_t mChannelMask;
size_t mFrameCount;
+ size_t mFrameCountHAL;
uint32_t mLatency; // only valid for output
};
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index c9eac2e..585ef59 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -43,6 +43,8 @@
{
public:
+ // FIXME Declare in binder opcode order, similarly to IAudioFlinger.h and IAudioFlinger.cpp
+
/* These are static methods to control the system-wide AudioFlinger
* only privileged processes can have access to them
*/
@@ -117,8 +119,8 @@
// returns the audio HAL sample rate
static status_t getSamplingRate(audio_io_handle_t ioHandle,
uint32_t* samplingRate);
- // returns the number of frames per audio HAL buffer. Corresponds to
- // audio_stream->get_buffer_size()/audio_stream_out/in_frame_size()
+ // For output threads with a fast mixer, returns the number of frames per normal mixer buffer.
+ // For output threads without a fast mixer, or for input, this is same as getFrameCountHAL().
static status_t getFrameCount(audio_io_handle_t ioHandle,
size_t* frameCount);
// returns the audio output latency in ms. Corresponds to
@@ -166,6 +168,12 @@
// Indicate JAVA services are ready (scheduling, power management ...)
static status_t systemReady();
+ // Returns the number of frames per audio HAL buffer.
+ // Corresponds to audio_stream->get_buffer_size()/audio_stream_in_frame_size() for input.
+ // See also getFrameCount().
+ static status_t getFrameCountHAL(audio_io_handle_t ioHandle,
+ size_t* frameCount);
+
// Events used to synchronize actions between audio sessions.
// For instance SYNC_EVENT_PRESENTATION_COMPLETE can be used to delay recording start until
// playback is complete on another audio session.
diff --git a/include/media/AudioTimestamp.h b/include/media/AudioTimestamp.h
index 969003c..44d6c0b 100644
--- a/include/media/AudioTimestamp.h
+++ b/include/media/AudioTimestamp.h
@@ -36,9 +36,17 @@
struct ExtendedTimestamp {
enum Location {
- LOCATION_CLIENT, // timestamp of last read frame from client-server track buffer
- LOCATION_SERVER, // timestamp of newest frame from client-server track buffer
+ LOCATION_INVALID = -1,
+ // Locations in the audio playback / record pipeline.
+ LOCATION_CLIENT, // timestamp of last read frame from client-server track buffer.
+ LOCATION_SERVER, // timestamp of newest frame from client-server track buffer.
LOCATION_KERNEL, // timestamp of newest frame in the kernel (alsa) buffer.
+
+ // Historical data: info when the kernel timestamp was OK (prior to the newest frame).
+ // This may be useful when the newest frame kernel timestamp is unavailable.
+ // Available for playback timestamps.
+ LOCATION_SERVER_LASTKERNELOK, // timestamp of server the prior time kernel timestamp OK.
+ LOCATION_KERNEL_LASTKERNELOK, // timestamp of kernel the prior time kernel timestamp OK.
LOCATION_MAX // for sizing arrays only
};
@@ -89,8 +97,10 @@
}
// Returns the best timestamp as judged from the closest-to-hw stage in the
- // pipeline with a valid timestamp.
- status_t getBestTimestamp(int64_t *position, int64_t *time, int timebase) const {
+ // pipeline with a valid timestamp. If the optional location parameter is non-null,
+ // it will be filled with the location where the time was obtained.
+ status_t getBestTimestamp(
+ int64_t *position, int64_t *time, int timebase, Location *location = nullptr) const {
if (position == nullptr || time == nullptr
|| timebase < 0 || timebase >= TIMEBASE_MAX) {
return BAD_VALUE;
@@ -98,22 +108,25 @@
// look for the closest-to-hw stage in the pipeline with a valid timestamp.
// We omit LOCATION_CLIENT as we prefer at least LOCATION_SERVER based accuracy
// when getting the best timestamp.
- for (int i = LOCATION_MAX - 1; i >= LOCATION_SERVER; --i) {
+ for (int i = LOCATION_KERNEL; i >= LOCATION_SERVER; --i) {
if (mTimeNs[i] > 0) {
*position = mPosition[i];
*time = mTimeNs[i] + mTimebaseOffset[timebase];
+ if (location != nullptr) {
+ *location = (Location)i;
+ }
return OK;
}
}
return INVALID_OPERATION;
}
- status_t getBestTimestamp(AudioTimestamp *timestamp) const {
+ status_t getBestTimestamp(AudioTimestamp *timestamp, Location *location = nullptr) const {
if (timestamp == nullptr) {
return BAD_VALUE;
}
int64_t position, time;
- if (getBestTimestamp(&position, &time, TIMEBASE_MONOTONIC) == OK) {
+ if (getBestTimestamp(&position, &time, TIMEBASE_MONOTONIC, location) == OK) {
timestamp->mPosition = position;
timestamp->mTime.tv_sec = time / 1000000000;
timestamp->mTime.tv_nsec = time - timestamp->mTime.tv_sec * 1000000000LL;
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index eaaef4a..88c4e61 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -186,8 +186,21 @@
* and inform of marker, position updates, etc.
* user: Context for use by the callback receiver.
* notificationFrames: The callback function is called each time notificationFrames PCM
- * frames have been consumed from track input buffer.
- * This is expressed in units of frames at the initial source sample rate.
+ * frames have been consumed from track input buffer by server.
+ * Zero means to use a default value, which is typically:
+ * - fast tracks: HAL buffer size, even if track frameCount is larger
+ * - normal tracks: 1/2 of track frameCount
+ * A positive value means that many frames at initial source sample rate.
+ * A negative value for this parameter specifies the negative of the
+ * requested number of notifications (sub-buffers) in the entire buffer.
+ * For fast tracks, the FastMixer will process one sub-buffer at a time.
+ * The size of each sub-buffer is determined by the HAL.
+ * To get "double buffering", for example, one should pass -2.
+ * The minimum number of sub-buffers is 1 (expressed as -1),
+ * and the maximum number of sub-buffers is 8 (expressed as -8).
+ * Negative is only permitted for fast tracks, and if frameCount is zero.
+ * TODO It is ugly to overload a parameter in this way depending on
+ * whether it is positive, negative, or zero. Consider splitting apart.
* sessionId: Specific session ID, or zero to use default.
* transferType: How data is transferred to AudioTrack.
* offloadInfo: If not NULL, provides offload parameters for
@@ -201,6 +214,10 @@
binder to AudioFlinger.
It will return an error instead. The application will recreate
the track based on offloading or different channel configuration, etc.
+ * maxRequiredSpeed: For PCM tracks, this creates an appropriate buffer size that will allow
+ * maxRequiredSpeed playback. Values less than 1.0f and greater than
+ * AUDIO_TIMESTRETCH_SPEED_MAX will be clamped. For non-PCM tracks
+ * and direct or offloaded tracks, this parameter is ignored.
* threadCanCallJava: Not present in parameter list, and so is fixed at false.
*/
@@ -212,14 +229,15 @@
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
callback_t cbf = NULL,
void* user = NULL,
- uint32_t notificationFrames = 0,
+ int32_t notificationFrames = 0,
audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
transfer_type transferType = TRANSFER_DEFAULT,
const audio_offload_info_t *offloadInfo = NULL,
int uid = -1,
pid_t pid = -1,
const audio_attributes_t* pAttributes = NULL,
- bool doNotReconnect = false);
+ bool doNotReconnect = false,
+ float maxRequiredSpeed = 1.0f);
/* Creates an audio track and registers it with AudioFlinger.
* With this constructor, the track is configured for static buffer mode.
@@ -241,14 +259,15 @@
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
callback_t cbf = NULL,
void* user = NULL,
- uint32_t notificationFrames = 0,
+ int32_t notificationFrames = 0,
audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
transfer_type transferType = TRANSFER_DEFAULT,
const audio_offload_info_t *offloadInfo = NULL,
int uid = -1,
pid_t pid = -1,
const audio_attributes_t* pAttributes = NULL,
- bool doNotReconnect = false);
+ bool doNotReconnect = false,
+ float maxRequiredSpeed = 1.0f);
/* Terminates the AudioTrack and unregisters it from AudioFlinger.
* Also destroys all resources associated with the AudioTrack.
@@ -284,7 +303,7 @@
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
callback_t cbf = NULL,
void* user = NULL,
- uint32_t notificationFrames = 0,
+ int32_t notificationFrames = 0,
const sp<IMemory>& sharedBuffer = 0,
bool threadCanCallJava = false,
audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
@@ -293,7 +312,8 @@
int uid = -1,
pid_t pid = -1,
const audio_attributes_t* pAttributes = NULL,
- bool doNotReconnect = false);
+ bool doNotReconnect = false,
+ float maxRequiredSpeed = 1.0f);
/* Result of constructing the AudioTrack. This must be checked for successful initialization
* before using any AudioTrack API (except for set()), because using
@@ -328,11 +348,17 @@
uint32_t channelCount() const { return mChannelCount; }
size_t frameCount() const { return mFrameCount; }
+ // TODO consider notificationFrames() if needed
+
/* Return effective size of audio buffer that an application writes to
* or a negative error if the track is uninitialized.
*/
ssize_t getBufferSizeInFrames();
+ /* Returns the buffer duration in microseconds at current playback rate.
+ */
+ status_t getBufferDurationInUs(int64_t *duration);
+
/* Set the effective size of audio buffer that an application writes to.
* This is used to determine the amount of available room in the buffer,
* which determines when a write will block.
@@ -916,6 +942,7 @@
mutable uint32_t mSampleRate; // mutable because getSampleRate() can update it
uint32_t mOriginalSampleRate;
AudioPlaybackRate mPlaybackRate;
+ float mMaxRequiredSpeed; // use PCM buffer size to allow this speed
// Corresponds to current IAudioTrack, value is reported back by AudioFlinger to the client.
// This allocated buffer size is maintained by the proxy.
@@ -965,9 +992,16 @@
void* mUserData;
// for notification APIs
+
+ // next 2 fields are const after constructor or set()
uint32_t mNotificationFramesReq; // requested number of frames between each
// notification callback,
// at initial source sample rate
+ uint32_t mNotificationsPerBufferReq;
+ // requested number of notifications per buffer,
+ // currently only used for fast tracks with
+ // default track buffer size
+
uint32_t mNotificationFramesAct; // actual number of frames between each
// notification callback,
// at initial source sample rate
@@ -1012,6 +1046,7 @@
bool mTimestampStartupGlitchReported; // reduce log spam
bool mRetrogradeMotionReported; // reduce log spam
AudioTimestamp mPreviousTimestamp; // used to detect retrograde motion
+ ExtendedTimestamp::Location mPreviousLocation; // location used for previous timestamp
uint32_t mUnderrunCountOffset; // updated when restoring tracks
diff --git a/media/libmediaplayerservice/Crypto.h b/include/media/Crypto.h
similarity index 100%
rename from media/libmediaplayerservice/Crypto.h
rename to include/media/Crypto.h
diff --git a/media/libmediaplayerservice/Drm.h b/include/media/Drm.h
similarity index 100%
rename from media/libmediaplayerservice/Drm.h
rename to include/media/Drm.h
diff --git a/services/mediadrm/DrmSessionClientInterface.h b/include/media/DrmSessionClientInterface.h
similarity index 100%
rename from services/mediadrm/DrmSessionClientInterface.h
rename to include/media/DrmSessionClientInterface.h
diff --git a/media/libmediaplayerservice/DrmSessionManager.h b/include/media/DrmSessionManager.h
similarity index 100%
rename from media/libmediaplayerservice/DrmSessionManager.h
rename to include/media/DrmSessionManager.h
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index e48aa1c..1ade4ba 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -103,6 +103,9 @@
* and therefore can be cached.
*/
virtual uint32_t sampleRate(audio_io_handle_t ioHandle) const = 0;
+
+ // reserved; formerly channelCount()
+
virtual audio_format_t format(audio_io_handle_t output) const = 0;
virtual size_t frameCount(audio_io_handle_t ioHandle) const = 0;
@@ -247,6 +250,9 @@
/* Indicate JAVA services are ready (scheduling, power management ...) */
virtual status_t systemReady() = 0;
+
+ // Returns the number of frames per audio HAL buffer.
+ virtual size_t frameCountHAL(audio_io_handle_t ioHandle) const = 0;
};
diff --git a/include/media/IMediaExtractor.h b/include/media/IMediaExtractor.h
index d9fcd89..34b15e9 100644
--- a/include/media/IMediaExtractor.h
+++ b/include/media/IMediaExtractor.h
@@ -19,7 +19,7 @@
#define IMEDIA_EXTRACTOR_BASE_H_
#include <media/IMediaSource.h>
-#include <media/IDataSource.h>
+#include <media/stagefright/DataSource.h>
namespace android {
@@ -72,7 +72,7 @@
void registerMediaExtractor(
const sp<IMediaExtractor> &extractor,
- const sp<IDataSource> &source,
+ const sp<DataSource> &source,
const char *mime);
void registerMediaSource(
diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h
index e5d3cda..8266b0b 100644
--- a/include/media/IMediaPlayerService.h
+++ b/include/media/IMediaPlayerService.h
@@ -31,8 +31,6 @@
namespace android {
-struct ICrypto;
-struct IDrm;
struct IHDCP;
struct IMediaCodecList;
struct IMediaHTTPService;
@@ -52,8 +50,6 @@
virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client,
audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE) = 0;
virtual sp<IOMX> getOMX() = 0;
- virtual sp<ICrypto> makeCrypto() = 0;
- virtual sp<IDrm> makeDrm() = 0;
virtual sp<IHDCP> makeHDCP(bool createEncryptionModule) = 0;
virtual sp<IMediaCodecList> getCodecList() const = 0;
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index 19c7955..15d691f 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -37,6 +37,7 @@
class IMemory;
class IOMXObserver;
class IOMXRenderer;
+class NativeHandle;
class Surface;
class IOMX : public IInterface {
@@ -118,6 +119,10 @@
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) = 0;
+ virtual status_t updateNativeHandleInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<NativeHandle> &nativeHandle, buffer_id buffer) = 0;
+
// This will set *type to resulting metadata buffer type on OMX error (not on binder error) as
// well as on success.
virtual status_t createInputSurface(
@@ -145,7 +150,7 @@
// pointer is just that, a pointer into local address space.
virtual status_t allocateSecureBuffer(
node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, native_handle_t **native_handle) = 0;
+ buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle) = 0;
// Allocate an OMX buffer of size |allotedSize|. Use |params| as the backup buffer, which
// may be larger.
@@ -272,17 +277,18 @@
OMX_U32 mLevel;
};
-} // namespace android
-
-inline static const char *asString(android::MetadataBufferType i, const char *def = "??") {
+inline static const char *asString(MetadataBufferType i, const char *def = "??") {
using namespace android;
switch (i) {
case kMetadataBufferTypeCameraSource: return "CameraSource";
case kMetadataBufferTypeGrallocSource: return "GrallocSource";
case kMetadataBufferTypeANWBuffer: return "ANWBuffer";
+ case kMetadataBufferTypeNativeHandleSource: return "NativeHandleSource";
case kMetadataBufferTypeInvalid: return "Invalid";
default: return def;
}
}
+} // namespace android
+
#endif // ANDROID_IOMX_H_
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 54862d1..4977efd 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -104,6 +104,7 @@
virtual audio_session_t getSessionId() const = 0;
virtual audio_stream_type_t getAudioStreamType() const = 0;
virtual uint32_t getSampleRate() const = 0;
+ virtual int64_t getBufferDurationInUs() const = 0;
// If no callback is specified, use the "write" API below to submit
// audio data.
diff --git a/media/libmediaplayerservice/SharedLibrary.h b/include/media/SharedLibrary.h
similarity index 100%
rename from media/libmediaplayerservice/SharedLibrary.h
rename to include/media/SharedLibrary.h
diff --git a/include/media/ToneGenerator.h b/include/media/ToneGenerator.h
index 8406ed6..c41c686 100644
--- a/include/media/ToneGenerator.h
+++ b/include/media/ToneGenerator.h
@@ -193,12 +193,15 @@
TONE_JAPAN_DIAL, // Dial tone: 400Hz, continuous
TONE_JAPAN_BUSY, // Busy tone: 400Hz, 500ms ON, 500ms OFF...
TONE_JAPAN_RADIO_ACK, // Radio path acknowlegment: 400Hz, 1s ON, 2s OFF...
+ // UK Supervisory tones
+ TONE_UK_RINGTONE, // Ring Tone: A 400Hz + 450Hz tone repeated in a 0.4s on, 0.2s off, 0.4s on, 2.0s off pattern.
NUM_ALTERNATE_TONES
};
enum region {
ANSI,
JAPAN,
+ UK,
CEPT,
NUM_REGIONS
};
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index b22e0b4..d14bb7b 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -271,6 +271,7 @@
bool mFatalError;
bool mShutdownInProgress;
bool mExplicitShutdown;
+ bool mIsLegacyVP9Decoder;
// If "mKeepComponentAllocated" we only transition back to Loaded state
// and do not release the component instance.
@@ -425,10 +426,10 @@
// gets index or sets it to 0 on error. Returns error from codec.
status_t initDescribeHDRStaticInfoIndex();
- // sets HDR static information for the decoder based on |configFormat|, and
- // set resulting HDRStaticInfo config into |outputFormat|. Returns error from the codec.
- status_t setHDRStaticInfoForVideoDecoder(
- const sp<AMessage> &configFormat, sp<AMessage> &outputFormat);
+ // sets HDR static metadata for the video encoder/decoder based on |configFormat|, and
+ // sets resulting HDRStaticInfo config into |outputFormat|. Returns error from the codec.
+ status_t setHDRStaticInfoForVideoCodec(
+ OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat);
// sets |params|. Returns the codec error.
status_t setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms);
@@ -436,8 +437,8 @@
// gets |params|. Returns the codec error.
status_t getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms);
- // gets HDR static information for the video decoder port and sets them into |format|.
- status_t getHDRStaticInfoForVideoDecoder(sp<AMessage> &format);
+ // gets HDR static information for the video encoder/decoder port and sets them into |format|.
+ status_t getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format);
typedef struct drcParams {
int32_t drcCut;
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h
index c732b41..c2e75a6 100644
--- a/include/media/stagefright/CameraSource.h
+++ b/include/media/stagefright/CameraSource.h
@@ -28,6 +28,7 @@
#include <utils/List.h>
#include <utils/RefBase.h>
#include <utils/String16.h>
+#include <MetadataBufferType.h>
namespace android {
@@ -118,11 +119,11 @@
* Tell whether this camera source stores meta data or real YUV
* frame data in video buffers.
*
- * @return true if meta data is stored in the video
- * buffers; false if real YUV data is stored in
+ * @return a valid type if meta data is stored in the video
+ * buffers; kMetadataBufferTypeInvalid if real YUV data is stored in
* the video buffers.
*/
- bool isMetaDataStoredInVideoBuffers() const;
+ MetadataBufferType metaDataStoredInVideoBuffers() const;
virtual void signalBufferReturned(MediaBuffer* buffer);
@@ -138,6 +139,8 @@
ProxyListener(const sp<CameraSource>& source);
virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
const sp<IMemory> &data);
+ virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
+ native_handle_t* handle);
private:
sp<CameraSource> mSource;
@@ -209,6 +212,7 @@
virtual status_t startCameraRecording();
virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+ virtual void releaseRecordingFrameHandle(native_handle_t* handle);
// Returns true if need to skip the current frame.
// Called from dataCallbackTimestamp.
@@ -220,6 +224,9 @@
virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
const sp<IMemory> &data);
+ virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
+ native_handle_t* handle);
+
// Process a buffer item received in BufferQueueListener.
virtual void processBufferQueueFrame(BufferItem& buffer);
@@ -244,6 +251,8 @@
// The mode video buffers are received from camera. One of VIDEO_BUFFER_MODE_*.
int32_t mVideoBufferMode;
+ static const uint32_t kDefaultVideoBufferCount = 32;
+
/**
* The following variables are used in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
*/
@@ -264,6 +273,7 @@
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);
+ void createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount);
status_t init(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h
index f17ec51..871c1d9 100644
--- a/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/include/media/stagefright/CameraSourceTimeLapse.h
@@ -145,6 +145,14 @@
virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
const sp<IMemory> &data);
+ // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
+ // timestamp and set mSkipCurrentFrame.
+ // Then it calls the base CameraSource::recordingFrameHandleCallbackTimestamp()
+ // This will be called in VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA mode when
+ // the metadata is VideoNativeHandleMetadata.
+ virtual void recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
+ native_handle_t* handle);
+
// Process a buffer item received in CameraSource::BufferQueueListener.
// This will be called in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
virtual void processBufferQueueFrame(BufferItem& buffer);
diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h
index c5df1f6..0254545 100644
--- a/include/media/stagefright/DataSource.h
+++ b/include/media/stagefright/DataSource.h
@@ -19,7 +19,7 @@
#define DATA_SOURCE_H_
#include <sys/types.h>
-
+#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaErrors.h>
#include <utils/Errors.h>
#include <utils/KeyedVector.h>
@@ -71,6 +71,20 @@
bool getUInt32(off64_t offset, uint32_t *x);
bool getUInt64(off64_t offset, uint64_t *x);
+ // Reads in "count" entries of type T into vector *x.
+ // Returns true if "count" entries can be read.
+ // If fewer than "count" entries can be read, return false. In this case,
+ // the output vector *x will still have those entries that were read. Call
+ // x->size() to obtain the number of entries read.
+ // The optional parameter chunkSize specifies how many entries should be
+ // read from the data source at one time into a temporary buffer. Increasing
+ // chunkSize can improve the performance at the cost of extra memory usage.
+ // The default value for chunkSize is set to read at least 4k bytes at a
+ // time, depending on sizeof(T).
+ template <typename T>
+ bool getVector(off64_t offset, Vector<T>* x, size_t count,
+ size_t chunkSize = (4095 / sizeof(T)) + 1);
+
// May return ERROR_UNSUPPORTED.
virtual status_t getSize(off64_t *size);
@@ -127,6 +141,51 @@
DataSource &operator=(const DataSource &);
};
+template <typename T>
+bool DataSource::getVector(off64_t offset, Vector<T>* x, size_t count,
+ size_t chunkSize)
+{
+ x->clear();
+ if (chunkSize == 0) {
+ return false;
+ }
+ if (count == 0) {
+ return true;
+ }
+
+ T tmp[chunkSize];
+ ssize_t numBytesRead;
+ size_t numBytesPerChunk = chunkSize * sizeof(T);
+ size_t i;
+
+ for (i = 0; i + chunkSize < count; i += chunkSize) {
+ // This loops is executed when more than chunkSize records need to be
+ // read.
+ numBytesRead = this->readAt(offset, (void*)&tmp, numBytesPerChunk);
+ if (numBytesRead == -1) { // If readAt() returns -1, there is an error.
+ return false;
+ }
+ if (numBytesRead < numBytesPerChunk) {
+ // This case is triggered when the stream ends before the whole
+ // chunk is read.
+ x->appendArray(tmp, (size_t)numBytesRead / sizeof(T));
+ return false;
+ }
+ x->appendArray(tmp, chunkSize);
+ offset += numBytesPerChunk;
+ }
+
+ // There are (count - i) more records to read.
+ // Right now, (count - i) <= chunkSize.
+ // We do the same thing as above, but with chunkSize replaced by count - i.
+ numBytesRead = this->readAt(offset, (void*)&tmp, (count - i) * sizeof(T));
+ if (numBytesRead == -1) {
+ return false;
+ }
+ x->appendArray(tmp, (size_t)numBytesRead / sizeof(T));
+ return x->size() == count;
+}
+
} // namespace android
#endif // DATA_SOURCE_H_
diff --git a/include/media/stagefright/MediaCodecSource.h b/include/media/stagefright/MediaCodecSource.h
index 035e8ae..cc62786 100644
--- a/include/media/stagefright/MediaCodecSource.h
+++ b/include/media/stagefright/MediaCodecSource.h
@@ -37,7 +37,6 @@
public MediaBufferObserver {
enum FlagBits {
FLAG_USE_SURFACE_INPUT = 1,
- FLAG_USE_METADATA_INPUT = 2,
FLAG_PREFER_SOFTWARE_CODEC = 4, // used for testing only
};
diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h
index 2177c00..ca3a3bf 100644
--- a/include/media/stagefright/SurfaceMediaSource.h
+++ b/include/media/stagefright/SurfaceMediaSource.h
@@ -25,6 +25,8 @@
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MediaBuffer.h>
+#include <MetadataBufferType.h>
+
#include "foundation/ABase.h"
namespace android {
@@ -109,9 +111,9 @@
void dump(String8& result, const char* prefix, char* buffer,
size_t SIZE) const;
- // isMetaDataStoredInVideoBuffers tells the encoder whether we will
- // pass metadata through the buffers. Currently, it is force set to true
- bool isMetaDataStoredInVideoBuffers() const;
+ // metaDataStoredInVideoBuffers tells the encoder what kind of metadata
+ // is passed through the buffers. Currently, it is set to ANWBuffer
+ MetadataBufferType metaDataStoredInVideoBuffers() const;
sp<IGraphicBufferProducer> getProducer() const { return mProducer; }
@@ -234,6 +236,9 @@
Condition mMediaBuffersAvailableCondition;
+ // Allocate and return a new MediaBuffer and pass the ANW buffer as metadata into it.
+ void passMetadataBuffer_l(MediaBuffer **buffer, ANativeWindowBuffer *bufferHandle) const;
+
// Avoid copying and equating and default constructor
DISALLOW_EVIL_CONSTRUCTORS(SurfaceMediaSource);
};
diff --git a/include/media/stagefright/foundation/AMessage.h b/include/media/stagefright/foundation/AMessage.h
index 09d2ad8..87c32a6 100644
--- a/include/media/stagefright/foundation/AMessage.h
+++ b/include/media/stagefright/foundation/AMessage.h
@@ -62,7 +62,29 @@
AMessage();
AMessage(uint32_t what, const sp<const AHandler> &handler);
- static sp<AMessage> FromParcel(const Parcel &parcel);
+ // Construct an AMessage from a parcel.
+ // nestingAllowed determines how many levels AMessage can be nested inside
+ // AMessage. The default value here is arbitrarily set to 255.
+ // FromParcel() returns NULL on error, which occurs when the input parcel
+ // contains
+ // - an AMessage nested deeper than maxNestingLevel; or
+ // - an item whose type is not recognized by this function.
+ // Types currently recognized by this function are:
+ // Item types set/find function suffixes
+ // ==========================================
+ // int32_t Int32
+ // int64_t Int64
+ // size_t Size
+ // float Float
+ // double Double
+ // AString String
+ // AMessage Message
+ static sp<AMessage> FromParcel(const Parcel &parcel,
+ size_t maxNestingLevel = 255);
+
+ // Write this AMessage to a parcel.
+ // All items in the AMessage must have types that are recognized by
+ // FromParcel(); otherwise, TRESPASS error will occur.
void writeToParcel(Parcel *parcel) const;
void setWhat(uint32_t what);
diff --git a/include/ndk/NdkImage.h b/include/ndk/NdkImage.h
index 5c92294..eab7ead 100644
--- a/include/ndk/NdkImage.h
+++ b/include/ndk/NdkImage.h
@@ -14,6 +14,15 @@
* limitations under the License.
*/
+/**
+ * @addtogroup Media Camera
+ * @{
+ */
+
+/**
+ * @file NdkImage.h
+ */
+
/*
* This file defines an NDK API.
* Do not remove methods.
@@ -44,7 +53,8 @@
AIMAGE_FORMAT_RAW10 = 0x25,
AIMAGE_FORMAT_RAW12 = 0x26,
AIMAGE_FORMAT_DEPTH16 = 0x44363159,
- AIMAGE_FORMAT_DEPTH_POINT_CLOUD = 0x101
+ AIMAGE_FORMAT_DEPTH_POINT_CLOUD = 0x101,
+ AIMAGE_FORMAT_PRIVATE = 0x22 ///> Not supported by AImageReader yet
};
typedef struct AImageCropRect {
@@ -97,3 +107,5 @@
#endif
#endif //_NDK_IMAGE_H
+
+/** @} */
diff --git a/include/ndk/NdkImageReader.h b/include/ndk/NdkImageReader.h
index 041c378..9e7483d 100644
--- a/include/ndk/NdkImageReader.h
+++ b/include/ndk/NdkImageReader.h
@@ -14,6 +14,15 @@
* limitations under the License.
*/
+/**
+ * @addtogroup Media Camera
+ * @{
+ */
+
+/**
+ * @file NdkImageReader.h
+ */
+
/*
* This file defines an NDK API.
* Do not remove methods.
@@ -75,3 +84,5 @@
#endif
#endif //_NDK_IMAGE_READER_H
+
+/** @} */
diff --git a/media/audioserver/audioserver.rc b/media/audioserver/audioserver.rc
index 1b39c8d..2409157 100644
--- a/media/audioserver/audioserver.rc
+++ b/media/audioserver/audioserver.rc
@@ -4,3 +4,4 @@
# media gid needed for /dev/fm (radio) and for /data/misc/media (tee)
group audio camera drmrpc inet media mediadrm net_bt net_bt_admin net_bw_acct
ioprio rt 4
+ writepid /dev/cpuset/foreground/tasks
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 3a5dee6..bbdf65e 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -104,6 +104,8 @@
return DEAD_OBJECT;
}
+// FIXME Declare in binder opcode order, similarly to IAudioFlinger.h and IAudioFlinger.cpp
+
status_t AudioSystem::muteMicrophone(bool state)
{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
@@ -429,6 +431,27 @@
return af->systemReady();
}
+status_t AudioSystem::getFrameCountHAL(audio_io_handle_t ioHandle,
+ size_t* frameCount)
+{
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
+ sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
+ if (desc == 0) {
+ *frameCount = af->frameCountHAL(ioHandle);
+ } else {
+ *frameCount = desc->mFrameCountHAL;
+ }
+ if (*frameCount == 0) {
+ ALOGE("AudioSystem::getFrameCountHAL failed for ioHandle %d", ioHandle);
+ return BAD_VALUE;
+ }
+
+ ALOGV("getFrameCountHAL() ioHandle %d, frameCount %zu", ioHandle, *frameCount);
+
+ return NO_ERROR;
+}
+
// ---------------------------------------------------------------------------
@@ -528,10 +551,10 @@
}
}
ALOGV("ioConfigChanged() new config for %s %d samplingRate %u, format %#x "
- "channel mask %#x frameCount %zu deviceId %d",
+ "channel mask %#x frameCount %zu frameCountHAL %zu deviceId %d",
event == AUDIO_OUTPUT_CONFIG_CHANGED ? "output" : "input",
ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat,
- ioDesc->mChannelMask, ioDesc->mFrameCount, ioDesc->getDeviceId());
+ ioDesc->mChannelMask, ioDesc->mFrameCount, ioDesc->mFrameCountHAL, ioDesc->getDeviceId());
} break;
}
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index a4cc3d7..22a5acd 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -90,16 +90,24 @@
// TODO: Move to a common library
static size_t calculateMinFrameCount(
uint32_t afLatencyMs, uint32_t afFrameCount, uint32_t afSampleRate,
- uint32_t sampleRate, float speed)
+ uint32_t sampleRate, float speed /*, uint32_t notificationsPerBufferReq*/)
{
// Ensure that buffer depth covers at least audio hardware latency
uint32_t minBufCount = afLatencyMs / ((1000 * afFrameCount) / afSampleRate);
if (minBufCount < 2) {
minBufCount = 2;
}
+#if 0
+ // The notificationsPerBufferReq parameter is not yet used for non-fast tracks,
+ // but keeping the code here to make it easier to add later.
+ if (minBufCount < notificationsPerBufferReq) {
+ minBufCount = notificationsPerBufferReq;
+ }
+#endif
ALOGV("calculateMinFrameCount afLatency %u afFrameCount %u afSampleRate %u "
- "sampleRate %u speed %f minBufCount: %u",
- afLatencyMs, afFrameCount, afSampleRate, sampleRate, speed, minBufCount);
+ "sampleRate %u speed %f minBufCount: %u" /*" notificationsPerBufferReq %u"*/,
+ afLatencyMs, afFrameCount, afSampleRate, sampleRate, speed, minBufCount
+ /*, notificationsPerBufferReq*/);
return minBufCount * sourceFramesNeededWithTimestretch(
sampleRate, afFrameCount, afSampleRate, speed);
}
@@ -144,7 +152,8 @@
// When called from createTrack, speed is 1.0f (normal speed).
// This is rechecked again on setting playback rate (TODO: on setting sample rate, too).
- *frameCount = calculateMinFrameCount(afLatency, afFrameCount, afSampleRate, sampleRate, 1.0f);
+ *frameCount = calculateMinFrameCount(afLatency, afFrameCount, afSampleRate, sampleRate, 1.0f
+ /*, 0 notificationsPerBufferReq*/);
// The formula above should always produce a non-zero value under normal circumstances:
// AudioTrack.SAMPLE_RATE_HZ_MIN <= sampleRate <= AudioTrack.SAMPLE_RATE_HZ_MAX.
@@ -184,14 +193,15 @@
audio_output_flags_t flags,
callback_t cbf,
void* user,
- uint32_t notificationFrames,
+ int32_t notificationFrames,
audio_session_t sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
int uid,
pid_t pid,
const audio_attributes_t* pAttributes,
- bool doNotReconnect)
+ bool doNotReconnect,
+ float maxRequiredSpeed)
: mStatus(NO_INIT),
mState(STATE_STOPPED),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -202,7 +212,7 @@
mStatus = set(streamType, sampleRate, format, channelMask,
frameCount, flags, cbf, user, notificationFrames,
0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType,
- offloadInfo, uid, pid, pAttributes, doNotReconnect);
+ offloadInfo, uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed);
}
AudioTrack::AudioTrack(
@@ -214,14 +224,15 @@
audio_output_flags_t flags,
callback_t cbf,
void* user,
- uint32_t notificationFrames,
+ int32_t notificationFrames,
audio_session_t sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
int uid,
pid_t pid,
const audio_attributes_t* pAttributes,
- bool doNotReconnect)
+ bool doNotReconnect,
+ float maxRequiredSpeed)
: mStatus(NO_INIT),
mState(STATE_STOPPED),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -232,7 +243,7 @@
mStatus = set(streamType, sampleRate, format, channelMask,
0 /*frameCount*/, flags, cbf, user, notificationFrames,
sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
- uid, pid, pAttributes, doNotReconnect);
+ uid, pid, pAttributes, doNotReconnect, maxRequiredSpeed);
}
AudioTrack::~AudioTrack()
@@ -272,7 +283,7 @@
audio_output_flags_t flags,
callback_t cbf,
void* user,
- uint32_t notificationFrames,
+ int32_t notificationFrames,
const sp<IMemory>& sharedBuffer,
bool threadCanCallJava,
audio_session_t sessionId,
@@ -281,10 +292,11 @@
int uid,
pid_t pid,
const audio_attributes_t* pAttributes,
- bool doNotReconnect)
+ bool doNotReconnect,
+ float maxRequiredSpeed)
{
ALOGV("set(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
- "flags #%x, notificationFrames %u, sessionId %d, transferType %d, uid %d, pid %d",
+ "flags #%x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames,
sessionId, transferType, uid, pid);
@@ -422,6 +434,8 @@
mSampleRate = sampleRate;
mOriginalSampleRate = sampleRate;
mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
+ // 1.0 <= mMaxRequiredSpeed <= AUDIO_TIMESTRETCH_SPEED_MAX
+ mMaxRequiredSpeed = min(max(maxRequiredSpeed, 1.0f), AUDIO_TIMESTRETCH_SPEED_MAX);
// Make copy of input parameter offloadInfo so that in the future:
// (a) createTrack_l doesn't need it as an input parameter
@@ -438,7 +452,29 @@
mSendLevel = 0.0f;
// mFrameCount is initialized in createTrack_l
mReqFrameCount = frameCount;
- mNotificationFramesReq = notificationFrames;
+ if (notificationFrames >= 0) {
+ mNotificationFramesReq = notificationFrames;
+ mNotificationsPerBufferReq = 0;
+ } else {
+ if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) {
+ ALOGE("notificationFrames=%d not permitted for non-fast track",
+ notificationFrames);
+ return BAD_VALUE;
+ }
+ if (frameCount > 0) {
+ ALOGE("notificationFrames=%d not permitted with non-zero frameCount=%zu",
+ notificationFrames, frameCount);
+ return BAD_VALUE;
+ }
+ mNotificationFramesReq = 0;
+ const uint32_t minNotificationsPerBuffer = 1;
+ const uint32_t maxNotificationsPerBuffer = 8;
+ mNotificationsPerBufferReq = min(maxNotificationsPerBuffer,
+ max((uint32_t) -notificationFrames, minNotificationsPerBuffer));
+ ALOGW_IF(mNotificationsPerBufferReq != (uint32_t) -notificationFrames,
+ "notificationFrames=%d clamped to the range -%u to -%u",
+ notificationFrames, minNotificationsPerBuffer, maxNotificationsPerBuffer);
+ }
mNotificationFramesAct = 0;
if (sessionId == AUDIO_SESSION_ALLOCATE) {
mSessionId = (audio_session_t) AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
@@ -499,6 +535,7 @@
mPreviousTimestampValid = false;
mTimestampStartupGlitchReported = false;
mRetrogradeMotionReported = false;
+ mPreviousLocation = ExtendedTimestamp::LOCATION_INVALID;
mUnderrunCountOffset = 0;
mFramesWritten = 0;
mFramesWrittenServerOffset = 0;
@@ -531,6 +568,7 @@
mPreviousTimestampValid = false;
mTimestampStartupGlitchReported = false;
mRetrogradeMotionReported = false;
+ mPreviousLocation = ExtendedTimestamp::LOCATION_INVALID;
// read last server side position change via timestamp.
ExtendedTimestamp ets;
@@ -824,6 +862,9 @@
if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
return INVALID_OPERATION;
}
+
+ ALOGV("setPlaybackRate (input): mSampleRate:%u mSpeed:%f mPitch:%f",
+ mSampleRate, playbackRate.mSpeed, playbackRate.mPitch);
// pitch is emulated by adjusting speed and sampleRate
const uint32_t effectiveRate = adjustSampleRate(mSampleRate, playbackRate.mPitch);
const float effectiveSpeed = adjustSpeed(playbackRate.mSpeed, playbackRate.mPitch);
@@ -832,12 +873,18 @@
playbackRateTemp.mSpeed = effectiveSpeed;
playbackRateTemp.mPitch = effectivePitch;
+ ALOGV("setPlaybackRate (effective): mSampleRate:%u mSpeed:%f mPitch:%f",
+ effectiveRate, effectiveSpeed, effectivePitch);
+
if (!isAudioPlaybackRateValid(playbackRateTemp)) {
+ ALOGV("setPlaybackRate(%f, %f) failed (effective rate out of bounds)",
+ playbackRate.mSpeed, playbackRate.mPitch);
return BAD_VALUE;
}
// Check if the buffer size is compatible.
if (!isSampleRateSpeedAllowed_l(effectiveRate, effectiveSpeed)) {
- ALOGV("setPlaybackRate(%f, %f) failed", playbackRate.mSpeed, playbackRate.mPitch);
+ ALOGV("setPlaybackRate(%f, %f) failed (buffer size)",
+ playbackRate.mSpeed, playbackRate.mPitch);
return BAD_VALUE;
}
@@ -875,6 +922,24 @@
return (ssize_t) mProxy->getBufferSizeInFrames();
}
+status_t AudioTrack::getBufferDurationInUs(int64_t *duration)
+{
+ if (duration == nullptr) {
+ return BAD_VALUE;
+ }
+ AutoMutex lock(mLock);
+ if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+ return NO_INIT;
+ }
+ ssize_t bufferSizeInFrames = (ssize_t) mProxy->getBufferSizeInFrames();
+ if (bufferSizeInFrames < 0) {
+ return (status_t)bufferSizeInFrames;
+ }
+ *duration = (int64_t)((double)bufferSizeInFrames * 1000000
+ / ((double)mSampleRate * mPlaybackRate.mSpeed));
+ return NO_ERROR;
+}
+
ssize_t AudioTrack::setBufferSizeInFrames(size_t bufferSizeInFrames)
{
AutoMutex lock(mLock);
@@ -1199,6 +1264,15 @@
goto release;
}
+ // TODO consider making this a member variable if there are other uses for it later
+ size_t afFrameCountHAL;
+ status = AudioSystem::getFrameCountHAL(output, &afFrameCountHAL);
+ if (status != NO_ERROR) {
+ ALOGE("getFrameCountHAL(output=%d) status %d", output, status);
+ goto release;
+ }
+ ALOG_ASSERT(afFrameCountHAL > 0);
+
status = AudioSystem::getSamplingRate(output, &mAfSampleRate);
if (status != NO_ERROR) {
ALOGE("getSamplingRate(output=%d) status %d", output, status);
@@ -1271,16 +1345,30 @@
// there _is_ a frameCount parameter. We silently ignore it.
frameCount = mSharedBuffer->size() / mFrameSize;
} else {
- // For fast tracks the frame count calculations and checks are done by server
-
- if ((mFlags & AUDIO_OUTPUT_FLAG_FAST) == 0) {
- // for normal tracks precompute the frame count based on speed.
- const size_t minFrameCount = calculateMinFrameCount(
- mAfLatency, mAfFrameCount, mAfSampleRate, mSampleRate,
- mPlaybackRate.mSpeed);
- if (frameCount < minFrameCount) {
- frameCount = minFrameCount;
+ size_t minFrameCount = 0;
+ // For fast tracks the frame count calculations and checks are mostly done by server,
+ // but we try to respect the application's request for notifications per buffer.
+ if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
+ if (mNotificationsPerBufferReq > 0) {
+ // Avoid possible arithmetic overflow during multiplication.
+ // mNotificationsPerBuffer is clamped to a small integer earlier, so it is unlikely.
+ if (mNotificationsPerBufferReq > SIZE_MAX / afFrameCountHAL) {
+ ALOGE("Requested notificationPerBuffer=%u ignored for HAL frameCount=%zu",
+ mNotificationsPerBufferReq, afFrameCountHAL);
+ } else {
+ minFrameCount = afFrameCountHAL * mNotificationsPerBufferReq;
+ }
}
+ } else {
+ // for normal tracks precompute the frame count based on speed.
+ const float speed = !isPurePcmData_l() || isOffloadedOrDirect_l() ? 1.0f :
+ max(mMaxRequiredSpeed, mPlaybackRate.mSpeed);
+ minFrameCount = calculateMinFrameCount(
+ mAfLatency, mAfFrameCount, mAfSampleRate, mSampleRate,
+ speed /*, 0 mNotificationsPerBufferReq*/);
+ }
+ if (frameCount < minFrameCount) {
+ frameCount = minFrameCount;
}
}
@@ -1374,22 +1462,27 @@
}
// Make sure that application is notified with sufficient margin before underrun.
- // The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where
- // n = 1 fast track with single buffering; nBuffering is ignored
- // n = 2 fast track with double buffering
- // n = 2 normal track, (including those with sample rate conversion)
- // n >= 3 very high latency or very small notification interval (unused).
- // FIXME Move the computation from client side to server side,
- // and allow nBuffering to be larger than 1 for OpenSL ES, like it can be for Java.
+ // The client can divide the AudioTrack buffer into sub-buffers,
+ // and expresses its desire to server as the notification frame count.
if (mSharedBuffer == 0 && audio_is_linear_pcm(mFormat)) {
- size_t maxNotificationFrames = frameCount;
- if (!(trackFlags & IAudioFlinger::TRACK_FAST)) {
- const uint32_t nBuffering = 2;
- maxNotificationFrames /= nBuffering;
+ size_t maxNotificationFrames;
+ if (trackFlags & IAudioFlinger::TRACK_FAST) {
+ // notify every HAL buffer, regardless of the size of the track buffer
+ maxNotificationFrames = afFrameCountHAL;
+ } else {
+ // For normal tracks, use at least double-buffering if no sample rate conversion,
+ // or at least triple-buffering if there is sample rate conversion
+ const int nBuffering = mOriginalSampleRate == mAfSampleRate ? 2 : 3;
+ maxNotificationFrames = frameCount / nBuffering;
}
if (mNotificationFramesAct == 0 || mNotificationFramesAct > maxNotificationFrames) {
- ALOGW("Client adjusted notificationFrames from %u to %zu for frameCount %zu",
+ if (mNotificationFramesAct == 0) {
+ ALOGD("Client defaulted notificationFrames to %zu for frameCount %zu",
+ maxNotificationFrames, frameCount);
+ } else {
+ ALOGW("Client adjusted notificationFrames from %u to %zu for frameCount %zu",
mNotificationFramesAct, maxNotificationFrames, frameCount);
+ }
mNotificationFramesAct = (uint32_t) maxNotificationFrames;
}
}
@@ -2009,6 +2102,7 @@
const nsecs_t datans = mRemainingFrames <= avail ? 0 :
framesToNanoseconds(mRemainingFrames - avail, sampleRate, speed);
// audio flinger thread buffer size (TODO: adjust for fast tracks)
+ // FIXME: use mAfFrameCountHAL instead of mAfFrameCount below for fast tracks.
const nsecs_t afns = framesToNanoseconds(mAfFrameCount, mAfSampleRate, speed);
// add a half the AudioFlinger buffer time to avoid soaking CPU if datans is 0.
myns = datans + (afns / 2);
@@ -2169,7 +2263,8 @@
return true; // static tracks do not have issues with buffer sizing.
}
const size_t minFrameCount =
- calculateMinFrameCount(mAfLatency, mAfFrameCount, mAfSampleRate, sampleRate, speed);
+ calculateMinFrameCount(mAfLatency, mAfFrameCount, mAfSampleRate, sampleRate, speed
+ /*, 0 mNotificationsPerBufferReq*/);
ALOGV("isSampleRateSpeedAllowed_l mFrameCount %zu minFrameCount %zu",
mFrameCount, minFrameCount);
return mFrameCount >= minFrameCount;
@@ -2269,7 +2364,37 @@
ExtendedTimestamp ets;
status = mProxy->getTimestamp(&ets);
if (status == OK) {
- status = ets.getBestTimestamp(×tamp);
+ ExtendedTimestamp::Location location;
+ status = ets.getBestTimestamp(×tamp, &location);
+
+ if (status == OK) {
+ // It is possible that the best location has moved from the kernel to the server.
+ // In this case we adjust the position from the previous computed latency.
+ if (location == ExtendedTimestamp::LOCATION_SERVER) {
+ ALOGW_IF(mPreviousLocation == ExtendedTimestamp::LOCATION_KERNEL,
+ "getTimestamp() location moved from kernel to server");
+ const int64_t frames =
+ (ets.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] < 0 ||
+ ets.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] < 0)
+ ?
+ int64_t((double)mAfLatency * mSampleRate * mPlaybackRate.mSpeed
+ / 1000)
+ :
+ (ets.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK]
+ - ets.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK]);
+ ALOGV("frame adjustment:%lld timestamp:%s",
+ (long long)frames, ets.toString().c_str());
+ if (frames >= ets.mPosition[location]) {
+ timestamp.mPosition = 0;
+ } else {
+ timestamp.mPosition = (uint32_t)(ets.mPosition[location] - frames);
+ }
+ }
+ mPreviousLocation = location;
+ } else {
+ // right after AudioTrack is started, one may not find a timestamp
+ ALOGV("getBestTimestamp did not find timestamp");
+ }
}
if (status == INVALID_OPERATION) {
status = WOULD_BLOCK;
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index 7543b60..aa75188 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -81,7 +81,8 @@
LIST_AUDIO_PATCHES,
SET_AUDIO_PORT_CONFIG,
GET_AUDIO_HW_SYNC,
- SYSTEM_READY
+ SYSTEM_READY,
+ FRAME_COUNT_HAL,
};
#define MAX_ITEMS_PER_LIST 1024
@@ -274,6 +275,8 @@
return reply.readInt32();
}
+ // RESERVED for channelCount()
+
virtual audio_format_t format(audio_io_handle_t output) const
{
Parcel data, reply;
@@ -911,6 +914,18 @@
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
return remote()->transact(SYSTEM_READY, data, &reply, IBinder::FLAG_ONEWAY);
}
+ virtual size_t frameCountHAL(audio_io_handle_t ioHandle) const
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.writeInt32((int32_t) ioHandle);
+ status_t status = remote()->transact(FRAME_COUNT_HAL, data, &reply);
+ if (status != NO_ERROR) {
+ return 0;
+ }
+ return reply.readInt64();
+ }
+
};
IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger");
@@ -993,6 +1008,9 @@
reply->writeInt32( sampleRate((audio_io_handle_t) data.readInt32()) );
return NO_ERROR;
} break;
+
+ // RESERVED for channelCount()
+
case FORMAT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
reply->writeInt32( format((audio_io_handle_t) data.readInt32()) );
@@ -1419,6 +1437,11 @@
systemReady();
return NO_ERROR;
} break;
+ case FRAME_COUNT_HAL: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ reply->writeInt64( frameCountHAL((audio_io_handle_t) data.readInt32()) );
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp
index 3429d36..8dca9e9 100644
--- a/media/libmedia/IAudioFlingerClient.cpp
+++ b/media/libmedia/IAudioFlingerClient.cpp
@@ -50,6 +50,7 @@
data.writeInt32(ioDesc->mFormat);
data.writeInt32(ioDesc->mChannelMask);
data.writeInt64(ioDesc->mFrameCount);
+ data.writeInt64(ioDesc->mFrameCountHAL);
data.writeInt32(ioDesc->mLatency);
remote()->transact(IO_CONFIG_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
}
@@ -73,6 +74,7 @@
ioDesc->mFormat = (audio_format_t) data.readInt32();
ioDesc->mChannelMask = (audio_channel_mask_t) data.readInt32();
ioDesc->mFrameCount = data.readInt64();
+ ioDesc->mFrameCountHAL = data.readInt64();
ioDesc->mLatency = data.readInt32();
ioConfigChanged(event, ioDesc);
return NO_ERROR;
diff --git a/media/libmedia/IMediaExtractor.cpp b/media/libmedia/IMediaExtractor.cpp
index b13b69f..e8ad75b 100644
--- a/media/libmedia/IMediaExtractor.cpp
+++ b/media/libmedia/IMediaExtractor.cpp
@@ -216,13 +216,15 @@
return str;
}
-static Vector<ExtractorInstance> extractors;
+static Vector<ExtractorInstance> sExtractors;
+static Mutex sExtractorsLock;
void registerMediaSource(
const sp<IMediaExtractor> &ex,
const sp<IMediaSource> &source) {
- for (size_t i = 0; i < extractors.size(); i++) {
- ExtractorInstance &instance = extractors.editItemAt(i);
+ Mutex::Autolock lock(sExtractorsLock);
+ for (size_t i = 0; i < sExtractors.size(); i++) {
+ ExtractorInstance &instance = sExtractors.editItemAt(i);
sp<IMediaExtractor> extractor = instance.extractor.promote();
if (extractor != NULL && extractor == ex) {
if (instance.tracks.size() > 5) {
@@ -237,7 +239,7 @@
void registerMediaExtractor(
const sp<IMediaExtractor> &extractor,
- const sp<IDataSource> &source,
+ const sp<DataSource> &source,
const char *mime) {
ExtractorInstance ex;
ex.mime = mime == NULL ? "NULL" : mime;
@@ -246,19 +248,25 @@
ex.owner = IPCThreadState::self()->getCallingPid();
ex.extractor = extractor;
- if (extractors.size() > 10) {
- extractors.resize(10);
+ {
+ Mutex::Autolock lock(sExtractorsLock);
+ if (sExtractors.size() > 10) {
+ sExtractors.resize(10);
+ }
+ sExtractors.push_front(ex);
}
- extractors.push_front(ex);
}
status_t dumpExtractors(int fd, const Vector<String16>&) {
String8 out;
out.append("Recent extractors, most recent first:\n");
- for (size_t i = 0; i < extractors.size(); i++) {
- const ExtractorInstance &instance = extractors.itemAt(i);
- out.append(" ");
- out.append(instance.toString());
+ {
+ Mutex::Autolock lock(sExtractorsLock);
+ for (size_t i = 0; i < sExtractors.size(); i++) {
+ const ExtractorInstance &instance = sExtractors.itemAt(i);
+ out.append(" ");
+ out.append(instance.toString());
+ }
}
write(fd, out.string(), out.size());
return OK;
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 27b9edd..7590c1b 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -20,8 +20,6 @@
#include <binder/Parcel.h>
#include <binder/IMemory.h>
-#include <media/ICrypto.h>
-#include <media/IDrm.h>
#include <media/IHDCP.h>
#include <media/IMediaCodecList.h>
#include <media/IMediaHTTPService.h>
@@ -42,8 +40,6 @@
CREATE_MEDIA_RECORDER,
CREATE_METADATA_RETRIEVER,
GET_OMX,
- MAKE_CRYPTO,
- MAKE_DRM,
MAKE_HDCP,
ADD_BATTERY_DATA,
PULL_BATTERY_DATA,
@@ -94,20 +90,6 @@
return interface_cast<IOMX>(reply.readStrongBinder());
}
- virtual sp<ICrypto> makeCrypto() {
- Parcel data, reply;
- data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
- remote()->transact(MAKE_CRYPTO, data, &reply);
- return interface_cast<ICrypto>(reply.readStrongBinder());
- }
-
- virtual sp<IDrm> makeDrm() {
- Parcel data, reply;
- data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
- remote()->transact(MAKE_DRM, data, &reply);
- return interface_cast<IDrm>(reply.readStrongBinder());
- }
-
virtual sp<IHDCP> makeHDCP(bool createEncryptionModule) {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
@@ -185,18 +167,6 @@
reply->writeStrongBinder(IInterface::asBinder(omx));
return NO_ERROR;
} break;
- case MAKE_CRYPTO: {
- CHECK_INTERFACE(IMediaPlayerService, data, reply);
- sp<ICrypto> crypto = makeCrypto();
- reply->writeStrongBinder(IInterface::asBinder(crypto));
- return NO_ERROR;
- } break;
- case MAKE_DRM: {
- CHECK_INTERFACE(IMediaPlayerService, data, reply);
- sp<IDrm> drm = makeDrm();
- reply->writeStrongBinder(IInterface::asBinder(drm));
- return NO_ERROR;
- } break;
case MAKE_HDCP: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
bool createEncryptionModule = data.readInt32();
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index 61fba35..8ebb355 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -25,6 +25,7 @@
#include <media/IOMX.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/openmax/OMX_IndexExt.h>
+#include <utils/NativeHandle.h>
namespace android {
@@ -60,6 +61,7 @@
SET_INTERNAL_OPTION,
UPDATE_GRAPHIC_BUFFER_IN_META,
CONFIGURE_VIDEO_TUNNEL_MODE,
+ UPDATE_NATIVE_HANDLE_IN_META,
};
class BpOMX : public BpInterface<IOMX> {
@@ -313,6 +315,24 @@
return err;
}
+ virtual status_t updateNativeHandleInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<NativeHandle> &nativeHandle, buffer_id buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeInt32((int32_t)node);
+ data.writeInt32(port_index);
+ data.writeInt32(nativeHandle != NULL);
+ if (nativeHandle != NULL) {
+ data.writeNativeHandle(nativeHandle->handle());
+ }
+ data.writeInt32((int32_t)buffer);
+ remote()->transact(UPDATE_NATIVE_HANDLE_IN_META, data, &reply);
+
+ status_t err = reply.readInt32();
+ return err;
+ }
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
@@ -416,7 +436,9 @@
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeInt32((int32_t)node);
data.writeInt32(port_index);
- data.writeInt32((uint32_t)enable);
+ data.writeInt32((int32_t)enable);
+ data.writeInt32(type == NULL ? kMetadataBufferTypeANWBuffer : *type);
+
remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
// read type even storeMetaDataInBuffers failed
@@ -465,7 +487,7 @@
virtual status_t allocateSecureBuffer(
node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, native_handle_t **native_handle) {
+ buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeInt32((int32_t)node);
@@ -484,7 +506,8 @@
*buffer = (buffer_id)reply.readInt32();
*buffer_data = (void *)reply.readInt64();
if (*buffer_data == NULL) {
- *native_handle = reply.readNativeHandle();
+ *native_handle = NativeHandle::create(
+ reply.readNativeHandle(), true /* ownsHandle */);
} else {
*native_handle = NULL;
}
@@ -908,6 +931,25 @@
return NO_ERROR;
}
+ case UPDATE_NATIVE_HANDLE_IN_META:
+ {
+ CHECK_OMX_INTERFACE(IOMX, data, reply);
+
+ node_id node = (node_id)data.readInt32();
+ OMX_U32 port_index = data.readInt32();
+ native_handle *handle = NULL;
+ if (data.readInt32()) {
+ handle = data.readNativeHandle();
+ }
+ buffer_id buffer = (buffer_id)data.readInt32();
+
+ status_t err = updateNativeHandleInMeta(
+ node, port_index, NativeHandle::create(handle, true /* ownshandle */), buffer);
+ reply->writeInt32(err);
+
+ return NO_ERROR;
+ }
+
case CREATE_INPUT_SURFACE:
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
@@ -1001,7 +1043,7 @@
OMX_U32 port_index = data.readInt32();
OMX_BOOL enable = (OMX_BOOL)data.readInt32();
- MetadataBufferType type = kMetadataBufferTypeInvalid;
+ MetadataBufferType type = (MetadataBufferType)data.readInt32();
status_t err = storeMetaDataInBuffers(node, port_index, enable, &type);
reply->writeInt32(type);
@@ -1063,7 +1105,7 @@
buffer_id buffer;
void *buffer_data = NULL;
- native_handle_t *native_handle = NULL;
+ sp<NativeHandle> native_handle;
status_t err = allocateSecureBuffer(
node, port_index, size, &buffer, &buffer_data, &native_handle);
reply->writeInt32(err);
@@ -1072,7 +1114,7 @@
reply->writeInt32((int32_t)buffer);
reply->writeInt64((uintptr_t)buffer_data);
if (buffer_data == NULL) {
- reply->writeNativeHandle(native_handle);
+ reply->writeNativeHandle(native_handle == NULL ? NULL : native_handle->handle());
}
}
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 06abd8d..1b3b3eb 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -75,6 +75,8 @@
}
uint32_t flags = static_cast<uint32_t>(parcel.readInt32());
sp<AMessage> details = AMessage::FromParcel(parcel);
+ if (details == NULL)
+ return NULL;
if (caps != NULL) {
caps->mFlags = flags;
caps->mDetails = details;
@@ -163,6 +165,8 @@
for (size_t i = 0; i < size; i++) {
AString mime = AString::FromParcel(parcel);
sp<Capabilities> caps = Capabilities::FromParcel(parcel);
+ if (caps == NULL)
+ return NULL;
if (info != NULL) {
info->mCaps.add(mime, caps);
}
diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp
index 9f4b4de..411519d 100644
--- a/media/libmedia/ToneGenerator.cpp
+++ b/media/libmedia/ToneGenerator.cpp
@@ -740,6 +740,13 @@
{ .duration = 0 , .waveFreq = { 0 }, 0, 0}},
.repeatCnt = ToneGenerator::TONEGEN_INF,
.repeatSegment = 0 }, // TONE_JAPAN_RADIO_ACK
+ { .segments = { { .duration = 400, .waveFreq = { 400, 450, 0 }, 0, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 400, .waveFreq = { 400, 450, 0 }, 0, 0 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0},
+ { .duration = 0, .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_UK_RINGTONE
@@ -767,7 +774,18 @@
TONE_SUP_ERROR, // TONE_SUP_ERROR
TONE_SUP_CALL_WAITING, // TONE_SUP_CALL_WAITING
TONE_SUP_RINGTONE // TONE_SUP_RINGTONE
+ },
+ { // UK
+ TONE_SUP_DIAL, // TONE_SUP_DIAL
+ TONE_SUP_BUSY, // TONE_SUP_BUSY
+ TONE_SUP_CONGESTION, // TONE_SUP_CONGESTION
+ TONE_SUP_RADIO_ACK, // TONE_SUP_RADIO_ACK
+ TONE_SUP_RADIO_NOTAVAIL, // TONE_SUP_RADIO_NOTAVAIL
+ TONE_SUP_ERROR, // TONE_SUP_ERROR
+ TONE_SUP_CALL_WAITING, // TONE_SUP_CALL_WAITING
+ TONE_UK_RINGTONE // TONE_SUP_RINGTONE
}
+
};
@@ -819,6 +837,9 @@
mRegion = ANSI;
} else if (strcmp(value,"jp") == 0) {
mRegion = JAPAN;
+ } else if (strcmp(value,"uk") == 0 ||
+ strcmp(value,"uk,uk") == 0) {
+ mRegion = UK;
} else {
mRegion = CEPT;
}
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 7f41143..8d86366 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -8,16 +8,12 @@
LOCAL_SRC_FILES:= \
ActivityManager.cpp \
- Crypto.cpp \
- Drm.cpp \
- DrmSessionManager.cpp \
HDCP.cpp \
MediaPlayerFactory.cpp \
MediaPlayerService.cpp \
MediaRecorderClient.cpp \
MetadataRetrieverClient.cpp \
RemoteDisplay.cpp \
- SharedLibrary.cpp \
StagefrightRecorder.cpp \
TestPlayerStub.cpp \
@@ -32,6 +28,7 @@
libgui \
libmedia \
libmediautils \
+ libmemunreachable \
libsonivox \
libstagefright \
libstagefright_foundation \
@@ -51,9 +48,12 @@
$(TOP)/frameworks/av/media/libstagefright/rtsp \
$(TOP)/frameworks/av/media/libstagefright/wifi-display \
$(TOP)/frameworks/av/media/libstagefright/webm \
+ $(TOP)/frameworks/av/include/media \
$(TOP)/frameworks/av/include/camera \
$(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/native/include/media/hardware \
$(TOP)/external/tremolo/Tremolo \
+ libcore/include \
LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall
LOCAL_CLANG := true
diff --git a/media/libmediaplayerservice/DrmSessionClientInterface.h b/media/libmediaplayerservice/DrmSessionClientInterface.h
deleted file mode 100644
index 17faf08..0000000
--- a/media/libmediaplayerservice/DrmSessionClientInterface.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef DRM_PROXY_INTERFACE_H_
-#define DRM_PROXY_INTERFACE_H_
-
-#include <utils/RefBase.h>
-#include <utils/Vector.h>
-
-namespace android {
-
-struct DrmSessionClientInterface : public RefBase {
- virtual bool reclaimSession(const Vector<uint8_t>& sessionId) = 0;
-
-protected:
- virtual ~DrmSessionClientInterface() {}
-};
-
-} // namespace android
-
-#endif // DRM_PROXY_INTERFACE_H_
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 1a3013a..35f439b 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -62,6 +62,7 @@
#include <media/stagefright/foundation/ALooperRoster.h>
#include <mediautils/BatteryNotifier.h>
+#include <memunreachable/memunreachable.h>
#include <system/audio.h>
#include <private/android_filesystem_config.h>
@@ -77,8 +78,6 @@
#include <OMX.h>
-#include "Crypto.h"
-#include "Drm.h"
#include "HDCP.h"
#include "HTTPBase.h"
#include "RemoteDisplay.h"
@@ -94,6 +93,8 @@
// Max number of entries in the filter.
const int kMaxFilterSize = 64; // I pulled that out of thin air.
+const float kMaxRequiredSpeed = 8.0f; // for PCM tracks allow up to 8x speedup.
+
// FIXME: Move all the metadata related function in the Metadata.cpp
@@ -356,14 +357,6 @@
return mOMX;
}
-sp<ICrypto> MediaPlayerService::makeCrypto() {
- return new Crypto;
-}
-
-sp<IDrm> MediaPlayerService::makeDrm() {
- return new Drm;
-}
-
sp<IHDCP> MediaPlayerService::makeHDCP(bool createEncryptionModule) {
return new HDCP(createEncryptionModule);
}
@@ -534,14 +527,23 @@
gLooperRoster.dump(fd, args);
bool dumpMem = false;
+ bool unreachableMemory = false;
for (size_t i = 0; i < args.size(); i++) {
if (args[i] == String16("-m")) {
dumpMem = true;
+ } else if (args[i] == String16("--unreachable")) {
+ unreachableMemory = true;
}
}
if (dumpMem) {
dumpMemoryAddresses(fd);
}
+ if (unreachableMemory) {
+ result.append("\nDumping unreachable memory:\n");
+ // TODO - should limit be an argument parameter?
+ std::string s = GetUnreachableMemoryString(true /* contents */, 10000 /* limit */);
+ result.append(s.c_str(), s.size());
+ }
}
write(fd, result.string(), result.size());
return NO_ERROR;
@@ -638,6 +640,28 @@
return p;
}
+MediaPlayerService::Client::ServiceDeathNotifier::ServiceDeathNotifier(
+ const sp<IBinder>& service,
+ const sp<MediaPlayerBase>& listener,
+ int which) {
+ mService = service;
+ mListener = listener;
+ mWhich = which;
+}
+
+MediaPlayerService::Client::ServiceDeathNotifier::~ServiceDeathNotifier() {
+ mService->unlinkToDeath(this);
+}
+
+void MediaPlayerService::Client::ServiceDeathNotifier::binderDied(const wp<IBinder>& /*who*/) {
+ sp<MediaPlayerBase> listener = mListener.promote();
+ if (listener != NULL) {
+ listener->sendEvent(MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED, mWhich);
+ } else {
+ ALOGW("listener for process %d death is gone", mWhich);
+ }
+}
+
sp<MediaPlayerBase> MediaPlayerService::Client::setDataSource_pre(
player_type playerType)
{
@@ -649,6 +673,19 @@
return p;
}
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.extractor"));
+ mExtractorDeathListener = new ServiceDeathNotifier(binder, p, MEDIAEXTRACTOR_PROCESS_DEATH);
+ binder->linkToDeath(mExtractorDeathListener);
+
+ binder = sm->getService(String16("media.codec"));
+ mCodecDeathListener = new ServiceDeathNotifier(binder, p, MEDIACODEC_PROCESS_DEATH);
+ binder->linkToDeath(mCodecDeathListener);
+
+ binder = sm->getService(String16("media.audio_flinger"));
+ mAudioDeathListener = new ServiceDeathNotifier(binder, p, AUDIO_PROCESS_DEATH);
+ binder->linkToDeath(mAudioDeathListener);
+
if (!p->hardwareOutput()) {
Mutex::Autolock l(mLock);
mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),
@@ -1749,6 +1786,14 @@
mAttributes,
doNotReconnect);
} else {
+ // TODO: Due to buffer memory concerns, we use a max target playback speed
+ // based on mPlaybackRate at the time of open (instead of kMaxRequiredSpeed),
+ // also clamping the target speed to 1.0 <= targetSpeed <= kMaxRequiredSpeed.
+ const float targetSpeed =
+ std::min(std::max(mPlaybackRate.mSpeed, 1.0f), kMaxRequiredSpeed);
+ ALOGW_IF(targetSpeed != mPlaybackRate.mSpeed,
+ "track target speed:%f clamped from playback speed:%f",
+ targetSpeed, mPlaybackRate.mSpeed);
t = new AudioTrack(
mStreamType,
sampleRate,
@@ -1765,7 +1810,8 @@
mUid,
mPid,
mAttributes,
- doNotReconnect);
+ doNotReconnect,
+ targetSpeed);
}
if ((t == 0) || (t->initCheck() != NO_ERROR)) {
@@ -2127,6 +2173,19 @@
return mTrack->getSampleRate();
}
+int64_t MediaPlayerService::AudioOutput::getBufferDurationInUs() const
+{
+ Mutex::Autolock lock(mLock);
+ if (mTrack == 0) {
+ return 0;
+ }
+ int64_t duration;
+ if (mTrack->getBufferDurationInUs(&duration) != OK) {
+ return 0;
+ }
+ return duration;
+}
+
////////////////////////////////////////////////////////////////////////////////
struct CallbackThread : public Thread {
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 1581b49..0ecfdbc 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -92,6 +92,7 @@
virtual status_t getFramesWritten(uint32_t *frameswritten) const;
virtual audio_session_t getSessionId() const;
virtual uint32_t getSampleRate() const;
+ virtual int64_t getBufferDurationInUs() const;
virtual status_t open(
uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
@@ -218,8 +219,6 @@
virtual sp<IMediaCodecList> getCodecList() const;
virtual sp<IOMX> getOMX();
- virtual sp<ICrypto> makeCrypto();
- virtual sp<IDrm> makeDrm();
virtual sp<IHDCP> makeHDCP(bool createEncryptionModule);
virtual sp<IRemoteDisplay> listenForRemoteDisplay(const String16 &opPackageName,
@@ -228,6 +227,14 @@
void removeClient(wp<Client> client);
+ enum {
+ MEDIASERVER_PROCESS_DEATH = 0,
+ MEDIAEXTRACTOR_PROCESS_DEATH = 1,
+ MEDIACODEC_PROCESS_DEATH = 2,
+ AUDIO_PROCESS_DEATH = 3,
+ CAMERA_PROCESS_DEATH = 4
+ };
+
// For battery usage tracking purpose
struct BatteryUsageInfo {
// how many streams are being played by one UID
@@ -335,6 +342,22 @@
audio_session_t getAudioSessionId() { return mAudioSessionId; }
private:
+ class ServiceDeathNotifier: public IBinder::DeathRecipient
+ {
+ public:
+ ServiceDeathNotifier(
+ const sp<IBinder>& service,
+ const sp<MediaPlayerBase>& listener,
+ int which);
+ virtual ~ServiceDeathNotifier();
+ virtual void binderDied(const wp<IBinder>& who);
+
+ private:
+ int mWhich;
+ sp<IBinder> mService;
+ wp<MediaPlayerBase> mListener;
+ };
+
friend class MediaPlayerService;
Client( const sp<MediaPlayerService>& service,
pid_t pid,
@@ -394,6 +417,9 @@
// getMetadata clears this set.
media::Metadata::Filter mMetadataUpdated; // protected by mLock
+ sp<IBinder::DeathRecipient> mExtractorDeathListener;
+ sp<IBinder::DeathRecipient> mCodecDeathListener;
+ sp<IBinder::DeathRecipient> mAudioDeathListener;
#if CALLBACK_ANTAGONIZER
Antagonizer* mAntagonizer;
#endif
@@ -409,7 +435,6 @@
SortedVector< wp<MediaRecorderClient> > mMediaRecorderClients;
int32_t mNextConnId;
sp<IOMX> mOMX;
- sp<ICrypto> mCrypto;
};
// ----------------------------------------------------------------------------
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 3152e04..2832166 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -335,6 +335,28 @@
release();
}
+MediaRecorderClient::ServiceDeathNotifier::ServiceDeathNotifier(
+ const sp<IBinder>& service,
+ const sp<IMediaRecorderClient>& listener,
+ int which) {
+ mService = service;
+ mListener = listener;
+ mWhich = which;
+}
+
+MediaRecorderClient::ServiceDeathNotifier::~ServiceDeathNotifier() {
+ mService->unlinkToDeath(this);
+}
+
+void MediaRecorderClient::ServiceDeathNotifier::binderDied(const wp<IBinder>& /*who*/) {
+ sp<IMediaRecorderClient> listener = mListener.promote();
+ if (listener != NULL) {
+ listener->notify(MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED, mWhich);
+ } else {
+ ALOGW("listener for process %d death is gone", mWhich);
+ }
+}
+
status_t MediaRecorderClient::setListener(const sp<IMediaRecorderClient>& listener)
{
ALOGV("setListener");
@@ -343,7 +365,25 @@
ALOGE("recorder is not initialized");
return NO_INIT;
}
- return mRecorder->setListener(listener);
+ mRecorder->setListener(listener);
+
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.camera"));
+ mCameraDeathListener = new ServiceDeathNotifier(binder, listener,
+ MediaPlayerService::CAMERA_PROCESS_DEATH);
+ binder->linkToDeath(mCameraDeathListener);
+
+ binder = sm->getService(String16("media.codec"));
+ mCodecDeathListener = new ServiceDeathNotifier(binder, listener,
+ MediaPlayerService::MEDIACODEC_PROCESS_DEATH);
+ binder->linkToDeath(mCodecDeathListener);
+
+ binder = sm->getService(String16("media.audio_flinger"));
+ mAudioDeathListener = new ServiceDeathNotifier(binder, listener,
+ MediaPlayerService::AUDIO_PROCESS_DEATH);
+ binder->linkToDeath(mAudioDeathListener);
+
+ return OK;
}
status_t MediaRecorderClient::setClientName(const String16& clientName) {
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 5a080df..6e70194 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -29,6 +29,22 @@
class MediaRecorderClient : public BnMediaRecorder
{
+ class ServiceDeathNotifier: public IBinder::DeathRecipient
+ {
+ public:
+ ServiceDeathNotifier(
+ const sp<IBinder>& service,
+ const sp<IMediaRecorderClient>& listener,
+ int which);
+ virtual ~ServiceDeathNotifier();
+ virtual void binderDied(const wp<IBinder>& who);
+
+ private:
+ int mWhich;
+ sp<IBinder> mService;
+ wp<IMediaRecorderClient> mListener;
+ };
+
public:
virtual status_t setCamera(const sp<hardware::ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy);
@@ -69,6 +85,10 @@
const String16& opPackageName);
virtual ~MediaRecorderClient();
+ sp<IBinder::DeathRecipient> mCameraDeathListener;
+ sp<IBinder::DeathRecipient> mCodecDeathListener;
+ sp<IBinder::DeathRecipient> mAudioDeathListener;
+
pid_t mPid;
Mutex mLock;
MediaRecorderBase *mRecorder;
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index bb808e1..793f476 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -231,6 +231,7 @@
ALOGV("rotation: %d", frameCopy->mRotationAngle);
frameCopy->mData = (uint8_t *)frameCopy + sizeof(VideoFrame);
memcpy(frameCopy->mData, frame->mData, frame->mSize);
+ frameCopy->mData = 0;
delete frame; // Fix memory leakage
return mThumbnail;
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 7b97d0f..6114af8 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -1474,8 +1474,8 @@
CHECK(mFrameRate != -1);
- mIsMetaDataStoredInVideoBuffers =
- (*cameraSource)->isMetaDataStoredInVideoBuffers();
+ mMetaDataStoredInVideoBuffers =
+ (*cameraSource)->metaDataStoredInVideoBuffers();
return OK;
}
@@ -1565,11 +1565,11 @@
format->setFloat("operating-rate", mCaptureFps);
}
- uint32_t flags = 0;
- if (mIsMetaDataStoredInVideoBuffers) {
- flags |= MediaCodecSource::FLAG_USE_METADATA_INPUT;
+ if (mMetaDataStoredInVideoBuffers != kMetadataBufferTypeInvalid) {
+ format->setInt32("android._input-metadata-buffer-type", mMetaDataStoredInVideoBuffers);
}
+ uint32_t flags = 0;
if (cameraSource == NULL) {
flags |= MediaCodecSource::FLAG_USE_SURFACE_INPUT;
} else {
@@ -1793,6 +1793,7 @@
mWriter.clear();
}
mTotalPausedDurationUs = 0;
+ mPauseStartTimeUs = 0;
mGraphicBufferProducer.clear();
mPersistentSurface.clear();
@@ -1865,7 +1866,7 @@
mCaptureFps = 0.0f;
mTimeBetweenCaptureUs = -1;
mCameraSourceTimeLapse = NULL;
- mIsMetaDataStoredInVideoBuffers = false;
+ mMetaDataStoredInVideoBuffers = kMetadataBufferTypeInvalid;
mEncoderProfiles = MediaProfiles::getInstance();
mRotationDegrees = 0;
mLatitudex10000 = -3600000;
@@ -1873,7 +1874,6 @@
mTotalBitRate = 0;
mOutputFd = -1;
- mPauseStartTimeUs = 0;
return OK;
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index a73197f..d7f43bc 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -24,6 +24,8 @@
#include <system/audio.h>
+#include <MetadataBufferType.h>
+
namespace android {
class Camera;
@@ -121,7 +123,7 @@
String8 mParams;
- bool mIsMetaDataStoredInVideoBuffers;
+ MetadataBufferType mMetaDataStoredInVideoBuffers;
MediaProfiles *mEncoderProfiles;
int64_t mPauseStartTimeUs;
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 81aafbe..9e33cb5 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -1620,14 +1620,13 @@
}
void NuPlayer::GenericSource::BufferingMonitor::startBufferingIfNecessary_l() {
- ALOGD("startBufferingIfNecessary_l: mPrepareBuffering=%d, mBuffering=%d",
- mPrepareBuffering, mBuffering);
-
if (mPrepareBuffering) {
return;
}
if (!mBuffering) {
+ ALOGD("startBufferingIfNecessary_l");
+
mBuffering = true;
ensureCacheIsFetching_l();
@@ -1640,10 +1639,9 @@
}
void NuPlayer::GenericSource::BufferingMonitor::stopBufferingIfNecessary_l() {
- ALOGD("stopBufferingIfNecessary_l: mPrepareBuffering=%d, mBuffering=%d",
- mPrepareBuffering, mBuffering);
-
if (mPrepareBuffering) {
+ ALOGD("stopBufferingIfNecessary_l, mBuffering=%d", mBuffering);
+
mPrepareBuffering = false;
sp<AMessage> notify = mNotify->dup();
@@ -1655,6 +1653,7 @@
}
if (mBuffering) {
+ ALOGD("stopBufferingIfNecessary_l");
mBuffering = false;
sendCacheStats_l();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 42a82ac..46a51ce 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -400,14 +400,20 @@
}
void NuPlayer::resetAsync() {
- if (mSource != NULL) {
+ sp<Source> source;
+ {
+ Mutex::Autolock autoLock(mSourceLock);
+ source = mSource;
+ }
+
+ if (source != NULL) {
// During a reset, the data source might be unresponsive already, we need to
// disconnect explicitly so that reads exit promptly.
// We can't queue the disconnect request to the looper, as it might be
// queued behind a stuck read and never gets processed.
// Doing a disconnect outside the looper to allows the pending reads to exit
// (either successfully or with error).
- mSource->disconnect();
+ source->disconnect();
}
(new AMessage(kWhatReset, this))->post();
@@ -484,6 +490,7 @@
sp<RefBase> obj;
CHECK(msg->findObject("source", &obj));
if (obj != NULL) {
+ Mutex::Autolock autoLock(mSourceLock);
mSource = static_cast<Source *>(obj.get());
} else {
err = UNKNOWN_ERROR;
@@ -1144,6 +1151,11 @@
int32_t reason;
CHECK(msg->findInt32("reason", &reason));
ALOGV("Tear down audio with reason %d.", reason);
+ if (reason == Renderer::kDueToTimeout && !(mPaused && mOffloadAudio)) {
+ // TimeoutWhenPaused is only for offload mode.
+ ALOGW("Receive a stale message for teardown.");
+ break;
+ }
int64_t positionUs;
if (!msg->findInt64("positionUs", &positionUs)) {
positionUs = mPreviousSeekTimeUs;
@@ -1998,6 +2010,7 @@
if (mSource != NULL) {
mSource->stop();
+ Mutex::Autolock autoLock(mSourceLock);
mSource.clear();
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 369590b..f6eb49e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -140,6 +140,7 @@
bool mUIDValid;
uid_t mUID;
pid_t mPID;
+ Mutex mSourceLock; // guard |mSource|.
sp<Source> mSource;
uint32_t mSourceFlags;
sp<Surface> mSurface;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index cbb9d95..4ae8e82 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -19,6 +19,7 @@
#include <utils/Log.h>
#include "NuPlayerRenderer.h"
+#include <algorithm>
#include <cutils/properties.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -487,8 +488,14 @@
// Let's give it more data after about half that time
// has elapsed.
+ delayUs /= 2;
+ // check the buffer size to estimate maximum delay permitted.
+ const int64_t maxDrainDelayUs = std::max(
+ mAudioSink->getBufferDurationInUs(), (int64_t)500000 /* half second */);
+ ALOGD_IF(delayUs > maxDrainDelayUs, "postDrainAudioQueue long delay: %lld > %lld",
+ (long long)delayUs, (long long)maxDrainDelayUs);
Mutex::Autolock autoLock(mLock);
- postDrainAudioQueue_l(delayUs / 2);
+ postDrainAudioQueue_l(delayUs);
}
break;
}
@@ -1715,10 +1722,16 @@
}
void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
- if (offloadingAudio()) {
- mWakeLock->release(true);
- ++mAudioOffloadPauseTimeoutGeneration;
- }
+ // We may have called startAudioOffloadPauseTimeout() without
+ // the AudioSink open and with offloadingAudio enabled.
+ //
+ // When we cancel, it may be that offloadingAudio is subsequently disabled, so regardless
+ // we always release the wakelock and increment the pause timeout generation.
+ //
+ // Note: The acquired wakelock prevents the device from suspending
+ // immediately after offload pause (in case a resume happens shortly thereafter).
+ mWakeLock->release(true);
+ ++mAudioOffloadPauseTimeoutGeneration;
}
status_t NuPlayer::Renderer::onOpenAudioSink(
@@ -1876,6 +1889,10 @@
// NuPlayer a chance to switch from non-offload mode to offload mode.
// So we only set doNotReconnect when there's no video.
const bool doNotReconnect = !hasVideo;
+
+ // We should always be able to set our playback settings if the sink is closed.
+ LOG_ALWAYS_FATAL_IF(mAudioSink->setPlaybackRate(mPlaybackSettings) != OK,
+ "onOpenAudioSink: can't set playback rate on closed sink");
status_t err = mAudioSink->open(
sampleRate,
numChannels,
@@ -1888,9 +1905,6 @@
NULL,
doNotReconnect,
frameCount);
- if (err == OK) {
- err = mAudioSink->setPlaybackRate(mPlaybackSettings);
- }
if (err != OK) {
ALOGW("openAudioSink: non offloaded open failed status: %d", err);
mAudioSink->close();
diff --git a/media/libmediaplayerservice/tests/Android.mk b/media/libmediaplayerservice/tests/Android.mk
index 8cbf782..ea75a97 100644
--- a/media/libmediaplayerservice/tests/Android.mk
+++ b/media/libmediaplayerservice/tests/Android.mk
@@ -12,6 +12,7 @@
LOCAL_SHARED_LIBRARIES := \
liblog \
libmediaplayerservice \
+ libmediadrm \
libutils \
LOCAL_C_INCLUDES := \
diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
index ef4c833..c5212fc 100644
--- a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
+++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp
@@ -20,9 +20,9 @@
#include <gtest/gtest.h>
-#include "Drm.h"
-#include "DrmSessionClientInterface.h"
-#include "DrmSessionManager.h"
+#include <media/Drm.h>
+#include <media/DrmSessionClientInterface.h>
+#include <media/DrmSessionManager.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/ProcessInfoInterface.h>
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 7c903ea..238cd35 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -503,6 +503,7 @@
mFatalError(false),
mShutdownInProgress(false),
mExplicitShutdown(false),
+ mIsLegacyVP9Decoder(false),
mEncoderDelay(0),
mEncoderPadding(0),
mRotationDegrees(0),
@@ -792,10 +793,10 @@
MetadataBufferType type =
portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType;
size_t bufSize = def.nBufferSize;
- if (type == kMetadataBufferTypeGrallocSource) {
- bufSize = sizeof(VideoGrallocMetadata);
- } else if (type == kMetadataBufferTypeANWBuffer) {
+ if (type == kMetadataBufferTypeANWBuffer) {
bufSize = sizeof(VideoNativeMetadata);
+ } else if (type == kMetadataBufferTypeNativeHandleSource) {
+ bufSize = sizeof(VideoNativeHandleMetadata);
}
// If using gralloc or native source input metadata buffers, allocate largest
@@ -803,7 +804,7 @@
// may require gralloc source. For camera source, allocate at least enough
// size for native metadata buffers.
size_t allottedSize = bufSize;
- if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) {
+ if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) {
bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata));
} else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) {
bufSize = max(bufSize, sizeof(VideoNativeMetadata));
@@ -866,7 +867,7 @@
mem.clear();
void *ptr = NULL;
- native_handle_t *native_handle = NULL;
+ sp<NativeHandle> native_handle;
err = mOMX->allocateSecureBuffer(
mNode, portIndex, bufSize, &info.mBufferID,
&ptr, &native_handle);
@@ -879,8 +880,11 @@
// TRICKY2: use native handle as the base of the ABuffer if received one,
// because Widevine source only receives these base addresses.
- info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize);
- info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */);
+ const native_handle_t *native_handle_ptr =
+ native_handle == NULL ? NULL : native_handle->handle();
+ info.mData = new ABuffer(
+ ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize);
+ info.mNativeHandle = native_handle;
info.mCodecData = info.mData;
} else if (mQuirks & requiresAllocateBufferBit) {
err = mOMX->allocateBufferWithBackup(
@@ -1724,19 +1728,20 @@
int32_t storeMeta;
if (encoder
- && msg->findInt32("store-metadata-in-buffers", &storeMeta)
- && storeMeta != 0) {
- err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType);
+ && msg->findInt32("android._input-metadata-buffer-type", &storeMeta)
+ && storeMeta != kMetadataBufferTypeInvalid) {
+ mInputMetadataType = (MetadataBufferType)storeMeta;
+ err = mOMX->storeMetaDataInBuffers(
+ mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType);
if (err != OK) {
ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d",
mComponentName.c_str(), err);
return err;
- }
- // For this specific case we could be using camera source even if storeMetaDataInBuffers
- // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize.
- if (mInputMetadataType == kMetadataBufferTypeGrallocSource) {
- mInputMetadataType = kMetadataBufferTypeCameraSource;
+ } else if (storeMeta == kMetadataBufferTypeANWBuffer
+ && mInputMetadataType == kMetadataBufferTypeGrallocSource) {
+ // IOMX translates ANWBuffers to gralloc source already.
+ mInputMetadataType = (MetadataBufferType)storeMeta;
}
uint32_t usageBits;
@@ -1782,9 +1787,10 @@
mIsVideo = video;
if (encoder && video) {
OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS
- && msg->findInt32("store-metadata-in-buffers-output", &storeMeta)
+ && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta)
&& storeMeta != 0);
+ mOutputMetadataType = kMetadataBufferTypeNativeHandleSource;
err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType);
if (err != OK) {
ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d",
@@ -1913,6 +1919,7 @@
}
// Always try to enable dynamic output buffers on native surface
+ mOutputMetadataType = kMetadataBufferTypeANWBuffer;
err = mOMX->storeMetaDataInBuffers(
mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType);
if (err != OK) {
@@ -3133,6 +3140,20 @@
return err;
}
+ if (compressionFormat == OMX_VIDEO_CodingVP9) {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexInput;
+ // Check if VP9 decoder advertises supported profiles.
+ params.nProfileIndex = 0;
+ status_t err = mOMX->getParameter(
+ mNode,
+ OMX_IndexParamVideoProfileLevelQuerySupported,
+ ¶ms,
+ sizeof(params));
+ mIsLegacyVP9Decoder = err != OK;
+ }
+
err = setVideoPortFormatType(
kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
@@ -3192,7 +3213,7 @@
return err;
}
- err = setHDRStaticInfoForVideoDecoder(msg, outputFormat);
+ err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat);
if (err == ERROR_UNSUPPORTED) { // support is optional
err = OK;
}
@@ -3392,6 +3413,25 @@
return OK;
}
+status_t ACodec::setHDRStaticInfoForVideoCodec(
+ OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+ DescribeHDRStaticInfoParams params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = portIndex;
+
+ HDRStaticInfo *info = ¶ms.sInfo;
+ if (getHDRStaticInfoFromFormat(configFormat, info)) {
+ setHDRStaticInfoIntoFormat(params.sInfo, outputFormat);
+ }
+
+ (void)initDescribeHDRStaticInfoIndex();
+
+ // communicate HDR static Info to codec
+ return setHDRStaticInfo(params);
+}
+
// subsequent initial video encoder setup for surface mode
status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(
android_dataspace *dataSpace /* nonnull */) {
@@ -3444,10 +3484,11 @@
return err;
}
-status_t ACodec::getHDRStaticInfoForVideoDecoder(sp<AMessage> &format) {
+status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
DescribeHDRStaticInfoParams params;
InitOMXParams(¶ms);
- params.nPortIndex = kPortIndexOutput;
+ params.nPortIndex = portIndex;
status_t err = getHDRStaticInfo(params);
if (err == OK) {
@@ -3466,23 +3507,6 @@
return err;
}
-status_t ACodec::setHDRStaticInfoForVideoDecoder(
- const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) {
- DescribeHDRStaticInfoParams params;
- InitOMXParams(¶ms);
- params.nPortIndex = kPortIndexOutput;
-
- HDRStaticInfo *info = ¶ms.sInfo;
- if (getHDRStaticInfoFromFormat(configFormat, info)) {
- setHDRStaticInfoIntoFormat(params.sInfo, outputFormat);
- }
-
- (void)initDescribeHDRStaticInfoIndex();
-
- // communicate HDR static Info to codec
- return setHDRStaticInfo(params);
-}
-
status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) {
status_t err = ERROR_UNSUPPORTED;
if (mDescribeHDRStaticInfoIndex) {
@@ -3697,6 +3721,16 @@
err = OK;
}
+ if (err != OK) {
+ return err;
+ }
+
+ err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat);
+ if (err == ERROR_UNSUPPORTED) { // support is optional
+ ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str());
+ err = OK;
+ }
+
if (err == OK) {
ALOGI("setupVideoEncoder succeeded");
}
@@ -4729,9 +4763,12 @@
if (mUsingNativeWindow) {
notify->setInt32("android._dataspace", dataSpace);
}
- (void)getHDRStaticInfoForVideoDecoder(notify);
+ (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify);
} else {
(void)getInputColorAspectsForVideoEncoder(notify);
+ if (mConfigFormat->contains("hdr-static-info")) {
+ (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify);
+ }
}
break;
@@ -5669,6 +5706,12 @@
int32_t isCSD;
if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) {
+ if (mCodec->mIsLegacyVP9Decoder) {
+ ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data",
+ mCodec->mComponentName.c_str(), bufferID);
+ postFillThisBuffer(info);
+ break;
+ }
flags |= OMX_BUFFERFLAG_CODECCONFIG;
}
@@ -5922,18 +5965,15 @@
if (mCodec->usingMetadataOnEncoderOutput()) {
native_handle_t *handle = NULL;
- VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data();
- VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data();
- if (info->mData->size() >= sizeof(grallocMeta)
- && grallocMeta.eType == kMetadataBufferTypeGrallocSource) {
- handle = (native_handle_t *)(uintptr_t)grallocMeta.pHandle;
- } else if (info->mData->size() >= sizeof(nativeMeta)
- && nativeMeta.eType == kMetadataBufferTypeANWBuffer) {
+ VideoNativeHandleMetadata &nativeMeta =
+ *(VideoNativeHandleMetadata *)info->mData->data();
+ if (info->mData->size() >= sizeof(nativeMeta)
+ && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) {
#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
- // ANativeWindowBuffer is only valid on 32-bit/mediaserver process
+ // handle is only valid on 32-bit/mediaserver process
handle = NULL;
#else
- handle = (native_handle_t *)nativeMeta.pBuffer->handle;
+ handle = (native_handle_t *)nativeMeta.pHandle;
#endif
}
info->mData->meta()->setPointer("handle", handle);
@@ -6620,8 +6660,14 @@
sp<IGraphicBufferProducer> bufferProducer;
if (err == OK) {
+ mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
err = mCodec->mOMX->createInputSurface(
- mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, &mCodec->mInputMetadataType);
+ mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer,
+ &mCodec->mInputMetadataType);
+ // framework uses ANW buffers internally instead of gralloc handles
+ if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) {
+ mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
+ }
}
if (err == OK) {
@@ -6660,9 +6706,14 @@
notify->setMessage("output-format", mCodec->mOutputFormat);
if (err == OK) {
+ mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
err = mCodec->mOMX->setInputSurface(
mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(),
&mCodec->mInputMetadataType);
+ // framework uses ANW buffers internally instead of gralloc handles
+ if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) {
+ mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
+ }
}
if (err == OK) {
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index cb974ae..e087249 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -56,6 +56,8 @@
virtual void postDataTimestamp(
nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
+ virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle);
+
protected:
virtual ~CameraSourceListener();
@@ -100,6 +102,14 @@
}
}
+void CameraSourceListener::postRecordingFrameHandleTimestamp(nsecs_t timestamp,
+ native_handle_t* handle) {
+ sp<CameraSource> source = mSource.promote();
+ if (source.get() != nullptr) {
+ source->recordingFrameHandleCallbackTimestamp(timestamp/1000, handle);
+ }
+}
+
static int32_t getColorFormat(const char* colorFormat) {
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
return OMX_COLOR_FormatYUV420Planar;
@@ -509,6 +519,14 @@
return err;
}
+void CameraSource::createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount) {
+ mMemoryHeapBase = new MemoryHeapBase(size * bufferCount, 0,
+ "StageFright-CameraSource-BufferHeap");
+ for (uint32_t i = 0; i < bufferCount; i++) {
+ mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * size, size));
+ }
+}
+
status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
ALOGV("initBufferQueue");
@@ -562,12 +580,7 @@
}
// Create memory heap to store buffers as VideoNativeMetadata.
- size_t bufferSize = sizeof(VideoNativeMetadata);
- mMemoryHeapBase = new MemoryHeapBase(bufferSize * bufferCount, 0,
- "StageFright-CameraSource-BufferHeap");
- for (uint32_t i = 0; i < bufferCount; i++) {
- mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * bufferSize, bufferSize));
- }
+ createVideoBufferMemoryHeap(sizeof(VideoNativeMetadata), bufferCount);
mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
@@ -718,6 +731,9 @@
ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
mEncoderFormat, mEncoderDataSpace, err);
}
+
+ // Create memory heap to store buffers as VideoNativeMetadata.
+ createVideoBufferMemoryHeap(sizeof(VideoNativeHandleMetadata), kDefaultVideoBufferCount);
}
err = OK;
@@ -920,12 +936,33 @@
mVideoBufferConsumer->releaseBuffer(buffer);
mMemoryBases.push_back(frame);
mMemoryBaseAvailableCond.signal();
- } else if (mCameraRecordingProxy != NULL) {
- mCameraRecordingProxy->releaseRecordingFrame(frame);
- } else if (mCamera != NULL) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrame(frame);
- IPCThreadState::self()->restoreCallingIdentity(token);
+ } else {
+ native_handle_t* handle = nullptr;
+
+ // Check if frame contains a VideoNativeHandleMetadata.
+ if (frame->size() == sizeof(VideoNativeHandleMetadata)) {
+ VideoNativeHandleMetadata *metadata =
+ (VideoNativeHandleMetadata*)(frame->pointer());
+ if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
+ handle = metadata->pHandle;
+ }
+ }
+
+ if (handle != nullptr) {
+ // Frame contains a VideoNativeHandleMetadata. Send the handle back to camera.
+ releaseRecordingFrameHandle(handle);
+ mMemoryBases.push_back(frame);
+ mMemoryBaseAvailableCond.signal();
+ } else if (mCameraRecordingProxy != nullptr) {
+ // mCamera is created by application. Return the frame back to camera via camera
+ // recording proxy.
+ mCameraRecordingProxy->releaseRecordingFrame(frame);
+ } else if (mCamera != nullptr) {
+ // mCamera is created by CameraSource. Return the frame directly back to camera.
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ mCamera->releaseRecordingFrame(frame);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ }
}
}
@@ -1073,6 +1110,53 @@
mFrameAvailableCondition.signal();
}
+void CameraSource::releaseRecordingFrameHandle(native_handle_t* handle) {
+ if (mCameraRecordingProxy != nullptr) {
+ mCameraRecordingProxy->releaseRecordingFrameHandle(handle);
+ } else if (mCamera != nullptr) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ mCamera->releaseRecordingFrameHandle(handle);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ }
+}
+
+void CameraSource::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
+ native_handle_t* handle) {
+ ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
+ Mutex::Autolock autoLock(mLock);
+ if (handle == nullptr) return;
+
+ if (shouldSkipFrameLocked(timestampUs)) {
+ releaseRecordingFrameHandle(handle);
+ return;
+ }
+
+ while (mMemoryBases.empty()) {
+ if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
+ TIMED_OUT) {
+ ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
+ releaseRecordingFrameHandle(handle);
+ return;
+ }
+ }
+
+ ++mNumFramesReceived;
+
+ sp<IMemory> data = *mMemoryBases.begin();
+ mMemoryBases.erase(mMemoryBases.begin());
+
+ // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
+ VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->pointer());
+ metadata->eType = kMetadataBufferTypeNativeHandleSource;
+ metadata->pHandle = handle;
+
+ mFramesReceived.push_back(data);
+ int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
+ mFrameTimes.push_back(timeUs);
+ ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
+ mFrameAvailableCondition.signal();
+}
+
CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
const sp<CameraSource>& cameraSource) {
mConsumer = consumer;
@@ -1160,13 +1244,19 @@
mFrameAvailableCondition.signal();
}
-bool CameraSource::isMetaDataStoredInVideoBuffers() const {
- ALOGV("isMetaDataStoredInVideoBuffers");
+MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
+ ALOGV("metaDataStoredInVideoBuffers");
// Output buffers will contain metadata if camera sends us buffer in metadata mode or via
// buffer queue.
- return (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA ||
- mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
+ switch (mVideoBufferMode) {
+ case hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA:
+ return kMetadataBufferTypeNativeHandleSource;
+ case hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE:
+ return kMetadataBufferTypeANWBuffer;
+ default:
+ return kMetadataBufferTypeInvalid;
+ }
}
CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
@@ -1178,6 +1268,11 @@
mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
}
+void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
+ native_handle_t* handle) {
+ mSource->recordingFrameHandleCallbackTimestamp(timestamp / 1000, handle);
+}
+
void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
ALOGI("Camera recording proxy died");
}
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index d52567c..390c556 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -308,6 +308,13 @@
CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
}
+void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
+ native_handle_t* handle) {
+ ALOGV("recordingFrameHandleCallbackTimestamp");
+ mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs);
+ CameraSource::recordingFrameHandleCallbackTimestamp(timestampUs, handle);
+}
+
void CameraSourceTimeLapse::processBufferQueueFrame(BufferItem& buffer) {
ALOGV("processBufferQueueFrame");
int64_t timestampUs = buffer.mTimestamp / 1000;
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index f5549e4..f296d9a 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -1560,12 +1560,29 @@
// Calculate average frame rate.
if (!strncasecmp("video/", mime, 6)) {
size_t nSamples = mLastTrack->sampleTable->countSamples();
- int64_t durationUs;
- if (mLastTrack->meta->findInt64(kKeyDuration, &durationUs)) {
- if (durationUs > 0) {
- int32_t frameRate = (nSamples * 1000000LL +
- (durationUs >> 1)) / durationUs;
- mLastTrack->meta->setInt32(kKeyFrameRate, frameRate);
+ if (nSamples == 0) {
+ int32_t trackId;
+ if (mLastTrack->meta->findInt32(kKeyTrackID, &trackId)) {
+ for (size_t i = 0; i < mTrex.size(); i++) {
+ Trex *t = &mTrex.editItemAt(i);
+ if (t->track_ID == (uint32_t) trackId) {
+ if (t->default_sample_duration > 0) {
+ int32_t frameRate =
+ mLastTrack->timescale / t->default_sample_duration;
+ mLastTrack->meta->setInt32(kKeyFrameRate, frameRate);
+ }
+ break;
+ }
+ }
+ }
+ } else {
+ int64_t durationUs;
+ if (mLastTrack->meta->findInt64(kKeyDuration, &durationUs)) {
+ if (durationUs > 0) {
+ int32_t frameRate = (nSamples * 1000000LL +
+ (durationUs >> 1)) / durationUs;
+ mLastTrack->meta->setInt32(kKeyFrameRate, frameRate);
+ }
}
}
}
@@ -2929,7 +2946,7 @@
int32_t trackId;
if (track->meta->findInt32(kKeyTrackID, &trackId)) {
for (size_t i = 0; i < mTrex.size(); i++) {
- Trex *t = &mTrex.editItemAt(index);
+ Trex *t = &mTrex.editItemAt(i);
if (t->track_ID == (uint32_t) trackId) {
trex = t;
break;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 7a8f4c0..db20590 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -2261,6 +2261,7 @@
HevcParameterSets paramSets;
if (parseHEVCCodecSpecificData(data, size, paramSets) != OK) {
+ ALOGE("failed parsing codec specific data");
return ERROR_MALFORMED;
}
@@ -2271,8 +2272,9 @@
return NO_MEMORY;
}
status_t err = paramSets.makeHvcc((uint8_t *)mCodecSpecificData,
- &mCodecSpecificDataSize, mOwner->useNalLengthFour() ? 5 : 2);
+ &mCodecSpecificDataSize, mOwner->useNalLengthFour() ? 4 : 2);
if (err != OK) {
+ ALOGE("failed constructing HVCC atom");
return err;
}
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 90c56f4..0aafa6bd 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -449,10 +449,6 @@
mCodecLooper->setName("codec_looper");
mCodecLooper->start();
- if (mFlags & FLAG_USE_METADATA_INPUT) {
- mOutputFormat->setInt32("store-metadata-in-buffers", 1);
- }
-
if (mFlags & FLAG_USE_SURFACE_INPUT) {
mOutputFormat->setInt32("create-input-buffers-suspended", 1);
}
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index a523d0e..4dde5f6 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -111,6 +111,10 @@
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer);
+ virtual status_t updateNativeHandleInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<NativeHandle> &nativeHandle, buffer_id buffer);
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type);
@@ -127,7 +131,7 @@
virtual status_t allocateSecureBuffer(
node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, native_handle_t **native_handle);
+ buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle);
virtual status_t allocateBufferWithBackup(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
@@ -387,6 +391,13 @@
node, port_index, graphicBuffer, buffer);
}
+status_t MuxOMX::updateNativeHandleInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<NativeHandle> &nativeHandle, buffer_id buffer) {
+ return getOMX(node)->updateNativeHandleInMeta(
+ node, port_index, nativeHandle, buffer);
+}
+
status_t MuxOMX::createInputSurface(
node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
@@ -415,7 +426,7 @@
status_t MuxOMX::allocateSecureBuffer(
node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, native_handle_t **native_handle) {
+ buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle) {
return getOMX(node)->allocateSecureBuffer(
node, port_index, size, buffer, buffer_data, native_handle);
}
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index 9162f80..37e8e9c 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -713,6 +713,7 @@
packetSize);
if (n < (ssize_t)packetSize) {
+ buffer->release();
ALOGV("failed to read %zu bytes at %#016llx, got %zd bytes",
packetSize, (long long)dataOffset, n);
return ERROR_IO;
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index 1bdd812..542a06d 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -121,8 +121,9 @@
mSampleSizeFieldSize(0),
mDefaultSampleSize(0),
mNumSampleSizes(0),
+ mHasTimeToSample(false),
mTimeToSampleCount(0),
- mTimeToSample(NULL),
+ mTimeToSample(),
mSampleTimeEntries(NULL),
mCompositionTimeDeltaEntries(NULL),
mNumCompositionTimeDeltaEntries(0),
@@ -151,9 +152,6 @@
delete[] mSampleTimeEntries;
mSampleTimeEntries = NULL;
- delete[] mTimeToSample;
- mTimeToSample = NULL;
-
delete mSampleIterator;
mSampleIterator = NULL;
}
@@ -162,7 +160,7 @@
return mChunkOffsetOffset >= 0
&& mSampleToChunkOffset >= 0
&& mSampleSizeOffset >= 0
- && mTimeToSample != NULL;
+ && mHasTimeToSample;
}
status_t SampleTable::setChunkOffsetParams(
@@ -336,7 +334,7 @@
status_t SampleTable::setTimeToSampleParams(
off64_t data_offset, size_t data_size) {
- if (mTimeToSample != NULL || data_size < 8) {
+ if (mHasTimeToSample || data_size < 8) {
return ERROR_MALFORMED;
}
@@ -352,24 +350,31 @@
}
mTimeToSampleCount = U32_AT(&header[4]);
- uint64_t allocSize = (uint64_t)mTimeToSampleCount * 2 * sizeof(uint32_t);
- if (allocSize > UINT32_MAX) {
+ if ((uint64_t)mTimeToSampleCount >
+ (uint64_t)UINT32_MAX / (2 * sizeof(uint32_t))) {
+ // Choose this bound because
+ // 1) 2 * sizeof(uint32_t) is the amount of memory needed for one
+ // time-to-sample entry in the time-to-sample table.
+ // 2) mTimeToSampleCount is the number of entries of the time-to-sample
+ // table.
+ // 3) We hope that the table size does not exceed UINT32_MAX.
+ ALOGE(" Error: Time-to-sample table size too large.");
return ERROR_OUT_OF_RANGE;
}
- mTimeToSample = new (std::nothrow) uint32_t[mTimeToSampleCount * 2];
- if (!mTimeToSample)
- return ERROR_OUT_OF_RANGE;
- size_t size = sizeof(uint32_t) * mTimeToSampleCount * 2;
- if (mDataSource->readAt(
- data_offset + 8, mTimeToSample, size) < (ssize_t)size) {
+ // Note: At this point, we know that mTimeToSampleCount * 2 will not
+ // overflow because of the above condition.
+ if (!mDataSource->getVector(data_offset + 8, &mTimeToSample,
+ mTimeToSampleCount * 2)) {
+ ALOGE(" Error: Incomplete data read for time-to-sample table.");
return ERROR_IO;
}
- for (uint32_t i = 0; i < mTimeToSampleCount * 2; ++i) {
- mTimeToSample[i] = ntohl(mTimeToSample[i]);
+ for (size_t i = 0; i < mTimeToSample.size(); ++i) {
+ mTimeToSample.editItemAt(i) = ntohl(mTimeToSample[i]);
}
+ mHasTimeToSample = true;
return OK;
}
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index e4bf67a..15ff569 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -23,6 +23,7 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
#include <OMX_IVCommon.h>
+#include <media/hardware/HardwareAPI.h>
#include <media/hardware/MetadataBufferType.h>
#include <ui/GraphicBuffer.h>
@@ -126,9 +127,9 @@
return OK;
}
-bool SurfaceMediaSource::isMetaDataStoredInVideoBuffers() const {
+MetadataBufferType SurfaceMediaSource::metaDataStoredInVideoBuffers() const {
ALOGV("isMetaDataStoredInVideoBuffers");
- return true;
+ return kMetadataBufferTypeANWBuffer;
}
int32_t SurfaceMediaSource::getFrameRate( ) const {
@@ -250,29 +251,19 @@
}
// Pass the data to the MediaBuffer. Pass in only the metadata
-// The metadata passed consists of two parts:
-// 1. First, there is an integer indicating that it is a GRAlloc
-// source (kMetadataBufferTypeGrallocSource)
-// 2. This is followed by the buffer_handle_t that is a handle to the
-// GRalloc buffer. The encoder needs to interpret this GRalloc handle
-// and encode the frames.
-// --------------------------------------------------------------
-// | kMetadataBufferTypeGrallocSource | sizeof(buffer_handle_t) |
-// --------------------------------------------------------------
// Note: Call only when you have the lock
-static void passMetadataBuffer(MediaBuffer **buffer,
- buffer_handle_t bufferHandle) {
- *buffer = new MediaBuffer(4 + sizeof(buffer_handle_t));
- char *data = (char *)(*buffer)->data();
+void SurfaceMediaSource::passMetadataBuffer_l(MediaBuffer **buffer,
+ ANativeWindowBuffer *bufferHandle) const {
+ *buffer = new MediaBuffer(sizeof(VideoNativeMetadata));
+ VideoNativeMetadata *data = (VideoNativeMetadata *)(*buffer)->data();
if (data == NULL) {
ALOGE("Cannot allocate memory for metadata buffer!");
return;
}
- OMX_U32 type = kMetadataBufferTypeGrallocSource;
- memcpy(data, &type, 4);
- memcpy(data + 4, &bufferHandle, sizeof(buffer_handle_t));
-
- ALOGV("handle = %p, , offset = %zu, length = %zu",
+ data->eType = metaDataStoredInVideoBuffers();
+ data->pBuffer = bufferHandle;
+ data->nFenceFd = -1;
+ ALOGV("handle = %p, offset = %zu, length = %zu",
bufferHandle, (*buffer)->range_length(), (*buffer)->range_offset());
}
@@ -361,7 +352,7 @@
mNumFramesEncoded++;
// Pass the data to the MediaBuffer. Pass in only the metadata
- passMetadataBuffer(buffer, mSlots[mCurrentSlot].mGraphicBuffer->handle);
+ passMetadataBuffer_l(buffer, mSlots[mCurrentSlot].mGraphicBuffer->getNativeBuffer());
(*buffer)->setObserver(this);
(*buffer)->add_ref();
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_cequivalent.h b/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_cequivalent.h
index 7fd680d..3c7590c 100644
--- a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_cequivalent.h
+++ b/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder_basic_op_cequivalent.h
@@ -206,16 +206,18 @@
{
int32 L_var_out;
- L_var_out = L_var1 + L_var2;
-
- if (((L_var1 ^ L_var2) & MIN_32) == 0) /* same sign ? */
- {
- if ((L_var_out ^ L_var1) & MIN_32) /* addition matches sign ? */
- {
- L_var_out = (L_var1 >> 31) ^ MAX_32;
+ //L_var_out = L_var1 + L_var2;
+ if (L_var2 < 0) {
+ if (L_var1 < MIN_32 - L_var2) {
+ return MIN_32;
+ }
+ } else {
+ if (L_var1 > MAX_32 - L_var2) {
+ return MAX_32;
}
}
- return (L_var_out);
+
+ return L_var1 + L_var2;
}
@@ -248,142 +250,24 @@
__inline int32 sub_int32(int32 L_var1, int32 L_var2)
{
- int32 L_var_out;
-
- L_var_out = L_var1 - L_var2;
-
- if (((L_var1 ^ L_var2) & MIN_32) != 0) /* different sign ? */
- {
- if ((L_var_out ^ L_var1) & MIN_32) /* difference matches sign ? */
- {
- L_var_out = (L_var1 >> 31) ^ MAX_32;
+ //L_var_out = L_var1 - L_var2;
+ if (L_var2 < 0) {
+ if (L_var1 > MAX_32 + L_var2) {
+ return MAX_32;
}
- }
- return (L_var_out);
- }
-
-
-
- /*----------------------------------------------------------------------------
-
- Function Name : mac_16by16_to_int32
-
- Multiply var1 by var2 and shift the result left by 1. Add the 32 bit
- result to L_var3 with saturation, return a 32 bit result:
- L_mac(L_var3,var1,var2) = L_add(L_var3,L_mult(var1,var2)).
-
- Inputs :
-
- L_var3 32 bit long signed integer (int32) whose value falls in the
- range : 0x8000 0000 <= L_var3 <= 0x7fff ffff.
-
- var1
- 16 bit short signed integer (int16) whose value falls in the
- range : 0xffff 8000 <= var1 <= 0x0000 7fff.
-
- var2
- 16 bit short signed integer (int16) whose value falls in the
- range : 0xffff 8000 <= var1 <= 0x0000 7fff.
-
-
- Return Value :
- 32 bit long signed integer (int32) whose value falls in the
- range : 0x8000 0000 <= L_var_out <= 0x7fff ffff.
-
- ----------------------------------------------------------------------------*/
-
-
- __inline int32 mac_16by16_to_int32(int32 L_var3, int16 var1, int16 var2)
- {
- int32 L_var_out;
- int32 L_mul;
-
- L_mul = ((int32) var1 * (int32) var2);
-
- if (L_mul != 0x40000000)
- {
- L_mul <<= 1;
- }
- else
- {
- L_mul = MAX_32; /* saturation */
- }
-
- L_var_out = L_var3 + L_mul;
-
- if (((L_mul ^ L_var3) & MIN_32) == 0) /* same sign ? */
- {
- if ((L_var_out ^ L_var3) & MIN_32) /* addition matches sign ? */
- {
- L_var_out = (L_var3 >> 31) ^ MAX_32;
+ } else {
+ if (L_var1 < MIN_32 + L_var2) {
+ return MIN_32;
}
}
- return (L_var_out);
+ return L_var1 - L_var2;
}
/*----------------------------------------------------------------------------
- Function Name : msu_16by16_from_int32
-
- Multiply var1 by var2 and shift the result left by 1. Subtract the 32 bit
- result to L_var3 with saturation, return a 32 bit result:
- L_msu(L_var3,var1,var2) = L_sub(L_var3,L_mult(var1,var2)).
-
- Inputs :
-
- L_var3 32 bit long signed integer (int32) whose value falls in the
- range : 0x8000 0000 <= L_var3 <= 0x7fff ffff.
-
- var1
- 16 bit short signed integer (int16) whose value falls in the
- range : 0xffff 8000 <= var1 <= 0x0000 7fff.
-
- var2
- 16 bit short signed integer (int16) whose value falls in the
- range : 0xffff 8000 <= var1 <= 0x0000 7fff.
-
-
- Return Value :
- 32 bit long signed integer (int32) whose value falls in the
- range : 0x8000 0000 <= L_var_out <= 0x7fff ffff.
-
- ----------------------------------------------------------------------------*/
-
- __inline int32 msu_16by16_from_int32(int32 L_var3, int16 var1, int16 var2)
- {
- int32 L_var_out;
- int32 L_mul;
-
- L_mul = ((int32) var1 * (int32) var2);
-
- if (L_mul != 0x40000000)
- {
- L_mul <<= 1;
- }
- else
- {
- L_mul = MAX_32; /* saturation */
- }
-
- L_var_out = L_var3 - L_mul;
-
- if (((L_mul ^ L_var3) & MIN_32) != 0) /* different sign ? */
- {
- if ((L_var_out ^ L_var3) & MIN_32) /* difference matches sign ? */
- {
- L_var_out = (L_var3 >> 31) ^ MAX_32;
- }
- }
-
- return (L_var_out);
- }
-
-
- /*----------------------------------------------------------------------------
-
Function Name : mul_16by16_to_int32
mul_16by16_to_int32 is the 32 bit result of the multiplication of var1
@@ -428,6 +312,75 @@
/*----------------------------------------------------------------------------
+ Function Name : mac_16by16_to_int32
+
+ Multiply var1 by var2 and shift the result left by 1. Add the 32 bit
+ result to L_var3 with saturation, return a 32 bit result:
+ L_mac(L_var3,var1,var2) = L_add(L_var3,L_mult(var1,var2)).
+
+ Inputs :
+
+ L_var3 32 bit long signed integer (int32) whose value falls in the
+ range : 0x8000 0000 <= L_var3 <= 0x7fff ffff.
+
+ var1
+ 16 bit short signed integer (int16) whose value falls in the
+ range : 0xffff 8000 <= var1 <= 0x0000 7fff.
+
+ var2
+ 16 bit short signed integer (int16) whose value falls in the
+ range : 0xffff 8000 <= var1 <= 0x0000 7fff.
+
+
+ Return Value :
+ 32 bit long signed integer (int32) whose value falls in the
+ range : 0x8000 0000 <= L_var_out <= 0x7fff ffff.
+
+ ----------------------------------------------------------------------------*/
+
+
+ __inline int32 mac_16by16_to_int32(int32 L_var3, int16 var1, int16 var2)
+ {
+ return add_int32(L_var3, mul_16by16_to_int32(var1, var2));
+ }
+
+
+ /*----------------------------------------------------------------------------
+
+ Function Name : msu_16by16_from_int32
+
+ Multiply var1 by var2 and shift the result left by 1. Subtract the 32 bit
+ result to L_var3 with saturation, return a 32 bit result:
+ L_msu(L_var3,var1,var2) = L_sub(L_var3,L_mult(var1,var2)).
+
+ Inputs :
+
+ L_var3 32 bit long signed integer (int32) whose value falls in the
+ range : 0x8000 0000 <= L_var3 <= 0x7fff ffff.
+
+ var1
+ 16 bit short signed integer (int16) whose value falls in the
+ range : 0xffff 8000 <= var1 <= 0x0000 7fff.
+
+ var2
+ 16 bit short signed integer (int16) whose value falls in the
+ range : 0xffff 8000 <= var1 <= 0x0000 7fff.
+
+
+ Return Value :
+ 32 bit long signed integer (int32) whose value falls in the
+ range : 0x8000 0000 <= L_var_out <= 0x7fff ffff.
+
+ ----------------------------------------------------------------------------*/
+
+ __inline int32 msu_16by16_from_int32(int32 L_var3, int16 var1, int16 var2)
+ {
+ return sub_int32(L_var3, mul_16by16_to_int32(var1, var2));
+ }
+
+
+ /*----------------------------------------------------------------------------
+
Function Name : amr_wb_round
Round the lower 16 bits of the 32 bit input number into the MS 16 bits
@@ -447,7 +400,7 @@
----------------------------------------------------------------------------*/
__inline int16 amr_wb_round(int32 L_var1)
{
- if (L_var1 != MAX_32)
+ if (L_var1 <= (MAX_32 - 0x00008000L))
{
L_var1 += 0x00008000L;
}
diff --git a/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c b/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
index 4d877f1..8cebb09 100644
--- a/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
+++ b/media/libstagefright/codecs/amrwbenc/src/c4t64fx.c
@@ -857,23 +857,23 @@
p2 = &vec[pos];
for (j=pos;j < L_SUBFR; j++)
{
- L_sum1 += *p1 * *p2;
+ L_sum1 = L_add(L_sum1, *p1 * *p2);
p2-=3;
- L_sum2 += *p1++ * *p2;
+ L_sum2 = L_add(L_sum2, *p1++ * *p2);
p2+=4;
}
p2-=3;
- L_sum2 += *p1++ * *p2++;
- L_sum2 += *p1++ * *p2++;
- L_sum2 += *p1++ * *p2++;
+ L_sum2 = L_add(L_sum2, *p1++ * *p2++);
+ L_sum2 = L_add(L_sum2, *p1++ * *p2++);
+ L_sum2 = L_add(L_sum2, *p1++ * *p2++);
- L_sum1 = (L_sum1 << 2);
- L_sum2 = (L_sum2 << 2);
+ L_sum1 = L_shl(L_sum1, 2);
+ L_sum2 = L_shl(L_sum2, 2);
- corr = vo_round(L_sum1);
- *cor_x++ = vo_mult(corr, sign[pos]) + (*p0++);
- corr = vo_round(L_sum2);
- *cor_y++ = vo_mult(corr, sign[pos-3]) + (*p3++);
+ corr = voround(L_sum1);
+ *cor_x++ = mult(corr, sign[pos]) + (*p0++);
+ corr = voround(L_sum2);
+ *cor_y++ = mult(corr, sign[pos-3]) + (*p3++);
pos += STEP;
L_sum1 = L_sum2 = 0L;
@@ -881,23 +881,23 @@
p2 = &vec[pos];
for (j=pos;j < L_SUBFR; j++)
{
- L_sum1 += *p1 * *p2;
+ L_sum1 = L_add(L_sum1, *p1 * *p2);
p2-=3;
- L_sum2 += *p1++ * *p2;
+ L_sum2 = L_add(L_sum2, *p1++ * *p2);
p2+=4;
}
p2-=3;
- L_sum2 += *p1++ * *p2++;
- L_sum2 += *p1++ * *p2++;
- L_sum2 += *p1++ * *p2++;
+ L_sum2 = L_add(L_sum2, *p1++ * *p2++);
+ L_sum2 = L_add(L_sum2, *p1++ * *p2++);
+ L_sum2 = L_add(L_sum2, *p1++ * *p2++);
- L_sum1 = (L_sum1 << 2);
- L_sum2 = (L_sum2 << 2);
+ L_sum1 = L_shl(L_sum1, 2);
+ L_sum2 = L_shl(L_sum2, 2);
- corr = vo_round(L_sum1);
- *cor_x++ = vo_mult(corr, sign[pos]) + (*p0++);
- corr = vo_round(L_sum2);
- *cor_y++ = vo_mult(corr, sign[pos-3]) + (*p3++);
+ corr = voround(L_sum1);
+ *cor_x++ = mult(corr, sign[pos]) + (*p0++);
+ corr = voround(L_sum2);
+ *cor_y++ = mult(corr, sign[pos-3]) + (*p3++);
pos += STEP;
}
return;
@@ -929,16 +929,16 @@
p2 = &vec[pos];
for (j=62-pos ;j >= 0; j--)
{
- L_sum1 += *p1 * *p2++;
- L_sum2 += *p1++ * *p2;
+ L_sum1 = L_add(L_sum1, *p1 * *p2++);
+ L_sum2 = L_add(L_sum2, *p1++ * *p2);
}
- L_sum1 += *p1 * *p2;
- L_sum1 = (L_sum1 << 2);
- L_sum2 = (L_sum2 << 2);
+ L_sum1 = L_add(L_sum1, *p1 * *p2);
+ L_sum1 = L_shl(L_sum1, 2);
+ L_sum2 = L_shl(L_sum2, 2);
- corr = (L_sum1 + 0x8000) >> 16;
+ corr = voround(L_sum1);
cor_x[i] = vo_mult(corr, sign[pos]) + (*p0++);
- corr = (L_sum2 + 0x8000) >> 16;
+ corr = voround(L_sum2);
cor_y[i] = vo_mult(corr, sign[pos + 1]) + (*p3++);
pos += STEP;
@@ -947,16 +947,16 @@
p2 = &vec[pos];
for (j= 62-pos;j >= 0; j--)
{
- L_sum1 += *p1 * *p2++;
- L_sum2 += *p1++ * *p2;
+ L_sum1 = L_add(L_sum1, *p1 * *p2++);
+ L_sum2 = L_add(L_sum2, *p1++ * *p2);
}
- L_sum1 += *p1 * *p2;
- L_sum1 = (L_sum1 << 2);
- L_sum2 = (L_sum2 << 2);
+ L_sum1 = L_add(L_sum1, *p1 * *p2);
+ L_sum1 = L_shl(L_sum1, 2);
+ L_sum2 = L_shl(L_sum2, 2);
- corr = (L_sum1 + 0x8000) >> 16;
+ corr = voround(L_sum1);
cor_x[i+1] = vo_mult(corr, sign[pos]) + (*p0++);
- corr = (L_sum2 + 0x8000) >> 16;
+ corr = voround(L_sum2);
cor_y[i+1] = vo_mult(corr, sign[pos + 1]) + (*p3++);
pos += STEP;
}
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.cpp b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
index 958e7c4..9f7b590 100644
--- a/media/libstagefright/codecs/g711/dec/SoftG711.cpp
+++ b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
@@ -240,6 +240,15 @@
mSignalledError = true;
}
+ if (inHeader->nFilledLen * sizeof(int16_t) > outHeader->nAllocLen) {
+ ALOGE("output buffer too small (%d).", outHeader->nAllocLen);
+ android_errorWriteLog(0x534e4554, "27793163");
+
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
+
const uint8_t *inputptr = inHeader->pBuffer + inHeader->nOffset;
if (mIsMLaw) {
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
index 7916c45..04d5a33 100644
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
@@ -228,6 +228,14 @@
mSignalledError = true;
}
+ if (outHeader->nAllocLen < (inHeader->nFilledLen / kMSGSMFrameSize) * 320) {
+ ALOGE("output buffer is not large enough (%d).", outHeader->nAllocLen);
+ android_errorWriteLog(0x534e4554, "27793367");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
+
uint8_t *inputptr = inHeader->pBuffer + inHeader->nOffset;
int n = mSignalledError ? 0 : DecodeGSM(mGsm,
diff --git a/media/libstagefright/codecs/on2/h264dec/inc/H264SwDecApi.h b/media/libstagefright/codecs/on2/h264dec/inc/H264SwDecApi.h
index fe112bc..9814e73 100644
--- a/media/libstagefright/codecs/on2/h264dec/inc/H264SwDecApi.h
+++ b/media/libstagefright/codecs/on2/h264dec/inc/H264SwDecApi.h
@@ -161,7 +161,7 @@
void H264SwDecTrace(char *);
/* function prototype for memory allocation */
- void* H264SwDecMalloc(u32 size);
+ void* H264SwDecMalloc(u32 size, u32 num);
/* function prototype for memory free */
void H264SwDecFree(void *ptr);
diff --git a/media/libstagefright/codecs/on2/h264dec/source/DecTestBench.c b/media/libstagefright/codecs/on2/h264dec/source/DecTestBench.c
index dcf2ef6..55c0065 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/DecTestBench.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/DecTestBench.c
@@ -700,18 +700,21 @@
library function malloc for allocation of memory.
------------------------------------------------------------------------------*/
-void* H264SwDecMalloc(u32 size)
+void* H264SwDecMalloc(u32 size, u32 num)
{
+ if (size > UINT32_MAX / num) {
+ return NULL;
+ }
#if defined(CHECK_MEMORY_USAGE)
/* Note that if the decoder has to free and reallocate some of the buffers
* the total value will be invalid */
static u32 numBytes = 0;
- numBytes += size;
+ numBytes += size * num;
DEBUG(("Allocated %d bytes, total %d\n", size, numBytes));
#endif
- return malloc(size);
+ return malloc(size * num);
}
/*------------------------------------------------------------------------------
diff --git a/media/libstagefright/codecs/on2/h264dec/source/EvaluationTestBench.c b/media/libstagefright/codecs/on2/h264dec/source/EvaluationTestBench.c
index aadc75f..e756a1f 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/EvaluationTestBench.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/EvaluationTestBench.c
@@ -85,7 +85,7 @@
rewind(finput);
/* allocate memory for stream buffer, exit if unsuccessful */
- byteStrm = byteStrmStart = (u8 *)H264SwDecMalloc(sizeof(u8)*strmLen);
+ byteStrm = byteStrmStart = (u8 *)H264SwDecMalloc(sizeof(u8), strmLen);
if (byteStrm == NULL)
{
printf("UNABLE TO ALLOCATE MEMORY\n");
@@ -298,9 +298,12 @@
library function malloc for allocation of memory.
------------------------------------------------------------------------------*/
-void* H264SwDecMalloc(u32 size)
+void* H264SwDecMalloc(u32 size, u32 num)
{
- return malloc(size);
+ if (size > UINT32_MAX / num) {
+ return NULL;
+ }
+ return malloc(size * num);
}
/*------------------------------------------------------------------------------
diff --git a/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c b/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
index a073dcb..f820dfd 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
@@ -35,6 +35,8 @@
/*------------------------------------------------------------------------------
1. Include headers
------------------------------------------------------------------------------*/
+#include <log/log.h>
+
#include <stdlib.h>
#include <string.h>
#include "basetype.h"
@@ -79,8 +81,13 @@
UNUSED(string);
}
-void* H264SwDecMalloc(u32 size) {
- return malloc(size);
+void* H264SwDecMalloc(u32 size, u32 num) {
+ if (size > UINT32_MAX / num) {
+ ALOGE("can't allocate %u * %u bytes", size, num);
+ android_errorWriteLog(0x534e4554, "27855419");
+ return NULL;
+ }
+ return malloc(size * num);
}
void H264SwDecFree(void *ptr) {
@@ -144,7 +151,7 @@
return(H264SWDEC_PARAM_ERR);
}
- pDecCont = (decContainer_t *)H264SwDecMalloc(sizeof(decContainer_t));
+ pDecCont = (decContainer_t *)H264SwDecMalloc(sizeof(decContainer_t), 1);
if (pDecCont == NULL)
{
diff --git a/media/libstagefright/codecs/on2/h264dec/source/TestBenchMultipleInstance.c b/media/libstagefright/codecs/on2/h264dec/source/TestBenchMultipleInstance.c
index 42170d3..9a386bb 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/TestBenchMultipleInstance.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/TestBenchMultipleInstance.c
@@ -413,9 +413,12 @@
Function name: H264SwDecmalloc
------------------------------------------------------------------------------*/
-void* H264SwDecMalloc(u32 size)
+void* H264SwDecMalloc(u32 size, u32 num)
{
- return malloc(size);
+ if (size > UINT32_MAX / num) {
+ return NULL;
+ }
+ return malloc(size * num);
}
/*------------------------------------------------------------------------------
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_decoder.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_decoder.c
index a816871..0ac480f 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_decoder.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_decoder.c
@@ -101,7 +101,7 @@
* specific NEON optimized "memset" for clearing the structure */
size = (sizeof(macroblockLayer_t) + 63) & ~0x3F;
- pStorage->mbLayer = (macroblockLayer_t*)H264SwDecMalloc(size);
+ pStorage->mbLayer = (macroblockLayer_t*)H264SwDecMalloc(size, 1);
if (!pStorage->mbLayer)
return HANTRO_NOK;
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.h
index 216ad04..9f0eb7d 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.h
@@ -141,7 +141,7 @@
/* macro to allocate memory */
#define ALLOCATE(ptr, count, type) \
{ \
- (ptr) = H264SwDecMalloc((count) * sizeof(type)); \
+ (ptr) = H264SwDecMalloc(sizeof(type), (count)); \
}
/* macro to free allocated memory */
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 855ac95..37fb33f 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -595,7 +595,7 @@
}
// static
-sp<AMessage> AMessage::FromParcel(const Parcel &parcel) {
+sp<AMessage> AMessage::FromParcel(const Parcel &parcel, size_t maxNestingLevel) {
int32_t what = parcel.readInt32();
sp<AMessage> msg = new AMessage();
msg->setWhat(what);
@@ -667,7 +667,19 @@
case kTypeMessage:
{
- sp<AMessage> subMsg = AMessage::FromParcel(parcel);
+ if (maxNestingLevel == 0) {
+ ALOGE("Too many levels of AMessage nesting.");
+ return NULL;
+ }
+ sp<AMessage> subMsg = AMessage::FromParcel(
+ parcel,
+ maxNestingLevel - 1);
+ if (subMsg == NULL) {
+ // This condition will be triggered when there exists an
+ // object that cannot cross process boundaries or when the
+ // level of nested AMessage is too deep.
+ return NULL;
+ }
subMsg->incStrong(msg.get());
item->u.refValue = subMsg.get();
@@ -677,7 +689,7 @@
default:
{
ALOGE("This type of object cannot cross process boundaries.");
- TRESPASS();
+ return NULL;
}
}
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index ff2bb27..935f1dc 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -603,6 +603,18 @@
return ERROR_MALFORMED;
}
err = parseMetaDataDuration(line, &itemMeta, "durationUs");
+ } else if (line.startsWith("#EXT-X-DISCONTINUITY-SEQUENCE")) {
+ if (mIsVariantPlaylist) {
+ return ERROR_MALFORMED;
+ }
+ size_t seq;
+ err = parseDiscontinuitySequence(line, &seq);
+ if (err == OK) {
+ mDiscontinuitySeq = seq;
+ ALOGI("mDiscontinuitySeq %zu", mDiscontinuitySeq);
+ } else {
+ ALOGI("Failed to parseDiscontinuitySequence %d", err);
+ }
} else if (line.startsWith("#EXT-X-DISCONTINUITY")) {
if (mIsVariantPlaylist) {
return ERROR_MALFORMED;
@@ -638,15 +650,6 @@
}
} else if (line.startsWith("#EXT-X-MEDIA")) {
err = parseMedia(line);
- } else if (line.startsWith("#EXT-X-DISCONTINUITY-SEQUENCE")) {
- if (mIsVariantPlaylist) {
- return ERROR_MALFORMED;
- }
- size_t seq;
- err = parseDiscontinuitySequence(line, &seq);
- if (err == OK) {
- mDiscontinuitySeq = seq;
- }
}
if (err != OK) {
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 0a1ed94..20d124c 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -368,11 +368,15 @@
AString iv;
if (itemMeta->findString("cipher-iv", &iv)) {
if ((!iv.startsWith("0x") && !iv.startsWith("0X"))
- || iv.size() != 16 * 2 + 2) {
+ || iv.size() > 16 * 2 + 2) {
ALOGE("malformed cipher IV '%s'.", iv.c_str());
return ERROR_MALFORMED;
}
+ while (iv.size() < 16 * 2 + 2) {
+ iv.insert("0", 1, 2);
+ }
+
memset(mAESInitVec, 0, sizeof(mAESInitVec));
for (size_t i = 0; i < 16; ++i) {
char c1 = tolower(iv.c_str()[2 + 2 * i]);
@@ -1180,8 +1184,7 @@
// Signal a format discontinuity to ATSParser to clear partial data
// from previous streams. Not doing this causes bitstream corruption.
if (mTSParser != NULL) {
- mTSParser->signalDiscontinuity(
- ATSParser::DISCONTINUITY_FORMATCHANGE, NULL /* extra */);
+ mTSParser.clear();
}
queueDiscontinuity(
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 9726741..6c073f0 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -93,6 +93,10 @@
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer);
+ virtual status_t updateNativeHandleInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<NativeHandle> &nativeHandle, buffer_id buffer);
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer,
@@ -111,7 +115,7 @@
virtual status_t allocateSecureBuffer(
node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, native_handle_t **native_handle);
+ buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle);
virtual status_t allocateBufferWithBackup(
node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms,
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index 25c3773..e98156e 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -79,6 +79,10 @@
OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
OMX::buffer_id buffer);
+ status_t updateNativeHandleInMeta(
+ OMX_U32 portIndex, const sp<NativeHandle> &nativeHandle,
+ OMX::buffer_id buffer);
+
status_t createInputSurface(
OMX_U32 portIndex, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer,
@@ -98,7 +102,7 @@
status_t allocateSecureBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
- void **buffer_data, native_handle_t **native_handle);
+ void **buffer_data, sp<NativeHandle> *native_handle);
status_t allocateBufferWithBackup(
OMX_U32 portIndex, const sp<IMemory> ¶ms,
diff --git a/media/libstagefright/include/SampleTable.h b/media/libstagefright/include/SampleTable.h
index 738f864..54da497 100644
--- a/media/libstagefright/include/SampleTable.h
+++ b/media/libstagefright/include/SampleTable.h
@@ -24,6 +24,7 @@
#include <media/stagefright/MediaErrors.h>
#include <utils/RefBase.h>
#include <utils/threads.h>
+#include <utils/Vector.h>
namespace android {
@@ -110,8 +111,9 @@
uint32_t mDefaultSampleSize;
uint32_t mNumSampleSizes;
+ bool mHasTimeToSample;
uint32_t mTimeToSampleCount;
- uint32_t *mTimeToSample;
+ Vector<uint32_t> mTimeToSample;
struct SampleTimeEntry {
uint32_t mSampleIndex;
diff --git a/media/libstagefright/matroska/MatroskaExtractor.h b/media/libstagefright/matroska/MatroskaExtractor.h
index 592e7cf..9406829 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.h
+++ b/media/libstagefright/matroska/MatroskaExtractor.h
@@ -18,7 +18,7 @@
#define MATROSKA_EXTRACTOR_H_
-#include "mkvparser.hpp"
+#include "mkvparser/mkvparser.h"
#include <media/stagefright/MediaExtractor.h>
#include <utils/Vector.h>
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index fb43a38..b863d67 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -1438,8 +1438,8 @@
// The number of bytes received by this parser up to and
// including the final byte of this PCR_ext field.
- size_t byteOffsetFromStart =
- mNumTSPacketsParsed * 188 + byteOffsetFromStartOfTSPacket;
+ uint64_t byteOffsetFromStart =
+ uint64_t(mNumTSPacketsParsed) * 188 + byteOffsetFromStartOfTSPacket;
for (size_t i = 0; i < mPrograms.size(); ++i) {
updatePCR(PID, PCR, byteOffsetFromStart);
@@ -1558,8 +1558,8 @@
__attribute__((no_sanitize("integer")))
void ATSParser::updatePCR(
- unsigned /* PID */, uint64_t PCR, size_t byteOffsetFromStart) {
- ALOGV("PCR 0x%016" PRIx64 " @ %zu", PCR, byteOffsetFromStart);
+ unsigned /* PID */, uint64_t PCR, uint64_t byteOffsetFromStart) {
+ ALOGV("PCR 0x%016" PRIx64 " @ %" PRIx64, PCR, byteOffsetFromStart);
if (mNumPCRs == 2) {
mPCR[0] = mPCR[1];
@@ -1734,6 +1734,13 @@
unsigned sectionLength = U16_AT(data + 1) & 0xfff;
ALOGV("sectionLength %u, skip %u", sectionLength, mSkipBytes);
+
+ if(sectionLength < mSkipBytes) {
+ ALOGE("b/28333006");
+ android_errorWriteLog(0x534e4554, "28333006");
+ return false;
+ }
+
// Skip the preceding field present when payload start indicator is on.
sectionLength -= mSkipBytes;
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index fb03cd6..9d9102d 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -182,10 +182,10 @@
// see feedTSPacket().
status_t parseTS(ABitReader *br, SyncEvent *event);
- void updatePCR(unsigned PID, uint64_t PCR, size_t byteOffsetFromStart);
+ void updatePCR(unsigned PID, uint64_t PCR, uint64_t byteOffsetFromStart);
uint64_t mPCR[2];
- size_t mPCRBytes[2];
+ uint64_t mPCRBytes[2];
int64_t mSystemTimeUs[2];
size_t mNumPCRs;
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 4d89ba1..2e989b5 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -488,6 +488,19 @@
port_index, graphicBuffer, buffer);
}
+status_t OMX::updateNativeHandleInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<NativeHandle> &nativeHandle, buffer_id buffer) {
+ OMXNodeInstance *instance = findInstance(node);
+
+ if (instance == NULL) {
+ return NAME_NOT_FOUND;
+ }
+
+ return instance->updateNativeHandleInMeta(
+ port_index, nativeHandle, buffer);
+}
+
status_t OMX::createInputSurface(
node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
@@ -533,7 +546,7 @@
status_t OMX::allocateSecureBuffer(
node_id node, OMX_U32 port_index, size_t size,
- buffer_id *buffer, void **buffer_data, native_handle_t **native_handle) {
+ buffer_id *buffer, void **buffer_data, sp<NativeHandle> *native_handle) {
OMXNodeInstance *instance = findInstance(node);
if (instance == NULL) {
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 6b7a871..31bab70 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -36,8 +36,8 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/MediaErrors.h>
-
#include <utils/misc.h>
+#include <utils/NativeHandle.h>
static const OMX_U32 kPortIndexInput = 0;
static const OMX_U32 kPortIndexOutput = 1;
@@ -152,8 +152,13 @@
mGraphicBuffer = graphicBuffer;
}
+ void setNativeHandle(const sp<NativeHandle> &nativeHandle) {
+ mNativeHandle = nativeHandle;
+ }
+
private:
sp<GraphicBuffer> mGraphicBuffer;
+ sp<NativeHandle> mNativeHandle;
sp<IMemory> mMem;
size_t mSize;
bool mIsBackup;
@@ -523,6 +528,9 @@
OMX_U32 portIndex, OMX_BOOL enable, MetadataBufferType *type) {
if (portIndex != kPortIndexInput && portIndex != kPortIndexOutput) {
android_errorWriteLog(0x534e4554, "26324358");
+ if (type != NULL) {
+ *type = kMetadataBufferTypeInvalid;
+ }
return BAD_VALUE;
}
@@ -533,26 +541,32 @@
OMX_STRING nativeBufferName = const_cast<OMX_STRING>(
"OMX.google.android.index.storeANWBufferInMetadata");
MetadataBufferType negotiatedType;
+ MetadataBufferType requestedType = type != NULL ? *type : kMetadataBufferTypeANWBuffer;
StoreMetaDataInBuffersParams params;
InitOMXParams(¶ms);
params.nPortIndex = portIndex;
params.bStoreMetaData = enable;
- OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, nativeBufferName, &index);
+ OMX_ERRORTYPE err =
+ requestedType == kMetadataBufferTypeANWBuffer
+ ? OMX_GetExtensionIndex(mHandle, nativeBufferName, &index)
+ : OMX_ErrorUnsupportedIndex;
OMX_ERRORTYPE xerr = err;
if (err == OMX_ErrorNone) {
err = OMX_SetParameter(mHandle, index, ¶ms);
if (err == OMX_ErrorNone) {
name = nativeBufferName; // set name for debugging
- negotiatedType = kMetadataBufferTypeANWBuffer;
+ negotiatedType = requestedType;
}
}
if (err != OMX_ErrorNone) {
err = OMX_GetExtensionIndex(mHandle, name, &index);
xerr = err;
if (err == OMX_ErrorNone) {
- negotiatedType = kMetadataBufferTypeGrallocSource;
+ negotiatedType =
+ requestedType == kMetadataBufferTypeANWBuffer
+ ? kMetadataBufferTypeGrallocSource : requestedType;
err = OMX_SetParameter(mHandle, index, ¶ms);
}
}
@@ -574,8 +588,9 @@
}
mMetadataType[portIndex] = negotiatedType;
}
- CLOG_CONFIG(storeMetaDataInBuffers, "%s:%u negotiated %s:%d",
- portString(portIndex), portIndex, asString(negotiatedType), negotiatedType);
+ CLOG_CONFIG(storeMetaDataInBuffers, "%s:%u %srequested %s:%d negotiated %s:%d",
+ portString(portIndex), portIndex, enable ? "" : "UN",
+ asString(requestedType), requestedType, asString(negotiatedType), negotiatedType);
if (type != NULL) {
*type = negotiatedType;
@@ -871,6 +886,43 @@
return updateGraphicBufferInMeta_l(portIndex, graphicBuffer, buffer, header);
}
+status_t OMXNodeInstance::updateNativeHandleInMeta(
+ OMX_U32 portIndex, const sp<NativeHandle>& nativeHandle, OMX::buffer_id buffer) {
+ Mutex::Autolock autoLock(mLock);
+ OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
+ // No need to check |nativeHandle| since NULL is valid for it as below.
+ if (header == NULL) {
+ ALOGE("b/25884056");
+ return BAD_VALUE;
+ }
+
+ if (portIndex != kPortIndexInput && portIndex != kPortIndexOutput) {
+ return BAD_VALUE;
+ }
+
+ BufferMeta *bufferMeta = (BufferMeta *)(header->pAppPrivate);
+ // update backup buffer for input, codec buffer for output
+ sp<ABuffer> data = bufferMeta->getBuffer(
+ header, portIndex == kPortIndexInput /* backup */, false /* limit */);
+ bufferMeta->setNativeHandle(nativeHandle);
+ if (mMetadataType[portIndex] == kMetadataBufferTypeNativeHandleSource
+ && data->capacity() >= sizeof(VideoNativeHandleMetadata)) {
+ VideoNativeHandleMetadata &metadata = *(VideoNativeHandleMetadata *)(data->data());
+ metadata.eType = mMetadataType[portIndex];
+ metadata.pHandle =
+ nativeHandle == NULL ? NULL : const_cast<native_handle*>(nativeHandle->handle());
+ } else {
+ CLOG_ERROR(updateNativeHandleInMeta, BAD_VALUE, "%s:%u, %#x bad type (%d) or size (%zu)",
+ portString(portIndex), portIndex, buffer, mMetadataType[portIndex], data->capacity());
+ return BAD_VALUE;
+ }
+
+ CLOG_BUFFER(updateNativeHandleInMeta, "%s:%u, %#x := %p",
+ portString(portIndex), portIndex, buffer,
+ nativeHandle == NULL ? NULL : nativeHandle->handle());
+ return OK;
+}
+
status_t OMXNodeInstance::createGraphicBufferSource(
OMX_U32 portIndex, sp<IGraphicBufferConsumer> bufferConsumer, MetadataBufferType *type) {
status_t err;
@@ -884,6 +936,9 @@
}
// Input buffers will hold meta-data (ANativeWindowBuffer references).
+ if (type != NULL) {
+ *type = kMetadataBufferTypeANWBuffer;
+ }
err = storeMetaDataInBuffers_l(portIndex, OMX_TRUE, type);
if (err != OK) {
return err;
@@ -1009,7 +1064,7 @@
status_t OMXNodeInstance::allocateSecureBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
- void **buffer_data, native_handle_t **native_handle) {
+ void **buffer_data, sp<NativeHandle> *native_handle) {
if (buffer == NULL || buffer_data == NULL || native_handle == NULL) {
ALOGE("b/25884056");
return BAD_VALUE;
@@ -1039,7 +1094,8 @@
*buffer = makeBufferID(header);
if (mSecureBufferType[portIndex] == kSecureBufferTypeNativeHandle) {
*buffer_data = NULL;
- *native_handle = (native_handle_t *)header->pBuffer;
+ *native_handle = NativeHandle::create(
+ (native_handle_t *)header->pBuffer, false /* ownsHandle */);
} else {
*buffer_data = header->pBuffer;
*native_handle = NULL;
@@ -1052,7 +1108,8 @@
bufferSource->addCodecBuffer(header);
}
CLOG_BUFFER(allocateSecureBuffer, NEW_BUFFER_FMT(
- *buffer, portIndex, "%zu@%p:%p", size, *buffer_data, *native_handle));
+ *buffer, portIndex, "%zu@%p:%p", size, *buffer_data,
+ *native_handle == NULL ? NULL : (*native_handle)->handle()));
return OK;
}
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
index 72823e2..0f9c118 100644
--- a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -170,6 +170,11 @@
OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetPortParams(
const OMX_PARAM_PORTDEFINITIONTYPE *port) {
+
+ if (!isValidOMXParam(port)) {
+ return OMX_ErrorBadParameter;
+ }
+
if (port->nPortIndex == kInputPortIndex) {
mWidth = port->format.video.nFrameWidth;
mHeight = port->format.video.nFrameHeight;
@@ -216,6 +221,10 @@
const OMX_PARAM_COMPONENTROLETYPE *roleParams =
(const OMX_PARAM_COMPONENTROLETYPE *)param;
+ if (!isValidOMXParam(roleParams)) {
+ return OMX_ErrorBadParameter;
+ }
+
if (strncmp((const char *)roleParams->cRole,
mComponentRole,
OMX_MAX_STRINGNAME_SIZE - 1)) {
@@ -241,6 +250,10 @@
const OMX_VIDEO_PARAM_PORTFORMATTYPE* format =
(const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
+ if (!isValidOMXParam(format)) {
+ return OMX_ErrorBadParameter;
+ }
+
if (format->nPortIndex == kInputPortIndex) {
if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar ||
format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
@@ -270,6 +283,10 @@
const StoreMetaDataInBuffersParams *storeParam =
(const StoreMetaDataInBuffersParams *)param;
+ if (!isValidOMXParam(storeParam)) {
+ return OMX_ErrorBadParameter;
+ }
+
if (storeParam->nPortIndex == kOutputPortIndex) {
return storeParam->bStoreMetaData ? OMX_ErrorUnsupportedSetting : OMX_ErrorNone;
} else if (storeParam->nPortIndex != kInputPortIndex) {
@@ -304,6 +321,10 @@
OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
(OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
+ if (!isValidOMXParam(formatParams)) {
+ return OMX_ErrorBadParameter;
+ }
+
if (formatParams->nPortIndex == kInputPortIndex) {
if (formatParams->nIndex >= NELEM(kSupportedColorFormats)) {
return OMX_ErrorNoMore;
@@ -329,6 +350,10 @@
OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
(OMX_VIDEO_PARAM_PROFILELEVELTYPE *) param;
+ if (!isValidOMXParam(profileLevel)) {
+ return OMX_ErrorBadParameter;
+ }
+
if (profileLevel->nPortIndex != kOutputPortIndex) {
ALOGE("Invalid port index: %u", profileLevel->nPortIndex);
return OMX_ErrorUnsupportedIndex;
@@ -600,6 +625,7 @@
case HAL_PIXEL_FORMAT_YCbCr_420_888:
ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
break;
+ case HAL_PIXEL_FORMAT_RGBX_8888:
case HAL_PIXEL_FORMAT_RGBA_8888:
case HAL_PIXEL_FORMAT_BGRA_8888:
ConvertRGB32ToPlanar(
diff --git a/media/libstagefright/tests/Android.mk b/media/libstagefright/tests/Android.mk
index 111e6c5..d1c9d36 100644
--- a/media/libstagefright/tests/Android.mk
+++ b/media/libstagefright/tests/Android.mk
@@ -30,6 +30,7 @@
frameworks/av/media/libstagefright \
frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/native/include/media/hardware \
LOCAL_CFLAGS += -Werror -Wall
LOCAL_CLANG := true
diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk
index 5bd6e5c..ae4ac90 100644
--- a/media/libstagefright/wifi-display/Android.mk
+++ b/media/libstagefright/wifi-display/Android.mk
@@ -17,6 +17,7 @@
LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/av/media/libstagefright \
$(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/native/include/media/hardware \
$(TOP)/frameworks/av/media/libstagefright/mpeg2ts \
LOCAL_SHARED_LIBRARIES:= \
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
index 3ecb52b..3587cb9 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -948,8 +948,9 @@
if (isVideo) {
format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
- format->setInt32("store-metadata-in-buffers", true);
- format->setInt32("store-metadata-in-buffers-output", (mHDCP != NULL)
+ format->setInt32(
+ "android._input-metadata-buffer-type", kMetadataBufferTypeANWBuffer);
+ format->setInt32("android._store-metadata-in-buffers-output", (mHDCP != NULL)
&& (mHDCP->getCaps() & HDCPModule::HDCP_CAPS_ENCRYPT_NATIVE));
format->setInt32(
"color-format", OMX_COLOR_FormatAndroidOpaque);
diff --git a/media/mediaserver/mediaserver.rc b/media/mediaserver/mediaserver.rc
index 89c3896..b777d5c 100644
--- a/media/mediaserver/mediaserver.rc
+++ b/media/mediaserver/mediaserver.rc
@@ -3,3 +3,4 @@
user media
group audio camera inet net_bt net_bt_admin net_bw_acct drmrpc mediadrm
ioprio rt 4
+ writepid /dev/cpuset/foreground/tasks
diff --git a/media/ndk/Android.mk b/media/ndk/Android.mk
index f287761..7f6b66b 100644
--- a/media/ndk/Android.mk
+++ b/media/ndk/Android.mk
@@ -45,6 +45,7 @@
LOCAL_SHARED_LIBRARIES := \
libbinder \
libmedia \
+ libmediadrm \
libstagefright \
libstagefright_foundation \
liblog \
diff --git a/media/ndk/NdkMediaCrypto.cpp b/media/ndk/NdkMediaCrypto.cpp
index 67d12a4..32aabdd 100644
--- a/media/ndk/NdkMediaCrypto.cpp
+++ b/media/ndk/NdkMediaCrypto.cpp
@@ -29,7 +29,6 @@
#include <binder/IServiceManager.h>
#include <media/ICrypto.h>
#include <media/IMediaDrmService.h>
-#include <media/IMediaPlayerService.h>
#include <android_runtime/AndroidRuntime.h>
#include <android_util_Binder.h>
@@ -39,34 +38,17 @@
static sp<ICrypto> makeCrypto() {
sp<IServiceManager> sm = defaultServiceManager();
- sp<ICrypto> crypto;
+ sp<IBinder> binder = sm->getService(String16("media.drm"));
- char value[PROPERTY_VALUE_MAX];
- if (property_get("media.mediadrmservice.enable", value, NULL)
- && (!strcmp("1", value) || !strcasecmp("true", value))) {
- sp<IBinder> binder =
- sm->getService(String16("media.drm"));
- sp<IMediaDrmService> service =
- interface_cast<IMediaDrmService>(binder);
- if (service == NULL) {
- return NULL;
- }
- crypto = service->makeCrypto();
- } else {
- sp<IBinder> binder =
- sm->getService(String16("media.player"));
- sp<IMediaPlayerService> service =
- interface_cast<IMediaPlayerService>(binder);
- if (service == NULL) {
- return NULL;
- }
- crypto = service->makeCrypto();
- }
-
- if (crypto == NULL || (crypto->initCheck() != OK && crypto->initCheck() != NO_INIT)) {
+ sp<IMediaDrmService> service = interface_cast<IMediaDrmService>(binder);
+ if (service == NULL) {
return NULL;
}
+ sp<ICrypto> crypto = service->makeCrypto();
+ if (crypto == NULL || (crypto->initCheck() != OK && crypto->initCheck() != NO_INIT)) {
+ return NULL;
+ }
return crypto;
}
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index e98b124..be71f43 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -29,7 +29,6 @@
#include <media/stagefright/MediaErrors.h>
#include <binder/IServiceManager.h>
#include <media/IMediaDrmService.h>
-#include <media/IMediaPlayerService.h>
#include <ndk/NdkMediaCrypto.h>
@@ -150,34 +149,17 @@
static sp<IDrm> CreateDrm() {
sp<IServiceManager> sm = defaultServiceManager();
- sp<IDrm> drm;
+ sp<IBinder> binder = sm->getService(String16("media.drm"));
- char value[PROPERTY_VALUE_MAX];
- if (property_get("media.mediadrmservice.enable", value, NULL)
- && (!strcmp("1", value) || !strcasecmp("true", value))) {
- sp<IBinder> binder =
- sm->getService(String16("media.drm"));
- sp<IMediaDrmService> service =
- interface_cast<IMediaDrmService>(binder);
- if (service == NULL) {
- return NULL;
- }
- drm = service->makeDrm();
- } else {
- sp<IBinder> binder =
- sm->getService(String16("media.player"));
- sp<IMediaPlayerService> service =
- interface_cast<IMediaPlayerService>(binder);
- if (service == NULL) {
- return NULL;
- }
- drm = service->makeDrm();
- }
-
- if (drm == NULL || (drm->initCheck() != OK && drm->initCheck() != NO_INIT)) {
+ sp<IMediaDrmService> service = interface_cast<IMediaDrmService>(binder);
+ if (service == NULL) {
return NULL;
}
+ sp<IDrm> drm = service->makeDrm();
+ if (drm == NULL || (drm->initCheck() != OK && drm->initCheck() != NO_INIT)) {
+ return NULL;
+ }
return drm;
}
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index 6700f6e..4f826e5 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -34,6 +34,7 @@
LOCAL_C_INCLUDES := \
$(TOPDIR)frameworks/av/services/audiopolicy \
$(TOPDIR)external/sonic \
+ libcore/include \
$(call include-path-for, audio-effects) \
$(call include-path-for, audio-utils)
@@ -54,7 +55,8 @@
libpowermanager \
libserviceutility \
libsonic \
- libmediautils
+ libmediautils \
+ libmemunreachable
LOCAL_STATIC_LIBRARIES := \
libcpustats \
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 208dc8b..1d575b3 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -31,6 +31,7 @@
#include <utils/Log.h>
#include <utils/Trace.h>
#include <binder/Parcel.h>
+#include <memunreachable/memunreachable.h>
#include <utils/String16.h>
#include <utils/threads.h>
#include <utils/Atomic.h>
@@ -175,7 +176,7 @@
mHardwareStatus(AUDIO_HW_IDLE),
mMasterVolume(1.0f),
mMasterMute(false),
- mNextUniqueId(AUDIO_UNIQUE_ID_USE_MAX), // zero has a special meaning, so unavailable
+ // mNextUniqueId(AUDIO_UNIQUE_ID_USE_MAX),
mMode(AUDIO_MODE_INVALID),
mBtNrecIsOff(false),
mIsLowRamDevice(true),
@@ -183,6 +184,12 @@
mGlobalEffectEnableTime(0),
mSystemReady(false)
{
+ // unsigned instead of audio_unique_id_use_t, because ++ operator is unavailable for enum
+ for (unsigned use = AUDIO_UNIQUE_ID_USE_UNSPECIFIED; use < AUDIO_UNIQUE_ID_USE_MAX; use++) {
+ // zero ID has a special meaning, so unavailable
+ mNextUniqueIds[use] = AUDIO_UNIQUE_ID_USE_MAX;
+ }
+
getpid_cached = getpid();
const bool doLog = property_get_bool("ro.test_harness", false);
if (doLog) {
@@ -462,6 +469,21 @@
binder->dump(fd, args);
}
}
+
+ // check for optional arguments
+ bool unreachableMemory = false;
+ for (const auto &arg : args) {
+ if (arg == String16("--unreachable")) {
+ unreachableMemory = true;
+ }
+ }
+
+ if (unreachableMemory) {
+ dprintf(fd, "\nDumping unreachable memory:\n");
+ // TODO - should limit be an argument parameter?
+ std::string s = GetUnreachableMemoryString(true /* contents */, 10000 /* limit */);
+ write(fd, s.c_str(), s.size());
+ }
}
return NO_ERROR;
}
@@ -724,6 +746,17 @@
return thread->frameCount();
}
+size_t AudioFlinger::frameCountHAL(audio_io_handle_t ioHandle) const
+{
+ Mutex::Autolock _l(mLock);
+ ThreadBase *thread = checkThread_l(ioHandle);
+ if (thread == NULL) {
+ ALOGW("frameCountHAL() unknown thread %d", ioHandle);
+ return 0;
+ }
+ return thread->frameCountHAL();
+}
+
uint32_t AudioFlinger::latency(audio_io_handle_t output) const
{
Mutex::Autolock _l(mLock);
@@ -2075,8 +2108,8 @@
audio_is_linear_pcm(config->format) &&
audio_is_linear_pcm(halconfig.format) &&
(halconfig.sample_rate <= AUDIO_RESAMPLER_DOWN_RATIO_MAX * config->sample_rate) &&
- (audio_channel_count_from_in_mask(halconfig.channel_mask) <= FCC_2) &&
- (audio_channel_count_from_in_mask(config->channel_mask) <= FCC_2)) {
+ (audio_channel_count_from_in_mask(halconfig.channel_mask) <= FCC_8) &&
+ (audio_channel_count_from_in_mask(config->channel_mask) <= FCC_8)) {
// FIXME describe the change proposed by HAL (save old values so we can log them here)
ALOGV("openInput_l() reopening with proposed sampling rate and channel mask");
inStream = NULL;
@@ -2424,13 +2457,23 @@
audio_unique_id_t AudioFlinger::nextUniqueId(audio_unique_id_use_t use)
{
- int32_t base = android_atomic_add(AUDIO_UNIQUE_ID_USE_MAX, &mNextUniqueId);
- // We have no way of recovering from wraparound
- LOG_ALWAYS_FATAL_IF(base == 0, "unique ID overflow");
// This is the internal API, so it is OK to assert on bad parameter.
LOG_ALWAYS_FATAL_IF((unsigned) use >= (unsigned) AUDIO_UNIQUE_ID_USE_MAX);
- ALOG_ASSERT(audio_unique_id_get_use(base) == AUDIO_UNIQUE_ID_USE_UNSPECIFIED);
- return (audio_unique_id_t) (base | use);
+ const int maxRetries = use == AUDIO_UNIQUE_ID_USE_SESSION ? 3 : 1;
+ for (int retry = 0; retry < maxRetries; retry++) {
+ // The cast allows wraparound from max positive to min negative instead of abort
+ uint32_t base = (uint32_t) atomic_fetch_add_explicit(&mNextUniqueIds[use],
+ (uint_fast32_t) AUDIO_UNIQUE_ID_USE_MAX, memory_order_acq_rel);
+ ALOG_ASSERT(audio_unique_id_get_use(base) == AUDIO_UNIQUE_ID_USE_UNSPECIFIED);
+ // allow wrap by skipping 0 and -1 for session ids
+ if (!(base == 0 || base == (~0u & ~AUDIO_UNIQUE_ID_USE_MASK))) {
+ ALOGW_IF(retry != 0, "unique ID overflow for use %d", use);
+ return (audio_unique_id_t) (base | use);
+ }
+ }
+ // We have no way of recovering from wraparound
+ LOG_ALWAYS_FATAL("unique ID overflow for use %d", use);
+ // TODO Use a floor after wraparound. This may need a mutex.
}
AudioFlinger::PlaybackThread *AudioFlinger::primaryPlaybackThread_l() const
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 498c33e..96d38d0 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -131,6 +131,7 @@
virtual uint32_t sampleRate(audio_io_handle_t ioHandle) const;
virtual audio_format_t format(audio_io_handle_t output) const;
virtual size_t frameCount(audio_io_handle_t ioHandle) const;
+ virtual size_t frameCountHAL(audio_io_handle_t ioHandle) const;
virtual uint32_t latency(audio_io_handle_t output) const;
virtual status_t setMasterVolume(float value);
@@ -679,9 +680,8 @@
// protected by mClientLock
DefaultKeyedVector< pid_t, sp<NotificationClient> > mNotificationClients;
- volatile int32_t mNextUniqueId; // updated by android_atomic_inc
- // nextUniqueId() returns uint32_t, but this is declared int32_t
- // because the atomic operations require an int32_t
+ // updated by atomic_fetch_add_explicit
+ volatile atomic_uint_fast32_t mNextUniqueIds[AUDIO_UNIQUE_ID_USE_MAX];
audio_mode_t mMode;
bool mBtNrecIsOff;
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 26cd1f9..546ef25 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -77,7 +77,7 @@
mSinkChannelMask = audio_channel_out_mask_from_count(mSinkChannelCount);
unsigned i;
- for (i = 0; i < FastMixerState::kMaxFastTracks; ++i) {
+ for (i = 0; i < FastMixerState::sMaxFastTracks; ++i) {
mFastTrackNames[i] = -1;
mGenerations[i] = 0;
}
@@ -187,7 +187,7 @@
// FIXME new may block for unbounded time at internal mutex of the heap
// implementation; it would be better to have normal mixer allocate for us
// to avoid blocking here and to prevent possible priority inversion
- mMixer = new AudioMixer(frameCount, mSampleRate, FastMixerState::kMaxFastTracks);
+ mMixer = new AudioMixer(frameCount, mSampleRate, FastMixerState::sMaxFastTracks);
const size_t mixerFrameSize = mSinkChannelCount
* audio_bytes_per_sample(mMixerBufferFormat);
mMixerBufferSize = mixerFrameSize * frameCount;
@@ -214,7 +214,7 @@
}
mMixerBufferState = UNDEFINED;
#if !LOG_NDEBUG
- for (unsigned i = 0; i < FastMixerState::kMaxFastTracks; ++i) {
+ for (unsigned i = 0; i < FastMixerState::sMaxFastTracks; ++i) {
mFastTrackNames[i] = -1;
}
#endif
diff --git a/services/audioflinger/FastMixerDumpState.cpp b/services/audioflinger/FastMixerDumpState.cpp
index b10942b..2326e2a 100644
--- a/services/audioflinger/FastMixerDumpState.cpp
+++ b/services/audioflinger/FastMixerDumpState.cpp
@@ -166,10 +166,10 @@
// Instead we always display all tracks, with an indication
// of whether we think the track is active.
uint32_t trackMask = mTrackMask;
- dprintf(fd, " Fast tracks: kMaxFastTracks=%u activeMask=%#x\n",
- FastMixerState::kMaxFastTracks, trackMask);
+ dprintf(fd, " Fast tracks: sMaxFastTracks=%u activeMask=%#x\n",
+ FastMixerState::sMaxFastTracks, trackMask);
dprintf(fd, " Index Active Full Partial Empty Recent Ready\n");
- for (uint32_t i = 0; i < FastMixerState::kMaxFastTracks; ++i, trackMask >>= 1) {
+ for (uint32_t i = 0; i < FastMixerState::sMaxFastTracks; ++i, trackMask >>= 1) {
bool isActive = trackMask & 1;
const FastTrackDump *ftDump = &mTracks[i];
const FastTrackUnderruns& underruns = ftDump->mUnderruns;
diff --git a/services/audioflinger/FastMixerState.cpp b/services/audioflinger/FastMixerState.cpp
index a8c2634..ad471fb 100644
--- a/services/audioflinger/FastMixerState.cpp
+++ b/services/audioflinger/FastMixerState.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include <cutils/properties.h>
#include "FastMixerState.h"
namespace android {
@@ -33,6 +34,10 @@
mFastTracksGen(0), mTrackMask(0), mOutputSink(NULL), mOutputSinkGen(0),
mFrameCount(0), mTeeSink(NULL)
{
+ int ok = pthread_once(&sMaxFastTracksOnce, sMaxFastTracksInit);
+ if (ok != 0) {
+ ALOGE("%s pthread_once failed: %d", __func__, ok);
+ }
}
FastMixerState::~FastMixerState()
@@ -40,6 +45,12 @@
}
// static
+unsigned FastMixerState::sMaxFastTracks = kDefaultFastTracks;
+
+// static
+pthread_once_t FastMixerState::sMaxFastTracksOnce = PTHREAD_ONCE_INIT;
+
+// static
const char *FastMixerState::commandToString(Command command)
{
const char *str = FastThreadState::commandToString(command);
@@ -54,4 +65,18 @@
LOG_ALWAYS_FATAL("%s", __func__);
}
+// static
+void FastMixerState::sMaxFastTracksInit()
+{
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("ro.audio.max_fast_tracks", value, NULL) > 0) {
+ char *endptr;
+ unsigned long ul = strtoul(value, &endptr, 0);
+ if (*endptr == '\0' && kMinFastTracks <= ul && ul <= kMaxFastTracks) {
+ sMaxFastTracks = (unsigned) ul;
+ }
+ }
+ ALOGI("sMaxFastTracks = %u", sMaxFastTracks);
+}
+
} // namespace android
diff --git a/services/audioflinger/FastMixerState.h b/services/audioflinger/FastMixerState.h
index 916514f..5a55c7a 100644
--- a/services/audioflinger/FastMixerState.h
+++ b/services/audioflinger/FastMixerState.h
@@ -54,7 +54,13 @@
FastMixerState();
/*virtual*/ ~FastMixerState();
- static const unsigned kMaxFastTracks = 8; // must be between 2 and 32 inclusive
+ // These are the minimum, maximum, and default values for maximum number of fast tracks
+ static const unsigned kMinFastTracks = 2;
+ static const unsigned kMaxFastTracks = 32;
+ static const unsigned kDefaultFastTracks = 8;
+
+ static unsigned sMaxFastTracks; // Configured maximum number of fast tracks
+ static pthread_once_t sMaxFastTracksOnce; // Protects initializer for sMaxFastTracks
// all pointer fields use raw pointers; objects are owned and ref-counted by the normal mixer
FastTrack mFastTracks[kMaxFastTracks];
@@ -76,6 +82,10 @@
// never returns NULL; asserts if command is invalid
static const char *commandToString(Command command);
+
+ // initialize sMaxFastTracks
+ static void sMaxFastTracksInit();
+
}; // struct FastMixerState
} // namespace android
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 21ce6b1..bee0447 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -182,13 +182,9 @@
static const int kPriorityFastMixer = 3;
static const int kPriorityFastCapture = 3;
-// IAudioFlinger::createTrack() reports back to client the total size of shared memory area
-// for the track. The client then sub-divides this into smaller buffers for its use.
-// Currently the client uses N-buffering by default, but doesn't tell us about the value of N.
-// So for now we just assume that client is double-buffered for fast tracks.
-// FIXME It would be better for client to tell AudioFlinger the value of N,
-// so AudioFlinger could allocate the right amount of memory.
-// See the client's minBufCount and mNotificationFramesAct calculations for details.
+// IAudioFlinger::createTrack() has an in/out parameter 'pFrameCount' for the total size of the
+// track buffer in shared memory. Zero on input means to use a default value. For fast tracks,
+// AudioFlinger derives the default from HAL buffer size and 'fast track multiplier'.
// This is the default value, if not specified by property.
static const int kFastTrackMultiplier = 2;
@@ -1602,7 +1598,7 @@
mSignalPending(false),
mScreenState(AudioFlinger::mScreenState),
// index 0 is reserved for normal mixer's submix
- mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1),
+ mFastTrackAvailMask(((1 << FastMixerState::sMaxFastTracks) - 1) & ~1),
mHwSupportsPause(false), mHwPaused(false), mFlushPending(false)
{
snprintf(mThreadName, kThreadNameLength, "AudioOut_%X", id);
@@ -2103,7 +2099,7 @@
track->mName = -1;
if (track->isFastTrack()) {
int index = track->mFastIndex;
- ALOG_ASSERT(0 < index && index < (int)FastMixerState::kMaxFastTracks);
+ ALOG_ASSERT(0 < index && index < (int)FastMixerState::sMaxFastTracks);
ALOG_ASSERT(!(mFastTrackAvailMask & (1 << index)));
mFastTrackAvailMask |= 1 << index;
// redundant as track is about to be destroyed, for dumpsys only
@@ -2153,6 +2149,7 @@
desc->mFormat = mFormat;
desc->mFrameCount = mNormalFrameCount; // FIXME see
// AudioFlinger::frameCount(audio_io_handle_t)
+ desc->mFrameCountHAL = mFrameCount;
desc->mLatency = latency_l();
break;
@@ -2673,11 +2670,10 @@
}
}
-void AudioFlinger::PlaybackThread::invalidateTracks(audio_stream_type_t streamType)
+void AudioFlinger::PlaybackThread::invalidateTracks_l(audio_stream_type_t streamType)
{
ALOGV("MixerThread::invalidateTracks() mixer %p, streamType %d, mTracks.size %zu",
this, streamType, mTracks.size());
- Mutex::Autolock _l(mLock);
size_t size = mTracks.size();
for (size_t i = 0; i < size; i++) {
@@ -2688,6 +2684,12 @@
}
}
+void AudioFlinger::PlaybackThread::invalidateTracks(audio_stream_type_t streamType)
+{
+ Mutex::Autolock _l(mLock);
+ invalidateTracks_l(streamType);
+}
+
status_t AudioFlinger::PlaybackThread::addEffectChain_l(const sp<EffectChain>& chain)
{
audio_session_t session = chain->sessionId();
@@ -2897,6 +2899,24 @@
// sink will block whie writing.
ExtendedTimestamp timestamp; // use private copy to fetch
(void) mNormalSink->getTimestamp(timestamp);
+
+ // We keep track of the last valid kernel position in case we are in underrun
+ // and the normal mixer period is the same as the fast mixer period, or there
+ // is some error from the HAL.
+ if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
+ } else {
+ ALOGV("getTimestamp error - no valid kernel position");
+ }
+
// copy over kernel info
mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
@@ -3150,7 +3170,9 @@
uint32_t diff = mThreadThrottleTimeMs - mThreadThrottleEndMs;
if (diff > 0) {
// notify of throttle end on debug log
- ALOGD("mixer(%p) throttle end: throttle time(%u)", this, diff);
+ // but prevent spamming for bluetooth
+ ALOGD_IF(!audio_is_a2dp_out_device(outDevice()),
+ "mixer(%p) throttle end: throttle time(%u)", this, diff);
mThreadThrottleEndMs = mThreadThrottleTimeMs;
}
}
@@ -3858,7 +3880,7 @@
// at the identical fast mixer slot within the same normal mix cycle,
// is impossible because the slot isn't marked available until the end of each cycle.
int j = track->mFastIndex;
- ALOG_ASSERT(0 < j && j < (int)FastMixerState::kMaxFastTracks);
+ ALOG_ASSERT(0 < j && j < (int)FastMixerState::sMaxFastTracks);
ALOG_ASSERT(!(mFastTrackAvailMask & (1 << j)));
FastTrack *fastTrack = &state->mFastTracks[j];
@@ -5448,6 +5470,13 @@
return time;
}
+void AudioFlinger::OffloadThread::invalidateTracks(audio_stream_type_t streamType)
+{
+ Mutex::Autolock _l(mLock);
+ mFlushPending = true;
+ PlaybackThread::invalidateTracks_l(streamType);
+}
+
// ----------------------------------------------------------------------------
AudioFlinger::DuplicatingThread::DuplicatingThread(const sp<AudioFlinger>& audioFlinger,
@@ -7117,6 +7146,7 @@
desc->mSamplingRate = mSampleRate;
desc->mFormat = mFormat;
desc->mFrameCount = mFrameCount;
+ desc->mFrameCountHAL = mFrameCount;
desc->mLatency = 0;
break;
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index cf896e0..0ddd279 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -247,6 +247,10 @@
// Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects,
// and returns the [normal mix] buffer's frame count.
virtual size_t frameCount() const = 0;
+
+ // Return's the HAL's frame count i.e. fast mixer buffer size.
+ size_t frameCountHAL() const { return mFrameCount; }
+
size_t frameSize() const { return mFrameSize; }
// Should be "virtual status_t requestExitAndWait()" and override same
@@ -602,13 +606,11 @@
virtual bool isValidSyncEvent(const sp<SyncEvent>& event) const;
// called with AudioFlinger lock held
- void invalidateTracks(audio_stream_type_t streamType);
+ void invalidateTracks_l(audio_stream_type_t streamType);
+ virtual void invalidateTracks(audio_stream_type_t streamType);
virtual size_t frameCount() const { return mNormalFrameCount; }
- // Return's the HAL's frame count i.e. fast mixer buffer size.
- size_t frameCountHAL() const { return mFrameCount; }
-
status_t getTimestamp_l(AudioTimestamp& timestamp);
void addPatchTrack(const sp<PatchTrack>& track);
@@ -912,7 +914,7 @@
public:
virtual bool hasFastMixer() const { return mFastMixer != 0; }
virtual FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex) const {
- ALOG_ASSERT(fastIndex < FastMixerState::kMaxFastTracks);
+ ALOG_ASSERT(fastIndex < FastMixerState::sMaxFastTracks);
return mFastMixerDumpState.mTracks[fastIndex].mUnderruns;
}
@@ -997,6 +999,7 @@
virtual bool waitingAsyncCallback();
virtual bool waitingAsyncCallback_l();
+ virtual void invalidateTracks(audio_stream_type_t streamType);
virtual bool keepWakeLock() const { return mKeepWakeLock; }
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 61b30c1..41cb030 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -402,7 +402,7 @@
//mAudioTrackServerProxy->framesReadyIsCalledByMultipleThreads();
ALOG_ASSERT(thread->mFastTrackAvailMask != 0);
int i = __builtin_ctz(thread->mFastTrackAvailMask);
- ALOG_ASSERT(0 < i && i < (int)FastMixerState::kMaxFastTracks);
+ ALOG_ASSERT(0 < i && i < (int)FastMixerState::sMaxFastTracks);
// FIXME This is too eager. We allocate a fast track index before the
// fast track becomes active. Since fast tracks are a scarce resource,
// this means we are potentially denying other more important fast tracks from
@@ -1103,7 +1103,7 @@
if (local.mTimeNs[i] > 0) {
local.mPosition[i] = mFrameMap.findX(local.mPosition[i]);
// check drain state from the latest stage in the pipeline.
- if (!checked) {
+ if (!checked && i <= ExtendedTimestamp::LOCATION_KERNEL) {
mAudioTrackServerProxy->setDrained(
local.mPosition[i] >= mAudioTrackServerProxy->framesReleased());
checked = true;
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index b1347f4..a215b95 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -158,14 +158,14 @@
int indexMax) = 0;
// sets the new stream volume at a level corresponding to the supplied index for the
- // supplied device. By convention, specifying AUDIO_DEVICE_OUT_DEFAULT means
+ // supplied device. By convention, specifying AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME means
// setting volume for all devices
virtual status_t setStreamVolumeIndex(audio_stream_type_t stream,
int index,
audio_devices_t device) = 0;
// retrieve current volume index for the specified stream and the
- // specified device. By convention, specifying AUDIO_DEVICE_OUT_DEFAULT means
+ // specified device. By convention, specifying AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME means
// querying the volume of the active device.
virtual status_t getStreamVolumeIndex(audio_stream_type_t stream,
int *index,
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index f73548d..55ee91f 100755
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -47,6 +47,23 @@
#define APM_AUDIO_DEVICE_IN_MATCH_ADDRESS_ALL (AUDIO_DEVICE_IN_REMOTE_SUBMIX|AUDIO_DEVICE_IN_BUS)
/**
+ * Stub audio output device. Used in policy configuration file on platforms without audio outputs.
+ * This alias value to AUDIO_DEVICE_OUT_DEFAULT is only used in the audio policy context.
+ */
+#define AUDIO_DEVICE_OUT_STUB AUDIO_DEVICE_OUT_DEFAULT
+/**
+ * Stub audio input device. Used in policy configuration file on platforms without audio inputs.
+ * This alias value to AUDIO_DEVICE_IN_DEFAULT is only used in the audio policy context.
+ */
+#define AUDIO_DEVICE_IN_STUB AUDIO_DEVICE_IN_DEFAULT
+/**
+ * Alias to AUDIO_DEVICE_OUT_DEFAULT defined for clarification when this value is used by volume
+ * control APIs (e.g setStreamVolumeIndex().
+ */
+#define AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME AUDIO_DEVICE_OUT_DEFAULT
+
+
+/**
* Check if the state given correspond to an in call state.
* @TODO find a better name for widely call state
*
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 54fcd0b..ed2450c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -84,8 +84,6 @@
audio_devices_t getDevicesFromHwModule(audio_module_handle_t moduleHandle) const;
- audio_policy_dev_state_t getDeviceConnectionState(const sp<DeviceDescriptor> &devDesc) const;
-
status_t dump(int fd, const String8 &tag, int spaces = 0, bool verbose = true) const;
private:
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index 93d03e6..dd2993d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -106,7 +106,8 @@
sp<DeviceDescriptor> getDeviceDescriptor(const audio_devices_t device,
const char *device_address,
- const char *device_name) const;
+ const char *device_name,
+ bool matchAdress = true) const;
status_t dump(int fd) const;
};
diff --git a/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h b/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h
index 7c486c8..10f0766 100644
--- a/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h
+++ b/services/audiopolicy/common/managerdefinitions/include/VolumeCurve.h
@@ -74,7 +74,7 @@
public:
VolumeCurvesForStream() : mIndexMin(0), mIndexMax(1), mCanBeMuted(true)
{
- mIndexCur.add(AUDIO_DEVICE_OUT_DEFAULT, 0);
+ mIndexCur.add(AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME, 0);
}
sp<VolumeCurve> getCurvesFor(device_category device) const
@@ -88,9 +88,9 @@
int getVolumeIndex(audio_devices_t device) const
{
device = Volume::getDeviceForVolume(device);
- // there is always a valid entry for AUDIO_DEVICE_OUT_DEFAULT
+ // there is always a valid entry for AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME
if (mIndexCur.indexOfKey(device) < 0) {
- device = AUDIO_DEVICE_OUT_DEFAULT;
+ device = AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME;
}
return mIndexCur.valueFor(device);
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
index b8c0550..f382dec 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
@@ -128,14 +128,35 @@
size_t patchesWritten = 0;
size_t patchesMax = *num_patches;
- for (size_t i = 0; i < size() && patchesWritten < patchesMax; i++) {
- const sp<AudioPatch> patch = valueAt(i);
- patches[patchesWritten] = patch->mPatch;
- patches[patchesWritten++].id = patch->mHandle;
+ *num_patches = 0;
+ for (size_t patchIndex = 0; patchIndex < size(); patchIndex++) {
+ // do not report patches with AUDIO_DEVICE_IN_STUB as source or
+ // AUDIO_DEVICE_OUT_STUB as sink as those devices are used by stub HALs by convention
+ const sp<AudioPatch> patch = valueAt(patchIndex);
+ bool skip = false;
+ for (size_t srcIndex = 0; srcIndex < patch->mPatch.num_sources && !skip; srcIndex++) {
+ if (patch->mPatch.sources[srcIndex].type == AUDIO_PORT_TYPE_DEVICE &&
+ patch->mPatch.sources[srcIndex].ext.device.type == AUDIO_DEVICE_IN_STUB) {
+ skip = true;
+ }
+ }
+ for (size_t sinkIndex = 0; sinkIndex < patch->mPatch.num_sinks && !skip; sinkIndex++) {
+ if (patch->mPatch.sinks[sinkIndex].type == AUDIO_PORT_TYPE_DEVICE &&
+ patch->mPatch.sinks[sinkIndex].ext.device.type == AUDIO_DEVICE_OUT_STUB) {
+ skip = true;
+ }
+ }
+ if (skip) {
+ continue; // to next audio patch
+ }
+ if (patchesWritten < patchesMax) {
+ patches[patchesWritten] = patch->mPatch;
+ patches[patchesWritten++].id = patch->mHandle;
+ }
+ (*num_patches)++;
ALOGV("listAudioPatches() patch %zu num_sources %d num_sinks %d",
- i, patch->mPatch.num_sources, patch->mPatch.num_sinks);
+ patchIndex, patch->mPatch.num_sources, patch->mPatch.num_sinks);
}
- *num_patches = size();
ALOGV("listAudioPatches() got %zu patches needed %d", patchesWritten, *num_patches);
return NO_ERROR;
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 44f380a..35f078e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -219,12 +219,6 @@
return NO_ERROR;
}
-audio_policy_dev_state_t DeviceVector::getDeviceConnectionState(const sp<DeviceDescriptor> &devDesc) const
-{
- ssize_t index = indexOf(devDesc);
- return index >= 0 ? AUDIO_POLICY_DEVICE_STATE_AVAILABLE : AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
-}
-
void DeviceDescriptor::toAudioPortConfig(struct audio_port_config *dstConfig,
const struct audio_port_config *srcConfig) const
{
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index 2d67bd2..a85c07f 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -292,7 +292,8 @@
sp<DeviceDescriptor> HwModuleCollection::getDeviceDescriptor(const audio_devices_t device,
const char *device_address,
- const char *device_name) const
+ const char *device_name,
+ bool matchAdress) const
{
String8 address = (device_address == NULL) ? String8("") : String8(device_address);
// handle legacy remote submix case where the address was not always specified
@@ -305,11 +306,17 @@
if (hwModule->mHandle == 0) {
continue;
}
- DeviceVector deviceList =
- hwModule->getDeclaredDevices().getDevicesFromTypeAddr(device, address);
+ DeviceVector declaredDevices = hwModule->getDeclaredDevices();
+ DeviceVector deviceList = declaredDevices.getDevicesFromTypeAddr(device, address);
if (!deviceList.isEmpty()) {
return deviceList.itemAt(0);
}
+ if (!matchAdress) {
+ deviceList = declaredDevices.getDevicesFromType(device);
+ if (!deviceList.isEmpty()) {
+ return deviceList.itemAt(0);
+ }
+ }
}
sp<DeviceDescriptor> devDesc = new DeviceDescriptor(device);
diff --git a/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp
index 8388a50..b3019e1 100644
--- a/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp
@@ -26,6 +26,7 @@
#include "StreamDescriptor.h"
#include "Gains.h"
+#include "policy.h"
#include <utils/Log.h>
#include <utils/String8.h>
@@ -39,15 +40,15 @@
// Initialize the current stream's index to mIndexMax so volume isn't 0 in
// cases where the Java layer doesn't call into the audio policy service to
// set the default volume.
- mIndexCur.add(AUDIO_DEVICE_OUT_DEFAULT, mIndexMax);
+ mIndexCur.add(AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME, mIndexMax);
}
int StreamDescriptor::getVolumeIndex(audio_devices_t device) const
{
device = Volume::getDeviceForVolume(device);
- // there is always a valid entry for AUDIO_DEVICE_OUT_DEFAULT
+ // there is always a valid entry for AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME
if (mIndexCur.indexOfKey(device) < 0) {
- device = AUDIO_DEVICE_OUT_DEFAULT;
+ device = AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME;
}
return mIndexCur.valueFor(device);
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index c6ed53e..f639551 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -51,6 +51,7 @@
MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_LINE),
MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_IP),
MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BUS),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_STUB),
MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AMBIENT),
MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUILTIN_MIC),
MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET),
@@ -74,6 +75,7 @@
MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LOOPBACK),
MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_IP),
MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUS),
+ MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_STUB),
};
template<>
diff --git a/services/audiopolicy/config/audio_policy_configuration_stub.xml b/services/audiopolicy/config/audio_policy_configuration_stub.xml
new file mode 100644
index 0000000..a7747f8
--- /dev/null
+++ b/services/audiopolicy/config/audio_policy_configuration_stub.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2016 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <modules>
+ <module name="stub" halVersion="2.0">
+ <attachedDevices>
+ <item>Default Out</item>
+ <item>Default In</item>
+ </attachedDevices>
+ <defaultOutputDevice>Default Out</defaultOutputDevice>
+ <mixPorts>
+ <mixPort name="stub output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+
+ <mixPort name="stub input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="Default Out" type="AUDIO_DEVICE_OUT_STUB" role="sink">
+ </devicePort>
+
+ <devicePort tagName="Default In" type="AUDIO_DEVICE_IN_STUB" role="source">
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="Default Out" sources="stub output"/>
+
+ <route type="mix" sink="stub input" sources="Default In"/>
+ </routes>
+
+ </module>
+
+ <!-- Remote Submix Audio HAL -->
+ <xi:include href="r_submix_audio_policy_configuration.xml"/>
+
+ </modules>
+
+ <xi:include href="audio_policy_volumes.xml"/>
+ <xi:include href="default_volume_tables.xml"/>
+
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index f2224fd..d31429c 100755
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -254,10 +254,6 @@
case STRATEGY_TRANSMITTED_THROUGH_SPEAKER:
device = availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER;
- if (!device) {
- ALOGE("getDeviceForStrategy() no device found for "\
- "STRATEGY_TRANSMITTED_THROUGH_SPEAKER");
- }
break;
case STRATEGY_SONIFICATION_RESPECTFUL:
@@ -373,11 +369,6 @@
if (device) break;
}
device = availableOutputDevicesType & AUDIO_DEVICE_OUT_EARPIECE;
- if (device) break;
- device = mApmObserver->getDefaultOutputDevice()->type();
- if (device == AUDIO_DEVICE_NONE) {
- ALOGE("getDeviceForStrategy() no device found for STRATEGY_PHONE");
- }
break;
case AUDIO_POLICY_FORCE_SPEAKER:
@@ -402,11 +393,6 @@
if (device) break;
}
device = availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER;
- if (device) break;
- device = mApmObserver->getDefaultOutputDevice()->type();
- if (device == AUDIO_DEVICE_NONE) {
- ALOGE("getDeviceForStrategy() no device found for STRATEGY_PHONE, FORCE_SPEAKER");
- }
break;
}
break;
@@ -431,9 +417,6 @@
if ((strategy == STRATEGY_SONIFICATION) ||
(mForceUse[AUDIO_POLICY_FORCE_FOR_SYSTEM] == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)) {
device = availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER;
- if (device == AUDIO_DEVICE_NONE) {
- ALOGE("getDeviceForStrategy() speaker device not found for STRATEGY_SONIFICATION");
- }
}
// The second device used for sonification is the same as the device used by media strategy
// FALL THROUGH
@@ -545,12 +528,6 @@
AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED)) {
device &= ~AUDIO_DEVICE_OUT_SPEAKER;
}
-
- if (device) break;
- device = mApmObserver->getDefaultOutputDevice()->type();
- if (device == AUDIO_DEVICE_NONE) {
- ALOGE("getDeviceForStrategy() no device found for STRATEGY_MEDIA");
- }
} break;
default:
@@ -558,6 +535,12 @@
break;
}
+ if (device == AUDIO_DEVICE_NONE) {
+ ALOGV("getDeviceForStrategy() no device found for strategy %d", strategy);
+ device = mApmObserver->getDefaultOutputDevice()->type();
+ ALOGE_IF(device == AUDIO_DEVICE_NONE,
+ "getDeviceForStrategy() no default device defined");
+ }
ALOGVV("getDeviceForStrategy() strategy %d, device %x", strategy, device);
return device;
}
@@ -677,6 +660,14 @@
ALOGW("getDeviceForInputSource() invalid input source %d", inputSource);
break;
}
+ if (device == AUDIO_DEVICE_NONE) {
+ ALOGV("getDeviceForInputSource() no device found for source %d", inputSource);
+ if (availableDeviceTypes & AUDIO_DEVICE_IN_STUB) {
+ device = AUDIO_DEVICE_IN_STUB;
+ }
+ ALOGE_IF(device == AUDIO_DEVICE_NONE,
+ "getDeviceForInputSource() no default device defined");
+ }
ALOGV("getDeviceForInputSource()input source %d, device %08x", inputSource, device);
return device;
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index fe2f9a6..21ce8c9 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -291,7 +291,15 @@
audio_policy_dev_state_t AudioPolicyManager::getDeviceConnectionState(audio_devices_t device,
const char *device_address)
{
- sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(device, device_address, "");
+ sp<DeviceDescriptor> devDesc =
+ mHwModules.getDeviceDescriptor(device, device_address, "",
+ (strlen(device_address) != 0)/*matchAddress*/);
+
+ if (devDesc == 0) {
+ ALOGW("getDeviceConnectionState() undeclared device, type %08x, address: %s",
+ device, device_address);
+ return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
+ }
DeviceVector *deviceVector;
@@ -303,7 +311,9 @@
ALOGW("getDeviceConnectionState() invalid device type %08x", device);
return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
}
- return deviceVector->getDeviceConnectionState(devDesc);
+
+ return (deviceVector->getDevice(device, String8(device_address)) != 0) ?
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE : AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
}
void AudioPolicyManager::updateCallRouting(audio_devices_t rxDevice, int delayMs)
@@ -1080,8 +1090,16 @@
mOutputRoutes.incRouteActivity(session);
audio_devices_t newDevice;
+ AudioMix *policyMix = NULL;
+ const char *address = NULL;
if (outputDesc->mPolicyMix != NULL) {
- newDevice = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+ policyMix = outputDesc->mPolicyMix;
+ address = policyMix->mDeviceAddress.string();
+ if ((policyMix->mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
+ newDevice = policyMix->mDeviceType;
+ } else {
+ newDevice = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+ }
} else if (mOutputRoutes.hasRouteChanged(session)) {
newDevice = getNewOutputDevice(outputDesc, false /*fromCache*/);
checkStrategyRoute(getStrategy(stream), output);
@@ -1091,7 +1109,7 @@
uint32_t delayMs = 0;
- status_t status = startSource(outputDesc, stream, newDevice, &delayMs);
+ status_t status = startSource(outputDesc, stream, newDevice, address, &delayMs);
if (status != NO_ERROR) {
mOutputRoutes.decRouteActivity(session);
@@ -1099,11 +1117,11 @@
}
// Automatically enable the remote submix input when output is started on a re routing mix
// of type MIX_TYPE_RECORDERS
- if (audio_is_remote_submix_device(newDevice) && outputDesc->mPolicyMix != NULL &&
- outputDesc->mPolicyMix->mMixType == MIX_TYPE_RECORDERS) {
+ if (audio_is_remote_submix_device(newDevice) && policyMix != NULL &&
+ policyMix->mMixType == MIX_TYPE_RECORDERS) {
setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
- outputDesc->mPolicyMix->mDeviceAddress,
+ address,
"remote-submix");
}
@@ -1117,6 +1135,7 @@
status_t AudioPolicyManager::startSource(sp<AudioOutputDescriptor> outputDesc,
audio_stream_type_t stream,
audio_devices_t device,
+ const char *address,
uint32_t *delayMs)
{
// cannot start playback of STREAM_TTS if any other output is being used
@@ -1173,7 +1192,7 @@
}
}
}
- uint32_t muteWaitMs = setOutputDevice(outputDesc, device, force);
+ uint32_t muteWaitMs = setOutputDevice(outputDesc, device, force, 0, NULL, address);
// handle special case for sonification while in call
if (isInCall()) {
@@ -1838,8 +1857,8 @@
// the requested device
// - For non default requested device, currently selected device on the output is either the
// requested device or one of the devices selected by the strategy
- // - For default requested device (AUDIO_DEVICE_OUT_DEFAULT), apply volume only if no specific
- // device volume value exists for currently selected device.
+ // - For default requested device (AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME), apply volume only if
+ // no specific device volume value exists for currently selected device.
status_t status = NO_ERROR;
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
@@ -1848,7 +1867,8 @@
if (!streamsMatchForvolume(stream, (audio_stream_type_t)curStream)) {
continue;
}
- if (!desc->isStreamActive((audio_stream_type_t)curStream)) {
+ if (!(desc->isStreamActive((audio_stream_type_t)curStream) ||
+ (isInCall() && (curStream == AUDIO_STREAM_VOICE_CALL)))) {
continue;
}
routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
@@ -1857,7 +1877,7 @@
continue;
}
bool applyDefault = false;
- if (device != AUDIO_DEVICE_OUT_DEFAULT) {
+ if (device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) {
curStreamDevice |= device;
} else if (!mVolumeCurves->hasVolumeIndexForDevice(
stream, Volume::getDeviceForVolume(curStreamDevice))) {
@@ -1886,9 +1906,9 @@
if (!audio_is_output_device(device)) {
return BAD_VALUE;
}
- // if device is AUDIO_DEVICE_OUT_DEFAULT, return volume for device corresponding to
+ // if device is AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME, return volume for device corresponding to
// the strategy the stream belongs to.
- if (device == AUDIO_DEVICE_OUT_DEFAULT) {
+ if (device == AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) {
device = getDeviceForStrategy(getStrategy(stream), true /*fromCache*/);
}
device = Volume::getDeviceForVolume(device);
@@ -2097,7 +2117,6 @@
&& (patch->mPatch.sinks[0].ext.device.type == device)
&& (strncmp(patch->mPatch.sinks[0].ext.device.address, address.string(),
AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0)) {
-
if (mPolicyMixes.registerMix(address, mixes[i], desc) != NO_ERROR) {
res = INVALID_OPERATION;
} else {
@@ -2323,19 +2342,29 @@
size_t portsMax = *num_ports;
*num_ports = 0;
if (type == AUDIO_PORT_TYPE_NONE || type == AUDIO_PORT_TYPE_DEVICE) {
+ // do not report devices with type AUDIO_DEVICE_IN_STUB or AUDIO_DEVICE_OUT_STUB
+ // as they are used by stub HALs by convention
if (role == AUDIO_PORT_ROLE_SINK || role == AUDIO_PORT_ROLE_NONE) {
- for (size_t i = 0;
- i < mAvailableOutputDevices.size() && portsWritten < portsMax; i++) {
- mAvailableOutputDevices[i]->toAudioPort(&ports[portsWritten++]);
+ for (size_t i = 0; i < mAvailableOutputDevices.size(); i++) {
+ if (mAvailableOutputDevices[i]->type() == AUDIO_DEVICE_OUT_STUB) {
+ continue;
+ }
+ if (portsWritten < portsMax) {
+ mAvailableOutputDevices[i]->toAudioPort(&ports[portsWritten++]);
+ }
+ (*num_ports)++;
}
- *num_ports += mAvailableOutputDevices.size();
}
if (role == AUDIO_PORT_ROLE_SOURCE || role == AUDIO_PORT_ROLE_NONE) {
- for (size_t i = 0;
- i < mAvailableInputDevices.size() && portsWritten < portsMax; i++) {
- mAvailableInputDevices[i]->toAudioPort(&ports[portsWritten++]);
+ for (size_t i = 0; i < mAvailableInputDevices.size(); i++) {
+ if (mAvailableInputDevices[i]->type() == AUDIO_DEVICE_IN_STUB) {
+ continue;
+ }
+ if (portsWritten < portsMax) {
+ mAvailableInputDevices[i]->toAudioPort(&ports[portsWritten++]);
+ }
+ (*num_ports)++;
}
- *num_ports += mAvailableInputDevices.size();
}
}
if (type == AUDIO_PORT_TYPE_NONE || type == AUDIO_PORT_TYPE_MIX) {
@@ -2976,7 +3005,7 @@
return INVALID_OPERATION;
}
uint32_t delayMs = 0;
- status = startSource(outputDesc, stream, sinkDevice, &delayMs);
+ status = startSource(outputDesc, stream, sinkDevice, NULL, &delayMs);
if (status != NO_ERROR) {
mpClientInterface->releaseAudioPatch(sourceDesc->mPatchDesc->mAfPatchHandle, 0);
@@ -3200,6 +3229,10 @@
}
sp<SwAudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(outProfile,
mpClientInterface);
+ const DeviceVector &supportedDevices = outProfile->getSupportedDevices();
+ const DeviceVector &devicesForType = supportedDevices.getDevicesFromType(profileType);
+ String8 address = devicesForType.size() > 0 ? devicesForType.itemAt(0)->mAddress
+ : String8("");
outputDesc->mDevice = profileType;
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
@@ -3211,7 +3244,7 @@
&output,
&config,
&outputDesc->mDevice,
- String8(""),
+ address,
&outputDesc->mLatency,
outputDesc->mFlags);
@@ -3224,7 +3257,6 @@
outputDesc->mChannelMask = config.channel_mask;
outputDesc->mFormat = config.format;
- const DeviceVector &supportedDevices = outProfile->getSupportedDevices();
for (size_t k = 0; k < supportedDevices.size(); k++) {
ssize_t index = mAvailableOutputDevices.indexOf(supportedDevices[k]);
// give a valid ID to an attached device once confirmed it is reachable
@@ -3239,7 +3271,10 @@
addOutput(output, outputDesc);
setOutputDevice(outputDesc,
outputDesc->mDevice,
- true);
+ true,
+ 0,
+ NULL,
+ address.string());
}
}
// open input streams needed to access attached devices to validate
@@ -4614,9 +4649,13 @@
if (device == AUDIO_DEVICE_NONE) {
resetOutputDevice(outputDesc, delayMs, NULL);
} else {
- DeviceVector deviceList = (address == NULL) ?
- mAvailableOutputDevices.getDevicesFromType(device)
- : mAvailableOutputDevices.getDevicesFromTypeAddr(device, String8(address));
+ DeviceVector deviceList;
+ if ((address == NULL) || (strlen(address) == 0)) {
+ deviceList = mAvailableOutputDevices.getDevicesFromType(device);
+ } else {
+ deviceList = mAvailableOutputDevices.getDevicesFromTypeAddr(device, String8(address));
+ }
+
if (!deviceList.isEmpty()) {
struct audio_patch patch;
outputDesc->toAudioPortConfig(&patch.sources[0]);
@@ -4862,7 +4901,9 @@
float volumeDb = mVolumeCurves->volIndexToDb(stream, Volume::getDeviceCategory(device), index);
// if a headset is connected, apply the following rules to ring tones and notifications
// to avoid sound level bursts in user's ears:
- // - always attenuate ring tones and notifications volume by 6dB
+ // - always attenuate notifications volume by 6dB
+ // - attenuate ring tones volume by 6dB unless music is not playing and
+ // speaker is part of the select devices
// - if music is playing, always limit the volume to current music volume,
// with a minimum threshold at -36dB so that notification is always perceived.
const routing_strategy stream_strategy = getStrategy(stream);
@@ -4876,12 +4917,12 @@
|| ((stream_strategy == STRATEGY_ENFORCED_AUDIBLE) &&
(mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_NONE))) &&
mVolumeCurves->canBeMuted(stream)) {
- volumeDb += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
// when the phone is ringing we must consider that music could have been paused just before
// by the music application and behave as if music was active if the last music track was
// just stopped
if (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY) ||
mLimitRingtoneVolume) {
+ volumeDb += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
audio_devices_t musicDevice = getDeviceForStrategy(STRATEGY_MEDIA, true /*fromCache*/);
float musicVolDB = computeVolume(AUDIO_STREAM_MUSIC,
mVolumeCurves->getVolumeIndex(AUDIO_STREAM_MUSIC,
@@ -4893,6 +4934,9 @@
volumeDb = minVolDB;
ALOGV("computeVolume limiting volume to %f musicVol %f", minVolDB, musicVolDB);
}
+ } else if ((Volume::getDeviceForVolume(device) != AUDIO_DEVICE_OUT_SPEAKER) ||
+ stream_strategy != STRATEGY_SONIFICATION) {
+ volumeDb += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
}
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 0420679..2d6a873 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -49,10 +49,8 @@
// ----------------------------------------------------------------------------
// Attenuation applied to STRATEGY_SONIFICATION streams when a headset is connected: 6dB
-#define SONIFICATION_HEADSET_VOLUME_FACTOR 0.5
#define SONIFICATION_HEADSET_VOLUME_FACTOR_DB (-6)
// Min volume for STRATEGY_SONIFICATION streams when limited by music volume: -36dB
-#define SONIFICATION_HEADSET_VOLUME_MIN 0.016
#define SONIFICATION_HEADSET_VOLUME_MIN_DB (-36)
// Time in milliseconds during which we consider that music is still active after a music
@@ -484,6 +482,7 @@
status_t startSource(sp<AudioOutputDescriptor> outputDesc,
audio_stream_type_t stream,
audio_devices_t device,
+ const char *address,
uint32_t *delayMs);
status_t stopSource(sp<AudioOutputDescriptor> outputDesc,
audio_stream_type_t stream,
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index c011613..ebe65e4 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -66,7 +66,8 @@
libhardware \
libsync \
libcamera_metadata \
- libjpeg
+ libjpeg \
+ libmemunreachable
LOCAL_C_INCLUDES += \
system/media/private/camera/include \
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index 0afd945..ad08a68 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -679,7 +679,8 @@
status_t res;
if (enabled) {
bool hasFlash = false;
- res = hasFlashUnitLocked(cameraId, &hasFlash);
+ // Check if it has a flash unit and leave camera device open.
+ res = hasFlashUnitLocked(cameraId, &hasFlash, /*keepDeviceOpen*/true);
// invalid camera?
if (res) {
// hasFlashUnitLocked() returns BAD_INDEX if mDevice is connected to
@@ -688,6 +689,8 @@
}
// no flash unit?
if (!hasFlash) {
+ // Disconnect camera device if it has no flash.
+ disconnectCameraDevice();
return -ENOSYS;
}
} else if (mDevice == NULL || cameraId != mCameraId) {
@@ -716,21 +719,28 @@
status_t CameraHardwareInterfaceFlashControl::hasFlashUnit(
const String8& cameraId, bool *hasFlash) {
Mutex::Autolock l(mLock);
- return hasFlashUnitLocked(cameraId, hasFlash);
+ // Close device after checking if it has a flash unit.
+ return hasFlashUnitLocked(cameraId, hasFlash, /*keepDeviceOpen*/false);
}
status_t CameraHardwareInterfaceFlashControl::hasFlashUnitLocked(
- const String8& cameraId, bool *hasFlash) {
+ const String8& cameraId, bool *hasFlash, bool keepDeviceOpen) {
+ bool closeCameraDevice = false;
+
if (!hasFlash) {
return BAD_VALUE;
}
status_t res;
if (mDevice == NULL) {
+ // Connect to camera device to query if it has a flash unit.
res = connectCameraDevice(cameraId);
if (res) {
return res;
}
+ // Close camera device only when it is just opened and the caller doesn't want to keep
+ // the camera device open.
+ closeCameraDevice = !keepDeviceOpen;
}
if (cameraId != mCameraId) {
@@ -745,6 +755,15 @@
*hasFlash = false;
}
+ if (closeCameraDevice) {
+ res = disconnectCameraDevice();
+ if (res != OK) {
+ ALOGE("%s: Failed to disconnect camera device. %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
return OK;
}
@@ -869,9 +888,13 @@
return OK;
}
- mParameters.set(CameraParameters::KEY_FLASH_MODE,
- CameraParameters::FLASH_MODE_OFF);
- mDevice->setParameters(mParameters);
+ if (mParameters.get(CameraParameters::KEY_FLASH_MODE)) {
+ // There is a flash, turn if off.
+ // (If there isn't one, leave the parameter null)
+ mParameters.set(CameraParameters::KEY_FLASH_MODE,
+ CameraParameters::FLASH_MODE_OFF);
+ mDevice->setParameters(mParameters);
+ }
mDevice->stopPreview();
status_t res = native_window_api_disconnect(mSurface.get(),
NATIVE_WINDOW_API_CAMERA);
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index 4d5fe8d..5cde372 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -203,7 +203,11 @@
status_t getSmallestSurfaceSize(int32_t *width, int32_t *height);
// protected by mLock
- status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash);
+ // If this function opens camera device in order to check if it has a flash unit, the
+ // camera device will remain open if keepDeviceOpen is true and the camera device will be
+ // closed if keepDeviceOpen is false. If camera device is already open when calling this
+ // function, keepDeviceOpen is ignored.
+ status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash, bool keepDeviceOpen);
CameraModule *mCameraModule;
const camera_module_callbacks_t *mCallbacks;
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 0c88dad..ff73c28 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -41,6 +41,7 @@
#include <cutils/properties.h>
#include <gui/Surface.h>
#include <hardware/hardware.h>
+#include <memunreachable/memunreachable.h>
#include <media/AudioSystem.h>
#include <media/IMediaHTTPService.h>
#include <media/mediaplayer.h>
@@ -463,6 +464,12 @@
if (rc.isOk()) {
cameraInfo->facing = info.facing;
cameraInfo->orientation = info.orientation;
+ // CameraInfo is for android.hardware.Camera which does not
+ // support external camera facing. The closest approximation would be
+ // front camera.
+ if (cameraInfo->orientation == CAMERA_FACING_EXTERNAL) {
+ cameraInfo->orientation = CAMERA_FACING_FRONT;
+ }
}
return rc;
}
@@ -766,6 +773,7 @@
case CAMERA_DEVICE_API_VERSION_3_1:
case CAMERA_DEVICE_API_VERSION_3_2:
case CAMERA_DEVICE_API_VERSION_3_3:
+ case CAMERA_DEVICE_API_VERSION_3_4:
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, packageName, cameraId, facing,
@@ -1611,6 +1619,7 @@
break;
case CAMERA_DEVICE_API_VERSION_3_2:
case CAMERA_DEVICE_API_VERSION_3_3:
+ case CAMERA_DEVICE_API_VERSION_3_4:
ALOGV("%s: Camera id %d uses HAL3.2 or newer, supports api1/api2 directly",
__FUNCTION__, cameraId);
*isSupported = true;
@@ -1708,6 +1717,7 @@
case CAMERA_DEVICE_API_VERSION_3_1:
case CAMERA_DEVICE_API_VERSION_3_2:
case CAMERA_DEVICE_API_VERSION_3_3:
+ case CAMERA_DEVICE_API_VERSION_3_4:
// in support
break;
case CAMERA_DEVICE_API_VERSION_2_0:
@@ -2595,16 +2605,32 @@
write(fd, "\n", 1);
camera3::CameraTraces::dump(fd, args);
- // change logging level
+ // Process dump arguments, if any
int n = args.size();
- for (int i = 0; i + 1 < n; i++) {
- String16 verboseOption("-v");
+ String16 verboseOption("-v");
+ String16 unreachableOption("--unreachable");
+ for (int i = 0; i < n; i++) {
if (args[i] == verboseOption) {
+ // change logging level
+ if (i + 1 >= n) continue;
String8 levelStr(args[i+1]);
int level = atoi(levelStr.string());
result = String8::format("\nSetting log level to %d.\n", level);
setLogLevel(level);
write(fd, result.string(), result.size());
+ } else if (args[i] == unreachableOption) {
+ // Dump memory analysis
+ // TODO - should limit be an argument parameter?
+ UnreachableMemoryInfo info;
+ bool success = GetUnreachableMemory(info, /*limit*/ 10000);
+ if (!success) {
+ dprintf(fd, "\nUnable to dump unreachable memory. "
+ "Try disabling SELinux enforcement.\n");
+ } else {
+ dprintf(fd, "\nDumping unreachable memory:\n");
+ std::string s = info.ToString(/*log_contents*/ true);
+ write(fd, s.c_str(), s.size());
+ }
}
}
}
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 4eb7b03..c8e64fe 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1242,6 +1242,12 @@
ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
}
+void Camera2Client::releaseRecordingFrameHandle(native_handle_t *handle) {
+ (void)handle;
+ ATRACE_CALL();
+ ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
+}
+
status_t Camera2Client::autoFocus() {
ATRACE_CALL();
Mutex::Autolock icl(mBinderSerializationLock);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 12ee157..3cb9e4f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -71,6 +71,7 @@
virtual void stopRecording();
virtual bool recordingEnabled();
virtual void releaseRecordingFrame(const sp<IMemory>& mem);
+ virtual void releaseRecordingFrameHandle(native_handle_t *handle);
virtual status_t autoFocus();
virtual status_t cancelAutoFocus();
virtual status_t takePicture(int msgType);
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index d2fedf8..266fb03 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -19,6 +19,7 @@
#include <cutils/properties.h>
#include <gui/Surface.h>
+#include <media/hardware/HardwareAPI.h>
#include "api1/CameraClient.h"
#include "device1/CameraHardwareInterface.h"
@@ -488,6 +489,39 @@
mHardware->releaseRecordingFrame(mem);
}
+void CameraClient::releaseRecordingFrameHandle(native_handle_t *handle) {
+ if (handle == nullptr) return;
+
+ sp<IMemory> dataPtr;
+ {
+ Mutex::Autolock l(mAvailableCallbackBuffersLock);
+ if (!mAvailableCallbackBuffers.empty()) {
+ dataPtr = mAvailableCallbackBuffers.back();
+ mAvailableCallbackBuffers.pop_back();
+ }
+ }
+
+ if (dataPtr == nullptr) {
+ ALOGE("%s: %d: No callback buffer available. Dropping a native handle.", __FUNCTION__,
+ __LINE__);
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ return;
+ } else if (dataPtr->size() != sizeof(VideoNativeHandleMetadata)) {
+ ALOGE("%s: %d: Callback buffer size doesn't match VideoNativeHandleMetadata", __FUNCTION__,
+ __LINE__);
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ return;
+ }
+
+ VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(dataPtr->pointer());
+ metadata->eType = kMetadataBufferTypeNativeHandleSource;
+ metadata->pHandle = handle;
+
+ mHardware->releaseRecordingFrame(dataPtr);
+}
+
status_t CameraClient::setVideoBufferMode(int32_t videoBufferMode) {
LOG1("setVideoBufferMode: %d", videoBufferMode);
bool enableMetadataInBuffers = false;
@@ -929,8 +963,28 @@
int32_t msgType, const sp<IMemory>& dataPtr) {
sp<hardware::ICameraClient> c = mRemoteCallback;
mLock.unlock();
- if (c != 0) {
- c->dataCallbackTimestamp(timestamp, msgType, dataPtr);
+ if (c != 0 && dataPtr != nullptr) {
+ native_handle_t* handle = nullptr;
+
+ // Check if dataPtr contains a VideoNativeHandleMetadata.
+ if (dataPtr->size() == sizeof(VideoNativeHandleMetadata)) {
+ VideoNativeHandleMetadata *metadata =
+ (VideoNativeHandleMetadata*)(dataPtr->pointer());
+ if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
+ handle = metadata->pHandle;
+ }
+ }
+
+ // If dataPtr contains a native handle, send it via recordingFrameHandleCallbackTimestamp.
+ if (handle != nullptr) {
+ {
+ Mutex::Autolock l(mAvailableCallbackBuffersLock);
+ mAvailableCallbackBuffers.push_back(dataPtr);
+ }
+ c->recordingFrameHandleCallbackTimestamp(timestamp, handle);
+ } else {
+ c->dataCallbackTimestamp(timestamp, msgType, dataPtr);
+ }
}
}
diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
index 603fd17..4f46fc4 100644
--- a/services/camera/libcameraservice/api1/CameraClient.h
+++ b/services/camera/libcameraservice/api1/CameraClient.h
@@ -49,6 +49,7 @@
virtual void stopRecording();
virtual bool recordingEnabled();
virtual void releaseRecordingFrame(const sp<IMemory>& mem);
+ virtual void releaseRecordingFrameHandle(native_handle_t *handle);
virtual status_t autoFocus();
virtual status_t cancelAutoFocus();
virtual status_t takePicture(int msgType);
@@ -148,6 +149,12 @@
// Debugging information
CameraParameters mLatestSetParameters;
+ // mAvailableCallbackBuffers stores sp<IMemory> that HAL uses to send VideoNativeHandleMetadata.
+ // It will be used to send VideoNativeHandleMetadata back to HAL when camera receives the
+ // native handle from releaseRecordingFrameHandle.
+ Mutex mAvailableCallbackBuffersLock;
+ std::vector<sp<IMemory>> mAvailableCallbackBuffers;
+
// We need to avoid the deadlock when the incoming command thread and
// the CameraHardwareInterface callback thread both want to grab mLock.
// An extra flag is used to tell the callback thread that it should stop
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index b6c9900..dbec34e 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -75,6 +75,7 @@
Camera2ClientBase(cameraService, remoteCallback, clientPackageName,
cameraId, cameraFacing, clientPid, clientUid, servicePid),
mInputStream(),
+ mStreamingRequestId(REQUEST_ID_NONE),
mRequestIdCounter(0) {
ATRACE_CALL();
@@ -233,7 +234,8 @@
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
msg.string());
} else {
- mStreamingRequestList.push_back(submitInfo->mRequestId);
+ Mutex::Autolock idLock(mStreamingRequestIdLock);
+ mStreamingRequestId = submitInfo->mRequestId;
}
} else {
err = mDevice->captureList(metadataRequestList, &(submitInfo->mLastFrameNumber));
@@ -270,17 +272,10 @@
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
- Vector<int>::iterator it, end;
- for (it = mStreamingRequestList.begin(), end = mStreamingRequestList.end();
- it != end; ++it) {
- if (*it == requestId) {
- break;
- }
- }
-
- if (it == end) {
- String8 msg = String8::format("Camera %d: Did not find request ID %d in list of "
- "streaming requests", mCameraId, requestId);
+ Mutex::Autolock idLock(mStreamingRequestIdLock);
+ if (mStreamingRequestId != requestId) {
+ String8 msg = String8::format("Camera %d: Canceling request ID %d doesn't match "
+ "current request ID %d", mCameraId, requestId, mStreamingRequestId);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
@@ -290,7 +285,7 @@
if (err == OK) {
ALOGV("%s: Camera %d: Successfully cleared streaming request",
__FUNCTION__, mCameraId);
- mStreamingRequestList.erase(it);
+ mStreamingRequestId = REQUEST_ID_NONE;
} else {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
"Camera %d: Error clearing streaming request: %s (%d)",
@@ -767,7 +762,8 @@
}
// FIXME: Also need check repeating burst.
- if (!mStreamingRequestList.isEmpty()) {
+ Mutex::Autolock idLock(mStreamingRequestIdLock);
+ if (mStreamingRequestId != REQUEST_ID_NONE) {
String8 msg = String8::format(
"Camera %d: Try to waitUntilIdle when there are active streaming requests",
mCameraId);
@@ -799,7 +795,8 @@
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
- mStreamingRequestList.clear();
+ Mutex::Autolock idLock(mStreamingRequestIdLock);
+ mStreamingRequestId = REQUEST_ID_NONE;
status_t err = mDevice->flush(lastFrameNumber);
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -982,6 +979,17 @@
}
}
+void CameraDeviceClient::notifyRepeatingRequestError(long lastFrameNumber) {
+ sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
+
+ if (remoteCb != 0) {
+ remoteCb->onRepeatingRequestError(lastFrameNumber);
+ }
+
+ Mutex::Autolock idLock(mStreamingRequestIdLock);
+ mStreamingRequestId = REQUEST_ID_NONE;
+}
+
void CameraDeviceClient::notifyIdle() {
// Thread safe. Don't bother locking.
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 38137a2..d792b7d 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -160,6 +160,7 @@
const CaptureResultExtras& resultExtras);
virtual void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp);
virtual void notifyPrepared(int streamId);
+ virtual void notifyRepeatingRequestError(long lastFrameNumber);
/**
* Interface used by independent components of CameraDeviceClient.
@@ -205,8 +206,10 @@
int32_t id;
} mInputStream;
- // Request ID
- Vector<int> mStreamingRequestList;
+ // Streaming request ID
+ int32_t mStreamingRequestId;
+ Mutex mStreamingRequestIdLock;
+ static const int32_t REQUEST_ID_NONE = -1;
int32_t mRequestIdCounter;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 2cc150d..c0d6da6 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -306,6 +306,14 @@
}
template <typename TClientBase>
+void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(long lastFrameNumber) {
+ (void)lastFrameNumber;
+
+ ALOGV("%s: Repeating request was stopped. Last frame number is %ld",
+ __FUNCTION__, lastFrameNumber);
+}
+
+template <typename TClientBase>
int Camera2ClientBase<TClientBase>::getCameraId() const {
return TClientBase::mCameraId;
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 6eea2f4..4f60034 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -73,6 +73,7 @@
virtual void notifyAutoWhitebalance(uint8_t newState,
int triggerId);
virtual void notifyPrepared(int streamId);
+ virtual void notifyRepeatingRequestError(long lastFrameNumber);
int getCameraId() const;
const sp<CameraDeviceBase>&
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index d570d4b..35ec531 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -209,6 +209,7 @@
virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0;
virtual void notifyAutoWhitebalance(uint8_t newState,
int triggerId) = 0;
+ virtual void notifyRepeatingRequestError(long lastFrameNumber) = 0;
protected:
virtual ~NotificationListener();
};
diff --git a/services/camera/libcameraservice/common/CameraModule.cpp b/services/camera/libcameraservice/common/CameraModule.cpp
index f33d1ba..073144c 100644
--- a/services/camera/libcameraservice/common/CameraModule.cpp
+++ b/services/camera/libcameraservice/common/CameraModule.cpp
@@ -29,6 +29,8 @@
ATRACE_CALL();
Vector<int32_t> derivedCharKeys;
+ Vector<int32_t> derivedRequestKeys;
+ Vector<int32_t> derivedResultKeys;
// Keys added in HAL3.3
if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_3) {
Vector<uint8_t> controlModes;
@@ -180,6 +182,9 @@
ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
defaultRange, 2);
derivedCharKeys.push(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE);
+ // Actual request/results will be derived by camera device.
+ derivedRequestKeys.push(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
+ derivedResultKeys.push(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
}
}
}
@@ -196,19 +201,35 @@
// Add those newly added keys to AVAILABLE_CHARACTERISTICS_KEYS
// This has to be done at this end of this function.
- entry = chars.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
- Vector<int32_t> availableCharsKeys;
- availableCharsKeys.setCapacity(entry.count + derivedCharKeys.size());
- for (size_t i = 0; i < entry.count; i++) {
- availableCharsKeys.push(entry.data.i32[i]);
+ if (derivedCharKeys.size() > 0) {
+ appendAvailableKeys(
+ chars, ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, derivedCharKeys);
}
- for (size_t i = 0; i < derivedCharKeys.size(); i++) {
- availableCharsKeys.push(derivedCharKeys[i]);
+ if (derivedRequestKeys.size() > 0) {
+ appendAvailableKeys(
+ chars, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, derivedRequestKeys);
}
- chars.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, availableCharsKeys);
+ if (derivedResultKeys.size() > 0) {
+ appendAvailableKeys(
+ chars, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, derivedResultKeys);
+ }
return;
}
+void CameraModule::appendAvailableKeys(CameraMetadata &chars,
+ int32_t keyTag, const Vector<int32_t>& appendKeys) {
+ camera_metadata_entry entry = chars.find(keyTag);
+ Vector<int32_t> availableKeys;
+ availableKeys.setCapacity(entry.count + appendKeys.size());
+ for (size_t i = 0; i < entry.count; i++) {
+ availableKeys.push(entry.data.i32[i]);
+ }
+ for (size_t i = 0; i < appendKeys.size(); i++) {
+ availableKeys.push(appendKeys[i]);
+ }
+ chars.update(keyTag, availableKeys);
+}
+
CameraModule::CameraModule(camera_module_t *module) {
if (module == NULL) {
ALOGE("%s: camera hardware module must not be null", __FUNCTION__);
diff --git a/services/camera/libcameraservice/common/CameraModule.h b/services/camera/libcameraservice/common/CameraModule.h
index 36822c7..1a1c274 100644
--- a/services/camera/libcameraservice/common/CameraModule.h
+++ b/services/camera/libcameraservice/common/CameraModule.h
@@ -57,8 +57,10 @@
private:
// Derive camera characteristics keys defined after HAL device version
static void deriveCameraCharacteristicsKeys(uint32_t deviceVersion, CameraMetadata &chars);
+ // Helper function to append available[request|result|chars]Keys
+ static void appendAvailableKeys(CameraMetadata &chars,
+ int32_t keyTag, const Vector<int32_t>& appendKeys);
status_t filterOpenErrorCode(status_t err);
-
camera_module_t *mModule;
KeyedVector<int, camera_info> mCameraInfoMap;
Mutex mCameraInfoLock;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 1caf157..0e4e244 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -200,6 +200,14 @@
mDeviceInfo = info.static_camera_characteristics;
mHal3Device = device;
+ // Determine whether we need to derive sensitivity boost values for older devices.
+ // If post-RAW sensitivity boost range is listed, so should post-raw sensitivity control
+ // be listed (as the default value 100)
+ if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_4 &&
+ mDeviceInfo.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE)) {
+ mDerivePostRawSensKey = true;
+ }
+
internalUpdateStatusLocked(STATUS_UNCONFIGURED);
mNextStreamId = 0;
mDummyStreamId = NO_STREAM;
@@ -1310,9 +1318,19 @@
__FUNCTION__, templateId);
return BAD_VALUE;
}
- *request = rawRequest;
+
mRequestTemplateCache[templateId] = rawRequest;
+ // Derive some new keys for backward compatibility
+ if (mDerivePostRawSensKey && !mRequestTemplateCache[templateId].exists(
+ ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
+ int32_t defaultBoost[1] = {100};
+ mRequestTemplateCache[templateId].update(
+ ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
+ defaultBoost, 1);
+ }
+
+ *request = mRequestTemplateCache[templateId];
return OK;
}
@@ -1952,7 +1970,7 @@
if (mIsConstrainedHighSpeedConfiguration) {
pid_t requestThreadTid = mRequestThread->getTid();
res = requestPriority(getpid(), requestThreadTid,
- kConstrainedHighSpeedThreadPriority, true);
+ kConstrainedHighSpeedThreadPriority, /*asynchronous*/ false);
if (res != OK) {
ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
strerror(-res), res);
@@ -2256,6 +2274,15 @@
captureResult.mMetadata.append(collectedPartialResult);
}
+ // Derive some new keys for backward compaibility
+ if (mDerivePostRawSensKey && !captureResult.mMetadata.exists(
+ ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
+ int32_t defaultBoost[1] = {100};
+ captureResult.mMetadata.update(
+ ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
+ defaultBoost, 1);
+ }
+
captureResult.mMetadata.sort();
// Check that there's a timestamp in the result metadata
@@ -2780,6 +2807,11 @@
status_t Camera3Device::RequestThread::clearRepeatingRequests(/*out*/int64_t *lastFrameNumber) {
Mutex::Autolock l(mRequestLock);
+ return clearRepeatingRequestsLocked(lastFrameNumber);
+
+}
+
+status_t Camera3Device::RequestThread::clearRepeatingRequestsLocked(/*out*/int64_t *lastFrameNumber) {
mRepeatingRequests.clear();
if (lastFrameNumber != NULL) {
*lastFrameNumber = mRepeatingLastFrameNumber;
@@ -2934,6 +2966,22 @@
}
}
+void Camera3Device::RequestThread::checkAndStopRepeatingRequest() {
+ Mutex::Autolock l(mRequestLock);
+ // Check all streams needed by repeating requests are still valid. Otherwise, stop
+ // repeating requests.
+ for (const auto& request : mRepeatingRequests) {
+ for (const auto& s : request->mOutputStreams) {
+ if (s->isAbandoned()) {
+ int64_t lastFrameNumber = 0;
+ clearRepeatingRequestsLocked(&lastFrameNumber);
+ mListener->notifyRepeatingRequestError(lastFrameNumber);
+ return;
+ }
+ }
+ }
+}
+
bool Camera3Device::RequestThread::threadLoop() {
ATRACE_CALL();
status_t res;
@@ -2965,6 +3013,8 @@
if (res == TIMED_OUT) {
// Not a fatal error if getting output buffers time out.
cleanUpFailedRequests(/*sendRequestError*/ true);
+ // Check if any stream is abandoned.
+ checkAndStopRepeatingRequest();
return true;
} else if (res != OK) {
cleanUpFailedRequests(/*sendRequestError*/ false);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 96ca7b7..0366ef6 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -203,6 +203,10 @@
uint32_t mDeviceVersion;
+ // whether Camera3Device should derive ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST for
+ // backward compatibility. Should not be changed after initialization.
+ bool mDerivePostRawSensKey = false;
+
struct Size {
uint32_t width;
uint32_t height;
@@ -561,6 +565,9 @@
// ERROR state to mark them as not having valid data. mNextRequests will be cleared.
void cleanUpFailedRequests(bool sendRequestError);
+ // Stop the repeating request if any of its output streams is abandoned.
+ void checkAndStopRepeatingRequest();
+
// Pause handling
bool waitIfPaused();
void unpauseForNewRequests();
@@ -574,6 +581,9 @@
// Handle AE precapture trigger cancel for devices <= CAMERA_DEVICE_API_VERSION_3_2.
void handleAePrecaptureCancelRequest(sp<CaptureRequest> request);
+ // Clear repeating requests. Must be called with mRequestLock held.
+ status_t clearRepeatingRequestsLocked(/*out*/ int64_t *lastFrameNumber = NULL);
+
wp<Camera3Device> mParent;
wp<camera3::StatusTracker> mStatusTracker;
camera3_device_t *mHal3Device;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 1e6452f..d2b98e6 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -157,6 +157,13 @@
if (res != OK) {
ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
+
+ // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is STATE_PREPARING,
+ // let prepareNextBuffer handle the error.)
+ if (res == NO_INIT && mState == STATE_CONFIGURED) {
+ mState = STATE_ABANDONED;
+ }
+
return res;
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 80dce84..96d62d4 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -323,6 +323,11 @@
return mState == STATE_PREPARING;
}
+bool Camera3Stream::isAbandoned() const {
+ Mutex::Autolock l(mLock);
+ return mState == STATE_ABANDONED;
+}
+
status_t Camera3Stream::prepareNextBuffer() {
ATRACE_CALL();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 810383d..0755700 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -95,6 +95,8 @@
* STATE_PREPARING => STATE_CONFIGURED:
* When sufficient prepareNextBuffer calls have been made to allocate
* all stream buffers, or cancelPrepare is called.
+ * STATE_CONFIGURED => STATE_ABANDONED:
+ * When the buffer queue of the stream is abandoned.
*
* Status Tracking:
* Each stream is tracked by StatusTracker as a separate component,
@@ -353,6 +355,11 @@
void removeBufferListener(
const sp<Camera3StreamBufferListener>& listener);
+ /**
+ * Return if the buffer queue of the stream is abandoned.
+ */
+ bool isAbandoned() const;
+
protected:
const int mId;
/**
@@ -380,7 +387,8 @@
STATE_IN_CONFIG,
STATE_IN_RECONFIG,
STATE_CONFIGURED,
- STATE_PREPARING
+ STATE_PREPARING,
+ STATE_ABANDONED
} mState;
mutable Mutex mLock;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 3f7e7a7..6cb7a54 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -262,6 +262,11 @@
virtual status_t disconnect() = 0;
/**
+ * Return if the buffer queue of the stream is abandoned.
+ */
+ virtual bool isAbandoned() const = 0;
+
+ /**
* Debug dump of the stream's state.
*/
virtual void dump(int fd, const Vector<String16> &args) const = 0;
diff --git a/services/camera/libcameraservice/utils/AutoConditionLock.cpp b/services/camera/libcameraservice/utils/AutoConditionLock.cpp
index c8ee965..ed80a95 100644
--- a/services/camera/libcameraservice/utils/AutoConditionLock.cpp
+++ b/services/camera/libcameraservice/utils/AutoConditionLock.cpp
@@ -24,13 +24,15 @@
// Locks manager-owned mutex
AutoConditionLock::AutoConditionLock(const std::shared_ptr<WaitableMutexWrapper>& manager) :
- mManager{manager}, mAutoLock{manager->mMutex} {}
+ mManager{manager}, mAutoLock{manager->mMutex}, mAcquired(false) {}
// Unlocks manager-owned mutex
AutoConditionLock::~AutoConditionLock() {
// Unset the condition and wake everyone up before releasing lock
- mManager->mState = false;
- mManager->mCondition.broadcast();
+ if (mAcquired) {
+ mManager->mState = false;
+ mManager->mCondition.broadcast();
+ }
}
std::unique_ptr<AutoConditionLock> AutoConditionLock::waitAndAcquire(
@@ -59,6 +61,7 @@
// Set the condition and return
manager->mState = true;
+ scopedLock->mAcquired = true;
return scopedLock;
}
@@ -84,6 +87,7 @@
// Set the condition and return
manager->mState = true;
+ scopedLock->mAcquired = true;
return scopedLock;
}
diff --git a/services/camera/libcameraservice/utils/AutoConditionLock.h b/services/camera/libcameraservice/utils/AutoConditionLock.h
index 9a3eafc..b7f167b 100644
--- a/services/camera/libcameraservice/utils/AutoConditionLock.h
+++ b/services/camera/libcameraservice/utils/AutoConditionLock.h
@@ -92,6 +92,7 @@
std::shared_ptr<WaitableMutexWrapper> mManager;
Mutex::Autolock mAutoLock;
+ bool mAcquired;
};
}; // namespace android
diff --git a/services/mediacodec/mediacodec.rc b/services/mediacodec/mediacodec.rc
index e8df7be..d78e0a4 100644
--- a/services/mediacodec/mediacodec.rc
+++ b/services/mediacodec/mediacodec.rc
@@ -3,3 +3,4 @@
user mediacodec
group camera drmrpc mediadrm
ioprio rt 4
+ writepid /dev/cpuset/foreground/tasks
diff --git a/services/mediadrm/Android.mk b/services/mediadrm/Android.mk
index f6ddf94..8baaf13 100644
--- a/services/mediadrm/Android.mk
+++ b/services/mediadrm/Android.mk
@@ -22,14 +22,15 @@
main_mediadrmserver.cpp
LOCAL_SHARED_LIBRARIES:= \
- libui \
- liblog \
- libutils \
libbinder \
libcutils \
- libstagefright \
- libmediaplayerservice \
+ liblog \
libmedia \
+ libmediadrm \
+ libmediaplayerservice \
+ libstagefright \
+ libui \
+ libutils \
LOCAL_C_INCLUDES := \
frameworks/av/media/libmediaplayerservice \
diff --git a/services/mediadrm/MediaDrmService.cpp b/services/mediadrm/MediaDrmService.cpp
index 36ab8fe6..331c568 100644
--- a/services/mediadrm/MediaDrmService.cpp
+++ b/services/mediadrm/MediaDrmService.cpp
@@ -19,13 +19,13 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaDrmService"
-#include <utils/Log.h>
-#include <binder/IServiceManager.h>
#include "MediaDrmService.h"
-#include "Crypto.h"
-#include "Drm.h"
+#include <binder/IServiceManager.h>
+#include <media/Crypto.h>
+#include <media/Drm.h>
+#include <utils/Log.h>
namespace android {
diff --git a/services/mediadrm/mediadrmserver.rc b/services/mediadrm/mediadrmserver.rc
index 374d24b..359c2cf 100644
--- a/services/mediadrm/mediadrmserver.rc
+++ b/services/mediadrm/mediadrmserver.rc
@@ -3,3 +3,4 @@
user media
group mediadrm drmrpc
ioprio rt 4
+ writepid /dev/cpuset/foreground/tasks
diff --git a/services/mediaextractor/MediaExtractorService.cpp b/services/mediaextractor/MediaExtractorService.cpp
index 0c93af1..4a80166 100644
--- a/services/mediaextractor/MediaExtractorService.cpp
+++ b/services/mediaextractor/MediaExtractorService.cpp
@@ -39,7 +39,7 @@
ret == NULL ? "" : ret->name());
if (ret != NULL) {
- registerMediaExtractor(ret, remoteSource, mime);
+ registerMediaExtractor(ret, localSource, mime);
}
return ret;
diff --git a/services/mediaextractor/mediaextractor.rc b/services/mediaextractor/mediaextractor.rc
index f733a2b..5fc2941 100644
--- a/services/mediaextractor/mediaextractor.rc
+++ b/services/mediaextractor/mediaextractor.rc
@@ -3,3 +3,4 @@
user mediaex
group drmrpc mediadrm
ioprio rt 4
+ writepid /dev/cpuset/foreground/tasks