blob: e6154f0d6987d7cc66830d6cca3e43d9c462f51b [file] [log] [blame]
/*
* Copyright (C) 2013-2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "Camera3-Device"
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
//#define LOG_NNDEBUG 0 // Per-frame verbose logging
#ifdef LOG_NNDEBUG
#define ALOGVV(...) ALOGV(__VA_ARGS__)
#else
#define ALOGVV(...) ((void)0)
#endif
// Convenience macro for transient errors
#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
##__VA_ARGS__)
#define CLOGW(fmt, ...) ALOGW("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
##__VA_ARGS__)
// Convenience macros for transitioning to the error state
#define SET_ERR(fmt, ...) setErrorState( \
"%s: " fmt, __FUNCTION__, \
##__VA_ARGS__)
#define SET_ERR_L(fmt, ...) setErrorStateLocked( \
"%s: " fmt, __FUNCTION__, \
##__VA_ARGS__)
#include <inttypes.h>
#include <utility>
#include <utils/Log.h>
#include <utils/Trace.h>
#include <utils/Timers.h>
#include <cutils/properties.h>
#include <android/hardware/camera2/ICameraDeviceUser.h>
#include "utils/CameraTraces.h"
#include "mediautils/SchedulingPolicyService.h"
#include "device3/Camera3Device.h"
#include "device3/Camera3OutputStream.h"
#include "device3/Camera3InputStream.h"
#include "device3/Camera3DummyStream.h"
#include "device3/Camera3SharedOutputStream.h"
#include "CameraService.h"
#include "utils/CameraThreadState.h"
#include "utils/TraceHFR.h"
#include <algorithm>
#include <tuple>
using namespace android::camera3;
using namespace android::hardware::camera;
using namespace android::hardware::camera::device::V3_2;
namespace android {
Camera3Device::Camera3Device(const String8 &id):
mId(id),
mOperatingMode(NO_MODE),
mIsConstrainedHighSpeedConfiguration(false),
mStatus(STATUS_UNINITIALIZED),
mStatusWaiters(0),
mUsePartialResult(false),
mNumPartialResults(1),
mTimestampOffset(0),
mNextResultFrameNumber(0),
mNextReprocessResultFrameNumber(0),
mNextZslStillResultFrameNumber(0),
mNextShutterFrameNumber(0),
mNextReprocessShutterFrameNumber(0),
mNextZslStillShutterFrameNumber(0),
mListener(NULL),
mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
mLastTemplateId(-1),
mNeedFixupMonochromeTags(false)
{
ATRACE_CALL();
ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.string());
}
Camera3Device::~Camera3Device()
{
ATRACE_CALL();
ALOGV("%s: Tearing down for camera id %s", __FUNCTION__, mId.string());
disconnectImpl();
}
const String8& Camera3Device::getId() const {
return mId;
}
status_t Camera3Device::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
ALOGV("%s: Initializing HIDL device for camera %s", __FUNCTION__, mId.string());
if (mStatus != STATUS_UNINITIALIZED) {
CLOGE("Already initialized!");
return INVALID_OPERATION;
}
if (manager == nullptr) return INVALID_OPERATION;
sp<ICameraDeviceSession> session;
ATRACE_BEGIN("CameraHal::openSession");
status_t res = manager->openSession(mId.string(), this,
/*out*/ &session);
ATRACE_END();
if (res != OK) {
SET_ERR_L("Could not open camera session: %s (%d)", strerror(-res), res);
return res;
}
res = manager->getCameraCharacteristics(mId.string(), &mDeviceInfo);
if (res != OK) {
SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
session->close();
return res;
}
mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId.string());
std::vector<std::string> physicalCameraIds;
bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
if (isLogical) {
for (auto& physicalId : physicalCameraIds) {
res = manager->getCameraCharacteristics(
physicalId, &mPhysicalDeviceInfoMap[physicalId]);
if (res != OK) {
SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
physicalId.c_str(), strerror(-res), res);
session->close();
return res;
}
bool usePrecorrectArray =
DistortionMapper::isDistortionSupported(mPhysicalDeviceInfoMap[physicalId]);
if (usePrecorrectArray) {
res = mDistortionMappers[physicalId].setupStaticInfo(
mPhysicalDeviceInfoMap[physicalId]);
if (res != OK) {
SET_ERR_L("Unable to read camera %s's calibration fields for distortion "
"correction", physicalId.c_str());
session->close();
return res;
}
}
mZoomRatioMappers[physicalId] = ZoomRatioMapper(
&mPhysicalDeviceInfoMap[physicalId],
mSupportNativeZoomRatio, usePrecorrectArray);
}
}
std::shared_ptr<RequestMetadataQueue> queue;
auto requestQueueRet = session->getCaptureRequestMetadataQueue(
[&queue](const auto& descriptor) {
queue = std::make_shared<RequestMetadataQueue>(descriptor);
if (!queue->isValid() || queue->availableToWrite() <= 0) {
ALOGE("HAL returns empty request metadata fmq, not use it");
queue = nullptr;
// don't use the queue onwards.
}
});
if (!requestQueueRet.isOk()) {
ALOGE("Transaction error when getting request metadata fmq: %s, not use it",
requestQueueRet.description().c_str());
return DEAD_OBJECT;
}
std::unique_ptr<ResultMetadataQueue>& resQueue = mResultMetadataQueue;
auto resultQueueRet = session->getCaptureResultMetadataQueue(
[&resQueue](const auto& descriptor) {
resQueue = std::make_unique<ResultMetadataQueue>(descriptor);
if (!resQueue->isValid() || resQueue->availableToWrite() <= 0) {
ALOGE("HAL returns empty result metadata fmq, not use it");
resQueue = nullptr;
// Don't use the resQueue onwards.
}
});
if (!resultQueueRet.isOk()) {
ALOGE("Transaction error when getting result metadata queue from camera session: %s",
resultQueueRet.description().c_str());
return DEAD_OBJECT;
}
IF_ALOGV() {
session->interfaceChain([](
::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) {
ALOGV("Session interface chain:");
for (const auto& iface : interfaceChain) {
ALOGV(" %s", iface.c_str());
}
});
}
camera_metadata_entry bufMgrMode =
mDeviceInfo.find(ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION);
if (bufMgrMode.count > 0) {
mUseHalBufManager = (bufMgrMode.data.u8[0] ==
ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5);
}
camera_metadata_entry_t capabilities = mDeviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
for (size_t i = 0; i < capabilities.count; i++) {
uint8_t capability = capabilities.data.u8[i];
if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING) {
mSupportOfflineProcessing = true;
}
}
mInterface = new HalInterface(session, queue, mUseHalBufManager, mSupportOfflineProcessing);
std::string providerType;
mVendorTagId = manager->getProviderTagIdLocked(mId.string());
mTagMonitor.initialize(mVendorTagId);
if (!monitorTags.isEmpty()) {
mTagMonitor.parseTagsToMonitor(String8(monitorTags));
}
// Metadata tags needs fixup for monochrome camera device version less
// than 3.5.
hardware::hidl_version maxVersion{0,0};
res = manager->getHighestSupportedVersion(mId.string(), &maxVersion);
if (res != OK) {
ALOGE("%s: Error in getting camera device version id: %s (%d)",
__FUNCTION__, strerror(-res), res);
return res;
}
int deviceVersion = HARDWARE_DEVICE_API_VERSION(
maxVersion.get_major(), maxVersion.get_minor());
bool isMonochrome = false;
for (size_t i = 0; i < capabilities.count; i++) {
uint8_t capability = capabilities.data.u8[i];
if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) {
isMonochrome = true;
}
}
mNeedFixupMonochromeTags = (isMonochrome && deviceVersion < CAMERA_DEVICE_API_VERSION_3_5);
return initializeCommonLocked();
}
status_t Camera3Device::initializeCommonLocked() {
/** Start up status tracker thread */
mStatusTracker = new StatusTracker(this);
status_t res = mStatusTracker->run(String8::format("C3Dev-%s-Status", mId.string()).string());
if (res != OK) {
SET_ERR_L("Unable to start status tracking thread: %s (%d)",
strerror(-res), res);
mInterface->close();
mStatusTracker.clear();
return res;
}
/** Register in-flight map to the status tracker */
mInFlightStatusId = mStatusTracker->addComponent();
if (mUseHalBufManager) {
res = mRequestBufferSM.initialize(mStatusTracker);
if (res != OK) {
SET_ERR_L("Unable to start request buffer state machine: %s (%d)",
strerror(-res), res);
mInterface->close();
mStatusTracker.clear();
return res;
}
}
/** Create buffer manager */
mBufferManager = new Camera3BufferManager();
Vector<int32_t> sessionParamKeys;
camera_metadata_entry_t sessionKeysEntry = mDeviceInfo.find(
ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
if (sessionKeysEntry.count > 0) {
sessionParamKeys.insertArrayAt(sessionKeysEntry.data.i32, 0, sessionKeysEntry.count);
}
/** Start up request queue thread */
mRequestThread = new RequestThread(
this, mStatusTracker, mInterface, sessionParamKeys, mUseHalBufManager);
res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
if (res != OK) {
SET_ERR_L("Unable to start request queue thread: %s (%d)",
strerror(-res), res);
mInterface->close();
mRequestThread.clear();
return res;
}
mPreparerThread = new PreparerThread();
internalUpdateStatusLocked(STATUS_UNCONFIGURED);
mNextStreamId = 0;
mDummyStreamId = NO_STREAM;
mNeedConfig = true;
mPauseStateNotify = false;
// Measure the clock domain offset between camera and video/hw_composer
camera_metadata_entry timestampSource =
mDeviceInfo.find(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE);
if (timestampSource.count > 0 && timestampSource.data.u8[0] ==
ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME) {
mTimestampOffset = getMonoToBoottimeOffset();
}
// Will the HAL be sending in early partial result metadata?
camera_metadata_entry partialResultsCount =
mDeviceInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
if (partialResultsCount.count > 0) {
mNumPartialResults = partialResultsCount.data.i32[0];
mUsePartialResult = (mNumPartialResults > 1);
}
camera_metadata_entry configs =
mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
for (uint32_t i = 0; i < configs.count; i += 4) {
if (configs.data.i32[i] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
configs.data.i32[i + 3] ==
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT) {
mSupportedOpaqueInputSizes.add(Size(configs.data.i32[i + 1],
configs.data.i32[i + 2]));
}
}
bool usePrecorrectArray = DistortionMapper::isDistortionSupported(mDeviceInfo);
if (usePrecorrectArray) {
res = mDistortionMappers[mId.c_str()].setupStaticInfo(mDeviceInfo);
if (res != OK) {
SET_ERR_L("Unable to read necessary calibration fields for distortion correction");
return res;
}
}
mZoomRatioMappers[mId.c_str()] = ZoomRatioMapper(&mDeviceInfo,
mSupportNativeZoomRatio, usePrecorrectArray);
if (RotateAndCropMapper::isNeeded(&mDeviceInfo)) {
mRotateAndCropMappers.emplace(mId.c_str(), &mDeviceInfo);
}
return OK;
}
status_t Camera3Device::disconnect() {
return disconnectImpl();
}
status_t Camera3Device::disconnectImpl() {
ATRACE_CALL();
ALOGI("%s: E", __FUNCTION__);
status_t res = OK;
std::vector<wp<Camera3StreamInterface>> streams;
{
Mutex::Autolock il(mInterfaceLock);
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
{
Mutex::Autolock l(mLock);
if (mStatus == STATUS_UNINITIALIZED) return res;
if (mStatus == STATUS_ACTIVE ||
(mStatus == STATUS_ERROR && mRequestThread != NULL)) {
res = mRequestThread->clearRepeatingRequests();
if (res != OK) {
SET_ERR_L("Can't stop streaming");
// Continue to close device even in case of error
} else {
res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
if (res != OK) {
SET_ERR_L("Timeout waiting for HAL to drain (% " PRIi64 " ns)",
maxExpectedDuration);
// Continue to close device even in case of error
}
}
}
if (mStatus == STATUS_ERROR) {
CLOGE("Shutting down in an error state");
}
if (mStatusTracker != NULL) {
mStatusTracker->requestExit();
}
if (mRequestThread != NULL) {
mRequestThread->requestExit();
}
streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
for (size_t i = 0; i < mOutputStreams.size(); i++) {
streams.push_back(mOutputStreams[i]);
}
if (mInputStream != nullptr) {
streams.push_back(mInputStream);
}
}
}
// Joining done without holding mLock and mInterfaceLock, otherwise deadlocks may ensue
// as the threads try to access parent state (b/143513518)
if (mRequestThread != NULL && mStatus != STATUS_ERROR) {
// HAL may be in a bad state, so waiting for request thread
// (which may be stuck in the HAL processCaptureRequest call)
// could be dangerous.
// give up mInterfaceLock here and then lock it again. Could this lead
// to other deadlocks
mRequestThread->join();
}
{
Mutex::Autolock il(mInterfaceLock);
if (mStatusTracker != NULL) {
mStatusTracker->join();
}
HalInterface* interface;
{
Mutex::Autolock l(mLock);
mRequestThread.clear();
Mutex::Autolock stLock(mTrackerLock);
mStatusTracker.clear();
interface = mInterface.get();
}
// Call close without internal mutex held, as the HAL close may need to
// wait on assorted callbacks,etc, to complete before it can return.
interface->close();
flushInflightRequests();
{
Mutex::Autolock l(mLock);
mInterface->clear();
mOutputStreams.clear();
mInputStream.clear();
mDeletedStreams.clear();
mBufferManager.clear();
internalUpdateStatusLocked(STATUS_UNINITIALIZED);
}
for (auto& weakStream : streams) {
sp<Camera3StreamInterface> stream = weakStream.promote();
if (stream != nullptr) {
ALOGE("%s: Stream %d leaked! strong reference (%d)!",
__FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
}
}
}
ALOGI("%s: X", __FUNCTION__);
return res;
}
// For dumping/debugging only -
// try to acquire a lock a few times, eventually give up to proceed with
// debug/dump operations
bool Camera3Device::tryLockSpinRightRound(Mutex& lock) {
bool gotLock = false;
for (size_t i = 0; i < kDumpLockAttempts; ++i) {
if (lock.tryLock() == NO_ERROR) {
gotLock = true;
break;
} else {
usleep(kDumpSleepDuration);
}
}
return gotLock;
}
Camera3Device::Size Camera3Device::getMaxJpegResolution() const {
int32_t maxJpegWidth = 0, maxJpegHeight = 0;
const int STREAM_CONFIGURATION_SIZE = 4;
const int STREAM_FORMAT_OFFSET = 0;
const int STREAM_WIDTH_OFFSET = 1;
const int STREAM_HEIGHT_OFFSET = 2;
const int STREAM_IS_INPUT_OFFSET = 3;
camera_metadata_ro_entry_t availableStreamConfigs =
mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
if (availableStreamConfigs.count == 0 ||
availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
return Size(0, 0);
}
// Get max jpeg size (area-wise).
for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
&& format == HAL_PIXEL_FORMAT_BLOB &&
(width * height > maxJpegWidth * maxJpegHeight)) {
maxJpegWidth = width;
maxJpegHeight = height;
}
}
return Size(maxJpegWidth, maxJpegHeight);
}
nsecs_t Camera3Device::getMonoToBoottimeOffset() {
// try three times to get the clock offset, choose the one
// with the minimum gap in measurements.
const int tries = 3;
nsecs_t bestGap, measured;
for (int i = 0; i < tries; ++i) {
const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
const nsecs_t gap = tmono2 - tmono;
if (i == 0 || gap < bestGap) {
bestGap = gap;
measured = tbase - ((tmono + tmono2) >> 1);
}
}
return measured;
}
hardware::graphics::common::V1_0::PixelFormat Camera3Device::mapToPixelFormat(
int frameworkFormat) {
return (hardware::graphics::common::V1_0::PixelFormat) frameworkFormat;
}
DataspaceFlags Camera3Device::mapToHidlDataspace(
android_dataspace dataSpace) {
return dataSpace;
}
BufferUsageFlags Camera3Device::mapToConsumerUsage(
uint64_t usage) {
return usage;
}
StreamRotation Camera3Device::mapToStreamRotation(camera3_stream_rotation_t rotation) {
switch (rotation) {
case CAMERA3_STREAM_ROTATION_0:
return StreamRotation::ROTATION_0;
case CAMERA3_STREAM_ROTATION_90:
return StreamRotation::ROTATION_90;
case CAMERA3_STREAM_ROTATION_180:
return StreamRotation::ROTATION_180;
case CAMERA3_STREAM_ROTATION_270:
return StreamRotation::ROTATION_270;
}
ALOGE("%s: Unknown stream rotation %d", __FUNCTION__, rotation);
return StreamRotation::ROTATION_0;
}
status_t Camera3Device::mapToStreamConfigurationMode(
camera3_stream_configuration_mode_t operationMode, StreamConfigurationMode *mode) {
if (mode == nullptr) return BAD_VALUE;
if (operationMode < CAMERA3_VENDOR_STREAM_CONFIGURATION_MODE_START) {
switch(operationMode) {
case CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE:
*mode = StreamConfigurationMode::NORMAL_MODE;
break;
case CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE:
*mode = StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE;
break;
default:
ALOGE("%s: Unknown stream configuration mode %d", __FUNCTION__, operationMode);
return BAD_VALUE;
}
} else {
*mode = static_cast<StreamConfigurationMode>(operationMode);
}
return OK;
}
int Camera3Device::mapToFrameworkFormat(
hardware::graphics::common::V1_0::PixelFormat pixelFormat) {
return static_cast<uint32_t>(pixelFormat);
}
android_dataspace Camera3Device::mapToFrameworkDataspace(
DataspaceFlags dataSpace) {
return static_cast<android_dataspace>(dataSpace);
}
uint64_t Camera3Device::mapConsumerToFrameworkUsage(
BufferUsageFlags usage) {
return usage;
}
uint64_t Camera3Device::mapProducerToFrameworkUsage(
BufferUsageFlags usage) {
return usage;
}
ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
// Get max jpeg size (area-wise).
Size maxJpegResolution = getMaxJpegResolution();
if (maxJpegResolution.width == 0) {
ALOGE("%s: Camera %s: Can't find valid available jpeg sizes in static metadata!",
__FUNCTION__, mId.string());
return BAD_VALUE;
}
// Get max jpeg buffer size
ssize_t maxJpegBufferSize = 0;
camera_metadata_ro_entry jpegBufMaxSize = mDeviceInfo.find(ANDROID_JPEG_MAX_SIZE);
if (jpegBufMaxSize.count == 0) {
ALOGE("%s: Camera %s: Can't find maximum JPEG size in static metadata!", __FUNCTION__,
mId.string());
return BAD_VALUE;
}
maxJpegBufferSize = jpegBufMaxSize.data.i32[0];
assert(kMinJpegBufferSize < maxJpegBufferSize);
// Calculate final jpeg buffer size for the given resolution.
float scaleFactor = ((float) (width * height)) /
(maxJpegResolution.width * maxJpegResolution.height);
ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) +
kMinJpegBufferSize;
if (jpegBufferSize > maxJpegBufferSize) {
jpegBufferSize = maxJpegBufferSize;
}
return jpegBufferSize;
}
ssize_t Camera3Device::getPointCloudBufferSize() const {
const int FLOATS_PER_POINT=4;
camera_metadata_ro_entry maxPointCount = mDeviceInfo.find(ANDROID_DEPTH_MAX_DEPTH_SAMPLES);
if (maxPointCount.count == 0) {
ALOGE("%s: Camera %s: Can't find maximum depth point cloud size in static metadata!",
__FUNCTION__, mId.string());
return BAD_VALUE;
}
ssize_t maxBytesForPointCloud = sizeof(android_depth_points) +
maxPointCount.data.i32[0] * sizeof(float) * FLOATS_PER_POINT;
return maxBytesForPointCloud;
}
ssize_t Camera3Device::getRawOpaqueBufferSize(int32_t width, int32_t height) const {
const int PER_CONFIGURATION_SIZE = 3;
const int WIDTH_OFFSET = 0;
const int HEIGHT_OFFSET = 1;
const int SIZE_OFFSET = 2;
camera_metadata_ro_entry rawOpaqueSizes =
mDeviceInfo.find(ANDROID_SENSOR_OPAQUE_RAW_SIZE);
size_t count = rawOpaqueSizes.count;
if (count == 0 || (count % PER_CONFIGURATION_SIZE)) {
ALOGE("%s: Camera %s: bad opaque RAW size static metadata length(%zu)!",
__FUNCTION__, mId.string(), count);
return BAD_VALUE;
}
for (size_t i = 0; i < count; i += PER_CONFIGURATION_SIZE) {
if (width == rawOpaqueSizes.data.i32[i + WIDTH_OFFSET] &&
height == rawOpaqueSizes.data.i32[i + HEIGHT_OFFSET]) {
return rawOpaqueSizes.data.i32[i + SIZE_OFFSET];
}
}
ALOGE("%s: Camera %s: cannot find size for %dx%d opaque RAW image!",
__FUNCTION__, mId.string(), width, height);
return BAD_VALUE;
}
status_t Camera3Device::dump(int fd, const Vector<String16> &args) {
ATRACE_CALL();
(void)args;
// Try to lock, but continue in case of failure (to avoid blocking in
// deadlocks)
bool gotInterfaceLock = tryLockSpinRightRound(mInterfaceLock);
bool gotLock = tryLockSpinRightRound(mLock);
ALOGW_IF(!gotInterfaceLock,
"Camera %s: %s: Unable to lock interface lock, proceeding anyway",
mId.string(), __FUNCTION__);
ALOGW_IF(!gotLock,
"Camera %s: %s: Unable to lock main lock, proceeding anyway",
mId.string(), __FUNCTION__);
bool dumpTemplates = false;
String16 templatesOption("-t");
int n = args.size();
for (int i = 0; i < n; i++) {
if (args[i] == templatesOption) {
dumpTemplates = true;
}
if (args[i] == TagMonitor::kMonitorOption) {
if (i + 1 < n) {
String8 monitorTags = String8(args[i + 1]);
if (monitorTags == "off") {
mTagMonitor.disableMonitoring();
} else {
mTagMonitor.parseTagsToMonitor(monitorTags);
}
} else {
mTagMonitor.disableMonitoring();
}
}
}
String8 lines;
const char *status =
mStatus == STATUS_ERROR ? "ERROR" :
mStatus == STATUS_UNINITIALIZED ? "UNINITIALIZED" :
mStatus == STATUS_UNCONFIGURED ? "UNCONFIGURED" :
mStatus == STATUS_CONFIGURED ? "CONFIGURED" :
mStatus == STATUS_ACTIVE ? "ACTIVE" :
"Unknown";
lines.appendFormat(" Device status: %s\n", status);
if (mStatus == STATUS_ERROR) {
lines.appendFormat(" Error cause: %s\n", mErrorCause.string());
}
lines.appendFormat(" Stream configuration:\n");
const char *mode =
mOperatingMode == static_cast<int>(StreamConfigurationMode::NORMAL_MODE) ? "NORMAL" :
mOperatingMode == static_cast<int>(
StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE) ? "CONSTRAINED_HIGH_SPEED" :
"CUSTOM";
lines.appendFormat(" Operation mode: %s (%d) \n", mode, mOperatingMode);
if (mInputStream != NULL) {
write(fd, lines.string(), lines.size());
mInputStream->dump(fd, args);
} else {
lines.appendFormat(" No input stream.\n");
write(fd, lines.string(), lines.size());
}
for (size_t i = 0; i < mOutputStreams.size(); i++) {
mOutputStreams[i]->dump(fd,args);
}
if (mBufferManager != NULL) {
lines = String8(" Camera3 Buffer Manager:\n");
write(fd, lines.string(), lines.size());
mBufferManager->dump(fd, args);
}
lines = String8(" In-flight requests:\n");
if (mInFlightMap.size() == 0) {
lines.append(" None\n");
} else {
for (size_t i = 0; i < mInFlightMap.size(); i++) {
InFlightRequest r = mInFlightMap.valueAt(i);
lines.appendFormat(" Frame %d | Timestamp: %" PRId64 ", metadata"
" arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
r.numBuffersLeft);
}
}
write(fd, lines.string(), lines.size());
if (mRequestThread != NULL) {
mRequestThread->dumpCaptureRequestLatency(fd,
" ProcessCaptureRequest latency histogram:");
}
{
lines = String8(" Last request sent:\n");
write(fd, lines.string(), lines.size());
CameraMetadata lastRequest = getLatestRequestLocked();
lastRequest.dump(fd, /*verbosity*/2, /*indentation*/6);
}
if (dumpTemplates) {
const char *templateNames[CAMERA3_TEMPLATE_COUNT] = {
"TEMPLATE_PREVIEW",
"TEMPLATE_STILL_CAPTURE",
"TEMPLATE_VIDEO_RECORD",
"TEMPLATE_VIDEO_SNAPSHOT",
"TEMPLATE_ZERO_SHUTTER_LAG",
"TEMPLATE_MANUAL",
};
for (int i = 1; i < CAMERA3_TEMPLATE_COUNT; i++) {
camera_metadata_t *templateRequest = nullptr;
mInterface->constructDefaultRequestSettings(
(camera3_request_template_t) i, &templateRequest);
lines = String8::format(" HAL Request %s:\n", templateNames[i-1]);
if (templateRequest == nullptr) {
lines.append(" Not supported\n");
write(fd, lines.string(), lines.size());
} else {
write(fd, lines.string(), lines.size());
dump_indented_camera_metadata(templateRequest,
fd, /*verbosity*/2, /*indentation*/8);
}
free_camera_metadata(templateRequest);
}
}
mTagMonitor.dumpMonitoredMetadata(fd);
if (mInterface->valid()) {
lines = String8(" HAL device dump:\n");
write(fd, lines.string(), lines.size());
mInterface->dump(fd);
}
if (gotLock) mLock.unlock();
if (gotInterfaceLock) mInterfaceLock.unlock();
return OK;
}
const CameraMetadata& Camera3Device::infoPhysical(const String8& physicalId) const {
ALOGVV("%s: E", __FUNCTION__);
if (CC_UNLIKELY(mStatus == STATUS_UNINITIALIZED ||
mStatus == STATUS_ERROR)) {
ALOGW("%s: Access to static info %s!", __FUNCTION__,
mStatus == STATUS_ERROR ?
"when in error state" : "before init");
}
if (physicalId.isEmpty()) {
return mDeviceInfo;
} else {
std::string id(physicalId.c_str());
if (mPhysicalDeviceInfoMap.find(id) != mPhysicalDeviceInfoMap.end()) {
return mPhysicalDeviceInfoMap.at(id);
} else {
ALOGE("%s: Invalid physical camera id %s", __FUNCTION__, physicalId.c_str());
return mDeviceInfo;
}
}
}
const CameraMetadata& Camera3Device::info() const {
String8 emptyId;
return infoPhysical(emptyId);
}
status_t Camera3Device::checkStatusOkToCaptureLocked() {
switch (mStatus) {
case STATUS_ERROR:
CLOGE("Device has encountered a serious error");
return INVALID_OPERATION;
case STATUS_UNINITIALIZED:
CLOGE("Device not initialized");
return INVALID_OPERATION;
case STATUS_UNCONFIGURED:
case STATUS_CONFIGURED:
case STATUS_ACTIVE:
// OK
break;
default:
SET_ERR_L("Unexpected status: %d", mStatus);
return INVALID_OPERATION;
}
return OK;
}
status_t Camera3Device::convertMetadataListToRequestListLocked(
const List<const PhysicalCameraSettingsList> &metadataList,
const std::list<const SurfaceMap> &surfaceMaps,
bool repeating,
RequestList *requestList) {
if (requestList == NULL) {
CLOGE("requestList cannot be NULL.");
return BAD_VALUE;
}
int32_t burstId = 0;
List<const PhysicalCameraSettingsList>::const_iterator metadataIt = metadataList.begin();
std::list<const SurfaceMap>::const_iterator surfaceMapIt = surfaceMaps.begin();
for (; metadataIt != metadataList.end() && surfaceMapIt != surfaceMaps.end();
++metadataIt, ++surfaceMapIt) {
sp<CaptureRequest> newRequest = setUpRequestLocked(*metadataIt, *surfaceMapIt);
if (newRequest == 0) {
CLOGE("Can't create capture request");
return BAD_VALUE;
}
newRequest->mRepeating = repeating;
// Setup burst Id and request Id
newRequest->mResultExtras.burstId = burstId++;
auto requestIdEntry = metadataIt->begin()->metadata.find(ANDROID_REQUEST_ID);
if (requestIdEntry.count == 0) {
CLOGE("RequestID does not exist in metadata");
return BAD_VALUE;
}
newRequest->mResultExtras.requestId = requestIdEntry.data.i32[0];
requestList->push_back(newRequest);
ALOGV("%s: requestId = %" PRId32, __FUNCTION__, newRequest->mResultExtras.requestId);
}
if (metadataIt != metadataList.end() || surfaceMapIt != surfaceMaps.end()) {
ALOGE("%s: metadataList and surfaceMaps are not the same size!", __FUNCTION__);
return BAD_VALUE;
}
// Setup batch size if this is a high speed video recording request.
if (mIsConstrainedHighSpeedConfiguration && requestList->size() > 0) {
auto firstRequest = requestList->begin();
for (auto& outputStream : (*firstRequest)->mOutputStreams) {
if (outputStream->isVideoStream()) {
(*firstRequest)->mBatchSize = requestList->size();
break;
}
}
}
return OK;
}
status_t Camera3Device::capture(CameraMetadata &request, int64_t* lastFrameNumber) {
ATRACE_CALL();
List<const PhysicalCameraSettingsList> requestsList;
std::list<const SurfaceMap> surfaceMaps;
convertToRequestList(requestsList, surfaceMaps, request);
return captureList(requestsList, surfaceMaps, lastFrameNumber);
}
void Camera3Device::convertToRequestList(List<const PhysicalCameraSettingsList>& requestsList,
std::list<const SurfaceMap>& surfaceMaps,
const CameraMetadata& request) {
PhysicalCameraSettingsList requestList;
requestList.push_back({std::string(getId().string()), request});
requestsList.push_back(requestList);
SurfaceMap surfaceMap;
camera_metadata_ro_entry streams = request.find(ANDROID_REQUEST_OUTPUT_STREAMS);
// With no surface list passed in, stream and surface will have 1-to-1
// mapping. So the surface index is 0 for each stream in the surfaceMap.
for (size_t i = 0; i < streams.count; i++) {
surfaceMap[streams.data.i32[i]].push_back(0);
}
surfaceMaps.push_back(surfaceMap);
}
status_t Camera3Device::submitRequestsHelper(
const List<const PhysicalCameraSettingsList> &requests,
const std::list<const SurfaceMap> &surfaceMaps,
bool repeating,
/*out*/
int64_t *lastFrameNumber) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
status_t res = checkStatusOkToCaptureLocked();
if (res != OK) {
// error logged by previous call
return res;
}
RequestList requestList;
res = convertMetadataListToRequestListLocked(requests, surfaceMaps,
repeating, /*out*/&requestList);
if (res != OK) {
// error logged by previous call
return res;
}
if (repeating) {
res = mRequestThread->setRepeatingRequests(requestList, lastFrameNumber);
} else {
res = mRequestThread->queueRequestList(requestList, lastFrameNumber);
}
if (res == OK) {
waitUntilStateThenRelock(/*active*/true, kActiveTimeout);
if (res != OK) {
SET_ERR_L("Can't transition to active in %f seconds!",
kActiveTimeout/1e9);
}
ALOGV("Camera %s: Capture request %" PRId32 " enqueued", mId.string(),
(*(requestList.begin()))->mResultExtras.requestId);
} else {
CLOGE("Cannot queue request. Impossible.");
return BAD_VALUE;
}
return res;
}
hardware::Return<void> Camera3Device::requestStreamBuffers(
const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
requestStreamBuffers_cb _hidl_cb) {
RequestBufferStates states {
mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
*this, *mInterface, *this};
camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
return hardware::Void();
}
hardware::Return<void> Camera3Device::returnStreamBuffers(
const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
ReturnBufferStates states {
mId, mUseHalBufManager, mOutputStreams, *mInterface};
camera3::returnStreamBuffers(states, buffers);
return hardware::Void();
}
hardware::Return<void> Camera3Device::processCaptureResult_3_4(
const hardware::hidl_vec<
hardware::camera::device::V3_4::CaptureResult>& results) {
// Ideally we should grab mLock, but that can lead to deadlock, and
// it's not super important to get up to date value of mStatus for this
// warning print, hence skipping the lock here
if (mStatus == STATUS_ERROR) {
// Per API contract, HAL should act as closed after device error
// But mStatus can be set to error by framework as well, so just log
// a warning here.
ALOGW("%s: received capture result in error state.", __FUNCTION__);
}
sp<NotificationListener> listener;
{
std::lock_guard<std::mutex> l(mOutputLock);
listener = mListener.promote();
}
if (mProcessCaptureResultLock.tryLock() != OK) {
// This should never happen; it indicates a wrong client implementation
// that doesn't follow the contract. But, we can be tolerant here.
ALOGE("%s: callback overlapped! waiting 1s...",
__FUNCTION__);
if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) {
ALOGE("%s: cannot acquire lock in 1s, dropping results",
__FUNCTION__);
// really don't know what to do, so bail out.
return hardware::Void();
}
}
CaptureOutputStates states {
mId,
mInFlightLock, mLastCompletedRegularFrameNumber,
mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
mInFlightMap, mOutputLock, mResultQueue, mResultSignal,
mNextShutterFrameNumber,
mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
mNextResultFrameNumber,
mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, *mInterface
};
for (const auto& result : results) {
processOneCaptureResultLocked(states, result.v3_2, result.physicalCameraMetadata);
}
mProcessCaptureResultLock.unlock();
return hardware::Void();
}
// Only one processCaptureResult should be called at a time, so
// the locks won't block. The locks are present here simply to enforce this.
hardware::Return<void> Camera3Device::processCaptureResult(
const hardware::hidl_vec<
hardware::camera::device::V3_2::CaptureResult>& results) {
hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> noPhysMetadata;
// Ideally we should grab mLock, but that can lead to deadlock, and
// it's not super important to get up to date value of mStatus for this
// warning print, hence skipping the lock here
if (mStatus == STATUS_ERROR) {
// Per API contract, HAL should act as closed after device error
// But mStatus can be set to error by framework as well, so just log
// a warning here.
ALOGW("%s: received capture result in error state.", __FUNCTION__);
}
sp<NotificationListener> listener;
{
std::lock_guard<std::mutex> l(mOutputLock);
listener = mListener.promote();
}
if (mProcessCaptureResultLock.tryLock() != OK) {
// This should never happen; it indicates a wrong client implementation
// that doesn't follow the contract. But, we can be tolerant here.
ALOGE("%s: callback overlapped! waiting 1s...",
__FUNCTION__);
if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) {
ALOGE("%s: cannot acquire lock in 1s, dropping results",
__FUNCTION__);
// really don't know what to do, so bail out.
return hardware::Void();
}
}
CaptureOutputStates states {
mId,
mInFlightLock, mLastCompletedRegularFrameNumber,
mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
mInFlightMap, mOutputLock, mResultQueue, mResultSignal,
mNextShutterFrameNumber,
mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
mNextResultFrameNumber,
mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, *mInterface
};
for (const auto& result : results) {
processOneCaptureResultLocked(states, result, noPhysMetadata);
}
mProcessCaptureResultLock.unlock();
return hardware::Void();
}
hardware::Return<void> Camera3Device::notify(
const hardware::hidl_vec<hardware::camera::device::V3_2::NotifyMsg>& msgs) {
// Ideally we should grab mLock, but that can lead to deadlock, and
// it's not super important to get up to date value of mStatus for this
// warning print, hence skipping the lock here
if (mStatus == STATUS_ERROR) {
// Per API contract, HAL should act as closed after device error
// But mStatus can be set to error by framework as well, so just log
// a warning here.
ALOGW("%s: received notify message in error state.", __FUNCTION__);
}
sp<NotificationListener> listener;
{
std::lock_guard<std::mutex> l(mOutputLock);
listener = mListener.promote();
}
CaptureOutputStates states {
mId,
mInFlightLock, mLastCompletedRegularFrameNumber,
mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
mInFlightMap, mOutputLock, mResultQueue, mResultSignal,
mNextShutterFrameNumber,
mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
mNextResultFrameNumber,
mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mResultMetadataQueue, mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, listener, *this, *this, *mInterface
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
}
return hardware::Void();
}
status_t Camera3Device::captureList(const List<const PhysicalCameraSettingsList> &requestsList,
const std::list<const SurfaceMap> &surfaceMaps,
int64_t *lastFrameNumber) {
ATRACE_CALL();
return submitRequestsHelper(requestsList, surfaceMaps, /*repeating*/false, lastFrameNumber);
}
status_t Camera3Device::setStreamingRequest(const CameraMetadata &request,
int64_t* /*lastFrameNumber*/) {
ATRACE_CALL();
List<const PhysicalCameraSettingsList> requestsList;
std::list<const SurfaceMap> surfaceMaps;
convertToRequestList(requestsList, surfaceMaps, request);
return setStreamingRequestList(requestsList, /*surfaceMap*/surfaceMaps,
/*lastFrameNumber*/NULL);
}
status_t Camera3Device::setStreamingRequestList(
const List<const PhysicalCameraSettingsList> &requestsList,
const std::list<const SurfaceMap> &surfaceMaps, int64_t *lastFrameNumber) {
ATRACE_CALL();
return submitRequestsHelper(requestsList, surfaceMaps, /*repeating*/true, lastFrameNumber);
}
sp<Camera3Device::CaptureRequest> Camera3Device::setUpRequestLocked(
const PhysicalCameraSettingsList &request, const SurfaceMap &surfaceMap) {
status_t res;
if (mStatus == STATUS_UNCONFIGURED || mNeedConfig) {
// This point should only be reached via API1 (API2 must explicitly call configureStreams)
// so unilaterally select normal operating mode.
res = filterParamsAndConfigureLocked(request.begin()->metadata,
CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE);
// Stream configuration failed. Client might try other configuraitons.
if (res != OK) {
CLOGE("Can't set up streams: %s (%d)", strerror(-res), res);
return NULL;
} else if (mStatus == STATUS_UNCONFIGURED) {
// Stream configuration successfully configure to empty stream configuration.
CLOGE("No streams configured");
return NULL;
}
}
sp<CaptureRequest> newRequest = createCaptureRequest(request, surfaceMap);
return newRequest;
}
status_t Camera3Device::clearStreamingRequest(int64_t *lastFrameNumber) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
switch (mStatus) {
case STATUS_ERROR:
CLOGE("Device has encountered a serious error");
return INVALID_OPERATION;
case STATUS_UNINITIALIZED:
CLOGE("Device not initialized");
return INVALID_OPERATION;
case STATUS_UNCONFIGURED:
case STATUS_CONFIGURED:
case STATUS_ACTIVE:
// OK
break;
default:
SET_ERR_L("Unexpected status: %d", mStatus);
return INVALID_OPERATION;
}
ALOGV("Camera %s: Clearing repeating request", mId.string());
return mRequestThread->clearRepeatingRequests(lastFrameNumber);
}
status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
return mRequestThread->waitUntilRequestProcessed(requestId, timeout);
}
status_t Camera3Device::createInputStream(
uint32_t width, uint32_t height, int format, int *id) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
Mutex::Autolock l(mLock);
ALOGV("Camera %s: Creating new input stream %d: %d x %d, format %d",
mId.string(), mNextStreamId, width, height, format);
status_t res;
bool wasActive = false;
switch (mStatus) {
case STATUS_ERROR:
ALOGE("%s: Device has encountered a serious error", __FUNCTION__);
return INVALID_OPERATION;
case STATUS_UNINITIALIZED:
ALOGE("%s: Device not initialized", __FUNCTION__);
return INVALID_OPERATION;
case STATUS_UNCONFIGURED:
case STATUS_CONFIGURED:
// OK
break;
case STATUS_ACTIVE:
ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
res = internalPauseAndWaitLocked(maxExpectedDuration);
if (res != OK) {
SET_ERR_L("Can't pause captures to reconfigure streams!");
return res;
}
wasActive = true;
break;
default:
SET_ERR_L("%s: Unexpected status: %d", mStatus);
return INVALID_OPERATION;
}
assert(mStatus != STATUS_ACTIVE);
if (mInputStream != 0) {
ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__);
return INVALID_OPERATION;
}
sp<Camera3InputStream> newStream = new Camera3InputStream(mNextStreamId,
width, height, format);
newStream->setStatusTracker(mStatusTracker);
mInputStream = newStream;
*id = mNextStreamId++;
// Continue captures if active at start
if (wasActive) {
ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
// Reuse current operating mode and session parameters for new stream config
res = configureStreamsLocked(mOperatingMode, mSessionParams);
if (res != OK) {
ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)",
__FUNCTION__, mNextStreamId, strerror(-res), res);
return res;
}
internalResumeLocked();
}
ALOGV("Camera %s: Created input stream", mId.string());
return OK;
}
status_t Camera3Device::createStream(sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
const String8& physicalCameraId,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, uint64_t consumerUsage) {
ATRACE_CALL();
if (consumer == nullptr) {
ALOGE("%s: consumer must not be null", __FUNCTION__);
return BAD_VALUE;
}
std::vector<sp<Surface>> consumers;
consumers.push_back(consumer);
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, surfaceIds, streamSetId,
isShared, consumerUsage);
}
status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
const String8& physicalCameraId,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, uint64_t consumerUsage) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
Mutex::Autolock l(mLock);
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
" consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s", mId.string(),
mNextStreamId, width, height, format, dataSpace, rotation, consumerUsage, isShared,
physicalCameraId.string());
status_t res;
bool wasActive = false;
switch (mStatus) {
case STATUS_ERROR:
CLOGE("Device has encountered a serious error");
return INVALID_OPERATION;
case STATUS_UNINITIALIZED:
CLOGE("Device not initialized");
return INVALID_OPERATION;
case STATUS_UNCONFIGURED:
case STATUS_CONFIGURED:
// OK
break;
case STATUS_ACTIVE:
ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
res = internalPauseAndWaitLocked(maxExpectedDuration);
if (res != OK) {
SET_ERR_L("Can't pause captures to reconfigure streams!");
return res;
}
wasActive = true;
break;
default:
SET_ERR_L("Unexpected status: %d", mStatus);
return INVALID_OPERATION;
}
assert(mStatus != STATUS_ACTIVE);
sp<Camera3OutputStream> newStream;
if (consumers.size() == 0 && !hasDeferredConsumer) {
ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
return BAD_VALUE;
}
if (hasDeferredConsumer && format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
ALOGE("Deferred consumer stream creation only support IMPLEMENTATION_DEFINED format");
return BAD_VALUE;
}
if (format == HAL_PIXEL_FORMAT_BLOB) {
ssize_t blobBufferSize;
if (dataSpace == HAL_DATASPACE_DEPTH) {
blobBufferSize = getPointCloudBufferSize();
if (blobBufferSize <= 0) {
SET_ERR_L("Invalid point cloud buffer size %zd", blobBufferSize);
return BAD_VALUE;
}
} else if (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS)) {
blobBufferSize = width * height;
} else {
blobBufferSize = getJpegBufferSize(width, height);
if (blobBufferSize <= 0) {
SET_ERR_L("Invalid jpeg buffer size %zd", blobBufferSize);
return BAD_VALUE;
}
}
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, streamSetId);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
ssize_t rawOpaqueBufferSize = getRawOpaqueBufferSize(width, height);
if (rawOpaqueBufferSize <= 0) {
SET_ERR_L("Invalid RAW opaque buffer size %zd", rawOpaqueBufferSize);
return BAD_VALUE;
}
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, streamSetId);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, streamSetId,
mUseHalBufManager);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, streamSetId);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, streamSetId);
}
size_t consumerCount = consumers.size();
for (size_t i = 0; i < consumerCount; i++) {
int id = newStream->getSurfaceId(consumers[i]);
if (id < 0) {
SET_ERR_L("Invalid surface id");
return BAD_VALUE;
}
if (surfaceIds != nullptr) {
surfaceIds->push_back(id);
}
}
newStream->setStatusTracker(mStatusTracker);
newStream->setBufferManager(mBufferManager);
res = mOutputStreams.add(mNextStreamId, newStream);
if (res < 0) {
SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res);
return res;
}
*id = mNextStreamId++;
mNeedConfig = true;
// Continue captures if active at start
if (wasActive) {
ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
// Reuse current operating mode and session parameters for new stream config
res = configureStreamsLocked(mOperatingMode, mSessionParams);
if (res != OK) {
CLOGE("Can't reconfigure device for new stream %d: %s (%d)",
mNextStreamId, strerror(-res), res);
return res;
}
internalResumeLocked();
}
ALOGV("Camera %s: Created new stream", mId.string());
return OK;
}
status_t Camera3Device::getStreamInfo(int id, StreamInfo *streamInfo) {
ATRACE_CALL();
if (nullptr == streamInfo) {
return BAD_VALUE;
}
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
switch (mStatus) {
case STATUS_ERROR:
CLOGE("Device has encountered a serious error");
return INVALID_OPERATION;
case STATUS_UNINITIALIZED:
CLOGE("Device not initialized!");
return INVALID_OPERATION;
case STATUS_UNCONFIGURED:
case STATUS_CONFIGURED:
case STATUS_ACTIVE:
// OK
break;
default:
SET_ERR_L("Unexpected status: %d", mStatus);
return INVALID_OPERATION;
}
sp<Camera3StreamInterface> stream = mOutputStreams.get(id);
if (stream == nullptr) {
CLOGE("Stream %d is unknown", id);
return BAD_VALUE;
}
streamInfo->width = stream->getWidth();
streamInfo->height = stream->getHeight();
streamInfo->format = stream->getFormat();
streamInfo->dataSpace = stream->getDataSpace();
streamInfo->formatOverridden = stream->isFormatOverridden();
streamInfo->originalFormat = stream->getOriginalFormat();
streamInfo->dataSpaceOverridden = stream->isDataSpaceOverridden();
streamInfo->originalDataSpace = stream->getOriginalDataSpace();
return OK;
}
status_t Camera3Device::setStreamTransform(int id,
int transform) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
switch (mStatus) {
case STATUS_ERROR:
CLOGE("Device has encountered a serious error");
return INVALID_OPERATION;
case STATUS_UNINITIALIZED:
CLOGE("Device not initialized");
return INVALID_OPERATION;
case STATUS_UNCONFIGURED:
case STATUS_CONFIGURED:
case STATUS_ACTIVE:
// OK
break;
default:
SET_ERR_L("Unexpected status: %d", mStatus);
return INVALID_OPERATION;
}
sp<Camera3OutputStreamInterface> stream = mOutputStreams.get(id);
if (stream == nullptr) {
CLOGE("Stream %d does not exist", id);
return BAD_VALUE;
}
return stream->setTransform(transform);
}
status_t Camera3Device::deleteStream(int id) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
status_t res;
ALOGV("%s: Camera %s: Deleting stream %d", __FUNCTION__, mId.string(), id);
// CameraDevice semantics require device to already be idle before
// deleteStream is called, unlike for createStream.
if (mStatus == STATUS_ACTIVE) {
ALOGW("%s: Camera %s: Device not idle", __FUNCTION__, mId.string());
return -EBUSY;
}
if (mStatus == STATUS_ERROR) {
ALOGW("%s: Camera %s: deleteStream not allowed in ERROR state",
__FUNCTION__, mId.string());
return -EBUSY;
}
sp<Camera3StreamInterface> deletedStream;
sp<Camera3StreamInterface> stream = mOutputStreams.get(id);
if (mInputStream != NULL && id == mInputStream->getId()) {
deletedStream = mInputStream;
mInputStream.clear();
} else {
if (stream == nullptr) {
CLOGE("Stream %d does not exist", id);
return BAD_VALUE;
}
}
// Delete output stream or the output part of a bi-directional stream.
if (stream != nullptr) {
deletedStream = stream;
mOutputStreams.remove(id);
}
// Free up the stream endpoint so that it can be used by some other stream
res = deletedStream->disconnect();
if (res != OK) {
SET_ERR_L("Can't disconnect deleted stream %d", id);
// fall through since we want to still list the stream as deleted.
}
mDeletedStreams.add(deletedStream);
mNeedConfig = true;
return res;
}
status_t Camera3Device::configureStreams(const CameraMetadata& sessionParams, int operatingMode) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
// In case the client doesn't include any session parameter, try a
// speculative configuration using the values from the last cached
// default request.
if (sessionParams.isEmpty() &&
((mLastTemplateId > 0) && (mLastTemplateId < CAMERA3_TEMPLATE_COUNT)) &&
(!mRequestTemplateCache[mLastTemplateId].isEmpty())) {
ALOGV("%s: Speculative session param configuration with template id: %d", __func__,
mLastTemplateId);
return filterParamsAndConfigureLocked(mRequestTemplateCache[mLastTemplateId],
operatingMode);
}
return filterParamsAndConfigureLocked(sessionParams, operatingMode);
}
status_t Camera3Device::filterParamsAndConfigureLocked(const CameraMetadata& sessionParams,
int operatingMode) {
//Filter out any incoming session parameters
const CameraMetadata params(sessionParams);
camera_metadata_entry_t availableSessionKeys = mDeviceInfo.find(
ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
CameraMetadata filteredParams(availableSessionKeys.count);
camera_metadata_t *meta = const_cast<camera_metadata_t *>(
filteredParams.getAndLock());
set_camera_metadata_vendor_id(meta, mVendorTagId);
filteredParams.unlock(meta);
if (availableSessionKeys.count > 0) {
for (size_t i = 0; i < availableSessionKeys.count; i++) {
camera_metadata_ro_entry entry = params.find(
availableSessionKeys.data.i32[i]);
if (entry.count > 0) {
filteredParams.update(entry);
}
}
}
return configureStreamsLocked(operatingMode, filteredParams);
}
status_t Camera3Device::getInputBufferProducer(
sp<IGraphicBufferProducer> *producer) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
if (producer == NULL) {
return BAD_VALUE;
} else if (mInputStream == NULL) {
return INVALID_OPERATION;
}
return mInputStream->getInputBufferProducer(producer);
}
status_t Camera3Device::createDefaultRequest(int templateId,
CameraMetadata *request) {
ATRACE_CALL();
ALOGV("%s: for template %d", __FUNCTION__, templateId);
if (templateId <= 0 || templateId >= CAMERA3_TEMPLATE_COUNT) {
android_errorWriteWithInfoLog(CameraService::SN_EVENT_LOG_ID, "26866110",
CameraThreadState::getCallingUid(), nullptr, 0);
return BAD_VALUE;
}
Mutex::Autolock il(mInterfaceLock);
{
Mutex::Autolock l(mLock);
switch (mStatus) {
case STATUS_ERROR:
CLOGE("Device has encountered a serious error");
return INVALID_OPERATION;
case STATUS_UNINITIALIZED:
CLOGE("Device is not initialized!");
return INVALID_OPERATION;
case STATUS_UNCONFIGURED:
case STATUS_CONFIGURED:
case STATUS_ACTIVE:
// OK
break;
default:
SET_ERR_L("Unexpected status: %d", mStatus);
return INVALID_OPERATION;
}
if (!mRequestTemplateCache[templateId].isEmpty()) {
*request = mRequestTemplateCache[templateId];
mLastTemplateId = templateId;
return OK;
}
}
camera_metadata_t *rawRequest;
status_t res = mInterface->constructDefaultRequestSettings(
(camera3_request_template_t) templateId, &rawRequest);
{
Mutex::Autolock l(mLock);
if (res == BAD_VALUE) {
ALOGI("%s: template %d is not supported on this camera device",
__FUNCTION__, templateId);
return res;
} else if (res != OK) {
CLOGE("Unable to construct request template %d: %s (%d)",
templateId, strerror(-res), res);
return res;
}
set_camera_metadata_vendor_id(rawRequest, mVendorTagId);
mRequestTemplateCache[templateId].acquire(rawRequest);
// Override the template request with zoomRatioMapper
res = mZoomRatioMappers[mId.c_str()].initZoomRatioInTemplate(
&mRequestTemplateCache[templateId]);
if (res != OK) {
CLOGE("Failed to update zoom ratio for template %d: %s (%d)",
templateId, strerror(-res), res);
return res;
}
// Fill in JPEG_QUALITY if not available
if (!mRequestTemplateCache[templateId].exists(ANDROID_JPEG_QUALITY)) {
static const uint8_t kDefaultJpegQuality = 95;
mRequestTemplateCache[templateId].update(ANDROID_JPEG_QUALITY,
&kDefaultJpegQuality, 1);
}
*request = mRequestTemplateCache[templateId];
mLastTemplateId = templateId;
}
return OK;
}
status_t Camera3Device::waitUntilDrained() {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
Mutex::Autolock l(mLock);
return waitUntilDrainedLocked(maxExpectedDuration);
}
status_t Camera3Device::waitUntilDrainedLocked(nsecs_t maxExpectedDuration) {
switch (mStatus) {
case STATUS_UNINITIALIZED:
case STATUS_UNCONFIGURED:
ALOGV("%s: Already idle", __FUNCTION__);
return OK;
case STATUS_CONFIGURED:
// To avoid race conditions, check with tracker to be sure
case STATUS_ERROR:
case STATUS_ACTIVE:
// Need to verify shut down
break;
default:
SET_ERR_L("Unexpected status: %d",mStatus);
return INVALID_OPERATION;
}
ALOGV("%s: Camera %s: Waiting until idle (%" PRIi64 "ns)", __FUNCTION__, mId.string(),
maxExpectedDuration);
status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
if (res != OK) {
SET_ERR_L("Error waiting for HAL to drain: %s (%d)", strerror(-res),
res);
}
return res;
}
void Camera3Device::internalUpdateStatusLocked(Status status) {
mStatus = status;
mRecentStatusUpdates.add(mStatus);
mStatusChanged.broadcast();
}
// Pause to reconfigure
status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration) {
if (mRequestThread.get() != nullptr) {
mRequestThread->setPaused(true);
} else {
return NO_INIT;
}
ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
maxExpectedDuration);
status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
if (res != OK) {
SET_ERR_L("Can't idle device in %f seconds!",
maxExpectedDuration/1e9);
}
return res;
}
// Resume after internalPauseAndWaitLocked
status_t Camera3Device::internalResumeLocked() {
status_t res;
mRequestThread->setPaused(false);
ALOGV("%s: Camera %s: Internal wait until active (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
kActiveTimeout);
res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
if (res != OK) {
SET_ERR_L("Can't transition to active in %f seconds!",
kActiveTimeout/1e9);
}
mPauseStateNotify = false;
return OK;
}
status_t Camera3Device::waitUntilStateThenRelock(bool active, nsecs_t timeout) {
status_t res = OK;
size_t startIndex = 0;
if (mStatusWaiters == 0) {
// Clear the list of recent statuses if there are no existing threads waiting on updates to
// this status list
mRecentStatusUpdates.clear();
} else {
// If other threads are waiting on updates to this status list, set the position of the
// first element that this list will check rather than clearing the list.
startIndex = mRecentStatusUpdates.size();
}
mStatusWaiters++;
if (!active && mUseHalBufManager) {
auto streamIds = mOutputStreams.getStreamIds();
if (mStatus == STATUS_ACTIVE) {
mRequestThread->signalPipelineDrain(streamIds);
}
mRequestBufferSM.onWaitUntilIdle();
}
bool stateSeen = false;
do {
if (active == (mStatus == STATUS_ACTIVE)) {
// Desired state is current
break;
}
res = mStatusChanged.waitRelative(mLock, timeout);
if (res != OK) break;
// This is impossible, but if not, could result in subtle deadlocks and invalid state
// transitions.
LOG_ALWAYS_FATAL_IF(startIndex > mRecentStatusUpdates.size(),
"%s: Skipping status updates in Camera3Device, may result in deadlock.",
__FUNCTION__);
// Encountered desired state since we began waiting
for (size_t i = startIndex; i < mRecentStatusUpdates.size(); i++) {
if (active == (mRecentStatusUpdates[i] == STATUS_ACTIVE) ) {
stateSeen = true;
break;
}
}
} while (!stateSeen);
mStatusWaiters--;
return res;
}
status_t Camera3Device::setNotifyCallback(wp<NotificationListener> listener) {
ATRACE_CALL();
std::lock_guard<std::mutex> l(mOutputLock);
if (listener != NULL && mListener != NULL) {
ALOGW("%s: Replacing old callback listener", __FUNCTION__);
}
mListener = listener;
mRequestThread->setNotificationListener(listener);
mPreparerThread->setNotificationListener(listener);
return OK;
}
bool Camera3Device::willNotify3A() {
return false;
}
status_t Camera3Device::waitForNextFrame(nsecs_t timeout) {
ATRACE_CALL();
std::unique_lock<std::mutex> l(mOutputLock);
while (mResultQueue.empty()) {
auto st = mResultSignal.wait_for(l, std::chrono::nanoseconds(timeout));
if (st == std::cv_status::timeout) {
return TIMED_OUT;
}
}
return OK;
}
status_t Camera3Device::getNextResult(CaptureResult *frame) {
ATRACE_CALL();
std::lock_guard<std::mutex> l(mOutputLock);
if (mResultQueue.empty()) {
return NOT_ENOUGH_DATA;
}
if (frame == NULL) {
ALOGE("%s: argument cannot be NULL", __FUNCTION__);
return BAD_VALUE;
}
CaptureResult &result = *(mResultQueue.begin());
frame->mResultExtras = result.mResultExtras;
frame->mMetadata.acquire(result.mMetadata);
frame->mPhysicalMetadatas = std::move(result.mPhysicalMetadatas);
mResultQueue.erase(mResultQueue.begin());
return OK;
}
status_t Camera3Device::triggerAutofocus(uint32_t id) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id);
// Mix-in this trigger into the next request and only the next request.
RequestTrigger trigger[] = {
{
ANDROID_CONTROL_AF_TRIGGER,
ANDROID_CONTROL_AF_TRIGGER_START
},
{
ANDROID_CONTROL_AF_TRIGGER_ID,
static_cast<int32_t>(id)
}
};
return mRequestThread->queueTrigger(trigger,
sizeof(trigger)/sizeof(trigger[0]));
}
status_t Camera3Device::triggerCancelAutofocus(uint32_t id) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
ALOGV("%s: Triggering cancel autofocus, id %d", __FUNCTION__, id);
// Mix-in this trigger into the next request and only the next request.
RequestTrigger trigger[] = {
{
ANDROID_CONTROL_AF_TRIGGER,
ANDROID_CONTROL_AF_TRIGGER_CANCEL
},
{
ANDROID_CONTROL_AF_TRIGGER_ID,
static_cast<int32_t>(id)
}
};
return mRequestThread->queueTrigger(trigger,
sizeof(trigger)/sizeof(trigger[0]));
}
status_t Camera3Device::triggerPrecaptureMetering(uint32_t id) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id);
// Mix-in this trigger into the next request and only the next request.
RequestTrigger trigger[] = {
{
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START
},
{
ANDROID_CONTROL_AE_PRECAPTURE_ID,
static_cast<int32_t>(id)
}
};
return mRequestThread->queueTrigger(trigger,
sizeof(trigger)/sizeof(trigger[0]));
}
status_t Camera3Device::flush(int64_t *frameNumber) {
ATRACE_CALL();
ALOGV("%s: Camera %s: Flushing all requests", __FUNCTION__, mId.string());
Mutex::Autolock il(mInterfaceLock);
{
Mutex::Autolock l(mLock);
// b/116514106 "disconnect()" can get called twice for the same device. The
// camera device will not be initialized during the second run.
if (mStatus == STATUS_UNINITIALIZED) {
return OK;
}
mRequestThread->clear(/*out*/frameNumber);
}
return mRequestThread->flush();
}
status_t Camera3Device::prepare(int streamId) {
return prepare(camera3::Camera3StreamInterface::ALLOCATE_PIPELINE_MAX, streamId);
}
status_t Camera3Device::prepare(int maxCount, int streamId) {
ATRACE_CALL();
ALOGV("%s: Camera %s: Preparing stream %d", __FUNCTION__, mId.string(), streamId);
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
if (stream == nullptr) {
CLOGE("Stream %d does not exist", streamId);
return BAD_VALUE;
}
if (stream->isUnpreparable() || stream->hasOutstandingBuffers() ) {
CLOGE("Stream %d has already been a request target", streamId);
return BAD_VALUE;
}
if (mRequestThread->isStreamPending(stream)) {
CLOGE("Stream %d is already a target in a pending request", streamId);
return BAD_VALUE;
}
return mPreparerThread->prepare(maxCount, stream);
}
status_t Camera3Device::tearDown(int streamId) {
ATRACE_CALL();
ALOGV("%s: Camera %s: Tearing down stream %d", __FUNCTION__, mId.string(), streamId);
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
if (stream == nullptr) {
CLOGE("Stream %d does not exist", streamId);
return BAD_VALUE;
}
if (stream->hasOutstandingBuffers() || mRequestThread->isStreamPending(stream)) {
CLOGE("Stream %d is a target of a in-progress request", streamId);
return BAD_VALUE;
}
return stream->tearDown();
}
status_t Camera3Device::addBufferListenerForStream(int streamId,
wp<Camera3StreamBufferListener> listener) {
ATRACE_CALL();
ALOGV("%s: Camera %s: Adding buffer listener for stream %d", __FUNCTION__, mId.string(), streamId);
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
sp<Camera3StreamInterface> stream = mOutputStreams.get(streamId);
if (stream == nullptr) {
CLOGE("Stream %d does not exist", streamId);
return BAD_VALUE;
}
stream->addBufferListener(listener);
return OK;
}
/**
* Methods called by subclasses
*/
void Camera3Device::notifyStatus(bool idle) {
ATRACE_CALL();
{
// Need mLock to safely update state and synchronize to current
// state of methods in flight.
Mutex::Autolock l(mLock);
// We can get various system-idle notices from the status tracker
// while starting up. Only care about them if we've actually sent
// in some requests recently.
if (mStatus != STATUS_ACTIVE && mStatus != STATUS_CONFIGURED) {
return;
}
ALOGV("%s: Camera %s: Now %s, pauseState: %s", __FUNCTION__, mId.string(),
idle ? "idle" : "active", mPauseStateNotify ? "true" : "false");
internalUpdateStatusLocked(idle ? STATUS_CONFIGURED : STATUS_ACTIVE);
// Skip notifying listener if we're doing some user-transparent
// state changes
if (mPauseStateNotify) return;
}
sp<NotificationListener> listener;
{
std::lock_guard<std::mutex> l(mOutputLock);
listener = mListener.promote();
}
if (idle && listener != NULL) {
listener->notifyIdle();
}
}
status_t Camera3Device::setConsumerSurfaces(int streamId,
const std::vector<sp<Surface>>& consumers, std::vector<int> *surfaceIds) {
ATRACE_CALL();
ALOGV("%s: Camera %s: set consumer surface for stream %d",
__FUNCTION__, mId.string(), streamId);
if (surfaceIds == nullptr) {
return BAD_VALUE;
}
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
if (consumers.size() == 0) {
CLOGE("No consumer is passed!");
return BAD_VALUE;
}
sp<Camera3OutputStreamInterface> stream = mOutputStreams.get(streamId);
if (stream == nullptr) {
CLOGE("Stream %d is unknown", streamId);
return BAD_VALUE;
}
// isConsumerConfigurationDeferred will be off after setConsumers
bool isDeferred = stream->isConsumerConfigurationDeferred();
status_t res = stream->setConsumers(consumers);
if (res != OK) {
CLOGE("Stream %d set consumer failed (error %d %s) ", streamId, res, strerror(-res));
return res;
}
for (auto &consumer : consumers) {
int id = stream->getSurfaceId(consumer);
if (id < 0) {
CLOGE("Invalid surface id!");
return BAD_VALUE;
}
surfaceIds->push_back(id);
}
if (isDeferred) {
if (!stream->isConfiguring()) {
CLOGE("Stream %d was already fully configured.", streamId);
return INVALID_OPERATION;
}
res = stream->finishConfiguration();
if (res != OK) {
// If finishConfiguration fails due to abandoned surface, do not set
// device to error state.
bool isSurfaceAbandoned =
(res == NO_INIT || res == DEAD_OBJECT) && stream->isAbandoned();
if (!isSurfaceAbandoned) {
SET_ERR_L("Can't finish configuring output stream %d: %s (%d)",
stream->getId(), strerror(-res), res);
}
return res;
}
}
return OK;
}
status_t Camera3Device::updateStream(int streamId, const std::vector<sp<Surface>> &newSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds, KeyedVector<sp<Surface>, size_t> *outputMap) {
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
sp<Camera3OutputStreamInterface> stream = mOutputStreams.get(streamId);
if (stream == nullptr) {
CLOGE("Stream %d is unknown", streamId);
return BAD_VALUE;
}
for (const auto &it : removedSurfaceIds) {
if (mRequestThread->isOutputSurfacePending(streamId, it)) {
CLOGE("Shared surface still part of a pending request!");
return -EBUSY;
}
}
status_t res = stream->updateStream(newSurfaces, outputInfo, removedSurfaceIds, outputMap);
if (res != OK) {
CLOGE("Stream %d failed to update stream (error %d %s) ",
streamId, res, strerror(-res));
if (res == UNKNOWN_ERROR) {
SET_ERR_L("%s: Stream update failed to revert to previous output configuration!",
__FUNCTION__);
}
return res;
}
return res;
}
status_t Camera3Device::dropStreamBuffers(bool dropping, int streamId) {
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
sp<Camera3OutputStreamInterface> stream = mOutputStreams.get(streamId);
if (stream == nullptr) {
ALOGE("%s: Stream %d is not found.", __FUNCTION__, streamId);
return BAD_VALUE;
}
return stream->dropBuffers(dropping);
}
/**
* Camera3Device private methods
*/
sp<Camera3Device::CaptureRequest> Camera3Device::createCaptureRequest(
const PhysicalCameraSettingsList &request, const SurfaceMap &surfaceMap) {
ATRACE_CALL();
sp<CaptureRequest> newRequest = new CaptureRequest();
newRequest->mSettingsList = request;
camera_metadata_entry_t inputStreams =
newRequest->mSettingsList.begin()->metadata.find(ANDROID_REQUEST_INPUT_STREAMS);
if (inputStreams.count > 0) {
if (mInputStream == NULL ||
mInputStream->getId() != inputStreams.data.i32[0]) {
CLOGE("Request references unknown input stream %d",
inputStreams.data.u8[0]);
return NULL;
}
if (mInputStream->isConfiguring()) {
SET_ERR_L("%s: input stream %d is not configured!",
__FUNCTION__, mInputStream->getId());
return NULL;
}
// Check if stream prepare is blocking requests.
if (mInputStream->isBlockedByPrepare()) {
CLOGE("Request references an input stream that's being prepared!");
return NULL;
}
newRequest->mInputStream = mInputStream;
newRequest->mSettingsList.begin()->metadata.erase(ANDROID_REQUEST_INPUT_STREAMS);
}
camera_metadata_entry_t streams =
newRequest->mSettingsList.begin()->metadata.find(ANDROID_REQUEST_OUTPUT_STREAMS);
if (streams.count == 0) {
CLOGE("Zero output streams specified!");
return NULL;
}
for (size_t i = 0; i < streams.count; i++) {
sp<Camera3OutputStreamInterface> stream = mOutputStreams.get(streams.data.i32[i]);
if (stream == nullptr) {
CLOGE("Request references unknown stream %d",
streams.data.i32[i]);
return NULL;
}
// It is illegal to include a deferred consumer output stream into a request
auto iter = surfaceMap.find(streams.data.i32[i]);
if (iter != surfaceMap.end()) {
const std::vector<size_t>& surfaces = iter->second;
for (const auto& surface : surfaces) {
if (stream->isConsumerConfigurationDeferred(surface)) {
CLOGE("Stream %d surface %zu hasn't finished configuration yet "
"due to deferred consumer", stream->getId(), surface);
return NULL;
}
}
newRequest->mOutputSurfaces[streams.data.i32[i]] = surfaces;
}
if (stream->isConfiguring()) {
SET_ERR_L("%s: stream %d is not configured!", __FUNCTION__, stream->getId());
return NULL;
}
// Check if stream prepare is blocking requests.
if (stream->isBlockedByPrepare()) {
CLOGE("Request references an output stream that's being prepared!");
return NULL;
}
newRequest->mOutputStreams.push(stream);
}
newRequest->mSettingsList.begin()->metadata.erase(ANDROID_REQUEST_OUTPUT_STREAMS);
newRequest->mBatchSize = 1;
auto rotateAndCropEntry =
newRequest->mSettingsList.begin()->metadata.find(ANDROID_SCALER_ROTATE_AND_CROP);
if (rotateAndCropEntry.count > 0 &&
rotateAndCropEntry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
newRequest->mRotateAndCropAuto = true;
} else {
newRequest->mRotateAndCropAuto = false;
}
auto zoomRatioEntry =
newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
if (zoomRatioEntry.count > 0 &&
zoomRatioEntry.data.f[0] == 1.0f) {
newRequest->mZoomRatioIs1x = true;
} else {
newRequest->mZoomRatioIs1x = false;
}
return newRequest;
}
void Camera3Device::cancelStreamsConfigurationLocked() {
int res = OK;
if (mInputStream != NULL && mInputStream->isConfiguring()) {
res = mInputStream->cancelConfiguration();
if (res != OK) {
CLOGE("Can't cancel configuring input stream %d: %s (%d)",
mInputStream->getId(), strerror(-res), res);
}
}
for (size_t i = 0; i < mOutputStreams.size(); i++) {
sp<Camera3OutputStreamInterface> outputStream = mOutputStreams[i];
if (outputStream->isConfiguring()) {
res = outputStream->cancelConfiguration();
if (res != OK) {
CLOGE("Can't cancel configuring output stream %d: %s (%d)",
outputStream->getId(), strerror(-res), res);
}
}
}
// Return state to that at start of call, so that future configures
// properly clean things up
internalUpdateStatusLocked(STATUS_UNCONFIGURED);
mNeedConfig = true;
res = mPreparerThread->resume();
if (res != OK) {
ALOGE("%s: Camera %s: Preparer thread failed to resume!", __FUNCTION__, mId.string());
}
}
bool Camera3Device::checkAbandonedStreamsLocked() {
if ((mInputStream.get() != nullptr) && (mInputStream->isAbandoned())) {
return true;
}
for (size_t i = 0; i < mOutputStreams.size(); i++) {
auto stream = mOutputStreams[i];
if ((stream.get() != nullptr) && (stream->isAbandoned())) {
return true;
}
}
return false;
}
bool Camera3Device::reconfigureCamera(const CameraMetadata& sessionParams, int clientStatusId) {
ATRACE_CALL();
bool ret = false;
Mutex::Autolock il(mInterfaceLock);
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
Mutex::Autolock l(mLock);
if (checkAbandonedStreamsLocked()) {
ALOGW("%s: Abandoned stream detected, session parameters can't be applied correctly!",
__FUNCTION__);
return true;
}
status_t rc = NO_ERROR;
bool markClientActive = false;
if (mStatus == STATUS_ACTIVE) {
markClientActive = true;
mPauseStateNotify = true;
mStatusTracker->markComponentIdle(clientStatusId, Fence::NO_FENCE);
rc = internalPauseAndWaitLocked(maxExpectedDuration);
}
if (rc == NO_ERROR) {
mNeedConfig = true;
rc = configureStreamsLocked(mOperatingMode, sessionParams, /*notifyRequestThread*/ false);
if (rc == NO_ERROR) {
ret = true;
mPauseStateNotify = false;
//Moving to active state while holding 'mLock' is important.
//There could be pending calls to 'create-/deleteStream' which
//will trigger another stream configuration while the already
//present streams end up with outstanding buffers that will
//not get drained.
internalUpdateStatusLocked(STATUS_ACTIVE);
} else if (rc == DEAD_OBJECT) {
// DEAD_OBJECT can be returned if either the consumer surface is
// abandoned, or the HAL has died.
// - If the HAL has died, configureStreamsLocked call will set
// device to error state,
// - If surface is abandoned, we should not set device to error
// state.
ALOGE("Failed to re-configure camera due to abandoned surface");
} else {
SET_ERR_L("Failed to re-configure camera: %d", rc);
}
} else {
ALOGE("%s: Failed to pause streaming: %d", __FUNCTION__, rc);
}
if (markClientActive) {
mStatusTracker->markComponentActive(clientStatusId);
}
return ret;
}
status_t Camera3Device::configureStreamsLocked(int operatingMode,
const CameraMetadata& sessionParams, bool notifyRequestThread) {
ATRACE_CALL();
status_t res;
if (mStatus != STATUS_UNCONFIGURED && mStatus != STATUS_CONFIGURED) {
CLOGE("Not idle");
return INVALID_OPERATION;
}
if (operatingMode < 0) {
CLOGE("Invalid operating mode: %d", operatingMode);
return BAD_VALUE;
}
bool isConstrainedHighSpeed =
static_cast<int>(StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE) ==
operatingMode;
if (mOperatingMode != operatingMode) {
mNeedConfig = true;
mIsConstrainedHighSpeedConfiguration = isConstrainedHighSpeed;
mOperatingMode = operatingMode;
}
// In case called from configureStreams, abort queued input buffers not belonging to
// any pending requests.
if (mInputStream != NULL && notifyRequestThread) {
while (true) {
camera3_stream_buffer_t inputBuffer;
status_t res = mInputStream->getInputBuffer(&inputBuffer,
/*respectHalLimit*/ false);
if (res != OK) {
// Exhausted acquiring all input buffers.
break;
}
inputBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
res = mInputStream->returnInputBuffer(inputBuffer);
if (res != OK) {
ALOGE("%s: %d: couldn't return input buffer while clearing input queue: "
"%s (%d)", __FUNCTION__, __LINE__, strerror(-res), res);
}
}
}
if (!mNeedConfig) {
ALOGV("%s: Skipping config, no stream changes", __FUNCTION__);
return OK;
}
// Workaround for device HALv3.2 or older spec bug - zero streams requires
// adding a dummy stream instead.
// TODO: Bug: 17321404 for fixing the HAL spec and removing this workaround.
if (mOutputStreams.size() == 0) {
addDummyStreamLocked();
} else {
tryRemoveDummyStreamLocked();
}
// Start configuring the streams
ALOGV("%s: Camera %s: Starting stream configuration", __FUNCTION__, mId.string());
mPreparerThread->pause();
camera3_stream_configuration config;
config.operation_mode = mOperatingMode;
config.num_streams = (mInputStream != NULL) + mOutputStreams.size();
Vector<camera3_stream_t*> streams;
streams.setCapacity(config.num_streams);
std::vector<uint32_t> bufferSizes(config.num_streams, 0);
if (mInputStream != NULL) {
camera3_stream_t *inputStream;
inputStream = mInputStream->startConfiguration();
if (inputStream == NULL) {
CLOGE("Can't start input stream configuration");
cancelStreamsConfigurationLocked();
return INVALID_OPERATION;
}
streams.add(inputStream);
}
for (size_t i = 0; i < mOutputStreams.size(); i++) {
// Don't configure bidi streams twice, nor add them twice to the list
if (mOutputStreams[i].get() ==
static_cast<Camera3StreamInterface*>(mInputStream.get())) {
config.num_streams--;
continue;
}
camera3_stream_t *outputStream;
outputStream = mOutputStreams[i]->startConfiguration();
if (outputStream == NULL) {
CLOGE("Can't start output stream configuration");
cancelStreamsConfigurationLocked();
return INVALID_OPERATION;
}
streams.add(outputStream);
if (outputStream->format == HAL_PIXEL_FORMAT_BLOB) {
size_t k = i + ((mInputStream != nullptr) ? 1 : 0); // Input stream if present should
// always occupy the initial entry.
if (outputStream->data_space == HAL_DATASPACE_V0_JFIF) {
bufferSizes[k] = static_cast<uint32_t>(
getJpegBufferSize(outputStream->width, outputStream->height));
} else if (outputStream->data_space ==
static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS)) {
bufferSizes[k] = outputStream->width * outputStream->height;
} else {
ALOGW("%s: Blob dataSpace %d not supported",
__FUNCTION__, outputStream->data_space);
}
}
}
config.streams = streams.editArray();
// Do the HAL configuration; will potentially touch stream
// max_buffers, usage, and priv fields, as well as data_space and format
// fields for IMPLEMENTATION_DEFINED formats.
const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
res = mInterface->configureStreams(sessionBuffer, &config, bufferSizes);
sessionParams.unlock(sessionBuffer);
if (res == BAD_VALUE) {
// HAL rejected this set of streams as unsupported, clean up config
// attempt and return to unconfigured state
CLOGE("Set of requested inputs/outputs not supported by HAL");
cancelStreamsConfigurationLocked();
return BAD_VALUE;
} else if (res != OK) {
// Some other kind of error from configure_streams - this is not
// expected
SET_ERR_L("Unable to configure streams with HAL: %s (%d)",
strerror(-res), res);
return res;
}
// Finish all stream configuration immediately.
// TODO: Try to relax this later back to lazy completion, which should be
// faster
if (mInputStream != NULL && mInputStream->isConfiguring()) {
bool streamReConfigured = false;
res = mInputStream->finishConfiguration(&streamReConfigured);
if (res != OK) {
CLOGE("Can't finish configuring input stream %d: %s (%d)",
mInputStream->getId(), strerror(-res), res);
cancelStreamsConfigurationLocked();
if ((res == NO_INIT || res == DEAD_OBJECT) && mInputStream->isAbandoned()) {
return DEAD_OBJECT;
}
return BAD_VALUE;
}
if (streamReConfigured) {
mInterface->onStreamReConfigured(mInputStream->getId());
}
}
for (size_t i = 0; i < mOutputStreams.size(); i++) {
sp<Camera3OutputStreamInterface> outputStream = mOutputStreams[i];
if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
bool streamReConfigured = false;
res = outputStream->finishConfiguration(&streamReConfigured);
if (res != OK) {
CLOGE("Can't finish configuring output stream %d: %s (%d)",
outputStream->getId(), strerror(-res), res);
cancelStreamsConfigurationLocked();
if ((res == NO_INIT || res == DEAD_OBJECT) && outputStream->isAbandoned()) {
return DEAD_OBJECT;
}
return BAD_VALUE;
}
if (streamReConfigured) {
mInterface->onStreamReConfigured(outputStream->getId());
}
}
}
// Request thread needs to know to avoid using repeat-last-settings protocol
// across configure_streams() calls
if (notifyRequestThread) {
mRequestThread->configurationComplete(mIsConstrainedHighSpeedConfiguration, sessionParams);
}
char value[PROPERTY_VALUE_MAX];
property_get("camera.fifo.disable", value, "0");
int32_t disableFifo = atoi(value);
if (disableFifo != 1) {
// Boost priority of request thread to SCHED_FIFO.
pid_t requestThreadTid = mRequestThread->getTid();
res = requestPriority(getpid(), requestThreadTid,
kRequestThreadPriority, /*isForApp*/ false, /*asynchronous*/ false);
if (res != OK) {
ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
strerror(-res), res);
} else {
ALOGD("Set real time priority for request queue thread (tid %d)", requestThreadTid);
}
}
// Update device state
const camera_metadata_t *newSessionParams = sessionParams.getAndLock();
const camera_metadata_t *currentSessionParams = mSessionParams.getAndLock();
bool updateSessionParams = (newSessionParams != currentSessionParams) ? true : false;
sessionParams.unlock(newSessionParams);
mSessionParams.unlock(currentSessionParams);
if (updateSessionParams) {
mSessionParams = sessionParams;
}
mNeedConfig = false;
internalUpdateStatusLocked((mDummyStreamId == NO_STREAM) ?
STATUS_CONFIGURED : STATUS_UNCONFIGURED);
ALOGV("%s: Camera %s: Stream configuration complete", __FUNCTION__, mId.string());
// tear down the deleted streams after configure streams.
mDeletedStreams.clear();
auto rc = mPreparerThread->resume();
if (rc != OK) {
SET_ERR_L("%s: Camera %s: Preparer thread failed to resume!", __FUNCTION__, mId.string());
return rc;
}
if (mDummyStreamId == NO_STREAM) {
mRequestBufferSM.onStreamsConfigured();
}
return OK;
}
status_t Camera3Device::addDummyStreamLocked() {
ATRACE_CALL();
status_t res;
if (mDummyStreamId != NO_STREAM) {
// Should never be adding a second dummy stream when one is already
// active
SET_ERR_L("%s: Camera %s: A dummy stream already exists!",
__FUNCTION__, mId.string());
return INVALID_OPERATION;
}
ALOGV("%s: Camera %s: Adding a dummy stream", __FUNCTION__, mId.string());
sp<Camera3OutputStreamInterface> dummyStream =
new Camera3DummyStream(mNextStreamId);
res = mOutputStreams.add(mNextStreamId, dummyStream);
if (res < 0) {
SET_ERR_L("Can't add dummy stream to set: %s (%d)", strerror(-res), res);
return res;
}
mDummyStreamId = mNextStreamId;
mNextStreamId++;
return OK;
}
status_t Camera3Device::tryRemoveDummyStreamLocked() {
ATRACE_CALL();
status_t res;
if (mDummyStreamId == NO_STREAM) return OK;
if (mOutputStreams.size() == 1) return OK;
ALOGV("%s: Camera %s: Removing the dummy stream", __FUNCTION__, mId.string());
// Ok, have a dummy stream and there's at least one other output stream,
// so remove the dummy
sp<Camera3StreamInterface> deletedStream = mOutputStreams.get(mDummyStreamId);
if (deletedStream == nullptr) {
SET_ERR_L("Dummy stream %d does not appear to exist", mDummyStreamId);
return INVALID_OPERATION;
}
mOutputStreams.remove(mDummyStreamId);
// Free up the stream endpoint so that it can be used by some other stream
res = deletedStream->disconnect();
if (res != OK) {
SET_ERR_L("Can't disconnect deleted dummy stream %d", mDummyStreamId);
// fall through since we want to still list the stream as deleted.
}
mDeletedStreams.add(deletedStream);
mDummyStreamId = NO_STREAM;
return res;
}
void Camera3Device::setErrorState(const char *fmt, ...) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
va_list args;
va_start(args, fmt);
setErrorStateLockedV(fmt, args);
va_end(args);
}
void Camera3Device::setErrorStateV(const char *fmt, va_list args) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
setErrorStateLockedV(fmt, args);
}
void Camera3Device::setErrorStateLocked(const char *fmt, ...) {
va_list args;
va_start(args, fmt);
setErrorStateLockedV(fmt, args);
va_end(args);
}
void Camera3Device::setErrorStateLockedV(const char *fmt, va_list args) {
// Print out all error messages to log
String8 errorCause = String8::formatV(fmt, args);
ALOGE("Camera %s: %s", mId.string(), errorCause.string());
// But only do error state transition steps for the first error
if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return;
mErrorCause = errorCause;
if (mRequestThread != nullptr) {
mRequestThread->setPaused(true);
}
internalUpdateStatusLocked(STATUS_ERROR);
// Notify upstream about a device error
sp<NotificationListener> listener = mListener.promote();
if (listener != NULL) {
listener->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
CaptureResultExtras());
}
// Save stack trace. View by dumping it later.
CameraTraces::saveTrace();
// TODO: consider adding errorCause and client pid/procname
}
/**
* In-flight request management
*/
status_t Camera3Device::registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
bool hasAppCallback, nsecs_t maxExpectedDuration,
std::set<String8>& physicalCameraIds, bool isStillCapture,
bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& cameraIdsWithZoom,
const SurfaceMap& outputSurfaces) {
ATRACE_CALL();
std::lock_guard<std::mutex> l(mInFlightLock);
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
hasAppCallback, maxExpectedDuration, physicalCameraIds, isStillCapture, isZslCapture,
rotateAndCropAuto, cameraIdsWithZoom, outputSurfaces));
if (res < 0) return res;
if (mInFlightMap.size() == 1) {
// Hold a separate dedicated tracker lock to prevent race with disconnect and also
// avoid a deadlock during reprocess requests.
Mutex::Autolock l(mTrackerLock);
if (mStatusTracker != nullptr) {
mStatusTracker->markComponentActive(mInFlightStatusId);
}
}
mExpectedInflightDuration += maxExpectedDuration;
return OK;
}
void Camera3Device::onInflightEntryRemovedLocked(nsecs_t duration) {
// Indicate idle inFlightMap to the status tracker
if (mInFlightMap.size() == 0) {
mRequestBufferSM.onInflightMapEmpty();
// Hold a separate dedicated tracker lock to prevent race with disconnect and also
// avoid a deadlock during reprocess requests.
Mutex::Autolock l(mTrackerLock);
if (mStatusTracker != nullptr) {
mStatusTracker->markComponentIdle(mInFlightStatusId, Fence::NO_FENCE);
}
}
mExpectedInflightDuration -= duration;
}
void Camera3Device::checkInflightMapLengthLocked() {
// Sanity check - if we have too many in-flight frames with long total inflight duration,
// something has likely gone wrong. This might still be legit only if application send in
// a long burst of long exposure requests.
if (mExpectedInflightDuration > kMinWarnInflightDuration) {
if (!mIsConstrainedHighSpeedConfiguration && mInFlightMap.size() > kInFlightWarnLimit) {
CLOGW("In-flight list too large: %zu, total inflight duration %" PRIu64,
mInFlightMap.size(), mExpectedInflightDuration);
} else if (mIsConstrainedHighSpeedConfiguration && mInFlightMap.size() >
kInFlightWarnLimitHighSpeed) {
CLOGW("In-flight list too large for high speed configuration: %zu,"
"total inflight duration %" PRIu64,
mInFlightMap.size(), mExpectedInflightDuration);
}
}
}
void Camera3Device::onInflightMapFlushedLocked() {
mExpectedInflightDuration = 0;
}
void Camera3Device::removeInFlightMapEntryLocked(int idx) {
ATRACE_HFR_CALL();
nsecs_t duration = mInFlightMap.valueAt(idx).maxExpectedDuration;
mInFlightMap.removeItemsAt(idx, 1);
onInflightEntryRemovedLocked(duration);
}
void Camera3Device::flushInflightRequests() {
ATRACE_CALL();
sp<NotificationListener> listener;
{
std::lock_guard<std::mutex> l(mOutputLock);
listener = mListener.promote();
}
FlushInflightReqStates states {
mId, mInFlightLock, mInFlightMap, mUseHalBufManager,
listener, *this, *mInterface, *this};
camera3::flushInflightRequests(states);
}
CameraMetadata Camera3Device::getLatestRequestLocked() {
ALOGV("%s", __FUNCTION__);
CameraMetadata retVal;
if (mRequestThread != NULL) {
retVal = mRequestThread->getLatestRequest();
}
return retVal;
}
void Camera3Device::monitorMetadata(TagMonitor::eventSource source,
int64_t frameNumber, nsecs_t timestamp, const CameraMetadata& metadata,
const std::unordered_map<std::string, CameraMetadata>& physicalMetadata) {
mTagMonitor.monitorMetadata(source, frameNumber, timestamp, metadata,
physicalMetadata);
}
/**
* HalInterface inner class methods
*/
Camera3Device::HalInterface::HalInterface(
sp<ICameraDeviceSession> &session,
std::shared_ptr<RequestMetadataQueue> queue,
bool useHalBufManager, bool supportOfflineProcessing) :
mHidlSession(session),
mRequestMetadataQueue(queue),
mUseHalBufManager(useHalBufManager),
mIsReconfigurationQuerySupported(true),
mSupportOfflineProcessing(supportOfflineProcessing) {
// Check with hardware service manager if we can downcast these interfaces
// Somewhat expensive, so cache the results at startup
auto castResult_3_6 = device::V3_6::ICameraDeviceSession::castFrom(mHidlSession);
if (castResult_3_6.isOk()) {
mHidlSession_3_6 = castResult_3_6;
}
auto castResult_3_5 = device::V3_5::ICameraDeviceSession::castFrom(mHidlSession);
if (castResult_3_5.isOk()) {
mHidlSession_3_5 = castResult_3_5;
}
auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(