blob: 2b14f84d2b2d9c26cb31c52401cc5b28903f879d [file] [log] [blame]
/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of The Linux Foundation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#define LOG_TAG "QCamera3HWI"
//#define LOG_NDEBUG 0
#define __STDC_LIMIT_MACROS
// To remove
#include <cutils/properties.h>
// System dependencies
#include <dlfcn.h>
#include <fcntl.h>
#include <stdio.h>
#include <stdlib.h>
#include "utils/Timers.h"
#include "sys/ioctl.h"
#include <time.h>
#include <sync/sync.h>
#include "gralloc_priv.h"
#include <map>
#include <unordered_map>
// Display dependencies
#include "qdMetaData.h"
// Camera dependencies
#include "android/QCamera3External.h"
#include "util/QCameraFlash.h"
#include "QCamera3HWI.h"
#include "QCamera3VendorTags.h"
#include "QCameraTrace.h"
// XML parsing
#include "tinyxml2.h"
#include "HdrPlusClientUtils.h"
extern "C" {
#include "mm_camera_dbg.h"
}
#include "cam_cond.h"
using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
using namespace android;
namespace qcamera {
#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
#define EMPTY_PIPELINE_DELAY 2
// mm_camera has 2 partial results: 3A, and final result.
// HDR+ requests have 3 partial results: postview, next request ready, and final result.
#define PARTIAL_RESULT_COUNT 3
#define FRAME_SKIP_DELAY 0
#define MAX_VALUE_8BIT ((1<<8)-1)
#define MAX_VALUE_10BIT ((1<<10)-1)
#define MAX_VALUE_12BIT ((1<<12)-1)
#define VIDEO_4K_WIDTH 3840
#define VIDEO_4K_HEIGHT 2160
#define MAX_EIS_WIDTH 3840
#define MAX_EIS_HEIGHT 2160
#define MAX_RAW_STREAMS 1
#define MAX_STALLING_STREAMS 1
#define MAX_PROCESSED_STREAMS 3
/* Batch mode is enabled only if FPS set is equal to or greater than this */
#define MIN_FPS_FOR_BATCH_MODE (120)
#define PREVIEW_FPS_FOR_HFR (30)
#define DEFAULT_VIDEO_FPS (30.0)
#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
#define MAX_HFR_BATCH_SIZE (8)
#define REGIONS_TUPLE_COUNT 5
// Set a threshold for detection of missing buffers //seconds
#define MISSING_REQUEST_BUF_TIMEOUT 10
#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
#define FLUSH_TIMEOUT 3
#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
CAM_QCOM_FEATURE_CROP |\
CAM_QCOM_FEATURE_ROTATION |\
CAM_QCOM_FEATURE_SHARPNESS |\
CAM_QCOM_FEATURE_SCALE |\
CAM_QCOM_FEATURE_CAC |\
CAM_QCOM_FEATURE_CDS )
/* Per configuration size for static metadata length*/
#define PER_CONFIGURATION_SIZE_3 (3)
#define TIMEOUT_NEVER -1
/* Face rect indices */
#define FACE_LEFT 0
#define FACE_TOP 1
#define FACE_RIGHT 2
#define FACE_BOTTOM 3
#define FACE_WEIGHT 4
/* Face landmarks indices */
#define LEFT_EYE_X 0
#define LEFT_EYE_Y 1
#define RIGHT_EYE_X 2
#define RIGHT_EYE_Y 3
#define MOUTH_X 4
#define MOUTH_Y 5
#define TOTAL_LANDMARK_INDICES 6
// Max preferred zoom
#define MAX_PREFERRED_ZOOM_RATIO 7.0
// Whether to check for the GPU stride padding, or use the default
//#define CHECK_GPU_PIXEL_ALIGNMENT
cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
extern pthread_mutex_t gCamLock;
volatile uint32_t gCamHal3LogLevel = 1;
extern uint8_t gNumCameraSessions;
// Note that this doesn't support concurrent front and back camera b/35960155.
// The following Easel related variables must be protected by gHdrPlusClientLock.
std::unique_ptr<EaselManagerClient> gEaselManagerClient;
bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
int32_t gActiveEaselClient = 0; // The number of active cameras on Easel.
std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
bool gEaselBypassOnly;
std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
{"On", CAM_CDS_MODE_ON},
{"Off", CAM_CDS_MODE_OFF},
{"Auto",CAM_CDS_MODE_AUTO}
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_video_hdr_mode_t,
cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
{ QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
{ QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_binning_correction_mode_t,
cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
{ QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
{ QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_ir_mode_t,
cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
{QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
{QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
{QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_control_effect_mode_t,
cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
{ ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
{ ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
{ ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
{ ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
{ ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
{ ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
{ ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
{ ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
{ ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_control_awb_mode_t,
cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
{ ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
{ ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
{ ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
{ ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
{ ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
{ ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
{ ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
{ ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
{ ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_control_scene_mode_t,
cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
{ ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
{ ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
{ ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
{ ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
{ ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
{ ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
{ ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
{ ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
{ ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
{ ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
{ ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
{ ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
{ ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
{ ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
{ ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
{ ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
{ ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_control_af_mode_t,
cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
{ ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
{ ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
{ ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
{ ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
{ ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
{ ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
{ ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_color_correction_aberration_mode_t,
cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
{ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
CAM_COLOR_CORRECTION_ABERRATION_OFF },
{ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
CAM_COLOR_CORRECTION_ABERRATION_FAST },
{ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_control_ae_antibanding_mode_t,
cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
{ ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
{ ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
{ ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
{ ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_control_ae_mode_t,
cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
{ ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
{ ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
{ ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
{ ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
{ ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
{ ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_flash_mode_t,
cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
{ ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
{ ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
{ ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_statistics_face_detect_mode_t,
cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
{ ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
{ ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
{ ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
{ ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
CAM_FOCUS_UNCALIBRATED },
{ ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
CAM_FOCUS_APPROXIMATE },
{ ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
CAM_FOCUS_CALIBRATED }
};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_lens_state_t,
cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
{ ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
{ ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
};
const int32_t available_thumbnail_sizes[] = {0, 0,
176, 144,
240, 144,
256, 144,
240, 160,
256, 154,
240, 240,
320, 240};
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_sensor_test_pattern_mode_t,
cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
{ ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
{ ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
{ ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
{ ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
{ ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
{ ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
};
/* Since there is no mapping for all the options some Android enum are not listed.
* Also, the order in this list is important because while mapping from HAL to Android it will
* traverse from lower to higher index which means that for HAL values that are map to different
* Android values, the traverse logic will select the first one found.
*/
const QCamera3HardwareInterface::QCameraMap<
camera_metadata_enum_android_sensor_reference_illuminant1_t,
cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
};
const QCamera3HardwareInterface::QCameraMap<
int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
{ 60, CAM_HFR_MODE_60FPS},
{ 90, CAM_HFR_MODE_90FPS},
{ 120, CAM_HFR_MODE_120FPS},
{ 150, CAM_HFR_MODE_150FPS},
{ 180, CAM_HFR_MODE_180FPS},
{ 210, CAM_HFR_MODE_210FPS},
{ 240, CAM_HFR_MODE_240FPS},
{ 480, CAM_HFR_MODE_480FPS},
};
const QCamera3HardwareInterface::QCameraMap<
qcamera3_ext_instant_aec_mode_t,
cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
{ QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
{ QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
{ QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
};
const QCamera3HardwareInterface::QCameraMap<
qcamera3_ext_exposure_meter_mode_t,
cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
{ QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
{ QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
{ QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
{ QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
{ QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
{ QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
{ QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
};
const QCamera3HardwareInterface::QCameraMap<
qcamera3_ext_iso_mode_t,
cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
{ QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
{ QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
{ QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
{ QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
{ QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
{ QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
{ QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
{ QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
};
camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
.initialize = QCamera3HardwareInterface::initialize,
.configure_streams = QCamera3HardwareInterface::configure_streams,
.register_stream_buffers = NULL,
.construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
.process_capture_request = QCamera3HardwareInterface::process_capture_request,
.get_metadata_vendor_tag_ops = NULL,
.dump = QCamera3HardwareInterface::dump,
.flush = QCamera3HardwareInterface::flush,
.reserved = {0},
};
typedef std::tuple<int32_t, int32_t, int32_t, int32_t> config_entry;
bool operator == (const config_entry & lhs, const config_entry & rhs) {
return (std::get<0> (lhs) == std::get<0> (rhs)) &&
(std::get<1> (lhs) == std::get<1> (rhs)) &&
(std::get<2> (lhs) == std::get<2> (rhs)) &&
(std::get<3> (lhs) == std::get<3> (rhs));
}
struct ConfigEntryHash {
std::size_t operator() (config_entry const& entry) const {
size_t result = 1;
size_t hashValue = 31;
result = hashValue*result + std::hash<int> {} (std::get<0>(entry));
result = hashValue*result + std::hash<int> {} (std::get<1>(entry));
result = hashValue*result + std::hash<int> {} (std::get<2>(entry));
result = hashValue*result + std::hash<int> {} (std::get<3>(entry));
return result;
}
};
// initialise to some default value
uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
static inline void logEaselEvent(const char *tag, const char *event) {
if (CC_UNLIKELY(gEaselProfilingEnabled)) {
struct timespec ts = {};
static int64_t kMsPerSec = 1000;
static int64_t kNsPerMs = 1000000;
status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
if (res != OK) {
ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
} else {
int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
}
}
}
/*===========================================================================
* FUNCTION : QCamera3HardwareInterface
*
* DESCRIPTION: constructor of QCamera3HardwareInterface
*
* PARAMETERS :
* @cameraId : camera ID
*
* RETURN : none
*==========================================================================*/
QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
const camera_module_callbacks_t *callbacks)
: mCameraId(cameraId),
mCameraHandle(NULL),
mCameraInitialized(false),
mCallbackOps(NULL),
mMetadataChannel(NULL),
mPictureChannel(NULL),
mRawChannel(NULL),
mSupportChannel(NULL),
mAnalysisChannel(NULL),
mRawDumpChannel(NULL),
mHdrPlusRawSrcChannel(NULL),
mDummyBatchChannel(NULL),
mDepthChannel(NULL),
mDepthCloudMode(CAM_PD_DATA_SKIP),
mPerfLockMgr(),
mChannelHandle(0),
mFirstConfiguration(true),
mFlush(false),
mFlushPerf(false),
mParamHeap(NULL),
mParameters(NULL),
mPrevParameters(NULL),
m_ISTypeVideo(IS_TYPE_NONE),
m_bIsVideo(false),
m_bIs4KVideo(false),
m_bEisSupportedSize(false),
m_bEisEnable(false),
m_bEis3PropertyEnabled(false),
m_bAVTimerEnabled(false),
m_MobicatMask(0),
mShutterDispatcher(this),
mOutputBufferDispatcher(this),
mMinProcessedFrameDuration(0),
mMinJpegFrameDuration(0),
mMinRawFrameDuration(0),
mExpectedFrameDuration(0),
mExpectedInflightDuration(0),
mMetaFrameCount(0U),
mUpdateDebugLevel(false),
mCallbacks(callbacks),
mCaptureIntent(0),
mCacMode(0),
/* DevCamDebug metadata internal m control*/
mDevCamDebugMetaEnable(0),
/* DevCamDebug metadata end */
mBatchSize(0),
mToBeQueuedVidBufs(0),
mHFRVideoFps(DEFAULT_VIDEO_FPS),
mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
mStreamConfig(false),
mCommon(),
mFirstFrameNumberInBatch(0),
mNeedSensorRestart(false),
mPreviewStarted(false),
mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
mPDSupported(false),
mPDIndex(0),
mInstantAEC(false),
mResetInstantAEC(false),
mInstantAECSettledFrameNumber(0),
mAecSkipDisplayFrameBound(0),
mInstantAecFrameIdxCount(0),
mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
mLastRequestedOisDataMode(ANDROID_STATISTICS_OIS_DATA_MODE_OFF),
mLastRequestedZoomRatio(1.0f),
mCurrFeatureState(0),
mLdafCalibExist(false),
mLastCustIntentFrmNum(-1),
mFirstMetadataCallback(true),
mState(CLOSED),
mIsDeviceLinked(false),
mIsMainCamera(true),
mLinkedCameraId(0),
m_pDualCamCmdHeap(NULL),
m_pDualCamCmdPtr(NULL),
mHdrPlusModeEnabled(false),
mZslEnabled(false),
mEaselMipiStarted(false),
mIsApInputUsedForHdrPlus(false),
mFirstPreviewIntentSeen(false),
m_bSensorHDREnabled(false),
mAfTrigger(),
mSceneDistance(-1),
mLastFocusDistance(0.0)
{
getLogLevel();
mCommon.init(gCamCapability[cameraId]);
mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
#ifndef USE_HAL_3_3
mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_5;
#else
mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
#endif
mCameraDevice.common.close = close_camera_device;
mCameraDevice.ops = &mCameraOps;
mCameraDevice.priv = this;
gCamCapability[cameraId]->version = CAM_HAL_V3;
// TODO: hardcode for now until mctl add support for min_num_pp_bufs
//TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
gCamCapability[cameraId]->min_num_pp_bufs = 3;
PTHREAD_COND_INIT(&mBuffersCond);
PTHREAD_COND_INIT(&mRequestCond);
mPendingLiveRequest = 0;
mCurrentRequestId = -1;
pthread_mutex_init(&mMutex, NULL);
for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
mDefaultMetadata[i] = NULL;
// Getting system props of different kinds
char prop[PROPERTY_VALUE_MAX];
memset(prop, 0, sizeof(prop));
property_get("persist.camera.raw.dump", prop, "0");
mEnableRawDump = atoi(prop);
property_get("persist.camera.hal3.force.hdr", prop, "0");
mForceHdrSnapshot = atoi(prop);
if (mEnableRawDump)
LOGD("Raw dump from Camera HAL enabled");
memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
memset(mLdafCalib, 0, sizeof(mLdafCalib));
memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
mEaselFwUpdated = false;
memset(prop, 0, sizeof(prop));
property_get("persist.camera.tnr.preview", prop, "0");
m_bTnrPreview = (uint8_t)atoi(prop);
memset(prop, 0, sizeof(prop));
property_get("persist.camera.swtnr.preview", prop, "1");
m_bSwTnrPreview = (uint8_t)atoi(prop);
memset(prop, 0, sizeof(prop));
property_get("persist.camera.tnr.video", prop, "1");
m_bTnrVideo = (uint8_t)atoi(prop);
memset(prop, 0, sizeof(prop));
property_get("persist.camera.avtimer.debug", prop, "0");
m_debug_avtimer = (uint8_t)atoi(prop);
LOGI("AV timer enabled: %d", m_debug_avtimer);
memset(prop, 0, sizeof(prop));
property_get("persist.camera.cacmode.disable", prop, "0");
m_cacModeDisabled = (uint8_t)atoi(prop);
m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
//Load and read GPU library.
lib_surface_utils = NULL;
LINK_get_surface_pixel_alignment = NULL;
mSurfaceStridePadding = CAM_PAD_TO_64;
#ifdef CHECK_GPU_PIXEL_ALIGNMENT
lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
if (lib_surface_utils) {
*(void **)&LINK_get_surface_pixel_alignment =
dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
if (LINK_get_surface_pixel_alignment) {
mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
}
dlclose(lib_surface_utils);
}
#endif
mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
mPDSupported = (0 <= mPDIndex) ? true : false;
m60HzZone = is60HzZone();
}
/*===========================================================================
* FUNCTION : ~QCamera3HardwareInterface
*
* DESCRIPTION: destructor of QCamera3HardwareInterface
*
* PARAMETERS : none
*
* RETURN : none
*==========================================================================*/
QCamera3HardwareInterface::~QCamera3HardwareInterface()
{
LOGD("E");
int32_t rc = 0;
// Clean up Easel error future first to avoid Easel error happens during destructor.
cleanupEaselErrorFuture();
// Disable power hint and enable the perf lock for close camera
mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
// Close HDR+ client first before destroying HAL.
{
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
finishHdrPlusClientOpeningLocked(l);
closeHdrPlusClientLocked();
}
// unlink of dualcam during close camera
if (mIsDeviceLinked) {
cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
&m_pDualCamCmdPtr->bundle_info;
m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
pthread_mutex_lock(&gCamLock);
if (mIsMainCamera == 1) {
m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
// related session id should be session id of linked session
m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
} else {
m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
}
m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
pthread_mutex_unlock(&gCamLock);
rc = mCameraHandle->ops->set_dual_cam_cmd(
mCameraHandle->camera_handle);
if (rc < 0) {
LOGE("Dualcam: Unlink failed, but still proceed to close");
}
}
/* We need to stop all streams before deleting any stream */
if (mRawDumpChannel) {
mRawDumpChannel->stop();
}
if (mHdrPlusRawSrcChannel) {
mHdrPlusRawSrcChannel->stop();
}
// NOTE: 'camera3_stream_t *' objects are already freed at
// this stage by the framework
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3ProcessingChannel *channel = (*it)->channel;
if (channel) {
channel->stop();
}
}
if (mSupportChannel)
mSupportChannel->stop();
if (mAnalysisChannel) {
mAnalysisChannel->stop();
}
if (mMetadataChannel) {
mMetadataChannel->stop();
}
if (mChannelHandle) {
stopChannelLocked(/*stop_immediately*/false);
}
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3ProcessingChannel *channel = (*it)->channel;
if (channel)
delete channel;
free (*it);
}
if (mSupportChannel) {
delete mSupportChannel;
mSupportChannel = NULL;
}
if (mAnalysisChannel) {
delete mAnalysisChannel;
mAnalysisChannel = NULL;
}
if (mRawDumpChannel) {
delete mRawDumpChannel;
mRawDumpChannel = NULL;
}
if (mHdrPlusRawSrcChannel) {
delete mHdrPlusRawSrcChannel;
mHdrPlusRawSrcChannel = NULL;
}
if (mDummyBatchChannel) {
delete mDummyBatchChannel;
mDummyBatchChannel = NULL;
}
mPictureChannel = NULL;
mDepthChannel = NULL;
if (mMetadataChannel) {
delete mMetadataChannel;
mMetadataChannel = NULL;
}
/* Clean up all channels */
if (mCameraInitialized) {
if(!mFirstConfiguration){
//send the last unconfigure
cam_stream_size_info_t stream_config_info;
memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
stream_config_info.buffer_info.max_buffers =
m_bIs4KVideo ? 0 :
m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
clear_metadata_buffer(mParameters);
ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
stream_config_info);
int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
if (rc < 0) {
LOGE("set_parms failed for unconfigure");
}
}
deinitParameters();
}
if (mChannelHandle) {
mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
mChannelHandle);
LOGH("deleting channel %d", mChannelHandle);
mChannelHandle = 0;
}
if (mState != CLOSED)
closeCamera();
for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
req.mPendingBufferList.clear();
}
mPendingBuffersMap.mPendingBuffersInRequest.clear();
for (pendingRequestIterator i = mPendingRequestsList.begin();
i != mPendingRequestsList.end();) {
i = erasePendingRequest(i);
}
for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
if (mDefaultMetadata[i])
free_camera_metadata(mDefaultMetadata[i]);
mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
pthread_cond_destroy(&mRequestCond);
pthread_cond_destroy(&mBuffersCond);
pthread_mutex_destroy(&mMutex);
LOGD("X");
}
/*===========================================================================
* FUNCTION : erasePendingRequest
*
* DESCRIPTION: function to erase a desired pending request after freeing any
* allocated memory
*
* PARAMETERS :
* @i : iterator pointing to pending request to be erased
*
* RETURN : iterator pointing to the next request
*==========================================================================*/
QCamera3HardwareInterface::pendingRequestIterator
QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
{
if (i->input_buffer != NULL) {
free(i->input_buffer);
i->input_buffer = NULL;
}
if (i->settings != NULL)
free_camera_metadata((camera_metadata_t*)i->settings);
mExpectedInflightDuration -= i->expectedFrameDuration;
if (mExpectedInflightDuration < 0) {
LOGE("Negative expected in-flight duration!");
mExpectedInflightDuration = 0;
}
return mPendingRequestsList.erase(i);
}
/*===========================================================================
* FUNCTION : camEvtHandle
*
* DESCRIPTION: Function registered to mm-camera-interface to handle events
*
* PARAMETERS :
* @camera_handle : interface layer camera handle
* @evt : ptr to event
* @user_data : user data ptr
*
* RETURN : none
*==========================================================================*/
void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
mm_camera_event_t *evt,
void *user_data)
{
QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
if (obj && evt) {
switch(evt->server_event_type) {
case CAM_EVENT_TYPE_DAEMON_DIED:
pthread_mutex_lock(&obj->mMutex);
obj->mState = ERROR;
pthread_mutex_unlock(&obj->mMutex);
LOGE("Fatal, camera daemon died");
break;
case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
LOGD("HAL got request pull from Daemon");
pthread_mutex_lock(&obj->mMutex);
obj->mWokenUpByDaemon = true;
obj->unblockRequestIfNecessary();
pthread_mutex_unlock(&obj->mMutex);
break;
default:
LOGW("Warning: Unhandled event %d",
evt->server_event_type);
break;
}
} else {
LOGE("NULL user_data/evt");
}
}
/*===========================================================================
* FUNCTION : openCamera
*
* DESCRIPTION: open camera
*
* PARAMETERS :
* @hw_device : double ptr for camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
{
int rc = 0;
if (mState != CLOSED) {
*hw_device = NULL;
return PERMISSION_DENIED;
}
logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
mCameraId);
if (mCameraHandle) {
LOGE("Failure: Camera already opened");
return ALREADY_EXISTS;
}
{
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
if (gActiveEaselClient == 0) {
rc = gEaselManagerClient->resume(this);
if (rc != 0) {
ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
return rc;
}
mEaselFwUpdated = false;
}
gActiveEaselClient++;
mQCamera3HdrPlusListenerThread = new QCamera3HdrPlusListenerThread(this);
rc = mQCamera3HdrPlusListenerThread->run("QCamera3HdrPlusListenerThread");
if (rc != OK) {
ALOGE("%s: Starting HDR+ client listener thread failed: %s (%d)", __FUNCTION__,
strerror(-rc), rc);
return rc;
}
}
}
rc = openCamera();
if (rc == 0) {
*hw_device = &mCameraDevice.common;
} else {
*hw_device = NULL;
// Suspend Easel because opening camera failed.
{
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
if (gActiveEaselClient == 1) {
status_t suspendErr = gEaselManagerClient->suspend();
if (suspendErr != 0) {
ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
strerror(-suspendErr), suspendErr);
}
}
gActiveEaselClient--;
}
if (mQCamera3HdrPlusListenerThread != nullptr) {
mQCamera3HdrPlusListenerThread->requestExit();
mQCamera3HdrPlusListenerThread->join();
mQCamera3HdrPlusListenerThread = nullptr;
}
}
}
LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
mCameraId, rc);
if (rc == NO_ERROR) {
mState = OPENED;
}
return rc;
}
/*===========================================================================
* FUNCTION : openCamera
*
* DESCRIPTION: open camera
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera3HardwareInterface::openCamera()
{
int rc = 0;
char value[PROPERTY_VALUE_MAX];
KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
if (rc < 0) {
LOGE("Failed to reserve flash for camera id: %d",
mCameraId);
return UNKNOWN_ERROR;
}
rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
if (rc) {
LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
return rc;
}
if (!mCameraHandle) {
LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
return -ENODEV;
}
rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
camEvtHandle, (void *)this);
if (rc < 0) {
LOGE("Error, failed to register event callback");
/* Not closing camera here since it is already handled in destructor */
return FAILED_TRANSACTION;
}
mExifParams.debug_params =
(mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
if (mExifParams.debug_params) {
memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
} else {
LOGE("Out of Memory. Allocation failed for 3A debug exif params");
return NO_MEMORY;
}
mFirstConfiguration = true;
//Notify display HAL that a camera session is active.
//But avoid calling the same during bootup because camera service might open/close
//cameras at boot time during its initialization and display service will also internally
//wait for camera service to initialize first while calling this display API, resulting in a
//deadlock situation. Since boot time camera open/close calls are made only to fetch
//capabilities, no need of this display bw optimization.
//Use "service.bootanim.exit" property to know boot status.
property_get("service.bootanim.exit", value, "0");
if (atoi(value) == 1) {
pthread_mutex_lock(&gCamLock);
if (gNumCameraSessions++ == 0) {
setCameraLaunchStatus(true);
}
pthread_mutex_unlock(&gCamLock);
}
//fill the session id needed while linking dual cam
pthread_mutex_lock(&gCamLock);
rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
&sessionId[mCameraId]);
pthread_mutex_unlock(&gCamLock);
if (rc < 0) {
LOGE("Error, failed to get sessiion id");
return UNKNOWN_ERROR;
} else {
//Allocate related cam sync buffer
//this is needed for the payload that goes along with bundling cmd for related
//camera use cases
m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
if(rc != OK) {
rc = NO_MEMORY;
LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
return NO_MEMORY;
}
//Map memory for related cam sync buffer
rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
m_pDualCamCmdHeap->getFd(0),
sizeof(cam_dual_camera_cmd_info_t),
m_pDualCamCmdHeap->getPtr(0));
if(rc < 0) {
LOGE("Dualcam: failed to map Related cam sync buffer");
rc = FAILED_TRANSACTION;
return NO_MEMORY;
}
m_pDualCamCmdPtr =
(cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
}
LOGH("mCameraId=%d",mCameraId);
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : closeCamera
*
* DESCRIPTION: close camera
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera3HardwareInterface::closeCamera()
{
KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
int rc = NO_ERROR;
char value[PROPERTY_VALUE_MAX];
LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
mCameraId);
// unmap memory for related cam sync buffer
mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
if (NULL != m_pDualCamCmdHeap) {
m_pDualCamCmdHeap->deallocate();
delete m_pDualCamCmdHeap;
m_pDualCamCmdHeap = NULL;
m_pDualCamCmdPtr = NULL;
}
rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
mCameraHandle = NULL;
//reset session id to some invalid id
pthread_mutex_lock(&gCamLock);
sessionId[mCameraId] = 0xDEADBEEF;
pthread_mutex_unlock(&gCamLock);
//Notify display HAL that there is no active camera session
//but avoid calling the same during bootup. Refer to openCamera
//for more details.
property_get("service.bootanim.exit", value, "0");
if (atoi(value) == 1) {
pthread_mutex_lock(&gCamLock);
if (--gNumCameraSessions == 0) {
setCameraLaunchStatus(false);
}
pthread_mutex_unlock(&gCamLock);
}
if (mExifParams.debug_params) {
free(mExifParams.debug_params);
mExifParams.debug_params = NULL;
}
if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
LOGW("Failed to release flash for camera id: %d",
mCameraId);
}
mState = CLOSED;
LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
mCameraId, rc);
{
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
if (EaselManagerClientOpened) {
if (gActiveEaselClient == 1) {
rc = gEaselManagerClient->suspend();
if (rc != 0) {
ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
}
}
gActiveEaselClient--;
}
if (mQCamera3HdrPlusListenerThread != nullptr) {
mQCamera3HdrPlusListenerThread->requestExit();
mQCamera3HdrPlusListenerThread->join();
mQCamera3HdrPlusListenerThread = nullptr;
}
}
return rc;
}
/*===========================================================================
* FUNCTION : initialize
*
* DESCRIPTION: Initialize frameworks callback functions
*
* PARAMETERS :
* @callback_ops : callback function to frameworks
*
* RETURN :
*
*==========================================================================*/
int QCamera3HardwareInterface::initialize(
const struct camera3_callback_ops *callback_ops)
{
ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
int rc;
LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
pthread_mutex_lock(&mMutex);
// Validate current state
switch (mState) {
case OPENED:
/* valid state */
break;
default:
LOGE("Invalid state %d", mState);
rc = -ENODEV;
goto err1;
}
rc = initParameters();
if (rc < 0) {
LOGE("initParamters failed %d", rc);
goto err1;
}
mCallbackOps = callback_ops;
mChannelHandle = mCameraHandle->ops->add_channel(
mCameraHandle->camera_handle, NULL, NULL, this);
if (mChannelHandle == 0) {
LOGE("add_channel failed");
rc = -ENOMEM;
pthread_mutex_unlock(&mMutex);
return rc;
}
pthread_mutex_unlock(&mMutex);
mCameraInitialized = true;
mState = INITIALIZED;
LOGI("X");
return 0;
err1:
pthread_mutex_unlock(&mMutex);
return rc;
}
/*===========================================================================
* FUNCTION : validateStreamDimensions
*
* DESCRIPTION: Check if the configuration requested are those advertised
*
* PARAMETERS :
* @cameraId : cameraId
* @stream_list : streams to be configured
*
* RETURN :
*
*==========================================================================*/
int QCamera3HardwareInterface::validateStreamDimensions(uint32_t cameraId,
camera3_stream_configuration_t *streamList)
{
int rc = NO_ERROR;
size_t count = 0;
uint32_t depthWidth = 0;
uint32_t depthHeight = 0;
auto pDIndex = getPDStatIndex(gCamCapability[cameraId]);
bool pDSupported = (0 <= pDIndex) ? true : false;
if (pDSupported) {
depthWidth = gCamCapability[cameraId]->raw_meta_dim[pDIndex].width;
depthHeight = gCamCapability[cameraId]->raw_meta_dim[pDIndex].height;
}
camera3_stream_t *inputStream = NULL;
/*
* Loop through all streams to find input stream if it exists*
*/
for (size_t i = 0; i< streamList->num_streams; i++) {
if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
if (inputStream != NULL) {
LOGE("Error, Multiple input streams requested");
return -EINVAL;
}
inputStream = streamList->streams[i];
}
}
/*
* Loop through all streams requested in configuration
* Check if unsupported sizes have been requested on any of them
*/
for (size_t j = 0; j < streamList->num_streams; j++) {
bool sizeFound = false;
camera3_stream_t *newStream = streamList->streams[j];
uint32_t rotatedHeight = newStream->height;
uint32_t rotatedWidth = newStream->width;
if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
(newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
rotatedHeight = newStream->width;
rotatedWidth = newStream->height;
}
/*
* Sizes are different for each type of stream format check against
* appropriate table.
*/
switch (newStream->format) {
case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
case HAL_PIXEL_FORMAT_RAW10:
if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
(ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
pDSupported) {
if ((depthWidth == newStream->width) &&
(depthHeight == newStream->height)) {
sizeFound = true;
}
break;
}
count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
for (size_t i = 0; i < count; i++) {
if ((gCamCapability[cameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
(gCamCapability[cameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
sizeFound = true;
break;
}
}
break;
case HAL_PIXEL_FORMAT_BLOB:
if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
pDSupported) {
//As per spec. depth cloud should be sample count / 16
uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
if ((depthSamplesCount == newStream->width) &&
(1 == newStream->height)) {
sizeFound = true;
}
break;
}
count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
/* Verify set size against generated sizes table */
for (size_t i = 0; i < count; i++) {
if (((int32_t)rotatedWidth ==
gCamCapability[cameraId]->picture_sizes_tbl[i].width) &&
((int32_t)rotatedHeight ==
gCamCapability[cameraId]->picture_sizes_tbl[i].height)) {
sizeFound = true;
break;
}
}
break;
case HAL_PIXEL_FORMAT_Y8:
case HAL_PIXEL_FORMAT_YCbCr_420_888:
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
default:
if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
|| newStream->stream_type == CAMERA3_STREAM_INPUT
|| IS_USAGE_ZSL(newStream->usage)) {
if (((int32_t)rotatedWidth ==
gCamCapability[cameraId]->active_array_size.width) &&
((int32_t)rotatedHeight ==
gCamCapability[cameraId]->active_array_size.height)) {
sizeFound = true;
break;
}
/* We could potentially break here to enforce ZSL stream
* set from frameworks always is full active array size
* but it is not clear from the spc if framework will always
* follow that, also we have logic to override to full array
* size, so keeping the logic lenient at the moment
*/
}
count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
MAX_SIZES_CNT);
for (size_t i = 0; i < count; i++) {
if (((int32_t)rotatedWidth ==
gCamCapability[cameraId]->picture_sizes_tbl[i].width) &&
((int32_t)rotatedHeight ==
gCamCapability[cameraId]->picture_sizes_tbl[i].height)) {
sizeFound = true;
break;
}
}
break;
} /* End of switch(newStream->format) */
/* We error out even if a single stream has unsupported size set */
if (!sizeFound) {
LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
rotatedWidth, rotatedHeight, newStream->format,
gCamCapability[cameraId]->active_array_size.width,
gCamCapability[cameraId]->active_array_size.height);
rc = -EINVAL;
break;
}
} /* End of for each stream */
return rc;
}
/*===========================================================================
* FUNCTION : validateUsageFlags
*
* DESCRIPTION: Check if the configuration usage flags map to same internal format.
*
* PARAMETERS :
* @cameraId : camera id.
* @stream_list : streams to be configured
*
* RETURN :
* NO_ERROR if the usage flags are supported
* error code if usage flags are not supported
*
*==========================================================================*/
int QCamera3HardwareInterface::validateUsageFlags(uint32_t cameraId,
const camera3_stream_configuration_t* streamList)
{
for (size_t j = 0; j < streamList->num_streams; j++) {
const camera3_stream_t *newStream = streamList->streams[j];
if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
(newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
continue;
}
// Here we only care whether it's EIS3 or not
char is_type_value[PROPERTY_VALUE_MAX];
property_get("persist.camera.is_type", is_type_value, "4");
cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
if (gCamCapability[cameraId]->position == CAM_POSITION_FRONT ||
streamList->operation_mode ==
CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
isType = IS_TYPE_NONE;
bool isVideo = IS_USAGE_VIDEO(newStream->usage);
bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
bool isZSL = IS_USAGE_ZSL(newStream->usage);
bool forcePreviewUBWC = true;
if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
forcePreviewUBWC = false;
}
cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
// Color space for this camera device is guaranteed to be ITU_R_601_FR.
// So color spaces will always match.
// Check whether underlying formats of shared streams match.
if (isVideo && isPreview && videoFormat != previewFormat) {
LOGE("Combined video and preview usage flag is not supported");
return -EINVAL;
}
if (isPreview && isZSL && previewFormat != zslFormat) {
LOGE("Combined preview and zsl usage flag is not supported");
return -EINVAL;
}
if (isVideo && isZSL && videoFormat != zslFormat) {
LOGE("Combined video and zsl usage flag is not supported");
return -EINVAL;
}
}
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : validateUsageFlagsForEis
*
* DESCRIPTION: Check if the configuration usage flags conflict with Eis
*
* PARAMETERS :
* @bEisEnable : Flag indicated that EIS is enabled.
* @bEisSupportedSize : Flag indicating that there is a preview/video stream
* within the EIS supported size.
* @stream_list : streams to be configured
*
* RETURN :
* NO_ERROR if the usage flags are supported
* error code if usage flags are not supported
*
*==========================================================================*/
int QCamera3HardwareInterface::validateUsageFlagsForEis(bool bEisEnable, bool bEisSupportedSize,
const camera3_stream_configuration_t* streamList)
{
for (size_t j = 0; j < streamList->num_streams; j++) {
const camera3_stream_t *newStream = streamList->streams[j];
bool isVideo = IS_USAGE_VIDEO(newStream->usage);
bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
// Because EIS is "hard-coded" for certain use case, and current
// implementation doesn't support shared preview and video on the same
// stream, return failure if EIS is forced on.
if (isPreview && isVideo && bEisEnable && bEisSupportedSize) {
LOGE("Combined video and preview usage flag is not supported due to EIS");
return -EINVAL;
}
}
return NO_ERROR;
}
/*==============================================================================
* FUNCTION : isSupportChannelNeeded
*
* DESCRIPTION: Simple heuristic func to determine if support channels is needed
*
* PARAMETERS :
* @stream_list : streams to be configured
* @stream_config_info : the config info for streams to be configured
*
* RETURN : Boolen true/false decision
*
*==========================================================================*/
bool QCamera3HardwareInterface::isSupportChannelNeeded(
camera3_stream_configuration_t *streamList,
cam_stream_size_info_t stream_config_info)
{
uint32_t i;
bool pprocRequested = false;
/* Check for conditions where PProc pipeline does not have any streams*/
for (i = 0; i < stream_config_info.num_streams; i++) {
if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
pprocRequested = true;
break;
}
}
if (pprocRequested == false )
return true;
/* Dummy stream needed if only raw or jpeg streams present */
for (i = 0; i < streamList->num_streams; i++) {
switch(streamList->streams[i]->format) {
case HAL_PIXEL_FORMAT_RAW_OPAQUE:
case HAL_PIXEL_FORMAT_RAW10:
case HAL_PIXEL_FORMAT_RAW16:
case HAL_PIXEL_FORMAT_BLOB:
break;
default:
return false;
}
}
return true;
}
/*==============================================================================
* FUNCTION : sensor_mode_info
*
* DESCRIPTION: Get sensor mode information based on current stream configuratoin
*
* PARAMETERS :
* @sensor_mode_info : sensor mode information (output)
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*
*==========================================================================*/
int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
{
int32_t rc = NO_ERROR;
cam_dimension_t max_dim = {0, 0};
for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
}
clear_metadata_buffer(mParameters);
rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
max_dim);
if (rc != NO_ERROR) {
LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
return rc;
}
rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
if (rc != NO_ERROR) {
LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
return rc;
}
clear_metadata_buffer(mParameters);
ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
mParameters);
if (rc != NO_ERROR) {
LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
return rc;
}
READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
"raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
sensorModeInfo.num_raw_bits);
return rc;
}
/*==============================================================================
* FUNCTION : getCurrentSensorModeInfo
*
* DESCRIPTION: Get sensor mode information that is currently selected.
*
* PARAMETERS :
* @sensorModeInfo : sensor mode information (output)
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*
*==========================================================================*/
int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
{
int32_t rc = NO_ERROR;
metadata_buffer_t *cachedParameters = (metadata_buffer_t *) malloc(sizeof(metadata_buffer_t));
if (nullptr == cachedParameters) {
return NO_MEMORY;
}
memcpy(cachedParameters, mParameters, sizeof(metadata_buffer_t));
clear_metadata_buffer(mParameters);
ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
mParameters);
if (rc != NO_ERROR) {
LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
free(cachedParameters);
return rc;
}
READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
"raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
sensorModeInfo.num_raw_bits);
memcpy(mParameters, cachedParameters, sizeof(metadata_buffer_t));
free(cachedParameters);
return rc;
}
/*==============================================================================
* FUNCTION : addToPPFeatureMask
*
* DESCRIPTION: add additional features to pp feature mask based on
* stream type and usecase
*
* PARAMETERS :
* @stream_format : stream type for feature mask
* @stream_idx : stream idx within postprocess_mask list to change
*
* RETURN : NULL
*
*==========================================================================*/
void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
uint32_t stream_idx)
{
char feature_mask_value[PROPERTY_VALUE_MAX];
cam_feature_mask_t feature_mask;
int args_converted;
int property_len;
/* Get feature mask from property */
#ifdef _LE_CAMERA_
char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
property_len = property_get("persist.camera.hal3.feature",
feature_mask_value, swtnr_feature_mask_value);
#else
property_len = property_get("persist.camera.hal3.feature",
feature_mask_value, "0");
#endif
if ((property_len > 2) && (feature_mask_value[0] == '0') &&
(feature_mask_value[1] == 'x')) {
args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
} else {
args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
}
if (1 != args_converted) {
feature_mask = 0;
LOGE("Wrong feature mask %s", feature_mask_value);
return;
}
switch (stream_format) {
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
/* Add LLVD to pp feature mask only if video hint is enabled */
if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
mStreamConfigInfo.postprocess_mask[stream_idx]
|= CAM_QTI_FEATURE_SW_TNR;
LOGH("Added SW TNR to pp feature mask");
} else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
mStreamConfigInfo.postprocess_mask[stream_idx]
|= CAM_QCOM_FEATURE_LLVD;
LOGH("Added LLVD SeeMore to pp feature mask");
}
if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
}
if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
CAM_QTI_FEATURE_BINNING_CORRECTION)) {
mStreamConfigInfo.postprocess_mask[stream_idx] |=
CAM_QTI_FEATURE_BINNING_CORRECTION;
}
break;
}
default:
break;
}
LOGD("PP feature mask %llx",
mStreamConfigInfo.postprocess_mask[stream_idx]);
}
/*==============================================================================
* FUNCTION : updateFpsInPreviewBuffer
*
* DESCRIPTION: update FPS information in preview buffer.
*
* PARAMETERS :
* @metadata : pointer to metadata buffer
* @frame_number: frame_number to look for in pending buffer list
*
* RETURN : None
*
*==========================================================================*/
void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
uint32_t frame_number)
{
// Mark all pending buffers for this particular request
// with corresponding framerate information
for (List<PendingBuffersInRequest>::iterator req =
mPendingBuffersMap.mPendingBuffersInRequest.begin();
req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
for(List<PendingBufferInfo>::iterator j =
req->mPendingBufferList.begin();
j != req->mPendingBufferList.end(); j++) {
QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
if ((req->frame_number == frame_number) &&
(channel->getStreamTypeMask() &
(1U << CAM_STREAM_TYPE_PREVIEW))) {
IF_META_AVAILABLE(cam_fps_range_t, float_range,
CAM_INTF_PARM_FPS_RANGE, metadata) {
typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
struct private_handle_t *priv_handle =
(struct private_handle_t *)(*(j->buffer));
setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
}
}
}
}
}
/*==============================================================================
* FUNCTION : updateTimeStampInPendingBuffers
*
* DESCRIPTION: update timestamp in display metadata for all pending buffers
* of a frame number
*
* PARAMETERS :
* @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
* @timestamp : timestamp to be set
*
* RETURN : None
*
*==========================================================================*/
void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
uint32_t frameNumber, nsecs_t timestamp)
{
for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
// WAR: save the av_timestamp to the next frame
if(req->frame_number == frameNumber + 1) {
req->av_timestamp = timestamp;
}
if (req->frame_number != frameNumber)
continue;
for (auto k = req->mPendingBufferList.begin();
k != req->mPendingBufferList.end(); k++ ) {
// WAR: update timestamp when it's not VT usecase
QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
m_bAVTimerEnabled)) {
struct private_handle_t *priv_handle =
(struct private_handle_t *) (*(k->buffer));
setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
}
}
}
return;
}
/*===========================================================================
* FUNCTION : configureStreams
*
* DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
* and output streams.
*
* PARAMETERS :
* @stream_list : streams to be configured
*
* RETURN :
*
*==========================================================================*/
int QCamera3HardwareInterface::configureStreams(
camera3_stream_configuration_t *streamList)
{
ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
int rc = 0;
// Acquire perfLock before configure streams
mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
rc = configureStreamsPerfLocked(streamList);
mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
return rc;
}
/*===========================================================================
* FUNCTION : validateStreamCombination
*
* DESCRIPTION: Validate a given stream combination.
*
* PARAMETERS :
* @cameraId : camera Id.
* @stream_list : stream combination to be validated.
* @status : validation status.
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera3HardwareInterface::validateStreamCombination(uint32_t cameraId,
camera3_stream_configuration_t *streamList /*in*/, StreamValidateStatus *status /*out*/)
{
bool isJpeg = false;
bool bJpegExceeds4K = false;
bool bJpegOnEncoder = false;
uint32_t width_ratio;
uint32_t height_ratio;
size_t rawStreamCnt = 0;
size_t stallStreamCnt = 0;
size_t processedStreamCnt = 0;
size_t pdStatCount = 0;
size_t numYuv888OnEncoder = 0;
cam_dimension_t jpegSize = {0, 0};
camera3_stream_t *zslStream = nullptr;
uint32_t maxEisWidth = 0;
uint32_t maxEisHeight = 0;
if (status == nullptr) {
LOGE("NULL stream status");
return BAD_VALUE;
}
// Sanity check stream_list
if (streamList == NULL) {
LOGE("NULL stream configuration");
return BAD_VALUE;
}
if (streamList->streams == NULL) {
LOGE("NULL stream list");
return BAD_VALUE;
}
if (streamList->num_streams < 1) {
LOGE("Bad number of streams requested: %d",
streamList->num_streams);
return BAD_VALUE;
}
if (streamList->num_streams >= MAX_NUM_STREAMS) {
LOGE("Maximum number of streams %d exceeded: %d",
MAX_NUM_STREAMS, streamList->num_streams);
return BAD_VALUE;
}
auto rc = validateUsageFlags(cameraId, streamList);
if (rc != NO_ERROR) {
return rc;
}
rc = validateStreamDimensions(cameraId, streamList);
if (rc == NO_ERROR) {
rc = validateStreamRotations(streamList);
}
if (rc != NO_ERROR) {
LOGE("Invalid stream configuration requested!");
return rc;
}
size_t count = IS_TYPE_MAX;
count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
for (size_t i = 0; i < count; i++) {
if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
(gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
status->bEisSupported = true;
break;
}
}
if (status->bEisSupported) {
maxEisWidth = MAX_EIS_WIDTH;
maxEisHeight = MAX_EIS_HEIGHT;
}
status->maxViewfinderSize = gCamCapability[cameraId]->max_viewfinder_size;
status->largeYuv888Size = {0, 0};
/* stream configurations */
for (size_t i = 0; i < streamList->num_streams; i++) {
camera3_stream_t *newStream = streamList->streams[i];
LOGI("stream[%d] type = %d, format = %d, width = %d, "
"height = %d, rotation = %d, usage = 0x%x",
i, newStream->stream_type, newStream->format,
newStream->width, newStream->height, newStream->rotation,
newStream->usage);
if (newStream->stream_type == CAMERA3_STREAM_INPUT){
status->isZsl = true;
status->inputStream = newStream;
}
if (IS_USAGE_ZSL(newStream->usage)) {
if (zslStream != nullptr) {
LOGE("Multiple input/reprocess streams requested!");
return BAD_VALUE;
}
zslStream = newStream;
}
if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
(newStream->data_space != HAL_DATASPACE_DEPTH)) {
isJpeg = true;
jpegSize.width = newStream->width;
jpegSize.height = newStream->height;
if (newStream->width > VIDEO_4K_WIDTH ||
newStream->height > VIDEO_4K_HEIGHT)
bJpegExceeds4K = true;
}
if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
(IS_USAGE_PREVIEW(newStream->usage) || IS_USAGE_VIDEO(newStream->usage))) {
if (IS_USAGE_VIDEO(newStream->usage)) {
status->bIsVideo = true;
// In HAL3 we can have multiple different video streams.
// The variables video width and height are used below as
// dimensions of the biggest of them
if (status->videoWidth < newStream->width ||
status->videoHeight < newStream->height) {
status->videoWidth = newStream->width;
status->videoHeight = newStream->height;
}
if ((VIDEO_4K_WIDTH <= newStream->width) &&
(VIDEO_4K_HEIGHT <= newStream->height)) {
status->bIs4KVideo = true;
}
}
status->bEisSupportedSize &= (newStream->width <= maxEisWidth) &&
(newStream->height <= maxEisHeight);
}
if (newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
switch (newStream->format) {
case HAL_PIXEL_FORMAT_BLOB:
if (newStream->data_space == HAL_DATASPACE_DEPTH) {
status->depthPresent = true;
break;
}
stallStreamCnt++;
if (isOnEncoder(status->maxViewfinderSize, newStream->width,
newStream->height)) {
status->numStreamsOnEncoder++;
bJpegOnEncoder = true;
}
width_ratio = CEIL_DIVISION(gCamCapability[cameraId]->active_array_size.width,
newStream->width);
height_ratio = CEIL_DIVISION(gCamCapability[cameraId]->active_array_size.height,
newStream->height);;
FATAL_IF(gCamCapability[cameraId]->max_downscale_factor == 0,
"FATAL: max_downscale_factor cannot be zero and so assert");
if ( (width_ratio > gCamCapability[cameraId]->max_downscale_factor) ||
(height_ratio > gCamCapability[cameraId]->max_downscale_factor)) {
LOGH("Setting small jpeg size flag to true");
status->bSmallJpegSize = true;
}
break;
case HAL_PIXEL_FORMAT_RAW10:
case HAL_PIXEL_FORMAT_RAW_OPAQUE:
case HAL_PIXEL_FORMAT_RAW16:
rawStreamCnt++;
if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
(HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
pdStatCount++;
}
break;
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
processedStreamCnt++;
if (isOnEncoder(status->maxViewfinderSize, newStream->width,
newStream->height)) {
if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
!IS_USAGE_ZSL(newStream->usage)) {
status->commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
}
status->numStreamsOnEncoder++;
}
break;
case HAL_PIXEL_FORMAT_YCbCr_420_888:
case HAL_PIXEL_FORMAT_Y8:
processedStreamCnt++;
if (isOnEncoder(status->maxViewfinderSize, newStream->width,
newStream->height)) {
// If Yuv888/Y8 size is not greater than 4K, set feature mask
// to SUPERSET so that it support concurrent request on
// YUV and JPEG.
if (newStream->width <= VIDEO_4K_WIDTH &&
newStream->height <= VIDEO_4K_HEIGHT) {
status->commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
}
if (newStream->format == HAL_PIXEL_FORMAT_Y8) {
status->bY80OnEncoder = true;
}
status->numStreamsOnEncoder++;
numYuv888OnEncoder++;
status->largeYuv888Size.width = newStream->width;
status->largeYuv888Size.height = newStream->height;
}
break;
default:
LOGE("not a supported format 0x%x", newStream->format);
return BAD_VALUE;
}
}
}
if (validateUsageFlagsForEis(status->bEisSupported, status->bEisSupportedSize, streamList) !=
NO_ERROR) {
return BAD_VALUE;
}
/* Check if num_streams is sane */
if (stallStreamCnt > MAX_STALLING_STREAMS ||
rawStreamCnt > MAX_RAW_STREAMS ||
processedStreamCnt > MAX_PROCESSED_STREAMS) {
LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
stallStreamCnt, rawStreamCnt, processedStreamCnt);
return BAD_VALUE;
}
/* Check whether we have zsl stream or 4k video case */
if (status->isZsl && status->bIs4KVideo) {
LOGE("Currently invalid configuration ZSL & 4K Video!");
return BAD_VALUE;
}
/* Check if stream sizes are sane */
if (status->numStreamsOnEncoder > 2) {
LOGE("Number of streams on ISP encoder path exceeds limits of 2");
return BAD_VALUE;
} else if (1 < status->numStreamsOnEncoder){
status->bUseCommonFeatureMask = true;
LOGH("Multiple streams above max viewfinder size, common mask needed");
}
/* Check if BLOB size is greater than 4k in 4k recording case */
if (status->bIs4KVideo && bJpegExceeds4K) {
LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
return BAD_VALUE;
}
if ((streamList->operation_mode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
status->depthPresent) {
LOGE("HAL doesn't support depth streams in HFR mode!");
return BAD_VALUE;
}
// When JPEG and preview streams share VFE output, CPP will not apply CAC2
// on JPEG stream. So disable such configurations to ensure CAC2 is applied.
// Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
// is not true. Otherwise testMandatoryOutputCombinations will fail with following
// configurations:
// {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
// {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
// (These two configurations will not have CAC2 enabled even in HQ modes.)
if (!status->isZsl && bJpegOnEncoder && bJpegExceeds4K && status->bUseCommonFeatureMask) {
ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
__func__);
return BAD_VALUE;
}
// If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
// the YUV stream's size is greater or equal to the JPEG size, set common
// postprocess mask to NONE, so that we can take advantage of postproc bypass.
if (numYuv888OnEncoder && isOnEncoder(status->maxViewfinderSize,
jpegSize.width, jpegSize.height) &&
status->largeYuv888Size.width > jpegSize.width &&
status->largeYuv888Size.height > jpegSize.height) {
status->bYuv888OverrideJpeg = true;
} else if (!isJpeg && status->numStreamsOnEncoder > 1) {
status->commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
}
LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
status->maxViewfinderSize.width, status->maxViewfinderSize.height, status->isZsl,
status->bUseCommonFeatureMask, status->commonFeatureMask);
LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
status->numStreamsOnEncoder, processedStreamCnt, stallStreamCnt,
status->bSmallJpegSize);
if (1 < pdStatCount) {
LOGE("HAL doesn't support multiple PD streams");
return BAD_VALUE;
}
if ((streamList->operation_mode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
(1 == pdStatCount)) {
LOGE("HAL doesn't support PD streams in HFR mode!");
return -EINVAL;
}
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : configureStreamsPerfLocked
*
* DESCRIPTION: configureStreams while perfLock is held.
*
* PARAMETERS :
* @stream_list : streams to be configured
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera3HardwareInterface::configureStreamsPerfLocked(
camera3_stream_configuration_t *streamList)
{
ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
StreamValidateStatus streamStatus;
auto rc = validateStreamCombination(mCameraId, streamList, &streamStatus);
if (NO_ERROR != rc) {
return rc;
}
mOpMode = streamList->operation_mode;
LOGD("mOpMode: %d", mOpMode);
// Disable HDR+ if it's enabled;
{
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
finishHdrPlusClientOpeningLocked(l);
disableHdrPlusModeLocked();
}
/* first invalidate all the steams in the mStreamList
* if they appear again, they will be validated */
for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
if (channel) {
channel->stop();
}
(*it)->status = INVALID;
}
if (mRawDumpChannel) {
mRawDumpChannel->stop();
delete mRawDumpChannel;
mRawDumpChannel = NULL;
}
if (mHdrPlusRawSrcChannel) {
mHdrPlusRawSrcChannel->stop();
delete mHdrPlusRawSrcChannel;
mHdrPlusRawSrcChannel = NULL;
}
if (mSupportChannel)
mSupportChannel->stop();
if (mAnalysisChannel) {
mAnalysisChannel->stop();
}
if (mMetadataChannel) {
/* If content of mStreamInfo is not 0, there is metadata stream */
mMetadataChannel->stop();
}
if (mChannelHandle) {
stopChannelLocked(/*stop_immediately*/false);
}
pthread_mutex_lock(&mMutex);
mPictureChannel = NULL;
// Check state
switch (mState) {
case INITIALIZED:
case CONFIGURED:
case STARTED:
/* valid state */
break;
default:
LOGE("Invalid state %d", mState);
pthread_mutex_unlock(&mMutex);
return -ENODEV;
}
/* Check whether we have video stream */
m_bIs4KVideo = streamStatus.bIs4KVideo;
m_bIsVideo = streamStatus.bIsVideo;
m_bEisSupported = streamStatus.bEisSupported;
m_bEisSupportedSize = streamStatus.bEisSupportedSize;
m_bTnrEnabled = false;
m_bVideoHdrEnabled = false;
cam_dimension_t previewSize = {0, 0};
cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
/*EIS configuration*/
uint8_t eis_prop_set;
// Initialize all instant AEC related variables
mInstantAEC = false;
mResetInstantAEC = false;
mInstantAECSettledFrameNumber = 0;
mAecSkipDisplayFrameBound = 0;
mInstantAecFrameIdxCount = 0;
mCurrFeatureState = 0;
mStreamConfig = true;
m_bAVTimerEnabled = false;
memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
/* EIS setprop control */
char eis_prop[PROPERTY_VALUE_MAX];
memset(eis_prop, 0, sizeof(eis_prop));
property_get("persist.camera.eis.enable", eis_prop, "1");
eis_prop_set = (uint8_t)atoi(eis_prop);
m_bEisEnable = eis_prop_set && m_bEisSupported &&
(mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
(gCamCapability[mCameraId]->position == CAM_POSITION_BACK ||
gCamCapability[mCameraId]->position == CAM_POSITION_BACK_AUX);
LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
m_bEisEnable, eis_prop_set, m_bEisSupported);
uint8_t forceEnableTnr = 0;
char tnr_prop[PROPERTY_VALUE_MAX];
memset(tnr_prop, 0, sizeof(tnr_prop));
property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
forceEnableTnr = (uint8_t)atoi(tnr_prop);
/* Logic to enable/disable TNR based on specific config size/etc.*/
if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
(mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
m_bTnrEnabled = true;
else if (forceEnableTnr)
m_bTnrEnabled = true;
char videoHdrProp[PROPERTY_VALUE_MAX];
memset(videoHdrProp, 0, sizeof(videoHdrProp));
property_get("persist.camera.hdr.video", videoHdrProp, "0");
uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
if (hdr_mode_prop == 1 && m_bIsVideo &&
mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
m_bVideoHdrEnabled = true;
else
m_bVideoHdrEnabled = false;
camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
for (size_t i = 0; i < streamList->num_streams; i++) {
camera3_stream_t *newStream = streamList->streams[i];
LOGH("newStream type = %d, stream format = %d "
"stream size : %d x %d, stream rotation = %d",
newStream->stream_type, newStream->format,
newStream->width, newStream->height, newStream->rotation);
//if the stream is in the mStreamList validate it
bool stream_exists = false;
for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
if ((*it)->stream == newStream) {
QCamera3ProcessingChannel *channel =
(QCamera3ProcessingChannel*)(*it)->stream->priv;
stream_exists = true;
if (channel)
delete channel;
(*it)->status = VALID;
(*it)->stream->priv = NULL;
(*it)->channel = NULL;
}
}
if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
//new stream
stream_info_t* stream_info;
stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
if (!stream_info) {
LOGE("Could not allocate stream info");
rc = -ENOMEM;
pthread_mutex_unlock(&mMutex);
return rc;
}
stream_info->stream = newStream;
stream_info->status = VALID;
stream_info->channel = NULL;
stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
mStreamInfo.push_back(stream_info);
}
/* Covers Opaque ZSL and API1 F/W ZSL */
if (IS_USAGE_ZSL(newStream->usage)
|| newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
if (zslStream != NULL) {
LOGE("Multiple input/reprocess streams requested!");
pthread_mutex_unlock(&mMutex);
return BAD_VALUE;
}
zslStream = newStream;
}
/* Covers YUV reprocess */
if (streamStatus.inputStream != NULL) {
if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
&& ((newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
&& streamStatus.inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888)
|| (newStream->format == HAL_PIXEL_FORMAT_Y8
&& streamStatus.inputStream->format == HAL_PIXEL_FORMAT_Y8))
&& streamStatus.inputStream->width == newStream->width
&& streamStatus.inputStream->height == newStream->height) {
if (zslStream != NULL) {
/* This scenario indicates multiple YUV streams with same size
* as input stream have been requested, since zsl stream handle
* is solely use for the purpose of overriding the size of streams
* which share h/w streams we will just make a guess here as to
* which of the stream is a ZSL stream, this will be refactored
* once we make generic logic for streams sharing encoder output
*/
LOGH("Warning, Multiple ip/reprocess streams requested!");
}
zslStream = newStream;
}
}
}
/* If a zsl stream is set, we know that we have configured at least one input or
bidirectional stream */
if (NULL != zslStream) {
mInputStreamInfo.dim.width = (int32_t)zslStream->width;
mInputStreamInfo.dim.height = (int32_t)zslStream->height;
mInputStreamInfo.format = zslStream->format;
mInputStreamInfo.usage = zslStream->usage;
LOGD("Input stream configured! %d x %d, format %d, usage %d",
mInputStreamInfo.dim.width,
mInputStreamInfo.dim.height,
mInputStreamInfo.format, mInputStreamInfo.usage);
}
cleanAndSortStreamInfo();
if (mMetadataChannel) {
delete mMetadataChannel;
mMetadataChannel = NULL;
}
if (mSupportChannel) {
delete mSupportChannel;
mSupportChannel = NULL;
}
if (mAnalysisChannel) {
delete mAnalysisChannel;
mAnalysisChannel = NULL;
}
if (mDummyBatchChannel) {
delete mDummyBatchChannel;
mDummyBatchChannel = NULL;
}
if (mDepthChannel) {
mDepthChannel = NULL;
}
mDepthCloudMode = CAM_PD_DATA_SKIP;
mShutterDispatcher.clear();
mOutputBufferDispatcher.clear();
char is_type_value[PROPERTY_VALUE_MAX];
property_get("persist.camera.is_type", is_type_value, "4");
m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
char property_value[PROPERTY_VALUE_MAX];
property_get("persist.camera.gzoom.at", property_value, "0");
int goog_zoom_at = atoi(property_value);
bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
property_get("persist.camera.gzoom.4k", property_value, "0");
bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
//Create metadata channel and initialize it
cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
gCamCapability[mCameraId]->color_arrangement);
mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
mChannelHandle, mCameraHandle->ops, captureResultCb,
setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
if (mMetadataChannel == NULL) {
LOGE("failed to allocate metadata channel");
rc = -ENOMEM;
pthread_mutex_unlock(&mMutex);
return rc;
}
mMetadataChannel->enableDepthData(streamStatus.depthPresent);
rc = mMetadataChannel->initialize(IS_TYPE_NONE);
if (rc < 0) {
LOGE("metadata channel initialization failed");
delete mMetadataChannel;
mMetadataChannel = NULL;
pthread_mutex_unlock(&mMutex);
return rc;
}
cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
bool isRawStreamRequested = false;
bool onlyRaw = true;
// Keep track of preview/video streams indices.
// There could be more than one preview streams, but only one video stream.
int32_t video_stream_idx = -1;
int32_t preview_stream_idx[streamList->num_streams];
size_t preview_stream_cnt = 0;
bool previewTnr[streamList->num_streams];
memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
// Loop through once to determine preview TNR conditions before creating channels.
for (size_t i = 0; i < streamList->num_streams; i++) {
camera3_stream_t *newStream = streamList->streams[i];
uint32_t stream_usage = newStream->usage;
if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
video_stream_idx = (int32_t)i;
else
preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
}
}
// By default, preview stream TNR is disabled.
// Enable TNR to the preview stream if all conditions below are satisfied:
// 1. preview resolution == video resolution.
// 2. video stream TNR is enabled.
// 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
if (m_bTnrEnabled && m_bTnrVideo &&
(isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
video_stream->width == preview_stream->width &&
video_stream->height == preview_stream->height) {
previewTnr[preview_stream_idx[i]] = true;
}
}
memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
/* Allocate channel objects for the requested streams */
for (size_t i = 0; i < streamList->num_streams; i++) {
camera3_stream_t *newStream = streamList->streams[i];
uint32_t stream_usage = newStream->usage;
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
struct camera_info *p_info = NULL;
pthread_mutex_lock(&gCamLock);
p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
pthread_mutex_unlock(&gCamLock);
if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
|| IS_USAGE_ZSL(newStream->usage)) &&
newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
onlyRaw = false; // There is non-raw stream - bypass flag if set
mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
if (isOnEncoder(streamStatus.maxViewfinderSize, newStream->width, newStream->height)) {
if (streamStatus.bUseCommonFeatureMask)
zsl_ppmask = streamStatus.commonFeatureMask;
else
zsl_ppmask = CAM_QCOM_FEATURE_NONE;
} else {
if (streamStatus.numStreamsOnEncoder > 0)
zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
else
zsl_ppmask = CAM_QCOM_FEATURE_NONE;
}
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
} else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
onlyRaw = false; // There is non-raw stream - bypass flag if set
LOGH("Input stream configured, reprocess config");
} else {
//for non zsl streams find out the format
switch (newStream->format) {
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
{
onlyRaw = false; // There is non-raw stream - bypass flag if set
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
/* add additional features to pp feature mask */
addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
mStreamConfigInfo.num_streams);
if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
CAM_STREAM_TYPE_VIDEO;
if (m_bTnrEnabled && m_bTnrVideo) {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
CAM_QCOM_FEATURE_CPP_TNR;
//TNR and CDS are mutually exclusive. So reset CDS from feature mask
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
~CAM_QCOM_FEATURE_CDS;
}
if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
CAM_QTI_FEATURE_PPEISCORE;
}
if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
CAM_QCOM_FEATURE_GOOG_ZOOM;
}
} else {
mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
CAM_STREAM_TYPE_PREVIEW;
if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
CAM_QCOM_FEATURE_CPP_TNR;
//TNR and CDS are mutually exclusive. So reset CDS from feature mask
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
~CAM_QCOM_FEATURE_CDS;
}
if(!m_bSwTnrPreview) {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
~CAM_QTI_FEATURE_SW_TNR;
}
if (is_goog_zoom_preview_enabled) {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
CAM_QCOM_FEATURE_GOOG_ZOOM;
}
padding_info.width_padding = mSurfaceStridePadding;
padding_info.height_padding = CAM_PAD_TO_2;
previewSize.width = (int32_t)newStream->width;
previewSize.height = (int32_t)newStream->height;
}
if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
(newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
newStream->height;
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
newStream->width;
}
}
break;
case HAL_PIXEL_FORMAT_YCbCr_420_888:
case HAL_PIXEL_FORMAT_Y8:
onlyRaw = false; // There is non-raw stream - bypass flag if set
mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
if (isOnEncoder(streamStatus.maxViewfinderSize, newStream->width,
newStream->height)) {
if (streamStatus.bUseCommonFeatureMask)
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
streamStatus.commonFeatureMask;
else
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
CAM_QCOM_FEATURE_NONE;
} else {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
}
break;
case HAL_PIXEL_FORMAT_BLOB:
onlyRaw = false; // There is non-raw stream - bypass flag if set
mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
// No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
if ((m_bIs4KVideo && !streamStatus.isZsl) ||
(streamStatus.bSmallJpegSize && !streamStatus.isZsl)) {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
/* Remove rotation if it is not supported
for 4K LiveVideo snapshot case (online processing) */
if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
CAM_QCOM_FEATURE_ROTATION)) {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
&= ~CAM_QCOM_FEATURE_ROTATION;
}
} else {
if (streamStatus.bUseCommonFeatureMask &&
isOnEncoder(streamStatus.maxViewfinderSize, newStream->width,
newStream->height)) {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
streamStatus.commonFeatureMask;
} else {
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
}
}
if (streamStatus.isZsl) {
if (zslStream) {
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
(int32_t)zslStream->width;
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
(int32_t)zslStream->height;
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
zsl_ppmask;
} else {
LOGE("Error, No ZSL stream identified");
pthread_mutex_unlock(&mMutex);
return -EINVAL;
}
} else if (m_bIs4KVideo) {
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
(int32_t) streamStatus.videoWidth;
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
(int32_t) streamStatus.videoHeight;
} else if (streamStatus.bYuv888OverrideJpeg) {
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
(int32_t) streamStatus.largeYuv888Size.width;
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
(int32_t) streamStatus.largeYuv888Size.height;
}
break;
case HAL_PIXEL_FORMAT_RAW_OPAQUE:
case HAL_PIXEL_FORMAT_RAW16:
case HAL_PIXEL_FORMAT_RAW10:
mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
isRawStreamRequested = true;
if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
(HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
gCamCapability[mCameraId]->sub_fmt[mPDIndex];
mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
gCamCapability[mCameraId]->dt[mPDIndex];
mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
gCamCapability[mCameraId]->vc[mPDIndex];
}
break;
default:
onlyRaw = false; // There is non-raw stream - bypass flag if set
mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
break;
}
}
setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
(cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
gCamCapability[mCameraId]->color_arrangement);
if (newStream->priv == NULL) {
//New stream, construct channel
switch (newStream->stream_type) {
case CAMERA3_STREAM_INPUT:
newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
break;
case CAMERA3_STREAM_BIDIRECTIONAL:
newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
GRALLOC_USAGE_HW_CAMERA_WRITE;
break;
case CAMERA3_STREAM_OUTPUT:
/* For video encoding stream, set read/write rarely
* flag so that they may be set to un-cached */
if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
newStream->usage |=
(GRALLOC_USAGE_SW_READ_RARELY |
GRALLOC_USAGE_SW_WRITE_RARELY |
GRALLOC_USAGE_HW_CAMERA_WRITE);
else if (IS_USAGE_ZSL(newStream->usage))
{
LOGD("ZSL usage flag skipping");
}
else if (newStream == zslStream
|| (newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
newStream->format == HAL_PIXEL_FORMAT_Y8)) {
newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
} else
newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
break;
default:
LOGE("Invalid stream_type %d", newStream->stream_type);
break;
}
bool forcePreviewUBWC = true;
if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
QCamera3ProcessingChannel *channel = NULL;
switch (newStream->format) {
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
if ((newStream->usage &
private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
(streamList->operation_mode ==
CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
) {
channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
mChannelHandle, mCameraHandle->ops, captureResultCb,
setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
this,
newStream,
(cam_stream_type_t)
mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
mMetadataChannel,
0); //heap buffers are not required for HFR video channel
if (channel == NULL) {
LOGE("allocation of channel failed");
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
//channel->getNumBuffers() will return 0 here so use
//MAX_INFLIGH_HFR_REQUESTS
newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
newStream->priv = channel;
LOGI("num video buffers in HFR mode: %d",
MAX_INFLIGHT_HFR_REQUESTS);
} else {
/* Copy stream contents in HFR preview only case to create
* dummy batch channel so that sensor streaming is in
* HFR mode */
if (!m_bIsVideo && (streamList->operation_mode ==
CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
mDummyBatchStream = *newStream;
mDummyBatchStream.usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
}
int bufferCount = MAX_INFLIGHT_REQUESTS;
if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
CAM_STREAM_TYPE_VIDEO) {
if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
// WAR: 4K video can only run <=30fps, reduce the buffer count.
bufferCount = m_bIs4KVideo ?
MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
}
}
channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
mChannelHandle, mCameraHandle->ops, captureResultCb,
setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
this,
newStream,
(cam_stream_type_t)
mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
mMetadataChannel,
bufferCount);
if (channel == NULL) {
LOGE("allocation of channel failed");
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
/* disable UBWC for preview, though supported,
* to take advantage of CPP duplication */
if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
(previewSize.width == (int32_t) streamStatus.videoWidth)&&
(previewSize.height == (int32_t) streamStatus.videoHeight)){
forcePreviewUBWC = false;
}
channel->setUBWCEnabled(forcePreviewUBWC);
/* When goog_zoom is linked to the preview or video stream,
* disable ubwc to the linked stream */
if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
channel->setUBWCEnabled(false);
}
newStream->max_buffers = channel->getNumBuffers();
newStream->priv = channel;
}
break;
case HAL_PIXEL_FORMAT_YCbCr_420_888:
case HAL_PIXEL_FORMAT_Y8: {
channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
mChannelHandle,
mCameraHandle->ops, captureResultCb,
setBufferErrorStatus, &padding_info,
this,
newStream,
(cam_stream_type_t)
mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
mMetadataChannel);
if (channel == NULL) {
LOGE("allocation of YUV channel failed");
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
newStream->max_buffers = channel->getNumBuffers();
newStream->priv = channel;
break;
}
case HAL_PIXEL_FORMAT_RAW_OPAQUE:
case HAL_PIXEL_FORMAT_RAW16:
case HAL_PIXEL_FORMAT_RAW10: {
bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
(HAL_DATASPACE_DEPTH != newStream->data_space))
? true : false;
mRawChannel = new QCamera3RawChannel(
mCameraHandle->camera_handle, mChannelHandle,
mCameraHandle->ops, captureResultCb,
setBufferErrorStatus, &padding_info,
this, newStream,
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
mMetadataChannel, isRAW16);
if (mRawChannel == NULL) {
LOGE("allocation of raw channel failed");
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
newStream->max_buffers = mRawChannel->getNumBuffers();
newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
break;
}
case HAL_PIXEL_FORMAT_BLOB:
if (newStream->data_space == HAL_DATASPACE_DEPTH) {
mDepthChannel = new QCamera3DepthChannel(
mCameraHandle->camera_handle, mChannelHandle,
mCameraHandle->ops, NULL, NULL, &padding_info,
0, this, MAX_INFLIGHT_REQUESTS, newStream,
mMetadataChannel);
if (NULL == mDepthChannel) {
LOGE("Allocation of depth channel failed");
pthread_mutex_unlock(&mMutex);
return NO_MEMORY;
}
newStream->priv = mDepthChannel;
newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
} else {
// Max live snapshot inflight buffer is 1. This is to mitigate
// frame drop issues for video snapshot. The more buffers being
// allocated, the more frame drops there are.
mPictureChannel = new QCamera3PicChannel(
mCameraHandle->camera_handle, mChannelHandle,
mCameraHandle->ops, captureResultCb,
setBufferErrorStatus, &padding_info, this, newStream,
mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
m_bIs4KVideo, streamStatus.isZsl, streamStatus.bY80OnEncoder,
mMetadataChannel, (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
if (mPictureChannel == NULL) {
LOGE("allocation of channel failed");
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
newStream->max_buffers = mPictureChannel->getNumBuffers();
mPictureChannel->overrideYuvSize(
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
}
break;
default:
LOGE("not a supported format 0x%x", newStream->format);
pthread_mutex_unlock(&mMutex);
return -EINVAL;
}
} else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
} else {
LOGE("Error, Unknown stream type");
pthread_mutex_unlock(&mMutex);
return -EINVAL;
}
QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
// Here we only care whether it's EIS3 or not
cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
isType = IS_TYPE_NONE;
cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
newStream->width, newStream->height, forcePreviewUBWC, isType);
if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
}
}
for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
if ((*it)->stream == newStream) {
(*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
break;
}
}
} else {
// Channel already exists for this stream
// Do nothing for now
}
padding_info = gCamCapability[mCameraId]->padding_info;
/* Do not add entries for input&depth stream in metastream info
* since there is no real stream associated with it
*/
if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
!((newStream->data_space == HAL_DATASPACE_DEPTH) &&
(newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
mStreamConfigInfo.num_streams++;
}
}
// Let buffer dispatcher know the configured streams.
mOutputBufferDispatcher.configureStreams(streamList);
if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
onlyRaw = false;
}
// Create analysis stream all the time, even when h/w support is not available
if (!onlyRaw) {
cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
cam_analysis_info_t analysisInfo;
int32_t ret = NO_ERROR;
ret = mCommon.getAnalysisInfo(
FALSE,
analysisFeatureMask,