CameraITS: Add reprocess noise reduction test
Add doReprocessCapture() to submit regular capture requests and use
the regular capture requests as the input for submitting reprocess
capture requests.
Add a test to verify noise reduction modes work as expected for
reprocess requests.
Bug: 20920487
Change-Id: I8c28ca103991c4b84d081c182d81e548c9c3b2e1
diff --git a/apps/CameraITS/pymodules/its/caps.py b/apps/CameraITS/pymodules/its/caps.py
index 70bb2ca..b6d398f 100644
--- a/apps/CameraITS/pymodules/its/caps.py
+++ b/apps/CameraITS/pymodules/its/caps.py
@@ -283,6 +283,30 @@
"android.shading.availableModes") and \
0 in props["android.shading.availableModes"]
+def yuv_reprocess(props):
+ """Returns whether a device supports YUV reprocessing.
+
+ Args:
+ props: Camera properties object.
+
+ Returns:
+ Boolean.
+ """
+ return props.has_key("android.request.availableCapabilities") and \
+ 7 in props["android.request.availableCapabilities"]
+
+def private_reprocess(props):
+ """Returns whether a device supports PRIVATE reprocessing.
+
+ Args:
+ props: Camera properties object.
+
+ Returns:
+ Boolean.
+ """
+ return props.has_key("android.request.availableCapabilities") and \
+ 4 in props["android.request.availableCapabilities"]
+
class __UnitTest(unittest.TestCase):
"""Run a suite of unit tests on this module.
"""
diff --git a/apps/CameraITS/pymodules/its/device.py b/apps/CameraITS/pymodules/its/device.py
index e396483..8773335 100644
--- a/apps/CameraITS/pymodules/its/device.py
+++ b/apps/CameraITS/pymodules/its/device.py
@@ -359,7 +359,7 @@
raise its.error.Error('3A failed to converge')
return ae_sens, ae_exp, awb_gains, awb_transform, af_dist
- def do_capture(self, cap_request, out_surfaces=None):
+ def do_capture(self, cap_request, out_surfaces=None, reprocess_format=None):
"""Issue capture request(s), and read back the image(s) and metadata.
The main top-level function for capturing one or more images using the
@@ -378,6 +378,18 @@
surface. At most one output surface can be specified for a given format,
and raw+dng, raw10+dng, and raw+raw10 are not supported as combinations.
+ If reprocess_format is not None, for each request, an intermediate
+ buffer of the given reprocess_format will be captured from camera and
+ the intermediate buffer will be reprocessed to the output surfaces. The
+ following settings will be turned off when capturing the intermediate
+ buffer and will be applied when reprocessing the intermediate buffer.
+ 1. android.noiseReduction.mode
+ 2. android.edge.mode
+ 3. android.reprocess.effectiveExposureFactor
+
+ Supported reprocess format are "yuv" and "private". Supported output
+ surface formats when reprocessing is enabled are "yuv" and "jpeg".
+
Example of a single capture request:
{
@@ -449,6 +461,8 @@
will be converted to JSON and sent to the device.
out_surfaces: (Optional) specifications of the output image formats
and sizes to use for each capture.
+ reprocess_format: (Optional) The reprocessing format. If not None,
+ reprocessing will be enabled.
Returns:
An object, list of objects, or list of lists of objects, where each
@@ -460,7 +474,11 @@
* metadata: the capture result object (Python dictionary).
"""
cmd = {}
- cmd["cmdName"] = "doCapture"
+ if reprocess_format != None:
+ cmd["cmdName"] = "doReprocessCapture"
+ cmd["reprocessFormat"] = reprocess_format
+ else:
+ cmd["cmdName"] = "doCapture"
if not isinstance(cap_request, list):
cmd["captureRequests"] = [cap_request]
else:
diff --git a/apps/CameraITS/tests/scene1/test_reprocess_noise_reduction.py b/apps/CameraITS/tests/scene1/test_reprocess_noise_reduction.py
new file mode 100644
index 0000000..e9240ba
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_reprocess_noise_reduction.py
@@ -0,0 +1,112 @@
+# Copyright 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import math
+import matplotlib
+import matplotlib.pyplot
+import os.path
+import pylab
+
+def main():
+ """Test that the android.noiseReduction.mode param is applied when set for
+ reprocessing requests.
+
+ Capture reprocessed images with the camera dimly lit. Uses a high analog
+ gain to ensure the captured image is noisy.
+
+ Captures three reprocessed images, for NR off, "fast", and "high quality".
+ Also captures a reprocessed image with low gain and NR off, and uses the
+ variance of this as the baseline.
+ """
+
+ NAME = os.path.basename(__file__).split(".")[0]
+
+ with its.device.ItsSession() as cam:
+ props = cam.get_camera_properties()
+
+ its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+ its.caps.per_frame_control(props) and
+ (its.caps.yuv_reprocess(props) or
+ its.caps.private_reprocess(props)))
+
+ reprocess_formats = []
+ if (its.caps.yuv_reprocess(props)):
+ reprocess_formats.append("yuv")
+ if (its.caps.private_reprocess(props)):
+ reprocess_formats.append("private")
+
+ for reprocess_format in reprocess_formats:
+ # List of variances for R, G, B.
+ variances = []
+ nr_modes_reported = []
+
+ # NR mode 0 with low gain
+ e, s = its.target.get_target_exposure_combos(cam)["minSensitivity"]
+ req = its.objects.manual_capture_request(s, e)
+ req["android.noiseReduction.mode"] = 0
+
+ # Test reprocess_format->JPEG reprocessing
+ # TODO: Switch to reprocess_format->YUV when YUV reprocessing is
+ # supported.
+ size = its.objects.get_available_output_sizes("jpg", props)[0]
+ out_surface = {"width":size[0], "height":size[1], "format":"jpg"}
+ cap = cam.do_capture(req, out_surface, reprocess_format)
+ img = its.image.decompress_jpeg_to_rgb_image(cap["data"])
+ its.image.write_image(img, "%s_low_gain_fmt=jpg.jpg" % (NAME))
+ tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+ ref_variance = its.image.compute_image_variances(tile)
+ print "Ref variances:", ref_variance
+
+ for nr_mode in range(3):
+ # NR modes 0, 1, 2 with high gain
+ e, s = its.target.get_target_exposure_combos(cam) \
+ ["maxSensitivity"]
+ req = its.objects.manual_capture_request(s, e)
+ req["android.noiseReduction.mode"] = nr_mode
+ cap = cam.do_capture(req, out_surface, reprocess_format)
+ nr_modes_reported.append(
+ cap["metadata"]["android.noiseReduction.mode"])
+
+ img = its.image.decompress_jpeg_to_rgb_image(cap["data"])
+ its.image.write_image(
+ img, "%s_high_gain_nr=%d_fmt=jpg.jpg" % (NAME, nr_mode))
+ tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+ # Get the variances for R, G, and B channels
+ variance = its.image.compute_image_variances(tile)
+ variances.append(
+ [variance[chan] / ref_variance[chan] for chan in range(3)])
+ print "Variances with NR mode [0,1,2]:", variances
+
+ # Draw a plot.
+ for nr_mode in range(3):
+ pylab.plot(range(3), variances[nr_mode], "rgb"[nr_mode])
+ matplotlib.pyplot.savefig("%s_plot_%s_variances.png" %
+ (NAME, reprocess_format))
+
+ assert(nr_modes_reported == [0,1,2])
+
+ # Check that the variance of the NR=0 image is higher than for the
+ # NR=1 and NR=2 images.
+ for j in range(3):
+ for i in range(1,3):
+ assert(variances[i][j] < variances[0][j])
+
+if __name__ == '__main__':
+ main()
+
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
index 58b51a5..85b8a18 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
@@ -30,6 +30,7 @@
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.DngCreator;
import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.InputConfiguration;
import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
@@ -37,6 +38,7 @@
import android.hardware.SensorManager;
import android.media.Image;
import android.media.ImageReader;
+import android.media.ImageWriter;
import android.net.Uri;
import android.os.ConditionVariable;
import android.os.Handler;
@@ -67,7 +69,6 @@
import java.math.BigInteger;
import java.net.ServerSocket;
import java.net.Socket;
-import java.net.SocketTimeoutException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.security.MessageDigest;
@@ -78,6 +79,7 @@
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
@@ -121,7 +123,8 @@
private BlockingStateCallback mCameraListener = null;
private CameraDevice mCamera = null;
private CameraCaptureSession mSession = null;
- private ImageReader[] mCaptureReaders = null;
+ private ImageReader[] mOutputImageReaders = null;
+ private ImageReader mInputImageReader = null;
private CameraCharacteristics mCameraCharacteristics = null;
private Vibrator mVibrator = null;
@@ -547,6 +550,8 @@
doVibrate(cmdObj);
} else if ("getCameraIds".equals(cmdObj.getString("cmdName"))) {
doGetCameraIds();
+ } else if ("doReprocessCapture".equals(cmdObj.getString("cmdName"))) {
+ doReprocessCapture(cmdObj);
} else {
throw new ItsException("Unknown command: " + cmd);
}
@@ -766,18 +771,42 @@
}
}
- private void prepareCaptureReader(int[] widths, int[] heights, int formats[], int numSurfaces) {
- if (mCaptureReaders != null) {
- for (int i = 0; i < mCaptureReaders.length; i++) {
- if (mCaptureReaders[i] != null) {
- mCaptureReaders[i].close();
+ private void prepareImageReaders(Size[] outputSizes, int[] outputFormats, Size inputSize,
+ int inputFormat, int maxInputBuffers) {
+ closeImageReaders();
+ mOutputImageReaders = new ImageReader[outputSizes.length];
+ for (int i = 0; i < outputSizes.length; i++) {
+ // Check if the output image reader can be shared with the input image reader.
+ if (outputSizes[i].equals(inputSize) && outputFormats[i] == inputFormat) {
+ mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(),
+ outputSizes[i].getHeight(), outputFormats[i],
+ MAX_CONCURRENT_READER_BUFFERS + maxInputBuffers);
+ mInputImageReader = mOutputImageReaders[i];
+ } else {
+ mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(),
+ outputSizes[i].getHeight(), outputFormats[i],
+ MAX_CONCURRENT_READER_BUFFERS);
+ }
+ }
+
+ if (inputSize != null && mInputImageReader == null) {
+ mInputImageReader = ImageReader.newInstance(inputSize.getWidth(), inputSize.getHeight(),
+ inputFormat, maxInputBuffers);
+ }
+ }
+
+ private void closeImageReaders() {
+ if (mOutputImageReaders != null) {
+ for (int i = 0; i < mOutputImageReaders.length; i++) {
+ if (mOutputImageReaders[i] != null) {
+ mOutputImageReaders[i].close();
+ mOutputImageReaders[i] = null;
}
}
}
- mCaptureReaders = new ImageReader[numSurfaces];
- for (int i = 0; i < numSurfaces; i++) {
- mCaptureReaders[i] = ImageReader.newInstance(widths[i], heights[i], formats[i],
- MAX_CONCURRENT_READER_BUFFERS);
+ if (mInputImageReader != null) {
+ mInputImageReader.close();
+ mInputImageReader = null;
}
}
@@ -788,18 +817,17 @@
// 3A happens on full-res frames.
Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
- int widths[] = new int[1];
- int heights[] = new int[1];
- int formats[] = new int[1];
- widths[0] = sizes[0].getWidth();
- heights[0] = sizes[0].getHeight();
- formats[0] = ImageFormat.YUV_420_888;
- int width = widths[0];
- int height = heights[0];
+ int outputFormats[] = new int[1];
+ outputFormats[0] = ImageFormat.YUV_420_888;
+ Size[] outputSizes = new Size[1];
+ outputSizes[0] = sizes[0];
+ int width = outputSizes[0].getWidth();
+ int height = outputSizes[0].getHeight();
- prepareCaptureReader(widths, heights, formats, 1);
+ prepareImageReaders(outputSizes, outputFormats, /*inputSize*/null, /*inputFormat*/0,
+ /*maxInputBuffers*/0);
List<Surface> outputSurfaces = new ArrayList<Surface>(1);
- outputSurfaces.add(mCaptureReaders[0].getSurface());
+ outputSurfaces.add(mOutputImageReaders[0].getSurface());
BlockingSessionCallback sessionListener = new BlockingSessionCallback();
mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
@@ -807,7 +835,7 @@
// Add a listener that just recycles buffers; they aren't saved anywhere.
ImageReader.OnImageAvailableListener readerListener =
createAvailableListenerDropper(mCaptureCallback);
- mCaptureReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]);
+ mOutputImageReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]);
// Get the user-specified regions for AE, AWB, AF.
// Note that the user specifies normalized [x,y,w,h], which is converted below
@@ -953,7 +981,7 @@
triggeredAF = true;
}
- req.addTarget(mCaptureReaders[0].getSurface());
+ req.addTarget(mOutputImageReaders[0].getSurface());
mIssuedRequest3A = true;
mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
@@ -991,16 +1019,96 @@
}
}
+ /**
+ * Parse jsonOutputSpecs to get output surface sizes and formats. Create input and output
+ * image readers for the parsed output surface sizes, output formats, and the given input
+ * size and format.
+ */
+ private void prepareImageReadersWithOutputSpecs(JSONArray jsonOutputSpecs, Size inputSize,
+ int inputFormat, int maxInputBuffers) throws ItsException {
+ Size outputSizes[];
+ int outputFormats[];
+ int numSurfaces = 0;
+
+ if (jsonOutputSpecs != null) {
+ try {
+ numSurfaces = jsonOutputSpecs.length();
+ if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) {
+ throw new ItsException("Too many output surfaces");
+ }
+
+ outputSizes = new Size[numSurfaces];
+ outputFormats = new int[numSurfaces];
+ for (int i = 0; i < numSurfaces; i++) {
+ // Get the specified surface.
+ JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i);
+ String sformat = surfaceObj.optString("format");
+ Size sizes[];
+ if ("yuv".equals(sformat) || "".equals(sformat)) {
+ // Default to YUV if no format is specified.
+ outputFormats[i] = ImageFormat.YUV_420_888;
+ sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
+ } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
+ outputFormats[i] = ImageFormat.JPEG;
+ sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics);
+ } else if ("raw".equals(sformat)) {
+ outputFormats[i] = ImageFormat.RAW_SENSOR;
+ sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics);
+ } else if ("raw10".equals(sformat)) {
+ outputFormats[i] = ImageFormat.RAW10;
+ sizes = ItsUtils.getRaw10OutputSizes(mCameraCharacteristics);
+ } else if ("raw12".equals(sformat)) {
+ outputFormats[i] = ImageFormat.RAW12;
+ sizes = ItsUtils.getRaw12OutputSizes(mCameraCharacteristics);
+ } else if ("dng".equals(sformat)) {
+ outputFormats[i] = ImageFormat.RAW_SENSOR;
+ sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics);
+ mCaptureRawIsDng = true;
+ } else {
+ throw new ItsException("Unsupported format: " + sformat);
+ }
+ // If the size is omitted, then default to the largest allowed size for the
+ // format.
+ int width = surfaceObj.optInt("width");
+ int height = surfaceObj.optInt("height");
+ if (width <= 0) {
+ if (sizes == null || sizes.length == 0) {
+ throw new ItsException(String.format(
+ "Zero stream configs available for requested format: %s",
+ sformat));
+ }
+ width = sizes[0].getWidth();
+ }
+ if (height <= 0) {
+ height = sizes[0].getHeight();
+ }
+
+ outputSizes[i] = new Size(width, height);
+ }
+ } catch (org.json.JSONException e) {
+ throw new ItsException("JSON error", e);
+ }
+ } else {
+ // No surface(s) specified at all.
+ // Default: a single output surface which is full-res YUV.
+ Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
+ numSurfaces = 1;
+
+ outputSizes = new Size[1];
+ outputFormats = new int[1];
+ outputSizes[0] = sizes[0];
+ outputFormats[0] = ImageFormat.YUV_420_888;
+ }
+
+ prepareImageReaders(outputSizes, outputFormats, inputSize, inputFormat, maxInputBuffers);
+ }
+
private void doCapture(JSONObject params) throws ItsException {
try {
// Parse the JSON to get the list of capture requests.
List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(
mCamera, params);
- // Set the output surface(s) and listeners.
- int widths[] = new int[MAX_NUM_OUTPUT_SURFACES];
- int heights[] = new int[MAX_NUM_OUTPUT_SURFACES];
- int formats[] = new int[MAX_NUM_OUTPUT_SURFACES];
int numSurfaces = 0;
try {
mCountRawOrDng.set(0);
@@ -1013,70 +1121,14 @@
mCaptureResults = new CaptureResult[requests.size()];
JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
- if (jsonOutputSpecs != null) {
- numSurfaces = jsonOutputSpecs.length();
- if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) {
- throw new ItsException("Too many output surfaces");
- }
- for (int i = 0; i < numSurfaces; i++) {
- // Get the specified surface.
- JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i);
- String sformat = surfaceObj.optString("format");
- Size sizes[];
- if ("yuv".equals(sformat) || "".equals(sformat)) {
- // Default to YUV if no format is specified.
- formats[i] = ImageFormat.YUV_420_888;
- sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
- } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
- formats[i] = ImageFormat.JPEG;
- sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics);
- } else if ("raw".equals(sformat)) {
- formats[i] = ImageFormat.RAW_SENSOR;
- sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics);
- } else if ("raw10".equals(sformat)) {
- formats[i] = ImageFormat.RAW10;
- sizes = ItsUtils.getRaw10OutputSizes(mCameraCharacteristics);
- } else if ("raw12".equals(sformat)) {
- formats[i] = ImageFormat.RAW12;
- sizes = ItsUtils.getRaw12OutputSizes(mCameraCharacteristics);
- } else if ("dng".equals(sformat)) {
- formats[i] = ImageFormat.RAW_SENSOR;
- sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics);
- mCaptureRawIsDng = true;
- } else {
- throw new ItsException("Unsupported format: " + sformat);
- }
- // If the size is omitted, then default to the largest allowed size for the
- // format.
- widths[i] = surfaceObj.optInt("width");
- heights[i] = surfaceObj.optInt("height");
- if (widths[i] <= 0) {
- if (sizes == null || sizes.length == 0) {
- throw new ItsException(String.format(
- "Zero stream configs available for requested format: %s",
- sformat));
- }
- widths[i] = sizes[0].getWidth();
- }
- if (heights[i] <= 0) {
- heights[i] = sizes[0].getHeight();
- }
- }
- } else {
- // No surface(s) specified at all.
- // Default: a single output surface which is full-res YUV.
- Size sizes[] =
- ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
- numSurfaces = 1;
- widths[0] = sizes[0].getWidth();
- heights[0] = sizes[0].getHeight();
- formats[0] = ImageFormat.YUV_420_888;
- }
- prepareCaptureReader(widths, heights, formats, numSurfaces);
+ prepareImageReadersWithOutputSpecs(jsonOutputSpecs, /*inputSize*/null,
+ /*inputFormat*/0, /*maxInputBuffers*/0);
+ numSurfaces = mOutputImageReaders.length;
+
List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces);
for (int i = 0; i < numSurfaces; i++) {
- outputSurfaces.add(mCaptureReaders[i].getSurface());
+ outputSurfaces.add(mOutputImageReaders[i].getSurface());
}
BlockingSessionCallback sessionListener = new BlockingSessionCallback();
mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
@@ -1085,7 +1137,8 @@
for (int i = 0; i < numSurfaces; i++) {
ImageReader.OnImageAvailableListener readerListener =
createAvailableListener(mCaptureCallback);
- mCaptureReaders[i].setOnImageAvailableListener(readerListener,mSaveHandlers[i]);
+ mOutputImageReaders[i].setOnImageAvailableListener(readerListener,
+ mSaveHandlers[i]);
}
// Plan for how many callbacks need to be received throughout the duration of this
@@ -1096,8 +1149,6 @@
} catch (CameraAccessException e) {
throw new ItsException("Error configuring outputs", e);
- } catch (org.json.JSONException e) {
- throw new ItsException("JSON error", e);
}
// Initiate the captures.
@@ -1109,7 +1160,7 @@
CaptureRequest.Builder req = requests.get(i);
for (int j = 0; j < numSurfaces; j++) {
- req.addTarget(mCaptureReaders[j].getSurface());
+ req.addTarget(mOutputImageReaders[j].getSurface());
}
mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
}
@@ -1135,6 +1186,181 @@
}
}
+ /**
+ * Perform reprocess captures.
+ *
+ * It takes captureRequests in a JSON object and perform capture requests in two steps:
+ * regular capture request to get reprocess input and reprocess capture request to get
+ * reprocess outputs.
+ *
+ * Regular capture requests:
+ * 1. For each capture request in the JSON object, create a full-size capture request with
+ * the settings in the JSON object.
+ * 2. Remember and clear noise reduction, edge enhancement, and effective exposure factor
+ * from the regular capture requests. (Those settings will be used for reprocess requests.)
+ * 3. Submit the regular capture requests.
+ *
+ * Reprocess capture requests:
+ * 4. Wait for the regular capture results and use them to create reprocess capture requests.
+ * 5. Wait for the regular capture output images and queue them to the image writer.
+ * 6. Set the noise reduction, edge enhancement, and effective exposure factor from #2.
+ * 7. Submit the reprocess capture requests.
+ *
+ * The output images and results for the regular capture requests won't be written to socket.
+ * The output images and results for the reprocess capture requests will be written to socket.
+ */
+ private void doReprocessCapture(JSONObject params) throws ItsException {
+ ImageWriter imageWriter = null;
+ ArrayList<Integer> noiseReductionModes = new ArrayList<>();
+ ArrayList<Integer> edgeModes = new ArrayList<>();
+ ArrayList<Float> effectiveExposureFactors = new ArrayList<>();
+
+ mCountRawOrDng.set(0);
+ mCountJpg.set(0);
+ mCountYuv.set(0);
+ mCountRaw10.set(0);
+ mCountRaw12.set(0);
+ mCountCapRes.set(0);
+ mCaptureRawIsDng = false;
+
+ try {
+ // Parse the JSON to get the list of capture requests.
+ List<CaptureRequest.Builder> inputRequests =
+ ItsSerializer.deserializeRequestList(mCamera, params);
+
+ // Prepare the image readers for reprocess input and reprocess outputs.
+ int inputFormat = getReprocessInputFormat(params);
+ Size inputSize = ItsUtils.getMaxOutputSize(mCameraCharacteristics, inputFormat);
+ JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
+ prepareImageReadersWithOutputSpecs(jsonOutputSpecs, inputSize, inputFormat,
+ inputRequests.size());
+
+ // Prepare a reprocessable session.
+ int numOutputSurfaces = mOutputImageReaders.length;
+ InputConfiguration inputConfig = new InputConfiguration(inputSize.getWidth(),
+ inputSize.getHeight(), inputFormat);
+ List<Surface> outputSurfaces = new ArrayList<Surface>();
+ boolean addSurfaceForInput = true;
+ for (int i = 0; i < numOutputSurfaces; i++) {
+ outputSurfaces.add(mOutputImageReaders[i].getSurface());
+ if (mOutputImageReaders[i] == mInputImageReader) {
+ // If input and one of the outputs share the same image reader, avoid
+ // adding the same surfaces twice.
+ addSurfaceForInput = false;
+ }
+ }
+
+ if (addSurfaceForInput) {
+ // Besides the output surfaces specified in JSON object, add an additional one
+ // for reprocess input.
+ outputSurfaces.add(mInputImageReader.getSurface());
+ }
+
+ BlockingSessionCallback sessionListener = new BlockingSessionCallback();
+ mCamera.createReprocessableCaptureSession(inputConfig, outputSurfaces, sessionListener,
+ mCameraHandler);
+ mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
+
+ // Create an image writer for reprocess input.
+ Surface inputSurface = mSession.getInputSurface();
+ imageWriter = ImageWriter.newInstance(inputSurface, inputRequests.size());
+
+ // Set up input reader listener and capture callback listener to get
+ // reprocess input buffers and the results in order to create reprocess capture
+ // requests.
+ ImageReaderListenerWaiter inputReaderListener = new ImageReaderListenerWaiter();
+ mInputImageReader.setOnImageAvailableListener(inputReaderListener, mSaveHandlers[0]);
+
+ CaptureCallbackWaiter captureCallbackWaiter = new CaptureCallbackWaiter();
+ // Prepare the reprocess input request
+ for (CaptureRequest.Builder inputReqest : inputRequests) {
+ // Remember and clear noise reduction, edge enhancement, and effective exposure
+ // factors.
+ noiseReductionModes.add(inputReqest.get(CaptureRequest.NOISE_REDUCTION_MODE));
+ edgeModes.add(inputReqest.get(CaptureRequest.EDGE_MODE));
+ effectiveExposureFactors.add(inputReqest.get(
+ CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR));
+
+ inputReqest.set(CaptureRequest.NOISE_REDUCTION_MODE,
+ CaptureRequest.NOISE_REDUCTION_MODE_OFF);
+ inputReqest.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_OFF);
+ inputReqest.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, null);
+ inputReqest.addTarget(mInputImageReader.getSurface());
+ mSession.capture(inputReqest.build(), captureCallbackWaiter, mResultHandler);
+ }
+
+ // Wait for reprocess input images
+ ArrayList<CaptureRequest.Builder> reprocessOutputRequests = new ArrayList<>();
+ for (int i = 0; i < inputRequests.size(); i++) {
+ TotalCaptureResult result =
+ captureCallbackWaiter.getResult(TIMEOUT_CALLBACK * 1000);
+ reprocessOutputRequests.add(mCamera.createReprocessCaptureRequest(result));
+ imageWriter.queueInputImage(inputReaderListener.getImage(TIMEOUT_CALLBACK * 1000));
+ }
+
+ // Start performing reprocess captures.
+
+ mCaptureResults = new CaptureResult[inputRequests.size()];
+
+ // Prepare reprocess capture requests.
+ for (int i = 0; i < numOutputSurfaces; i++) {
+ ImageReader.OnImageAvailableListener outputReaderListener =
+ createAvailableListener(mCaptureCallback);
+ mOutputImageReaders[i].setOnImageAvailableListener(outputReaderListener,
+ mSaveHandlers[i]);
+ }
+
+ // Initiate the captures.
+ for (int i = 0; i < reprocessOutputRequests.size(); i++) {
+ CaptureRequest.Builder req = reprocessOutputRequests.get(i);
+ for (ImageReader outputImageReader : mOutputImageReaders) {
+ req.addTarget(outputImageReader.getSurface());
+ }
+
+ req.set(CaptureRequest.NOISE_REDUCTION_MODE, noiseReductionModes.get(i));
+ req.set(CaptureRequest.EDGE_MODE, edgeModes.get(i));
+ req.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR,
+ effectiveExposureFactors.get(i));
+
+ mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
+ }
+
+ // Plan for how many callbacks need to be received throughout the duration of this
+ // sequence of capture requests. There is one callback per image surface, and one
+ // callback for the CaptureResult, for each capture.
+ int numCaptures = reprocessOutputRequests.size();
+ mCountCallbacksRemaining.set(numCaptures * (numOutputSurfaces + 1));
+
+ // Make sure all callbacks have been hit (wait until captures are done).
+ // If no timeouts are received after a timeout, then fail.
+ int currentCount = mCountCallbacksRemaining.get();
+ while (currentCount > 0) {
+ try {
+ Thread.sleep(TIMEOUT_CALLBACK*1000);
+ } catch (InterruptedException e) {
+ throw new ItsException("Timeout failure", e);
+ }
+ int newCount = mCountCallbacksRemaining.get();
+ if (newCount == currentCount) {
+ throw new ItsException(
+ "No callback received within timeout");
+ }
+ currentCount = newCount;
+ }
+ } catch (android.hardware.camera2.CameraAccessException e) {
+ throw new ItsException("Access error: ", e);
+ } finally {
+ closeImageReaders();
+ if (mSession != null) {
+ mSession.close();
+ mSession = null;
+ }
+ if (imageWriter != null) {
+ imageWriter.close();
+ }
+ }
+ }
+
@Override
public final void onSensorChanged(SensorEvent event) {
synchronized(mEventLock) {
@@ -1364,7 +1590,7 @@
int count = mCountCapRes.getAndIncrement();
mCaptureResults[count] = result;
mSocketRunnableObj.sendResponseCaptureResult(mCameraCharacteristics,
- request, result, mCaptureReaders);
+ request, result, mOutputImageReaders);
mCountCallbacksRemaining.decrementAndGet();
}
} catch (ItsException e) {
@@ -1380,4 +1606,93 @@
Logt.e(TAG, "Script error: capture failed");
}
};
+
+ private class CaptureCallbackWaiter extends CameraCaptureSession.CaptureCallback {
+ private final LinkedBlockingQueue<TotalCaptureResult> mResultQueue =
+ new LinkedBlockingQueue<>();
+
+ @Override
+ public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
+ long timestamp, long frameNumber) {
+ }
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
+ TotalCaptureResult result) {
+ try {
+ mResultQueue.put(result);
+ } catch (InterruptedException e) {
+ throw new UnsupportedOperationException(
+ "Can't handle InterruptedException in onImageAvailable");
+ }
+ }
+
+ @Override
+ public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
+ CaptureFailure failure) {
+ Logt.e(TAG, "Script error: capture failed");
+ }
+
+ public TotalCaptureResult getResult(long timeoutMs) throws ItsException {
+ TotalCaptureResult result;
+ try {
+ result = mResultQueue.poll(timeoutMs, TimeUnit.MILLISECONDS);
+ } catch (InterruptedException e) {
+ throw new ItsException(e);
+ }
+
+ if (result == null) {
+ throw new ItsException("Getting an image timed out after " + timeoutMs +
+ "ms");
+ }
+
+ return result;
+ }
+ }
+
+ private static class ImageReaderListenerWaiter implements ImageReader.OnImageAvailableListener {
+ private final LinkedBlockingQueue<Image> mImageQueue = new LinkedBlockingQueue<>();
+
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ try {
+ mImageQueue.put(reader.acquireNextImage());
+ } catch (InterruptedException e) {
+ throw new UnsupportedOperationException(
+ "Can't handle InterruptedException in onImageAvailable");
+ }
+ }
+
+ public Image getImage(long timeoutMs) throws ItsException {
+ Image image;
+ try {
+ image = mImageQueue.poll(timeoutMs, TimeUnit.MILLISECONDS);
+ } catch (InterruptedException e) {
+ throw new ItsException(e);
+ }
+
+ if (image == null) {
+ throw new ItsException("Getting an image timed out after " + timeoutMs +
+ "ms");
+ }
+ return image;
+ }
+ }
+
+ private int getReprocessInputFormat(JSONObject params) throws ItsException {
+ String reprocessFormat;
+ try {
+ reprocessFormat = params.getString("reprocessFormat");
+ } catch (org.json.JSONException e) {
+ throw new ItsException("Error parsing reprocess format: " + e);
+ }
+
+ if (reprocessFormat.equals("yuv")) {
+ return ImageFormat.YUV_420_888;
+ } else if (reprocessFormat.equals("private")) {
+ return ImageFormat.PRIVATE;
+ }
+
+ throw new ItsException("Uknown reprocess format: " + reprocessFormat);
+ }
}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
index b09b90c..fddee4d 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsUtils.java
@@ -116,6 +116,11 @@
return getOutputSizes(ccs, ImageFormat.YUV_420_888);
}
+ public static Size getMaxOutputSize(CameraCharacteristics ccs, int format)
+ throws ItsException {
+ return getMaxSize(getOutputSizes(ccs, format));
+ }
+
private static Size[] getOutputSizes(CameraCharacteristics ccs, int format)
throws ItsException {
StreamConfigurationMap configMap = ccs.get(
@@ -126,6 +131,22 @@
return configMap.getOutputSizes(format);
}
+ private static Size getMaxSize(Size[] sizes) {
+ if (sizes == null || sizes.length == 0) {
+ throw new IllegalArgumentException("sizes was empty");
+ }
+
+ Size maxSize = sizes[0];
+ for (int i = 1; i < sizes.length; i++) {
+ if (sizes[i].getWidth() * sizes[i].getHeight() >
+ maxSize.getWidth() * maxSize.getHeight()) {
+ maxSize = sizes[i];
+ }
+ }
+
+ return maxSize;
+ }
+
public static byte[] getDataFromImage(Image image)
throws ItsException {
int format = image.getFormat();