blob: cc484eaf265ca1c092b6760b75d7ab76f3a1b5f4 [file] [log] [blame]
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.hardware.camera2;
import android.annotation.IntRange;
import android.annotation.NonNull;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.hardware.camera2.impl.CameraMetadataNative;
import android.location.Location;
import android.media.ExifInterface;
import android.media.Image;
import android.os.SystemClock;
import android.util.Log;
import android.util.Size;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Locale;
import java.util.TimeZone;
/**
* The {@link DngCreator} class provides functions to write raw pixel data as a DNG file.
*
* <p>
* This class is designed to be used with the {@link android.graphics.ImageFormat#RAW_SENSOR}
* buffers available from {@link android.hardware.camera2.CameraDevice}, or with Bayer-type raw
* pixel data that is otherwise generated by an application. The DNG metadata tags will be
* generated from a {@link android.hardware.camera2.CaptureResult} object or set directly.
* </p>
*
* <p>
* The DNG file format is a cross-platform file format that is used to store pixel data from
* camera sensors with minimal pre-processing applied. DNG files allow for pixel data to be
* defined in a user-defined colorspace, and have associated metadata that allow for this
* pixel data to be converted to the standard CIE XYZ colorspace during post-processing.
* </p>
*
* <p>
* For more information on the DNG file format and associated metadata, please refer to the
* <a href=
* "https://wwwimages2.adobe.com/content/dam/Adobe/en/products/photoshop/pdfs/dng_spec_1.4.0.0.pdf">
* Adobe DNG 1.4.0.0 specification</a>.
* </p>
*/
public final class DngCreator implements AutoCloseable {
private static final String TAG = "DngCreator";
/**
* Create a new DNG object.
*
* <p>
* It is not necessary to call any set methods to write a well-formatted DNG file.
* </p>
* <p>
* DNG metadata tags will be generated from the corresponding parameters in the
* {@link android.hardware.camera2.CaptureResult} object.
* </p>
* <p>
* For best quality DNG files, it is strongly recommended that lens shading map output is
* enabled if supported. See {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE}.
* </p>
* @param characteristics an object containing the static
* {@link android.hardware.camera2.CameraCharacteristics}.
* @param metadata a metadata object to generate tags from.
*/
public DngCreator(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureResult metadata) {
if (characteristics == null || metadata == null) {
throw new IllegalArgumentException("Null argument to DngCreator constructor");
}
// Find current time in milliseconds since 1970
long currentTime = System.currentTimeMillis();
// Assume that sensor timestamp has that timebase to start
long timeOffset = 0;
int timestampSource = characteristics.get(
CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE);
if (timestampSource == CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME) {
// This means the same timebase as SystemClock.elapsedRealtime(),
// which is CLOCK_BOOTTIME
timeOffset = currentTime - SystemClock.elapsedRealtime();
} else if (timestampSource == CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN) {
// This means the same timebase as System.currentTimeMillis(),
// which is CLOCK_MONOTONIC
timeOffset = currentTime - SystemClock.uptimeMillis();
} else {
// Unexpected time source - treat as CLOCK_MONOTONIC
Log.w(TAG, "Sensor timestamp source is unexpected: " + timestampSource);
timeOffset = currentTime - SystemClock.uptimeMillis();
}
// Find capture time (nanos since boot)
Long timestamp = metadata.get(CaptureResult.SENSOR_TIMESTAMP);
long captureTime = currentTime;
if (timestamp != null) {
captureTime = timestamp / 1000000 + timeOffset;
}
// Create this fresh each time since the time zone may change while a long-running application
// is active.
final DateFormat dateTimeStampFormat =
new SimpleDateFormat(TIFF_DATETIME_FORMAT, Locale.US);
dateTimeStampFormat.setTimeZone(TimeZone.getDefault());
// Format for metadata
String formattedCaptureTime = dateTimeStampFormat.format(captureTime);
nativeInit(characteristics.getNativeCopy(), metadata.getNativeCopy(),
formattedCaptureTime);
}
/**
* Set the orientation value to write.
*
* <p>
* This will be written as the TIFF "Orientation" tag {@code (0x0112)}.
* Calling this will override any prior settings for this tag.
* </p>
*
* @param orientation the orientation value to set, one of:
* <ul>
* <li>{@link android.media.ExifInterface#ORIENTATION_NORMAL}</li>
* <li>{@link android.media.ExifInterface#ORIENTATION_FLIP_HORIZONTAL}</li>
* <li>{@link android.media.ExifInterface#ORIENTATION_ROTATE_180}</li>
* <li>{@link android.media.ExifInterface#ORIENTATION_FLIP_VERTICAL}</li>
* <li>{@link android.media.ExifInterface#ORIENTATION_TRANSPOSE}</li>
* <li>{@link android.media.ExifInterface#ORIENTATION_ROTATE_90}</li>
* <li>{@link android.media.ExifInterface#ORIENTATION_TRANSVERSE}</li>
* <li>{@link android.media.ExifInterface#ORIENTATION_ROTATE_270}</li>
* </ul>
* @return this {@link #DngCreator} object.
*/
@NonNull
public DngCreator setOrientation(int orientation) {
if (orientation < ExifInterface.ORIENTATION_UNDEFINED ||
orientation > ExifInterface.ORIENTATION_ROTATE_270) {
throw new IllegalArgumentException("Orientation " + orientation +
" is not a valid EXIF orientation value");
}
// ExifInterface and TIFF/EP spec differ on definition of
// "Unknown" orientation; other values map directly
if (orientation == ExifInterface.ORIENTATION_UNDEFINED) {
orientation = TAG_ORIENTATION_UNKNOWN;
}
nativeSetOrientation(orientation);
return this;
}
/**
* Set the thumbnail image.
*
* <p>
* Pixel data will be converted to a Baseline TIFF RGB image, with 8 bits per color channel.
* The alpha channel will be discarded. Thumbnail images with a dimension larger than
* {@link #MAX_THUMBNAIL_DIMENSION} will be rejected.
* </p>
*
* @param pixels a {@link android.graphics.Bitmap} of pixel data.
* @return this {@link #DngCreator} object.
* @throws java.lang.IllegalArgumentException if the given thumbnail image has a dimension
* larger than {@link #MAX_THUMBNAIL_DIMENSION}.
*/
@NonNull
public DngCreator setThumbnail(@NonNull Bitmap pixels) {
if (pixels == null) {
throw new IllegalArgumentException("Null argument to setThumbnail");
}
int width = pixels.getWidth();
int height = pixels.getHeight();
if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) {
throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width +
"," + height + ") too large, dimensions must be smaller than " +
MAX_THUMBNAIL_DIMENSION);
}
ByteBuffer rgbBuffer = convertToRGB(pixels);
nativeSetThumbnail(rgbBuffer, width, height);
return this;
}
/**
* Set the thumbnail image.
*
* <p>
* Pixel data is interpreted as a {@link android.graphics.ImageFormat#YUV_420_888} image.
* Thumbnail images with a dimension larger than {@link #MAX_THUMBNAIL_DIMENSION} will be
* rejected.
* </p>
*
* @param pixels an {@link android.media.Image} object with the format
* {@link android.graphics.ImageFormat#YUV_420_888}.
* @return this {@link #DngCreator} object.
* @throws java.lang.IllegalArgumentException if the given thumbnail image has a dimension
* larger than {@link #MAX_THUMBNAIL_DIMENSION}.
*/
@NonNull
public DngCreator setThumbnail(@NonNull Image pixels) {
if (pixels == null) {
throw new IllegalArgumentException("Null argument to setThumbnail");
}
int format = pixels.getFormat();
if (format != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("Unsupported Image format " + format);
}
int width = pixels.getWidth();
int height = pixels.getHeight();
if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) {
throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width +
"," + height + ") too large, dimensions must be smaller than " +
MAX_THUMBNAIL_DIMENSION);
}
ByteBuffer rgbBuffer = convertToRGB(pixels);
nativeSetThumbnail(rgbBuffer, width, height);
return this;
}
/**
* Set image location metadata.
*
* <p>
* The given location object must contain at least a valid time, latitude, and longitude
* (equivalent to the values returned by {@link android.location.Location#getTime()},
* {@link android.location.Location#getLatitude()}, and
* {@link android.location.Location#getLongitude()} methods).
* </p>
*
* @param location an {@link android.location.Location} object to set.
* @return this {@link #DngCreator} object.
*
* @throws java.lang.IllegalArgumentException if the given location object doesn't
* contain enough information to set location metadata.
*/
@NonNull
public DngCreator setLocation(@NonNull Location location) {
if (location == null) {
throw new IllegalArgumentException("Null location passed to setLocation");
}
double latitude = location.getLatitude();
double longitude = location.getLongitude();
long time = location.getTime();
int[] latTag = toExifLatLong(latitude);
int[] longTag = toExifLatLong(longitude);
String latRef = latitude >= 0 ? GPS_LAT_REF_NORTH : GPS_LAT_REF_SOUTH;
String longRef = longitude >= 0 ? GPS_LONG_REF_EAST : GPS_LONG_REF_WEST;
String dateTag = sExifGPSDateStamp.format(time);
mGPSTimeStampCalendar.setTimeInMillis(time);
int[] timeTag = new int[] { mGPSTimeStampCalendar.get(Calendar.HOUR_OF_DAY), 1,
mGPSTimeStampCalendar.get(Calendar.MINUTE), 1,
mGPSTimeStampCalendar.get(Calendar.SECOND), 1 };
nativeSetGpsTags(latTag, latRef, longTag, longRef, dateTag, timeTag);
return this;
}
/**
* Set the user description string to write.
*
* <p>
* This is equivalent to setting the TIFF "ImageDescription" tag {@code (0x010E)}.
* </p>
*
* @param description the user description string.
* @return this {@link #DngCreator} object.
*/
@NonNull
public DngCreator setDescription(@NonNull String description) {
if (description == null) {
throw new IllegalArgumentException("Null description passed to setDescription.");
}
nativeSetDescription(description);
return this;
}
/**
* Write the {@link android.graphics.ImageFormat#RAW_SENSOR} pixel data to a DNG file with
* the currently configured metadata.
*
* <p>
* Raw pixel data must have 16 bits per pixel, and the input must contain at least
* {@code offset + 2 * width * height)} bytes. The width and height of
* the input are taken from the width and height set in the {@link DngCreator} metadata tags,
* and will typically be equal to the width and height of
* {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE}. Prior to
* API level 23, this was always the same as
* {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE}.
* The pixel layout in the input is determined from the reported color filter arrangement (CFA)
* set in {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}. If insufficient
* metadata is available to write a well-formatted DNG file, an
* {@link java.lang.IllegalStateException} will be thrown.
* </p>
*
* @param dngOutput an {@link java.io.OutputStream} to write the DNG file to.
* @param size the {@link Size} of the image to write, in pixels.
* @param pixels an {@link java.io.InputStream} of pixel data to write.
* @param offset the offset of the raw image in bytes. This indicates how many bytes will
* be skipped in the input before any pixel data is read.
*
* @throws IOException if an error was encountered in the input or output stream.
* @throws java.lang.IllegalStateException if not enough metadata information has been
* set to write a well-formatted DNG file.
* @throws java.lang.IllegalArgumentException if the size passed in does not match the
*/
public void writeInputStream(@NonNull OutputStream dngOutput, @NonNull Size size,
@NonNull InputStream pixels, @IntRange(from=0) long offset) throws IOException {
if (dngOutput == null) {
throw new IllegalArgumentException("Null dngOutput passed to writeInputStream");
} else if (size == null) {
throw new IllegalArgumentException("Null size passed to writeInputStream");
} else if (pixels == null) {
throw new IllegalArgumentException("Null pixels passed to writeInputStream");
} else if (offset < 0) {
throw new IllegalArgumentException("Negative offset passed to writeInputStream");
}
int width = size.getWidth();
int height = size.getHeight();
if (width <= 0 || height <= 0) {
throw new IllegalArgumentException("Size with invalid width, height: (" + width + "," +
height + ") passed to writeInputStream");
}
nativeWriteInputStream(dngOutput, pixels, width, height, offset);
}
/**
* Write the {@link android.graphics.ImageFormat#RAW_SENSOR} pixel data to a DNG file with
* the currently configured metadata.
*
* <p>
* Raw pixel data must have 16 bits per pixel, and the input must contain at least
* {@code offset + 2 * width * height)} bytes. The width and height of
* the input are taken from the width and height set in the {@link DngCreator} metadata tags,
* and will typically be equal to the width and height of
* {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE}. Prior to
* API level 23, this was always the same as
* {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE}.
* The pixel layout in the input is determined from the reported color filter arrangement (CFA)
* set in {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}. If insufficient
* metadata is available to write a well-formatted DNG file, an
* {@link java.lang.IllegalStateException} will be thrown.
* </p>
*
* <p>
* Any mark or limit set on this {@link ByteBuffer} is ignored, and will be cleared by this
* method.
* </p>
*
* @param dngOutput an {@link java.io.OutputStream} to write the DNG file to.
* @param size the {@link Size} of the image to write, in pixels.
* @param pixels an {@link java.nio.ByteBuffer} of pixel data to write.
* @param offset the offset of the raw image in bytes. This indicates how many bytes will
* be skipped in the input before any pixel data is read.
*
* @throws IOException if an error was encountered in the input or output stream.
* @throws java.lang.IllegalStateException if not enough metadata information has been
* set to write a well-formatted DNG file.
*/
public void writeByteBuffer(@NonNull OutputStream dngOutput, @NonNull Size size,
@NonNull ByteBuffer pixels, @IntRange(from=0) long offset)
throws IOException {
if (dngOutput == null) {
throw new IllegalArgumentException("Null dngOutput passed to writeByteBuffer");
} else if (size == null) {
throw new IllegalArgumentException("Null size passed to writeByteBuffer");
} else if (pixels == null) {
throw new IllegalArgumentException("Null pixels passed to writeByteBuffer");
} else if (offset < 0) {
throw new IllegalArgumentException("Negative offset passed to writeByteBuffer");
}
int width = size.getWidth();
int height = size.getHeight();
writeByteBuffer(width, height, pixels, dngOutput, DEFAULT_PIXEL_STRIDE,
width * DEFAULT_PIXEL_STRIDE, offset);
}
/**
* Write the pixel data to a DNG file with the currently configured metadata.
*
* <p>
* For this method to succeed, the {@link android.media.Image} input must contain
* {@link android.graphics.ImageFormat#RAW_SENSOR} pixel data, otherwise an
* {@link java.lang.IllegalArgumentException} will be thrown.
* </p>
*
* @param dngOutput an {@link java.io.OutputStream} to write the DNG file to.
* @param pixels an {@link android.media.Image} to write.
*
* @throws java.io.IOException if an error was encountered in the output stream.
* @throws java.lang.IllegalArgumentException if an image with an unsupported format was used.
* @throws java.lang.IllegalStateException if not enough metadata information has been
* set to write a well-formatted DNG file.
*/
public void writeImage(@NonNull OutputStream dngOutput, @NonNull Image pixels)
throws IOException {
if (dngOutput == null) {
throw new IllegalArgumentException("Null dngOutput to writeImage");
} else if (pixels == null) {
throw new IllegalArgumentException("Null pixels to writeImage");
}
int format = pixels.getFormat();
if (format != ImageFormat.RAW_SENSOR) {
throw new IllegalArgumentException("Unsupported image format " + format);
}
Image.Plane[] planes = pixels.getPlanes();
if (planes == null || planes.length <= 0) {
throw new IllegalArgumentException("Image with no planes passed to writeImage");
}
ByteBuffer buf = planes[0].getBuffer();
writeByteBuffer(pixels.getWidth(), pixels.getHeight(), buf, dngOutput,
planes[0].getPixelStride(), planes[0].getRowStride(), 0);
}
@Override
public void close() {
nativeDestroy();
}
/**
* Max width or height dimension for thumbnails.
*/
public static final int MAX_THUMBNAIL_DIMENSION = 256; // max pixel dimension for TIFF/EP
@Override
protected void finalize() throws Throwable {
try {
close();
} finally {
super.finalize();
}
}
private static final String GPS_LAT_REF_NORTH = "N";
private static final String GPS_LAT_REF_SOUTH = "S";
private static final String GPS_LONG_REF_EAST = "E";
private static final String GPS_LONG_REF_WEST = "W";
private static final String GPS_DATE_FORMAT_STR = "yyyy:MM:dd";
private static final String TIFF_DATETIME_FORMAT = "yyyy:MM:dd HH:mm:ss";
private static final DateFormat sExifGPSDateStamp =
new SimpleDateFormat(GPS_DATE_FORMAT_STR, Locale.US);
private final Calendar mGPSTimeStampCalendar = Calendar
.getInstance(TimeZone.getTimeZone("UTC"));
static {
sExifGPSDateStamp.setTimeZone(TimeZone.getTimeZone("UTC"));
}
private static final int DEFAULT_PIXEL_STRIDE = 2; // bytes per sample
private static final int BYTES_PER_RGB_PIX = 3; // byts per pixel
// TIFF tag values needed to map between public API and TIFF spec
private static final int TAG_ORIENTATION_UNKNOWN = 9;
/**
* Offset, rowStride, and pixelStride are given in bytes. Height and width are given in pixels.
*/
private void writeByteBuffer(int width, int height, ByteBuffer pixels, OutputStream dngOutput,
int pixelStride, int rowStride, long offset) throws IOException {
if (width <= 0 || height <= 0) {
throw new IllegalArgumentException("Image with invalid width, height: (" + width + "," +
height + ") passed to write");
}
long capacity = pixels.capacity();
long totalSize = ((long) rowStride) * height + offset;
if (capacity < totalSize) {
throw new IllegalArgumentException("Image size " + capacity +
" is too small (must be larger than " + totalSize + ")");
}
int minRowStride = pixelStride * width;
if (minRowStride > rowStride) {
throw new IllegalArgumentException("Invalid image pixel stride, row byte width " +
minRowStride + " is too large, expecting " + rowStride);
}
pixels.clear(); // Reset mark and limit
nativeWriteImage(dngOutput, width, height, pixels, rowStride, pixelStride, offset,
pixels.isDirect());
pixels.clear();
}
/**
* Convert a single YUV pixel to RGB.
*/
private static void yuvToRgb(byte[] yuvData, int outOffset, /*out*/byte[] rgbOut) {
final int COLOR_MAX = 255;
float y = yuvData[0] & 0xFF; // Y channel
float cb = yuvData[1] & 0xFF; // U channel
float cr = yuvData[2] & 0xFF; // V channel
// convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
float r = y + 1.402f * (cr - 128);
float g = y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128);
float b = y + 1.772f * (cb - 128);
// clamp to [0,255]
rgbOut[outOffset] = (byte) Math.max(0, Math.min(COLOR_MAX, r));
rgbOut[outOffset + 1] = (byte) Math.max(0, Math.min(COLOR_MAX, g));
rgbOut[outOffset + 2] = (byte) Math.max(0, Math.min(COLOR_MAX, b));
}
/**
* Convert a single {@link Color} pixel to RGB.
*/
private static void colorToRgb(int color, int outOffset, /*out*/byte[] rgbOut) {
rgbOut[outOffset] = (byte) Color.red(color);
rgbOut[outOffset + 1] = (byte) Color.green(color);
rgbOut[outOffset + 2] = (byte) Color.blue(color);
// Discards Alpha
}
/**
* Generate a direct RGB {@link ByteBuffer} from a YUV420_888 {@link Image}.
*/
private static ByteBuffer convertToRGB(Image yuvImage) {
// TODO: Optimize this with renderscript intrinsic.
int width = yuvImage.getWidth();
int height = yuvImage.getHeight();
ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height);
Image.Plane yPlane = yuvImage.getPlanes()[0];
Image.Plane uPlane = yuvImage.getPlanes()[1];
Image.Plane vPlane = yuvImage.getPlanes()[2];
ByteBuffer yBuf = yPlane.getBuffer();
ByteBuffer uBuf = uPlane.getBuffer();
ByteBuffer vBuf = vPlane.getBuffer();
yBuf.rewind();
uBuf.rewind();
vBuf.rewind();
int yRowStride = yPlane.getRowStride();
int vRowStride = vPlane.getRowStride();
int uRowStride = uPlane.getRowStride();
int yPixStride = yPlane.getPixelStride();
int vPixStride = vPlane.getPixelStride();
int uPixStride = uPlane.getPixelStride();
byte[] yuvPixel = { 0, 0, 0 };
byte[] yFullRow = new byte[yPixStride * (width - 1) + 1];
byte[] uFullRow = new byte[uPixStride * (width / 2 - 1) + 1];
byte[] vFullRow = new byte[vPixStride * (width / 2 - 1) + 1];
byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width];
for (int i = 0; i < height; i++) {
int halfH = i / 2;
yBuf.position(yRowStride * i);
yBuf.get(yFullRow);
uBuf.position(uRowStride * halfH);
uBuf.get(uFullRow);
vBuf.position(vRowStride * halfH);
vBuf.get(vFullRow);
for (int j = 0; j < width; j++) {
int halfW = j / 2;
yuvPixel[0] = yFullRow[yPixStride * j];
yuvPixel[1] = uFullRow[uPixStride * halfW];
yuvPixel[2] = vFullRow[vPixStride * halfW];
yuvToRgb(yuvPixel, j * BYTES_PER_RGB_PIX, /*out*/finalRow);
}
buf.put(finalRow);
}
yBuf.rewind();
uBuf.rewind();
vBuf.rewind();
buf.rewind();
return buf;
}
/**
* Generate a direct RGB {@link ByteBuffer} from a {@link Bitmap}.
*/
private static ByteBuffer convertToRGB(Bitmap argbBitmap) {
// TODO: Optimize this.
int width = argbBitmap.getWidth();
int height = argbBitmap.getHeight();
ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height);
int[] pixelRow = new int[width];
byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width];
for (int i = 0; i < height; i++) {
argbBitmap.getPixels(pixelRow, /*offset*/0, /*stride*/width, /*x*/0, /*y*/i,
/*width*/width, /*height*/1);
for (int j = 0; j < width; j++) {
colorToRgb(pixelRow[j], j * BYTES_PER_RGB_PIX, /*out*/finalRow);
}
buf.put(finalRow);
}
buf.rewind();
return buf;
}
/**
* Convert coordinate to EXIF GPS tag format.
*/
private static int[] toExifLatLong(double value) {
// convert to the format dd/1 mm/1 ssss/100
value = Math.abs(value);
int degrees = (int) value;
value = (value - degrees) * 60;
int minutes = (int) value;
value = (value - minutes) * 6000;
int seconds = (int) value;
return new int[] { degrees, 1, minutes, 1, seconds, 100 };
}
/**
* This field is used by native code, do not access or modify.
*/
private long mNativeContext;
private static native void nativeClassInit();
private synchronized native void nativeInit(CameraMetadataNative nativeCharacteristics,
CameraMetadataNative nativeResult,
String captureTime);
private synchronized native void nativeDestroy();
private synchronized native void nativeSetOrientation(int orientation);
private synchronized native void nativeSetDescription(String description);
private synchronized native void nativeSetGpsTags(int[] latTag, String latRef, int[] longTag,
String longRef, String dateTag,
int[] timeTag);
private synchronized native void nativeSetThumbnail(ByteBuffer buffer, int width, int height);
private synchronized native void nativeWriteImage(OutputStream out, int width, int height,
ByteBuffer rawBuffer, int rowStride,
int pixStride, long offset, boolean isDirect)
throws IOException;
private synchronized native void nativeWriteInputStream(OutputStream out, InputStream rawStream,
int width, int height, long offset)
throws IOException;
static {
nativeClassInit();
}
}