Camera2/3: Implement full-color NV21 output.

Needed for full CTS compliance, and existing grayscale-only output makes
debugging color plane issues on the emulator impossible.

Bug: 8949720
Change-Id: Iea9a1a8508b2c05c2fd81bb4d2c729986e1184af
diff --git a/camera/fake-pipeline2/Sensor.cpp b/camera/fake-pipeline2/Sensor.cpp
index 91fe9ab..aa54ef4 100644
--- a/camera/fake-pipeline2/Sensor.cpp
+++ b/camera/fake-pipeline2/Sensor.cpp
@@ -484,37 +484,56 @@
 
 void Sensor::captureNV21(uint8_t *img, uint32_t gain, uint32_t stride) {
     float totalGain = gain/100.0 * kBaseGainFactor;
+    // Using fixed-point math with 6 bits of fractional precision.
     // In fixed-point math, calculate total scaling from electrons to 8bpp
-    int scale64x = 64 * totalGain * 255 / kMaxRawValue;
+    const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
+    // In fixed-point math, saturation point of sensor after gain
+    const int saturationPoint = 64 * 255;
+    // Fixed-point coefficients for RGB-YUV transform
+    // Based on JFIF RGB->YUV transform.
+    // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
+    const int rgbToY[]  = {19, 37, 7};
+    const int rgbToCb[] = {-10,-21, 32, 524288};
+    const int rgbToCr[] = {32,-26, -5, 524288};
+    // Scale back to 8bpp non-fixed-point
+    const int scaleOut = 64;
+    const int scaleOutSq = scaleOut * scaleOut; // after multiplies
 
-    // TODO: Make full-color
     uint32_t inc = kResolution[0] / stride;
     uint32_t outH = kResolution[1] / inc;
-    for (unsigned int y = 0, outY = 0, outUV = outH;
-         y < kResolution[1]; y+=inc, outY++, outUV ) {
+    for (unsigned int y = 0, outY = 0;
+         y < kResolution[1]; y+=inc, outY++) {
         uint8_t *pxY = img + outY * stride;
+        uint8_t *pxVU = img + (outH + outY / 2) * stride;
         mScene.setReadoutPixel(0,y);
-        for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
-            uint32_t rCount, gCount, bCount;
+        for (unsigned int outX = 0; outX < stride; outX++) {
+            int32_t rCount, gCount, bCount;
             // TODO: Perfect demosaicing is a cheat
             const uint32_t *pixel = mScene.getPixelElectrons();
             rCount = pixel[Scene::R]  * scale64x;
+            rCount = rCount < saturationPoint ? rCount : saturationPoint;
             gCount = pixel[Scene::Gr] * scale64x;
+            gCount = gCount < saturationPoint ? gCount : saturationPoint;
             bCount = pixel[Scene::B]  * scale64x;
-            uint32_t avg = (rCount + gCount + bCount) / 3;
-            *pxY++ = avg < 255*64 ? avg / 64 : 255;
+            bCount = bCount < saturationPoint ? bCount : saturationPoint;
+
+            *pxY++ = (rgbToY[0] * rCount +
+                    rgbToY[1] * gCount +
+                    rgbToY[2] * bCount) / scaleOutSq;
+            if (outY % 2 == 0 && outX % 2 == 0) {
+                *pxVU++ = (rgbToCr[0] * rCount +
+                        rgbToCr[1] * gCount +
+                        rgbToCr[2] * bCount +
+                        rgbToCr[3]) / scaleOutSq;
+                *pxVU++ = (rgbToCb[0] * rCount +
+                        rgbToCb[1] * gCount +
+                        rgbToCb[2] * bCount +
+                        rgbToCb[3]) / scaleOutSq;
+            }
             for (unsigned int j = 1; j < inc; j++)
                 mScene.getPixelElectrons();
         }
     }
-    for (unsigned int y = 0, outY = outH; y < kResolution[1]/2; y+=inc, outY++) {
-        uint8_t *px = img + outY * stride;
-        for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
-            // UV to neutral
-            *px++ = 128;
-            *px++ = 128;
-        }
-    }
     ALOGVV("NV21 sensor image captured");
 }
 
diff --git a/camera/fake-pipeline2/Sensor.h b/camera/fake-pipeline2/Sensor.h
index 33a8861..b485844 100644
--- a/camera/fake-pipeline2/Sensor.h
+++ b/camera/fake-pipeline2/Sensor.h
@@ -18,6 +18,8 @@
  * This class is a simple simulation of a typical CMOS cellphone imager chip,
  * which outputs 12-bit Bayer-mosaic raw images.
  *
+ * Unlike most real image sensors, this one's native color space is linear sRGB.
+ *
  * The sensor is abstracted as operating as a pipeline 3 stages deep;
  * conceptually, each frame to be captured goes through these three stages. The
  * processing step for the sensor is marked off by vertical sync signals, which