Audio Latency Tests

Add two tests to gauge audio latency. The cold latency test measures
how fast data can be read from freshly created AudioRecord. The warm
latency test measures how long it takes to record a sound after
creaing an AudioRecord and AudioTrack and waiting a bit.

Change-Id: I06c4990d811ad6815cc1b0d185fab6c45c510f25
diff --git a/apps/CtsVerifier/res/values/strings.xml b/apps/CtsVerifier/res/values/strings.xml
index dc6cf20..dfc41f8 100644
--- a/apps/CtsVerifier/res/values/strings.xml
+++ b/apps/CtsVerifier/res/values/strings.xml
@@ -170,6 +170,8 @@
     <string name="aq_linearity_exp">Gain linearity test</string>
     <string name="aq_overflow_exp">Overflow check</string>
     <string name="aq_bias_exp">Bias measurement</string>
+    <string name="aq_cold_latency">Cold recording latency</string>
+    <string name="aq_warm_latency">Warm recording latency</string>
     
     <!-- Experiment outcomes -->
     <string name="aq_fail">Fail</string>
@@ -191,4 +193,16 @@
     <string name="aq_level_report">RMS = %1$.0f, target = %2$.0f\nTolerance = %3$.1f%%\nDuration = %4$.1fs</string>
     <string name="aq_spectrum_report_error">Cannot perform test.\nCheck volume is sufficiently high?</string>
     <string name="aq_spectrum_report_normal">RMS deviation = %1$.2f\nMax allowed deviation = %2$.1f</string>
+    <string name="aq_cold_latency_report">Latency = %1$dms, maximum allowed = %2$dms</string>
+    <string name="aq_warm_latency_report_error">RMS = %1$.0f, target = %2$.0f</string>
+    <string name="aq_warm_latency_report_normal">Latency = %1$dms</string>
+    
+    <!-- General experiment messages -->    
+    <string name="aq_audiorecord_buffer_size_error">Error getting minimum AudioRecord buffer size: %1$d</string>
+    <string name="aq_audiotrack_buffer_size_error">Error getting minimum AudioTrack buffer size: %1$d</string>
+    <string name="aq_init_audiorecord_error">Error initializing AudioRecord instance</string>
+    <string name="aq_init_audiotrack_error">Error initializing AudioTrack instance</string>
+    <string name="aq_recording_error">Error reading data from AudioRecord instance</string>
+    <string name="aq_exception_error">Exception thrown during test: %1$s</string>
+
 </resources>
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/AudioQualityVerifierActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/AudioQualityVerifierActivity.java
index fd84cd3..7d7c16d 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/AudioQualityVerifierActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/AudioQualityVerifierActivity.java
@@ -45,8 +45,8 @@
 /**
  * Main UI for the Android Audio Quality Verifier.
  */
-public class AudioQualityVerifierActivity extends PassFailButtons.Activity implements View.OnClickListener,
-        OnItemClickListener {
+public class AudioQualityVerifierActivity extends PassFailButtons.Activity
+        implements View.OnClickListener, OnItemClickListener {
     public static final String TAG = "AudioQualityVerifier";
 
     public static final int SAMPLE_RATE = 16000;
@@ -90,6 +90,8 @@
 
     private boolean mRunningExperiment;
 
+    private BroadcastReceiver mReceiver;
+
     @Override
     public void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
@@ -118,7 +120,7 @@
 
         mExperiments = VerifierExperiments.getExperiments(this);
 
-        BroadcastReceiver receiver = new BroadcastReceiver() {
+        mReceiver = new BroadcastReceiver() {
             @Override
             public void onReceive(Context context, Intent intent) {
                 experimentReplied(intent);
@@ -127,7 +129,7 @@
         IntentFilter filter = new IntentFilter();
         filter.addAction(ACTION_EXP_STARTED);
         filter.addAction(ACTION_EXP_FINISHED);
-        registerReceiver(receiver, filter);
+        registerReceiver(mReceiver, filter);
 
         fillAdapter();
         mList.setAdapter(mAdapter);
@@ -139,6 +141,7 @@
     public void onResume() {
         super.onResume();
         mAdapter.notifyDataSetChanged(); // Update List UI
+        setVolumeControlStream(AudioManager.STREAM_MUSIC);
         checkNotSilent();
     }
 
@@ -275,4 +278,10 @@
         }
         mAdapter.notifyDataSetChanged();
     }
+
+    @Override
+    protected void onDestroy() {
+        super.onDestroy();
+        unregisterReceiver(mReceiver);
+    }
 }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/BackgroundAudio.java b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/BackgroundAudio.java
index e22d596..e55f9b7 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/BackgroundAudio.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/BackgroundAudio.java
@@ -50,7 +50,7 @@
         final int minHardwareBufferSize =
                 AudioTrack.getMinBufferSize(AudioQualityVerifierActivity.SAMPLE_RATE,
                         AudioFormat.CHANNEL_OUT_MONO, AudioQualityVerifierActivity.AUDIO_FORMAT);
-        mBufferSize = Math.max(minHardwareBufferSize, minBufferSize);
+        mBufferSize = Utils.getAudioTrackBufferSize(minBufferSize);
         Log.i(TAG, "minBufferSize = " + minBufferSize + ", minHWSize = " + minHardwareBufferSize
                 + ", bufferSize = " + mBufferSize);
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/CalibrateVolumeActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/CalibrateVolumeActivity.java
index c6cf34a..98e8cd1 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/CalibrateVolumeActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/CalibrateVolumeActivity.java
@@ -159,10 +159,7 @@
 
             final int minBufferSize = (BUFFER_TIME * AudioQualityVerifierActivity.SAMPLE_RATE *
                     AudioQualityVerifierActivity.BYTES_PER_SAMPLE) / 1000;
-            final int minHardwareBufferSize = AudioRecord.getMinBufferSize(
-                    AudioQualityVerifierActivity.SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
-                    AudioQualityVerifierActivity.AUDIO_FORMAT);
-            final int bufferSize = Math.max(minHardwareBufferSize, minBufferSize);
+            final int bufferSize = Utils.getAudioRecordBufferSize(minBufferSize);
 
             mRecord = new AudioRecord(MediaRecorder.AudioSource.VOICE_RECOGNITION,
                     AudioQualityVerifierActivity.SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/Utils.java b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/Utils.java
index 5774782..704b1df 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/Utils.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/Utils.java
@@ -18,6 +18,7 @@
 
 import android.content.Context;
 import android.media.AudioFormat;
+import android.media.AudioRecord;
 import android.media.AudioTrack;
 import android.os.Environment;
 import android.util.Log;
@@ -39,6 +40,38 @@
     public static final ByteOrder BYTE_ORDER = ByteOrder.LITTLE_ENDIAN;
 
     /**
+     * @param minBufferSize requested
+     * @return the buffer size or a negative {@link AudioTrack} ERROR value
+     */
+    public static int getAudioTrackBufferSize(int minBufferSize) {
+        int minHardwareBufferSize = AudioTrack.getMinBufferSize(
+                AudioQualityVerifierActivity.SAMPLE_RATE,
+                AudioFormat.CHANNEL_OUT_MONO,
+                AudioQualityVerifierActivity.AUDIO_FORMAT);
+        if (minHardwareBufferSize < 0) {
+            return minHardwareBufferSize;
+        } else {
+            return Math.max(minHardwareBufferSize, minBufferSize);
+        }
+    }
+
+    /**
+     * @param minBufferSize requested
+     * @return the buffer size or a negative {@link AudioRecord} ERROR value
+     */
+    public static int getAudioRecordBufferSize(int minBufferSize) {
+        int minHardwareBufferSize = AudioRecord.getMinBufferSize(
+                AudioQualityVerifierActivity.SAMPLE_RATE,
+                AudioFormat.CHANNEL_IN_MONO,
+                AudioQualityVerifierActivity.AUDIO_FORMAT);
+        if (minHardwareBufferSize < 0) {
+            return minHardwareBufferSize;
+        } else {
+            return Math.max(minHardwareBufferSize, minBufferSize);
+        }
+    }
+
+    /**
      *  Time delay.
      *
      *  @param ms time in milliseconds to pause for
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/VerifierExperiments.java b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/VerifierExperiments.java
index f800907..044bde9 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/VerifierExperiments.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/VerifierExperiments.java
@@ -17,11 +17,13 @@
 package com.android.cts.verifier.audioquality;
 
 import com.android.cts.verifier.audioquality.experiments.BiasExperiment;
+import com.android.cts.verifier.audioquality.experiments.ColdLatencyExperiment;
 import com.android.cts.verifier.audioquality.experiments.OverflowExperiment;
 import com.android.cts.verifier.audioquality.experiments.GainLinearityExperiment;
 import com.android.cts.verifier.audioquality.experiments.GlitchExperiment;
 import com.android.cts.verifier.audioquality.experiments.SoundLevelExperiment;
 import com.android.cts.verifier.audioquality.experiments.SpectrumShapeExperiment;
+import com.android.cts.verifier.audioquality.experiments.WarmLatencyExperiment;
 
 import android.content.Context;
 
@@ -47,7 +49,8 @@
             mExperiments.add(new SpectrumShapeExperiment());
             mExperiments.add(new GlitchExperiment(0));
             mExperiments.add(new GlitchExperiment(7));
-            // mExperiments.add(new VoiceRecognitionExperiment());
+            mExperiments.add(new ColdLatencyExperiment());
+            mExperiments.add(new WarmLatencyExperiment());
             for (Experiment exp : mExperiments) {
                 exp.init(context);
             }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/experiments/ColdLatencyExperiment.java b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/experiments/ColdLatencyExperiment.java
new file mode 100644
index 0000000..648e6cb
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/experiments/ColdLatencyExperiment.java
@@ -0,0 +1,253 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audioquality.experiments;
+
+import com.android.cts.verifier.R;
+import com.android.cts.verifier.audioquality.AudioQualityVerifierActivity;
+import com.android.cts.verifier.audioquality.Experiment;
+import com.android.cts.verifier.audioquality.Utils;
+
+import android.content.Context;
+import android.media.AudioFormat;
+import android.media.AudioRecord;
+import android.media.MediaRecorder.AudioSource;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * {@link Experiment} that measures how long it takes for an initialized
+ * {@link AudioRecord} object to enter the recording state.
+ */
+public class ColdLatencyExperiment extends Experiment {
+
+    /**
+     * Rough latency amounts observed:
+     *
+     * N1 2.3.4: 350 ms
+     * NS 2.3.4: 250 ms
+     * Xoom 3.1: 100 ms
+     */
+    private static final int MAXIMUM_LATENCY_ALLOWED_MS = 500;
+
+    /** Enough time to say a short phrase usually entered as a voice command. */
+    private static final int BUFFER_TIME_MS = 25 * 1000;
+
+    /** Milliseconds to pause while repeatedly checking the recording state. */
+    private static final int DELAY_MS = 10;
+
+    /** Milliseconds to record before turning off the recording. */
+    private static final int RECORDING_DELAY_MS = 3000;
+
+    /** Milliseconds to pause before checking the latency after making a sound. */
+    private static final int LATENCY_CHECK_DELAY_MS = 5000;
+
+    private static final int TEST_TIMEOUT_SECONDS = 10;
+
+    public ColdLatencyExperiment() {
+        super(true);
+    }
+
+    @Override
+    protected String lookupName(Context context) {
+        return context.getString(R.string.aq_cold_latency);
+    }
+
+    @Override
+    public void run() {
+        ExecutorService executor = Executors.newCachedThreadPool();
+        RecordingTask recordingTask = new RecordingTask(RECORDING_DELAY_MS);
+
+        try {
+            // 1. Start recording for a couple seconds.
+            Future<Long> recordingFuture = executor.submit(recordingTask);
+            long recordTime = recordingFuture.get(RECORDING_DELAY_MS * 2, TimeUnit.MILLISECONDS);
+            if (recordTime < 0) {
+                setScore(getString(R.string.aq_fail));
+                return;
+            }
+
+            // 2. Wait a bit for the audio hardware to shut down.
+            long startTime = System.currentTimeMillis();
+            while (System.currentTimeMillis() - startTime < LATENCY_CHECK_DELAY_MS) {
+                Utils.delay(DELAY_MS);
+            }
+
+            // 3. Now measure the latency by starting up the hardware again.
+            long latency = getLatency();
+            if (latency < 0) {
+                setScore(getString(R.string.aq_fail));
+            } else {
+                setScore(latency < MAXIMUM_LATENCY_ALLOWED_MS
+                        ? getString(R.string.aq_pass)
+                        : getString(R.string.aq_fail));
+                setReport(String.format(getString(R.string.aq_cold_latency_report), latency,
+                        MAXIMUM_LATENCY_ALLOWED_MS));
+            }
+        } catch (InterruptedException e) {
+            Thread.currentThread().interrupt();
+            setScore(getString(R.string.aq_fail));
+        } catch (ExecutionException e) {
+            setScore(getString(R.string.aq_fail));
+        } catch (TimeoutException e) {
+            setScore(getString(R.string.aq_fail));
+        } finally {
+            recordingTask.stopRecording();
+            executor.shutdown();
+            mTerminator.terminate(false);
+        }
+    }
+
+    @Override
+    public int getTimeout() {
+        return TEST_TIMEOUT_SECONDS;
+    }
+
+    /** Task that records for a given length of time. */
+    private class RecordingTask implements Callable<Long> {
+
+        private static final int READ_TIME = 25;
+
+        private final long mRecordMs;
+
+        private final int mSamplesToRead;
+
+        private final byte[] mBuffer;
+
+        private boolean mKeepRecording = true;
+
+        public RecordingTask(long recordMs) {
+            this.mRecordMs = recordMs;
+            this.mSamplesToRead = (READ_TIME * AudioQualityVerifierActivity.SAMPLE_RATE) / 1000;
+            this.mBuffer = new byte[mSamplesToRead * AudioQualityVerifierActivity.BYTES_PER_SAMPLE];
+        }
+
+        public Long call() throws Exception {
+            int minBufferSize = BUFFER_TIME_MS / 1000
+                    * AudioQualityVerifierActivity.SAMPLE_RATE
+                    * AudioQualityVerifierActivity.BYTES_PER_SAMPLE;
+            int bufferSize = Utils.getAudioRecordBufferSize(minBufferSize);
+            if (bufferSize < 0) {
+                setReport(getString(R.string.aq_audiorecord_buffer_size_error));
+                return -1L;
+            }
+
+            AudioRecord record = null;
+            try {
+                record = new AudioRecord(AudioSource.VOICE_RECOGNITION,
+                        AudioQualityVerifierActivity.SAMPLE_RATE,
+                        AudioFormat.CHANNEL_IN_MONO,
+                        AudioQualityVerifierActivity.AUDIO_FORMAT,
+                        bufferSize);
+
+                if (record.getRecordingState() != AudioRecord.STATE_INITIALIZED) {
+                    setReport(getString(R.string.aq_init_audiorecord_error));
+                    return -2L;
+                }
+
+                record.startRecording();
+                while (record.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+                    // Wait until we can start recording...
+                    Utils.delay(DELAY_MS);
+                }
+
+                long startTime = System.currentTimeMillis();
+                int maxBytes = mSamplesToRead * AudioQualityVerifierActivity.BYTES_PER_SAMPLE;
+                while (true) {
+                    synchronized (this) {
+                        if (!mKeepRecording) {
+                            break;
+                        }
+                    }
+                    int numBytesRead = record.read(mBuffer, 0, maxBytes);
+                    if (numBytesRead < 0) {
+                        setReport(getString(R.string.aq_recording_error));
+                        return -3L;
+                    } else if (System.currentTimeMillis() - startTime >= mRecordMs) {
+                        return System.currentTimeMillis() - startTime;
+                    }
+                }
+
+                return -4L;
+            } finally {
+                if (record != null) {
+                    if (record.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
+                        record.stop();
+                    }
+                    record.release();
+                    record = null;
+                }
+            }
+        }
+
+        public void stopRecording() {
+            synchronized (this) {
+                mKeepRecording = false;
+            }
+        }
+    }
+
+    /**
+     * @return latency between starting to record and entering the record state or
+     *         -1 if an error occurred
+     */
+    private long getLatency() {
+        int minBufferSize = BUFFER_TIME_MS / 1000
+                * AudioQualityVerifierActivity.SAMPLE_RATE
+                * AudioQualityVerifierActivity.BYTES_PER_SAMPLE;
+        int bufferSize = Utils.getAudioRecordBufferSize(minBufferSize);
+        if (bufferSize < 0) {
+            setReport(String.format(getString(R.string.aq_audiorecord_buffer_size_error),
+                    bufferSize));
+            return -1;
+        }
+
+        AudioRecord record = null;
+        try {
+            record = new AudioRecord(AudioSource.VOICE_RECOGNITION,
+                    AudioQualityVerifierActivity.SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
+                    AudioQualityVerifierActivity.AUDIO_FORMAT, bufferSize);
+
+            if (record.getRecordingState() != AudioRecord.STATE_INITIALIZED) {
+                setReport(getString(R.string.aq_init_audiorecord_error));
+                return -1;
+            }
+
+            long startTime = System.currentTimeMillis();
+            record.startRecording();
+            while (record.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+                Utils.delay(DELAY_MS);
+            }
+            long endTime = System.currentTimeMillis();
+
+            return endTime - startTime;
+        } finally {
+            if (record != null) {
+                if (record.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
+                    record.stop();
+                }
+                record.release();
+                record = null;
+            }
+        }
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/experiments/LoopbackExperiment.java b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/experiments/LoopbackExperiment.java
index f4a1e1f..16039cb 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/experiments/LoopbackExperiment.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/experiments/LoopbackExperiment.java
@@ -131,10 +131,7 @@
         public void run() {
             final int minBufferSize = AudioQualityVerifierActivity.SAMPLE_RATE
                     * AudioQualityVerifierActivity.BYTES_PER_SAMPLE;
-            final int minHardwareBufferSize = AudioRecord.getMinBufferSize(
-                    AudioQualityVerifierActivity.SAMPLE_RATE,
-                    AudioFormat.CHANNEL_IN_MONO, AudioQualityVerifierActivity.AUDIO_FORMAT);
-            final int bufferSize = Math.max(minHardwareBufferSize, minBufferSize);
+            final int bufferSize = Utils.getAudioRecordBufferSize(minBufferSize);
 
             mRecord = new AudioRecord(MediaRecorder.AudioSource.VOICE_RECOGNITION,
                     AudioQualityVerifierActivity.SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/experiments/WarmLatencyExperiment.java b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/experiments/WarmLatencyExperiment.java
new file mode 100644
index 0000000..88aaf8c
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audioquality/experiments/WarmLatencyExperiment.java
@@ -0,0 +1,337 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audioquality.experiments;
+
+import com.android.cts.verifier.R;
+import com.android.cts.verifier.audioquality.AudioQualityVerifierActivity;
+import com.android.cts.verifier.audioquality.Experiment;
+import com.android.cts.verifier.audioquality.Native;
+import com.android.cts.verifier.audioquality.Utils;
+
+import android.content.Context;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.media.MediaRecorder.AudioSource;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.CyclicBarrier;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * {@link Experiment} that measures how long it takes for a stimulus emitted
+ * by a warmed up {@link AudioTrack} to be recorded by a warmed up
+ * {@link AudioRecord} instance.
+ */
+public class WarmLatencyExperiment extends Experiment {
+
+    /** Milliseconds to wait before playing the sound. */
+    private static final int DELAY_TIME = 2000;
+
+    /** Target RMS value to detect before quitting the experiment. */
+    private static final float TARGET_RMS = 4000;
+
+    /** Target latency to react to the sound. */
+    private static final long TARGET_LATENCY_MS = 200;
+
+    private static final int CHANNEL_IN_CONFIG = AudioFormat.CHANNEL_IN_MONO;
+    private static final int CHANNEL_OUT_CONFIG = AudioFormat.CHANNEL_OUT_MONO;
+    private static final float FREQ = 625.0f;
+    private static final int DURATION = 1;
+    private static final int OUTPUT_AMPL = 5000;
+    private static final float RAMP = 0.0f;
+    private static final int BUFFER_TIME_MS = 100;
+    private static final int READ_TIME = 25;
+
+    public WarmLatencyExperiment() {
+        super(true);
+    }
+
+    @Override
+    protected String lookupName(Context context) {
+        return context.getString(R.string.aq_warm_latency);
+    }
+
+    @Override
+    public void run() {
+        ExecutorService executor = Executors.newFixedThreadPool(2);
+        CyclicBarrier barrier = new CyclicBarrier(2);
+        PlaybackTask playbackTask = new PlaybackTask(barrier);
+        RecordingTask recordingTask = new RecordingTask(barrier);
+
+        Future<Long> playbackTimeFuture = executor.submit(playbackTask);
+        Future<Long> recordTimeFuture = executor.submit(recordingTask);
+
+        try {
+            // Get the time when the sound is detected or throw an exception...
+            long recordTime = recordTimeFuture.get(DELAY_TIME * 2, TimeUnit.MILLISECONDS);
+
+            // Stop the playback now since the sound was detected. Get the time playback started.
+            playbackTask.stopPlaying();
+            long playbackTime = playbackTimeFuture.get();
+
+            if (recordTime == -1 || playbackTime == -1) {
+                setScore(getString(R.string.aq_fail));
+            } else {
+                long latency = recordTime - playbackTime;
+                setScore(latency < TARGET_LATENCY_MS
+                        ? getString(R.string.aq_pass)
+                        : getString(R.string.aq_fail));
+                setReport(String.format(getString(R.string.aq_warm_latency_report_normal),
+                        latency));
+            }
+        } catch (InterruptedException e) {
+            setExceptionReport(e);
+        } catch (ExecutionException e) {
+            setExceptionReport(e);
+        } catch (TimeoutException e) {
+            setScore(getString(R.string.aq_fail));
+            setReport(String.format(getString(R.string.aq_warm_latency_report_error),
+                    recordingTask.getLastRms(), TARGET_RMS));
+        } finally {
+            playbackTask.stopPlaying();
+            recordingTask.stopRecording();
+            mTerminator.terminate(false);
+        }
+    }
+
+    private void setExceptionReport(Exception e) {
+        setScore(getString(R.string.aq_fail));
+        setReport(String.format(getString(R.string.aq_exception_error), e.getClass().getName()));
+    }
+
+    @Override
+    public int getTimeout() {
+        return 10; // seconds
+    }
+
+    /**
+     * Task that plays a sinusoid after playing silence for a couple of seconds.
+     * Returns the playback start time.
+     */
+    private class PlaybackTask implements Callable<Long> {
+
+        private final byte[] mData;
+
+        private final int mBufferSize;
+
+        private final CyclicBarrier mReadyBarrier;
+
+        private int mPosition;
+
+        private boolean mKeepPlaying = true;
+
+        public PlaybackTask(CyclicBarrier barrier) {
+            this.mData = getAudioData();
+            this.mBufferSize = getBufferSize();
+            this.mReadyBarrier = barrier;
+        }
+
+        private byte[] getAudioData() {
+            short[] sinusoid = mNative.generateSinusoid(FREQ, DURATION,
+                    AudioQualityVerifierActivity.SAMPLE_RATE, OUTPUT_AMPL, RAMP);
+            return Utils.shortToByteArray(sinusoid);
+        }
+
+        private int getBufferSize() {
+            int minBufferSize = (BUFFER_TIME_MS * AudioQualityVerifierActivity.SAMPLE_RATE
+                    * AudioQualityVerifierActivity.BYTES_PER_SAMPLE) / 1000;
+            return Utils.getAudioTrackBufferSize(minBufferSize);
+        }
+
+        public Long call() throws Exception {
+            if (mBufferSize == -1) {
+                setReport(getString(R.string.aq_audiotrack_buffer_size_error));
+                return -1l;
+            }
+
+            AudioTrack track = null;
+            try {
+                track = new AudioTrack(AudioManager.STREAM_MUSIC,
+                        AudioQualityVerifierActivity.SAMPLE_RATE, CHANNEL_OUT_CONFIG,
+                        AudioQualityVerifierActivity.AUDIO_FORMAT, mBufferSize,
+                        AudioTrack.MODE_STREAM);
+
+                if (track.getPlayState() != AudioTrack.STATE_INITIALIZED) {
+                    setReport(getString(R.string.aq_init_audiotrack_error));
+                    return -1l;
+                }
+
+                track.play();
+                while (track.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
+                    // Wait until we've started playing...
+                }
+
+                // Wait until the recording thread has started and is recording...
+                mReadyBarrier.await(1, TimeUnit.SECONDS);
+
+                long time = System.currentTimeMillis();
+                while (System.currentTimeMillis() - time < DELAY_TIME) {
+                    synchronized (this) {
+                        if (!mKeepPlaying) {
+                            break;
+                        }
+                    }
+                    // Play nothing...
+                }
+
+                long playTime = System.currentTimeMillis();
+                writeAudio(track);
+                while (true) {
+                    synchronized (this) {
+                        if (!mKeepPlaying) {
+                            break;
+                        }
+                    }
+                    writeAudio(track);
+                }
+
+                return playTime;
+            } finally {
+                if (track != null) {
+                    if (track.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
+                        track.stop();
+                    }
+                    track.release();
+                    track = null;
+                }
+            }
+        }
+
+        private void writeAudio(AudioTrack track) {
+            int length = mData.length;
+            int writeBytes = Math.min(mBufferSize, length - mPosition);
+            int numBytesWritten = track.write(mData, mPosition, writeBytes);
+            if (numBytesWritten < 0) {
+                throw new IllegalStateException("Couldn't write any data to the track!");
+            } else {
+                mPosition += numBytesWritten;
+                if (mPosition == length) {
+                    mPosition = 0;
+                }
+            }
+        }
+
+        public void stopPlaying() {
+            synchronized (this) {
+                mKeepPlaying = false;
+            }
+        }
+    }
+
+    /** Task that records until detecting a sound of the target RMS. Returns the detection time. */
+    private class RecordingTask implements Callable<Long> {
+
+        private final int mSamplesToRead;
+
+        private final byte[] mBuffer;
+
+        private final CyclicBarrier mBarrier;
+
+        private boolean mKeepRecording = true;
+
+        private float mLastRms = 0.0f;
+
+        public RecordingTask(CyclicBarrier barrier) {
+            this.mSamplesToRead = (READ_TIME * AudioQualityVerifierActivity.SAMPLE_RATE) / 1000;
+            this.mBuffer = new byte[mSamplesToRead * AudioQualityVerifierActivity.BYTES_PER_SAMPLE];
+            this.mBarrier = barrier;
+        }
+
+        public Long call() throws Exception {
+            int minBufferSize = BUFFER_TIME_MS / 1000
+                    * AudioQualityVerifierActivity.SAMPLE_RATE
+                    * AudioQualityVerifierActivity.BYTES_PER_SAMPLE;
+            int bufferSize = Utils.getAudioRecordBufferSize(minBufferSize);
+            if (bufferSize < 0) {
+                setReport(getString(R.string.aq_audiorecord_buffer_size_error));
+                return -1l;
+            }
+
+            long recordTime = -1;
+            AudioRecord record = null;
+            try {
+                record = new AudioRecord(AudioSource.VOICE_RECOGNITION,
+                        AudioQualityVerifierActivity.SAMPLE_RATE, CHANNEL_IN_CONFIG,
+                        AudioQualityVerifierActivity.AUDIO_FORMAT, bufferSize);
+
+                if (record.getRecordingState() != AudioRecord.STATE_INITIALIZED) {
+                    setReport(getString(R.string.aq_init_audiorecord_error));
+                    return -1l;
+                }
+
+                record.startRecording();
+                while (record.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+                    // Wait until we can start recording...
+                }
+
+                // Wait until the playback thread has started and is playing...
+                mBarrier.await(1, TimeUnit.SECONDS);
+
+                int maxBytes = mSamplesToRead * AudioQualityVerifierActivity.BYTES_PER_SAMPLE;
+                while (true) {
+                    synchronized (this) {
+                        if (!mKeepRecording) {
+                            break;
+                        }
+                    }
+                    int numBytesRead = record.read(mBuffer, 0, maxBytes);
+                    if (numBytesRead < 0) {
+                        setReport(getString(R.string.aq_recording_error));
+                        return -1l;
+                    } else if (numBytesRead > 2) {
+                        // TODO: Could be improved to use a sliding window?
+                        short[] samples = Utils.byteToShortArray(mBuffer, 0, numBytesRead);
+                        float[] results = mNative.measureRms(samples,
+                                AudioQualityVerifierActivity.SAMPLE_RATE, -1.0f);
+                        mLastRms = results[Native.MEASURE_RMS_RMS];
+                        if (mLastRms >= TARGET_RMS) {
+                            recordTime = System.currentTimeMillis();
+                            break;
+                        }
+                    }
+                }
+
+                return recordTime;
+            } finally {
+                if (record != null) {
+                    if (record.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
+                        record.stop();
+                    }
+                    record.release();
+                    record = null;
+                }
+            }
+        }
+
+        public float getLastRms() {
+            return mLastRms;
+        }
+
+        public void stopRecording() {
+            synchronized (this) {
+                mKeepRecording = false;
+            }
+        }
+    }
+}