[automerger skipped] DO NOT MERGE - Merge pi-dev@5234907 into stage-aosp-master
am: 1debe26835 -s ours
am skip reason: subject contains skip directive

Change-Id: I2cc4eec7d46cef0b5120e620b4b9c8df575acd91
diff --git a/LoopbackApp/.gitignore b/LoopbackApp/.gitignore
index 4d893e9..ab266d3 100644
--- a/LoopbackApp/.gitignore
+++ b/LoopbackApp/.gitignore
@@ -3,3 +3,4 @@
 local.properties
 build/*
 .gradle/*
+**/.externalNativeBuild
diff --git a/LoopbackApp/Android.mk b/LoopbackApp/Android.mk
deleted file mode 100644
index b8a8165..0000000
--- a/LoopbackApp/Android.mk
+++ /dev/null
@@ -1 +0,0 @@
-include $(call all-makefiles-under, app/src/main)
diff --git a/LoopbackApp/app/build.gradle b/LoopbackApp/app/build.gradle
index ff69f81..a315418 100644
--- a/LoopbackApp/app/build.gradle
+++ b/LoopbackApp/app/build.gradle
@@ -1,37 +1,38 @@
-apply plugin: 'com.android.model.application'
+/**
+ * The default build is with CMake Plugin.
+ * If Android.mk build system is required, src/main/cpp/Android.mk is
+ * a workable, just add hooks to this file to replace cmake at the "cmake"
+ * location.
+ */
 
-model {
-    android {
-        compileSdkVersion = 23
-        buildToolsVersion = "25.0"
+apply plugin: 'com.android.application'
 
-        defaultConfig.with {
-            applicationId = "org.drrickorang.loopback"
-            minSdkVersion.apiLevel = 11
-            targetSdkVersion.apiLevel = 23
+android {
+        compileSdkVersion 28
+
+        defaultConfig {
+            applicationId 'org.drrickorang.loopback'
+            minSdkVersion 14
+            targetSdkVersion  26
+            externalNativeBuild.cmake {
+                arguments "-DANDROID_STL=c++_static"
+            }
         }
-        ndk {
-            moduleName "loopback"
-            cppFlags.addAll "-I${project.rootDir}/app/src/main/jni".toString(), "-g"
-            CFlags.addAll "-I${project.rootDir}/app/src/main/jni".toString()
 
-            ldLibs.addAll "OpenSLES", "log"
-        }
+	externalNativeBuild {
+	    cmake {
+	        path 'src/main/cpp/CMakeLists.txt'
+	    }
+	}
+
         buildTypes {
             release {
                 minifyEnabled false
                 proguardFiles.add file('proguard.cfg')
             }
-            debug {
-                ndk {
-                    debuggable true
-                }
-            }
-        }    }
-
-
+    }
 }
 
 dependencies {
-    compile 'com.android.support:appcompat-v7:23.0.1'
+    implementation 'androidx.appcompat:appcompat:1.0.0-rc02'
 }
diff --git a/LoopbackApp/app/src/main/Android.mk b/LoopbackApp/app/src/main/Android.mk_
similarity index 85%
rename from LoopbackApp/app/src/main/Android.mk
rename to LoopbackApp/app/src/main/Android.mk_
index d25b808..78b835a 100644
--- a/LoopbackApp/app/src/main/Android.mk
+++ b/LoopbackApp/app/src/main/Android.mk_
@@ -17,7 +17,9 @@
 LOCAL_USE_AAPT2 := true
 
 LOCAL_STATIC_ANDROID_LIBRARIES := \
-    android-support-v4
+    androidx.legacy_legacy-support-v4
+
+LOCAL_SDK_VERSION := current
 
 include $(BUILD_PACKAGE)
 
diff --git a/LoopbackApp/app/src/main/AndroidManifest.xml b/LoopbackApp/app/src/main/AndroidManifest.xml
index 29aa1c9..df6fdc7 100644
--- a/LoopbackApp/app/src/main/AndroidManifest.xml
+++ b/LoopbackApp/app/src/main/AndroidManifest.xml
@@ -23,8 +23,8 @@
     xmlns:android="http://schemas.android.com/apk/res/android"
     package="org.drrickorang.loopback"
 
-    android:versionCode="19"
-    android:versionName="0.9.75">
+    android:versionCode="27"
+    android:versionName="0.10.5">
 
     <uses-permission android:name="android.permission.RECORD_AUDIO"/>
     <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
diff --git a/LoopbackApp/app/src/main/cpp/Android.bp b/LoopbackApp/app/src/main/cpp/Android.bp
new file mode 100644
index 0000000..3d78846
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/Android.bp
@@ -0,0 +1,35 @@
+cc_library_shared {
+    name: "libloopback",
+    srcs: [
+        "sles.cpp",
+        "byte_buffer.c",
+        "jni_native.c",
+        "loopback.c",
+        "audio_utils/atomic.c",
+        "audio_utils/fifo.c",
+        "audio_utils/roundup.c",
+        "lb2/loopback_test.cpp",
+        "lb2/sound_system_echo.cpp",
+        "lb2/test_context.cpp",
+        "lb2/loopback2.cpp",
+        "lb2/sound_system_aaudio.cpp",
+        "lb2/oboe/src/aaudio/AAudioLoader.cpp",
+    ],
+    include_dirs: [
+        "frameworks/wilhelm/include",
+        "frameworks/av/media/libaaudio/include",
+    ],
+    shared_libs: [
+        "libOpenSLES",
+        "libdl",
+        "liblog",
+        "libandroid",
+    ],
+    //LOCAL_LDFLAGS += -Wl,--hash-style=sysv
+    //LOCAL_CFLAGS := -DSTDC_HEADERS
+    cppflags: [
+        "-fexceptions",
+    ],
+    cpp_std: "c++11",
+    c_std: "c11",
+}
diff --git a/LoopbackApp/app/src/main/cpp/CMakeLists.txt b/LoopbackApp/app/src/main/cpp/CMakeLists.txt
new file mode 100644
index 0000000..f1b87a9
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/CMakeLists.txt
@@ -0,0 +1,57 @@
+#
+# Copyright (C) The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+cmake_minimum_required(VERSION 3.4.1)
+project(loopback LANGUAGES C CXX)
+
+# set up common compile options
+add_library(${PROJECT_NAME} SHARED
+    sles.cpp
+    byte_buffer.c
+    jni_native.c
+    loopback.c
+    audio_utils/atomic.c
+    audio_utils/fifo.c
+    audio_utils/roundup.c
+    lb2/loopback_test.cpp
+    lb2/sound_system_echo.cpp
+    lb2/test_context.cpp
+    lb2/loopback2.cpp
+    lb2/sound_system_aaudio.cpp
+    lb2/oboe/src/aaudio/AAudioLoader.cpp )
+
+target_include_directories(${PROJECT_NAME} PRIVATE ${CMAKE_SOURCE_DIR} frameworks/wilhelm/include)
+
+set_target_properties(${PROJECT_NAME}
+  PROPERTIES
+    CXX_STANDARD 11
+    CXX_STANDARD_REQUIRED YES
+    CXX_EXTENSIONS NO
+)
+
+target_compile_options(${PROJECT_NAME} PRIVATE -fexceptions -Wall)
+#target_compile_options(${PROJECT_NAME} -DSTDC_HEADERS)
+
+set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c11")
+
+# set(CMAKE_SHARED_LINKER_FLAGS, "${CMAKE_SHARED_LINKER_FLAGS} --hash-style=sysv")
+# Export ANativeActivity_onCreate(),
+# Refer to: https://github.com/android-ndk/ndk/issues/381.
+# set_target_properties(${PROJECT_NAME}
+#   PROPERTIES  LINK_FLAGS "-u ANativeActivity_onCreate")
+
+# add lib dependencies
+target_link_libraries(${PROJECT_NAME} PRIVATE  OpenSLES dl log android)
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/atomic.c b/LoopbackApp/app/src/main/cpp/audio_utils/atomic.c
similarity index 96%
rename from LoopbackApp/app/src/main/jni/audio_utils/atomic.c
rename to LoopbackApp/app/src/main/cpp/audio_utils/atomic.c
index b76b1f4..e1f1e87 100644
--- a/LoopbackApp/app/src/main/jni/audio_utils/atomic.c
+++ b/LoopbackApp/app/src/main/cpp/audio_utils/atomic.c
@@ -34,7 +34,8 @@
     return atomic_exchange(a, value);
 }
 
-bool android_atomic_compare_exchange(int32_t* expect, int32_t desire, volatile const int32_t* addr) {
+bool android_atomic_compare_exchange(int32_t* expect, int32_t desire,
+        volatile const int32_t* addr) {
     volatile atomic_int_least32_t* a = (volatile atomic_int_least32_t*) addr;
     return atomic_compare_exchange_weak(a, expect, desire);
 }
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/atomic.h b/LoopbackApp/app/src/main/cpp/audio_utils/atomic.h
similarity index 100%
rename from LoopbackApp/app/src/main/jni/audio_utils/atomic.h
rename to LoopbackApp/app/src/main/cpp/audio_utils/atomic.h
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/fifo.c b/LoopbackApp/app/src/main/cpp/audio_utils/fifo.c
similarity index 100%
rename from LoopbackApp/app/src/main/jni/audio_utils/fifo.c
rename to LoopbackApp/app/src/main/cpp/audio_utils/fifo.c
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/fifo.h b/LoopbackApp/app/src/main/cpp/audio_utils/fifo.h
similarity index 100%
rename from LoopbackApp/app/src/main/jni/audio_utils/fifo.h
rename to LoopbackApp/app/src/main/cpp/audio_utils/fifo.h
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/roundup.c b/LoopbackApp/app/src/main/cpp/audio_utils/roundup.c
similarity index 100%
rename from LoopbackApp/app/src/main/jni/audio_utils/roundup.c
rename to LoopbackApp/app/src/main/cpp/audio_utils/roundup.c
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/roundup.h b/LoopbackApp/app/src/main/cpp/audio_utils/roundup.h
similarity index 100%
rename from LoopbackApp/app/src/main/jni/audio_utils/roundup.h
rename to LoopbackApp/app/src/main/cpp/audio_utils/roundup.h
diff --git a/LoopbackApp/app/src/main/cpp/byte_buffer.c b/LoopbackApp/app/src/main/cpp/byte_buffer.c
new file mode 100644
index 0000000..bf6ffd4
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/byte_buffer.c
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "byte_buffer.h"
+
+#include <stdatomic.h>
+#include <string.h>
+
+typedef _Atomic int32_t writer_pos_t;
+
+ssize_t byteBuffer_write(byte_buffer_t byteBuffer, size_t byteBufferSize,
+        const char *srcBuffer, size_t frameCount, int channels) {
+    // bytebufferSize is in bytes
+    const size_t dataSectionSize = byteBufferSize - sizeof(writer_pos_t);
+    writer_pos_t *rear_ptr = (writer_pos_t*)(byteBuffer + dataSectionSize);
+    writer_pos_t rear = *rear_ptr;
+    // rear should not exceed 2^31 - 1, or else overflow will happen
+
+    size_t frameSize = channels * sizeof(short); // only one channel
+    int32_t maxLengthInShort = dataSectionSize / frameSize;
+    // mask the upper bits to get the correct position in the pipe
+    writer_pos_t tempRear = rear & (maxLengthInShort - 1);
+    size_t part1 = maxLengthInShort - tempRear;
+
+    if (part1 > frameCount) {
+        part1 = frameCount;
+    }
+
+    if (part1 > 0) {
+        memcpy(byteBuffer + (tempRear * frameSize), srcBuffer,
+               part1 * frameSize);
+
+        size_t part2 = frameCount - part1;
+        if (part2 > 0) {
+            memcpy(byteBuffer, (srcBuffer + (part1 * frameSize)),
+                   part2 * frameSize);
+        }
+    }
+
+    // increase value of rear using the strongest memory ordering
+    // (since it's being read by Java we can't control the ordering
+    // used by the other side).
+    atomic_store(rear_ptr, rear + frameCount);
+    return frameCount;
+}
diff --git a/LoopbackApp/app/src/main/cpp/byte_buffer.h b/LoopbackApp/app/src/main/cpp/byte_buffer.h
new file mode 100644
index 0000000..66ed985
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/byte_buffer.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _Included_org_drrickorang_loopback_byte_buffer
+#define _Included_org_drrickorang_loopback_byte_buffer
+
+#include <sys/types.h>
+
+// Introduce a dedicated type because the destination buffer
+// is special, and needs to be obtained from the Java side.
+typedef char* byte_buffer_t;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Writes data to a ByteBuffer for consumption on the Java side
+// via PipeByteBuffer class. The function assumes sample size being "short".
+// Returns the actual number of frames written.
+ssize_t byteBuffer_write(byte_buffer_t byteBuffer, size_t byteBufferSize,
+        const char *srcBuffer, size_t frameCount, int channels);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // _Included_org_drrickorang_loopback_byte_buffer
diff --git a/LoopbackApp/app/src/main/cpp/jni_native.c b/LoopbackApp/app/src/main/cpp/jni_native.c
new file mode 100644
index 0000000..1eeae38
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/jni_native.c
@@ -0,0 +1,235 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "jni_native.h"
+
+#include <stdlib.h>
+
+#include <android/log.h>
+
+#include "loopback.h"
+
+#define LOG_TAG "jni_native"
+
+static int nativeEngineFromThreadType(int threadType) {
+    switch (threadType) {
+        case AUDIO_THREAD_TYPE_NATIVE_SLES: return NATIVE_ENGINE_SLES;
+        case AUDIO_THREAD_TYPE_NATIVE_AAUDIO: return NATIVE_ENGINE_AAUDIO;
+    }
+    __android_log_print(ANDROID_LOG_ERROR, LOG_TAG,
+            "unsupported thread type %d", threadType);
+    return -1;
+}
+
+JNIEXPORT jobject JNICALL
+Java_org_drrickorang_loopback_NativeAudioThread_nativeComputeDefaultSettings
+(JNIEnv *env, jobject obj __unused, jint bytesPerFrame, jint threadType, jint performanceMode) {
+    int engine = nativeEngineFromThreadType(threadType);
+    if (engine == -1) return NULL;
+    int samplingRate, playerBufferFrameCount, recorderBufferFrameCount;
+    if (sEngines[engine].computeDefaultSettings(performanceMode, &samplingRate,
+                    &playerBufferFrameCount, &recorderBufferFrameCount) == STATUS_SUCCESS) {
+        jclass cls = (*env)->FindClass(env, "org/drrickorang/loopback/TestSettings");
+        jmethodID methodID = (*env)->GetMethodID(env, cls, "<init>", "(III)V");
+        jobject testSettings = (*env)->NewObject(env, cls, methodID,
+                samplingRate,
+                playerBufferFrameCount * bytesPerFrame,
+                recorderBufferFrameCount * bytesPerFrame);
+        return testSettings;
+    } else {
+        return NULL;
+    }
+}
+
+JNIEXPORT jlong JNICALL Java_org_drrickorang_loopback_NativeAudioThread_nativeInit
+  (JNIEnv *env, jobject obj __unused, jint threadType, jint samplingRate, jint frameCount,
+   jint micSource, jint performanceMode,
+   jint testType, jdouble frequency1, jobject byteBuffer, jshortArray loopbackTone,
+   jint maxRecordedLateCallbacks, jint ignoreFirstFrames) {
+
+    int engine = nativeEngineFromThreadType(threadType);
+    if (engine == -1) return 0;
+
+    native_engine_instance_t *pInstance =
+            (native_engine_instance_t*) malloc(sizeof(native_engine_instance_t));
+    if (pInstance == NULL) {
+        __android_log_print(ANDROID_LOG_ERROR, LOG_TAG,
+                "failed to allocate a native engine instance");
+        return 0;
+    }
+    void *pContext = NULL;
+
+    char *byteBufferPtr = (*env)->GetDirectBufferAddress(env, byteBuffer);
+    int byteBufferLength = (*env)->GetDirectBufferCapacity(env, byteBuffer);
+
+    short *loopbackToneArray = (*env)->GetShortArrayElements(env, loopbackTone, 0);
+
+    if (sEngines[engine].init(&pContext, samplingRate, frameCount, micSource,
+                 performanceMode,
+                 testType, frequency1, byteBufferPtr, byteBufferLength,
+                 loopbackToneArray, maxRecordedLateCallbacks, ignoreFirstFrames) != STATUS_FAIL) {
+        pInstance->context = pContext;
+        pInstance->methods = &sEngines[engine];
+        return (long) pInstance;
+    }
+
+    free(pInstance);
+    return 0;
+}
+
+
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_nativeProcessNext
+(JNIEnv *env __unused, jobject obj __unused, jlong handle, jdoubleArray samplesArray,
+jlong offset) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+
+    long maxSamples = (*env)->GetArrayLength(env, samplesArray);
+    double *pSamples = (*env)->GetDoubleArrayElements(env, samplesArray, 0);
+
+    long availableSamples = maxSamples-offset;
+    double *pCurrentSample = pSamples+offset;
+
+    __android_log_print(ANDROID_LOG_INFO, LOG_TAG,
+            "jni nativeProcessNext currentSample %p, availableSamples %ld ",
+            pCurrentSample, availableSamples);
+
+    int samplesRead = pInstance->methods->processNext(
+            pInstance->context, pCurrentSample, availableSamples);
+    return samplesRead;
+}
+
+
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_nativeDestroy
+  (JNIEnv *env __unused, jobject obj __unused, jlong handle) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+    int status = pInstance->methods->destroy(&pInstance->context);
+    free(pInstance);
+    return status;
+}
+
+
+JNIEXPORT jintArray JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetRecorderBufferPeriod
+  (JNIEnv *env, jobject obj __unused, jlong handle) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+    int* recorderBufferPeriod = pInstance->methods->getRecorderBufferPeriod(
+            pInstance->context);
+
+    // get the length = RANGE
+    jintArray result = (*env)->NewIntArray(env, RANGE);
+    (*env)->SetIntArrayRegion(env, result, 0, RANGE, recorderBufferPeriod);
+
+    return result;
+}
+
+
+JNIEXPORT jint JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetRecorderMaxBufferPeriod
+  (JNIEnv *env __unused, jobject obj __unused, jlong handle) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+    int recorderMaxBufferPeriod = pInstance->methods->getRecorderMaxBufferPeriod(
+            pInstance->context);
+
+    return recorderMaxBufferPeriod;
+}
+
+
+JNIEXPORT jdouble JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetRecorderVarianceBufferPeriod
+        (JNIEnv *env __unused, jobject obj __unused, jlong handle) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+    int64_t result = pInstance->methods->getRecorderVarianceBufferPeriod(pInstance->context);
+    // variance has units ns^2 so we have to square the conversion factor
+    double scaled = (double) result / ((double) NANOS_PER_MILLI * (double) NANOS_PER_MILLI);
+    return scaled;
+}
+
+
+JNIEXPORT jintArray
+JNICALL Java_org_drrickorang_loopback_NativeAudioThread_nativeGetPlayerBufferPeriod
+  (JNIEnv *env __unused, jobject obj __unused, jlong handle) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+    int* playerBufferPeriod = pInstance->methods->getPlayerBufferPeriod(pInstance->context);
+
+    jintArray result = (*env)->NewIntArray(env, RANGE);
+    (*env)->SetIntArrayRegion(env, result, 0, RANGE, playerBufferPeriod);
+
+    return result;
+}
+
+
+JNIEXPORT jint JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetPlayerMaxBufferPeriod
+  (JNIEnv *env __unused, jobject obj __unused, jlong handle) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+    int playerMaxBufferPeriod = pInstance->methods->getPlayerMaxBufferPeriod(pInstance->context);
+
+    return playerMaxBufferPeriod;
+}
+
+
+JNIEXPORT jdouble JNICALL
+Java_org_drrickorang_loopback_NativeAudioThread_nativeGetPlayerVarianceBufferPeriod
+        (JNIEnv *env __unused, jobject obj __unused, jlong handle) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+    int64_t result = pInstance->methods->getPlayerVarianceBufferPeriod(pInstance->context);
+    // variance has units ns^2 so we have to square the conversion factor
+    double scaled = (double) result / ((double) NANOS_PER_MILLI * (double) NANOS_PER_MILLI);
+    return scaled;
+}
+
+
+jobject getCallbackTimes(JNIEnv *env, callbackTimeStamps *callbacks, short expectedBufferPeriod){
+    jintArray timeStamps = (*env)->NewIntArray(env, callbacks->index);
+    (*env)->SetIntArrayRegion(env, timeStamps, 0, callbacks->index, callbacks->timeStampsMs);
+
+    jshortArray callbackLengths = (*env)->NewShortArray(env, callbacks->index);
+    (*env)->SetShortArrayRegion(env, callbackLengths, 0, callbacks->index,
+                                callbacks->callbackDurations);
+
+    jclass cls = (*env)->FindClass(env, "org/drrickorang/loopback/BufferCallbackTimes");
+    jmethodID methodID = (*env)->GetMethodID(env, cls, "<init>", "([I[SZS)V");
+    jobject callbackTimes=(*env)->NewObject(env,cls, methodID, timeStamps, callbackLengths,
+                                            callbacks->exceededCapacity, expectedBufferPeriod);
+    return callbackTimes;
+}
+
+JNIEXPORT jobject
+JNICALL Java_org_drrickorang_loopback_NativeAudioThread_nativeGetPlayerCallbackTimeStamps
+        (JNIEnv *env, jobject obj __unused, jlong handle) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+    callbackTimeStamps *pTSs;
+    int expectedBufferPeriod = pInstance->methods->getPlayerTimeStampsAndExpectedBufferPeriod(
+            pInstance->context, &pTSs);
+    return getCallbackTimes(env, pTSs, expectedBufferPeriod);
+}
+
+JNIEXPORT jobject
+JNICALL Java_org_drrickorang_loopback_NativeAudioThread_nativeGetRecorderCallbackTimeStamps
+        (JNIEnv *env, jobject obj __unused, jlong handle) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+    callbackTimeStamps *pTSs;
+    int expectedBufferPeriod = pInstance->methods->getRecorderTimeStampsAndExpectedBufferPeriod(
+            pInstance->context, &pTSs);
+    return getCallbackTimes(env, pTSs, expectedBufferPeriod);
+}
+
+JNIEXPORT jint
+JNICALL Java_org_drrickorang_loopback_NativeAudioThread_nativeGetCaptureRank
+        (JNIEnv *env __unused, jobject obj __unused, jlong handle) {
+    native_engine_instance_t *pInstance = (native_engine_instance_t*) handle;
+    return pInstance->methods->getCaptureRank(pInstance->context);
+}
diff --git a/LoopbackApp/app/src/main/jni/jni_sles.h b/LoopbackApp/app/src/main/cpp/jni_native.h
similarity index 62%
rename from LoopbackApp/app/src/main/jni/jni_sles.h
rename to LoopbackApp/app/src/main/cpp/jni_native.h
index f25bd52..55a8042 100644
--- a/LoopbackApp/app/src/main/jni/jni_sles.h
+++ b/LoopbackApp/app/src/main/cpp/jni_native.h
@@ -14,56 +14,61 @@
  * limitations under the License.
  */
 
-#include <jni.h>
-
 #ifndef _Included_org_drrickorang_loopback_jni
 #define _Included_org_drrickorang_loopback_jni
+
+#include <jni.h>
+
 #ifdef __cplusplus
 extern "C" {
 #endif
 
 
 ////////////////////////
-////SLE
-JNIEXPORT jlong JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesInit
-  (JNIEnv *, jobject, jint, jint, jint, jint, jint, jdouble, jobject byteBuffer,
+JNIEXPORT jobject JNICALL
+Java_org_drrickorang_loopback_NativeAudioThread_nativeComputeDefaultSettings
+(JNIEnv *, jobject, jint bytesPerFrame, jint threadType, jint performanceMode);
+
+JNIEXPORT jlong JNICALL Java_org_drrickorang_loopback_NativeAudioThread_nativeInit
+  (JNIEnv *, jobject, jint, jint, jint, jint, jint, jint, jdouble, jobject byteBuffer,
    jshortArray loopbackTone, jint maxRecordedLateCallbacks, jint ignoreFirstFrames);
 
-JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesProcessNext
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_nativeProcessNext
   (JNIEnv *, jobject, jlong, jdoubleArray, jlong);
 
-JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesDestroy
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_nativeDestroy
   (JNIEnv *, jobject, jlong);
 
 JNIEXPORT jintArray JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderBufferPeriod
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetRecorderBufferPeriod
   (JNIEnv *, jobject, jlong);
 
 JNIEXPORT jint JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderMaxBufferPeriod
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetRecorderMaxBufferPeriod
   (JNIEnv *, jobject, jlong);
 
 JNIEXPORT jdouble JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderVarianceBufferPeriod
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetRecorderVarianceBufferPeriod
   (JNIEnv *, jobject, jlong);
 
 JNIEXPORT jintArray JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerBufferPeriod
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetPlayerBufferPeriod
   (JNIEnv *, jobject, jlong);
 
 JNIEXPORT jint JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerMaxBufferPeriod
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetPlayerMaxBufferPeriod
   (JNIEnv *, jobject, jlong);
 
 JNIEXPORT jdouble JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerVarianceBufferPeriod
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetPlayerVarianceBufferPeriod
   (JNIEnv *, jobject, jlong);
 
 JNIEXPORT jint JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetCaptureRank
+        Java_org_drrickorang_loopback_NativeAudioThread_nativeGetCaptureRank
   (JNIEnv *, jobject, jlong);
 
 #ifdef __cplusplus
 }
 #endif
+
 #endif //_Included_org_drrickorang_loopback_jni
diff --git a/LoopbackApp/app/src/main/cpp/lb2/audio_buffer.h b/LoopbackApp/app/src/main/cpp/lb2/audio_buffer.h
new file mode 100644
index 0000000..3cb8912
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/audio_buffer.h
@@ -0,0 +1,233 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LB2_AUDIO_BUFFER_H_
+#define LB2_AUDIO_BUFFER_H_
+
+#include <algorithm>
+#include <functional>
+#include <memory>
+#include <string.h>
+
+#include <android/log.h>
+
+#include "lb2/sample.h"
+#include "lb2/util.h"
+
+// Implements sample / frame / byte count conversions. Not to be used directly.
+template<class T>
+class CountsConverter {
+  public:
+    size_t getDataSize() const { return getSampleCount() * sizeof(T); }
+    size_t getFrameCount() const { return mFrameCount; }
+    size_t getFrameSize() const { return mChannelCount * sizeof(T); }
+    size_t getSampleCount() const { return mFrameCount * mChannelCount; }
+    int getChannelCount() const { return mChannelCount; }
+
+  protected:
+    CountsConverter(size_t frameCount, int channelCount) :
+            mFrameCount(frameCount), mChannelCount(channelCount) {}
+    CountsConverter(const CountsConverter<T>&) = default;
+    CountsConverter(CountsConverter<T>&&) = default;
+    CountsConverter<T>& operator=(const CountsConverter<T>&) = default;
+    CountsConverter<T>& operator=(CountsConverter<T>&&) = default;
+
+  private:
+    // Fields are logically const, but can be overwritten during an object assignment.
+    size_t mFrameCount;
+    int mChannelCount;
+};
+
+// Implements the common parts of AudioBuffer and AudioBufferView.
+// Not to be used directly.
+//
+// Although AudioBuffer could be considered as an extension of AudioBufferView,
+// they have different copy/move semantics, and thus AudioBuffer
+// doesn't satisfy Liskov Substitution Principle. That's why these classes are
+// implemented as siblings instead, with an implicit conversion constructor of
+// AudioBufferView from AudioBuffer.
+template<class T>
+class AudioBufferBase : public CountsConverter<T> {
+  public:
+    void clear() { memset(mData, 0, CountsConverter<T>::getDataSize()); }
+    T* getData() const { return mData; }
+    T* getFrameAt(int offsetInFrames) const {
+        return mData + offsetInFrames * CountsConverter<T>::getChannelCount();
+    }
+
+  protected:
+    static constexpr size_t npos = static_cast<size_t>(-1);
+
+    AudioBufferBase(T* const data, size_t frameCount, int channelCount)
+            : CountsConverter<T>(frameCount, channelCount), mData(data) {}
+    AudioBufferBase(const AudioBufferBase<T>&) = default;
+    AudioBufferBase(AudioBufferBase<T>&&) = default;
+    AudioBufferBase<T>& operator=(const AudioBufferBase<T>&) = default;
+    AudioBufferBase<T>& operator=(AudioBufferBase<T>&&) = default;
+
+    AudioBufferBase<T> getView(int offsetInFrames, size_t lengthInFrames) const {
+        if (offsetInFrames < 0) {
+            __android_log_assert("assert", "lb2", "Negative buffer offset %d", offsetInFrames);
+        }
+        if (lengthInFrames > CountsConverter<T>::getFrameCount() - offsetInFrames) {
+            lengthInFrames = CountsConverter<T>::getFrameCount() - offsetInFrames;
+        }
+        return AudioBufferBase<T>(
+                getFrameAt(offsetInFrames), lengthInFrames, CountsConverter<T>::getChannelCount());
+    }
+
+  private:
+    // Fields are logically const, but can be overwritten during an object assignment.
+    T* mData;
+};
+
+template<class T> class AudioBufferView;
+
+// Container for PCM audio data, allocates the data buffer via 'new' and owns it.
+// Allows modification of the data. Does not support copying,
+// move only. For passing audio data around it's recommended
+// to use instances of AudioBufferView class instead.
+template<class T>
+class AudioBuffer : public AudioBufferBase<T> {
+  public:
+    // Null AudioBuffer constructor.
+    constexpr AudioBuffer(): AudioBufferBase<T>(nullptr, 0, 1), mBuffer() {}
+    AudioBuffer(size_t frameCount, int channelCount)
+            : AudioBufferBase<T>(new T[frameCount * channelCount], frameCount, channelCount),
+            mBuffer(AudioBufferBase<T>::getData()) {
+        AudioBufferBase<T>::clear();
+    }
+    AudioBuffer(const AudioBuffer<T>&) = delete;
+    AudioBuffer(AudioBuffer<T>&&) = default;
+    AudioBuffer<T>& operator=(const AudioBuffer<T>&) = delete;
+    AudioBuffer<T>& operator=(AudioBuffer<T>&&) = default;
+
+    AudioBufferView<T> getView(
+            int offsetInFrames = 0, size_t lengthInFrames = AudioBufferBase<T>::npos) const {
+        return AudioBufferBase<T>::getView(offsetInFrames, lengthInFrames);
+    }
+
+  private:
+    std::unique_ptr<T[]> mBuffer;
+};
+
+// Lightweight view into the PCM audio data provided by AudioBuffer.
+// AudioBufferView does *not* own buffer memory. Data can be modified
+// via the view. Thanks to its small size, should be passed by value.
+template<class T>
+class AudioBufferView : public AudioBufferBase<T> {
+  public:
+    AudioBufferView(T* const data, size_t frameCount, int channelCount)
+            : AudioBufferBase<T>(data, frameCount, channelCount) {}
+    // Implicit conversion from AudioBufferBase.
+    AudioBufferView(const AudioBufferBase<T>& b)
+            : AudioBufferBase<T>(b.getData(), b.getFrameCount(), b.getChannelCount()) {}
+    AudioBufferView(const AudioBufferView<T>&) = default;
+    AudioBufferView(AudioBufferView<T>&&) = default;
+    AudioBufferView<T>& operator=(const AudioBufferView<T>&) = default;
+    AudioBufferView<T>& operator=(AudioBufferView<T>&&) = default;
+
+    AudioBufferView<T> getView(
+            int offsetInFrames = 0, size_t lengthInFrames = AudioBufferBase<T>::npos) const {
+        return AudioBufferBase<T>::getView(offsetInFrames, lengthInFrames);
+    }
+};
+
+
+template<class S, class D>
+inline void convertAudioBufferViewType(AudioBufferView<S> src, AudioBufferView<D> dst) {
+    if (src.getChannelCount() != dst.getChannelCount()) {
+        __android_log_assert("assert", "lb2", "Buffer channel counts differ: %d != %d",
+                src.getChannelCount(), dst.getChannelCount());
+    }
+    if (src.getSampleCount() != dst.getSampleCount()) {
+        __android_log_assert("assert", "lb2", "Buffer sample counts differ: %lld != %lld",
+                (long long)src.getSampleCount(), (long long)dst.getChannelCount());
+    }
+    for (size_t i = 0; i < src.getSampleCount(); ++i) {
+        dst.getData()[i] = convertSampleType(src.getData()[i]);
+    }
+}
+
+template<class T>
+inline void forEachFrame(AudioBufferView<T> src, AudioBufferView<T> dst,
+        std::function<void(T* srcFrame, T* dstFrame)> op) {
+    T *srcData = src.getData();
+    T *dstData = dst.getData();
+    for (size_t i = 0;
+             i < std::min(src.getFrameCount(), dst.getFrameCount());
+             ++i, srcData += src.getChannelCount(), dstData += dst.getChannelCount()) {
+        op(srcData, dstData);
+    }
+}
+
+// Copies audio buffers data frame by frame. Initially fills the
+// destination buffer with zeroes. Ignores extra channels in the
+// source buffer.
+template<class T>
+inline void strideCopyAudioBufferViewData(AudioBufferView<T> src, AudioBufferView<T> dst) {
+    dst.clear();
+    forEachFrame<T>(src, dst,
+            [&](T* srcFrame, T* dstFrame) {
+                memcpy(dstFrame, srcFrame, std::min(src.getFrameSize(), dst.getFrameSize()));
+            });
+}
+
+// Copies audio buffers data frame by frame. If there are more
+// channels in the destination buffer than in the source buffer, the source
+// buffer content is duplicated to the extra channels until the entire frame
+// gets filled. E.g. if the source buffer has two channels, and the destination
+// buffer has five, then each frame of the destination buffer will be filled
+// as follows: 12121.
+// If the destination buffer has more frames than the source, the extra frames
+// a zeroed out.
+template<class T>
+inline void fillCopyAudioBufferViewData(AudioBufferView<T> src, AudioBufferView<T> dst) {
+    dst.clear();
+    const int srcFrameCopies = wholeMultiplier(dst.getChannelCount(), src.getChannelCount());
+    // A temporary buffer allowing to avoid dealing with copying a fraction of the source frame.
+    T srcFramePatch[srcFrameCopies * src.getChannelCount()];
+    forEachFrame<T>(src, dst,
+            [&](T* srcFrame, T* dstFrame) {
+               // Fill the temporary buffer with copies of the source frame.
+               T* patch = srcFramePatch;
+               for (int j = 0; j < srcFrameCopies; ++j, patch += src.getChannelCount()) {
+                   memcpy(patch, srcFrame, src.getFrameSize());
+               }
+               memcpy(dstFrame, srcFramePatch, dst.getFrameSize());
+            });
+}
+
+
+// Copies audio data between the AudioBufferViews of the same type.
+// Any missing audio data in the source buffer (not enough frames, or less
+// channels) is filled with zeroes in the destination buffer.
+template<class T>
+inline void copyAudioBufferViewData(AudioBufferView<T> src, AudioBufferView<T> dst) {
+    if (src.getChannelCount() == dst.getChannelCount()) {
+        size_t framesToCopy = std::min(src.getFrameCount(), dst.getFrameCount());
+        if (framesToCopy > 0) {
+            memcpy(dst.getData(), src.getData(), framesToCopy * dst.getFrameSize());
+        }
+        if (dst.getFrameCount() > framesToCopy) {
+            dst.getView(framesToCopy).clear();
+        }
+    } else {
+        fillCopyAudioBufferViewData(src, dst);
+    }
+}
+
+#endif  // LB2_AUDIO_BUFFER_H_
diff --git a/LoopbackApp/app/src/main/cpp/lb2/logging.h b/LoopbackApp/app/src/main/cpp/lb2/logging.h
new file mode 100644
index 0000000..9a6bc35
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/logging.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LB2_LOGGING_H_
+#define LB2_LOGGING_H_
+
+#ifndef LOG_TAG
+#define LOG_TAG "lb2"
+#endif
+
+#include <android/log.h>
+#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
+#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__)
+#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
+#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__)
+#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
+#define ALOGF(...) __android_log_assert("assert", LOG_TAG, __VA_ARGS__)
+
+#include <android/trace.h>
+#define PASTE(x, y) x ## y
+#define ATRACE_NAME(name) ScopedTrace PASTE(___tracer, __LINE__) (name)
+#define ATRACE_CALL() ATRACE_NAME(__func__)
+
+struct ScopedTrace {
+    ScopedTrace(const char* name) {
+#if __ANDROID_API__ >= 23
+        ATrace_beginSection(name);
+#else
+        (void)name;
+#endif
+    }
+    ScopedTrace(const ScopedTrace&) = delete;
+    ScopedTrace& operator=(const ScopedTrace&) = delete;
+    ~ScopedTrace() {
+#if __ANDROID_API__ >= 23
+        ATrace_endSection();
+#endif
+    }
+};
+
+#endif  // LB2_LOGGING_H_
diff --git a/LoopbackApp/app/src/main/cpp/lb2/loopback2.cpp b/LoopbackApp/app/src/main/cpp/lb2/loopback2.cpp
new file mode 100644
index 0000000..228dc40
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/loopback2.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <memory>
+
+#include <android/log.h>
+
+#include "lb2/logging.h"
+#include "lb2/loopback2.h"
+#include "lb2/loopback_test.h"
+#include "lb2/sound_system_aaudio.h"
+#include "lb2/sound_system_echo.h"
+
+// The Java layer always uses "mono" mode for native tests.
+static constexpr int CHANNEL_COUNT = 1;
+
+struct LbData {
+    std::unique_ptr<TestContext> testContext;
+    std::unique_ptr<SoundSystem> soundSys;
+    std::unique_ptr<LoopbackTest> currentTest;
+};
+
+int lb2ComputeDefaultSettings(int performanceMode, int *samplingRate,
+             int *playerBufferFrameCount, int *recorderBufferFrameCount) {
+    SoundSystemAAudio ss;
+    return ss.probeDefaultSettings(static_cast<PerformanceMode>(performanceMode),
+            samplingRate, playerBufferFrameCount, recorderBufferFrameCount) ?
+            STATUS_SUCCESS : STATUS_FAIL;
+}
+
+int lb2Init(void **ppLbData, int samplingRate, int frameCount, int /*micSource*/,
+        int performanceMode, int testType, double frequency1, char* byteBufferPtr,
+        int byteBufferLength, short* loopbackTone, int /*maxRecordedLateCallbacks*/,
+        int ignoreFirstFrames) {
+    *ppLbData = nullptr;
+    std::unique_ptr<LbData> lbData(new LbData());  // will auto-release in case if init fails.
+    switch (testType) {
+        case TEST_TYPE_LATENCY:
+            lbData->testContext.reset(new LatencyTestContext(
+                            static_cast<PerformanceMode>(performanceMode), frameCount,
+                            CHANNEL_COUNT, samplingRate, ignoreFirstFrames, loopbackTone));
+            break;
+        case TEST_TYPE_BUFFER_PERIOD: {
+            // TODO: Get rid of ByteBuffer.
+            static_assert(
+                    sizeof(sample_t) == sizeof(short), "byteBuffer only supports short samples");
+            AudioBufferView<sample_t> byteBuffer(
+                    reinterpret_cast<sample_t*>(byteBufferPtr), byteBufferLength, CHANNEL_COUNT);
+            lbData->testContext.reset(new GlitchTestContext(
+                            static_cast<PerformanceMode>(performanceMode),frameCount,
+                            CHANNEL_COUNT, samplingRate, frequency1, std::move(byteBuffer)));
+            break;
+        }
+        default:
+            ALOGE("Invalid test type: %d", testType);
+            return STATUS_FAIL;
+    }
+    // TODO: Implement switching from the Java side.
+    lbData->soundSys.reset(new SoundSystemAAudio(lbData->testContext.get()));
+    // lbData->soundSys.reset(new SoundSystemEcho(lbData->testContext.get()));
+    switch (testType) {
+        case TEST_TYPE_LATENCY:
+            lbData->currentTest.reset(new LatencyTest(
+                            lbData->soundSys.get(),
+                            static_cast<LatencyTestContext*>(lbData->testContext.get())));
+            break;
+        case TEST_TYPE_BUFFER_PERIOD:
+            lbData->currentTest.reset(new GlitchTest(
+                            lbData->soundSys.get(),
+                            static_cast<GlitchTestContext*>(lbData->testContext.get())));
+            break;
+    }
+    if (!lbData->currentTest->init()) return STATUS_FAIL;
+    *ppLbData = lbData.release();
+    return STATUS_SUCCESS;
+}
+
+int lb2ProcessNext(void *pLbData, double *pSamples, long maxSamples) {
+    if (pLbData == nullptr) return 0;
+    LbData *lbData = static_cast<LbData*>(pLbData);
+    return lbData->currentTest->collectRecording(
+            AudioBufferView<double>(pSamples, maxSamples / CHANNEL_COUNT, CHANNEL_COUNT));
+}
+
+int lb2Destroy(void **ppCtx) {
+    LbData** ppLbData = reinterpret_cast<LbData**>(ppCtx);
+    if (ppLbData != nullptr) {
+        delete *ppLbData;
+        *ppLbData = nullptr;
+        return STATUS_SUCCESS;
+    } else {
+        return STATUS_FAIL;
+    }
+}
+
+int* lb2GetRecorderBufferPeriod(void*) {
+    static int *bufferPeriod = new int[1002]();
+    return bufferPeriod;
+}
+
+int lb2GetRecorderMaxBufferPeriod(void*) {
+    return 0;
+}
+
+int64_t lb2GetRecorderVarianceBufferPeriod(void*) {
+    return 0;
+}
+
+int* lb2GetPlayerBufferPeriod(void*) {
+    static int *bufferPeriod = new int[1002]();
+    return bufferPeriod;
+}
+
+int lb2GetPlayerMaxBufferPeriod(void*) {
+    return 0;
+}
+
+int64_t lb2GetPlayerVarianceBufferPeriod(void*) {
+    return 0;
+}
+
+int lb2GetCaptureRank(void*) {
+    return 0;
+}
+
+int lb2GetPlayerTimeStampsAndExpectedBufferPeriod(void*, callbackTimeStamps **ppTSs) {
+    static callbackTimeStamps tss = {
+        new int[10],               //int* timeStampsMs
+        new short[10],             //short* callbackDurations
+        0,                         //short index
+        {0,0},                     //struct timespec startTime;
+        0,                         //int capacity
+        false                      //bool exceededCapacity
+    };
+    *ppTSs = &tss;
+    return 0;
+}
+
+int lb2GetRecorderTimeStampsAndExpectedBufferPeriod(void*, callbackTimeStamps **ppTSs) {
+    static callbackTimeStamps tss = {
+        new int[10],               //int* timeStampsMs
+        new short[10],             //short* callbackDurations
+        0,                         //short index
+        {0,0},                     //struct timespec startTime;
+        0,                         //int capacity
+        false                      //bool exceededCapacity
+    };
+    *ppTSs = &tss;
+    return 0;
+}
diff --git a/LoopbackApp/app/src/main/cpp/lb2/loopback2.h b/LoopbackApp/app/src/main/cpp/lb2/loopback2.h
new file mode 100644
index 0000000..512c72e
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/loopback2.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _Included_org_drrickorang_loopback_lb2_loopback2
+#define _Included_org_drrickorang_loopback_lb2_loopback2
+
+#include "loopback.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int lb2ComputeDefaultSettings(int performanceMode, int *samplingRate,
+             int *playerBufferFrameCount, int *recorderBufferFrameCount);
+int lb2Init(void ** ppCtx, int samplingRate, int frameCount, int micSource,
+             int performanceMode,
+             int testType, double frequency1, char* byteBufferPtr, int byteBufferLength,
+             short* loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames);
+int lb2Destroy(void ** ppCtx);
+int lb2ProcessNext(void *pCtx, double *pSamples, long maxSamples);
+int* lb2GetRecorderBufferPeriod(void *pCtx);
+int lb2GetRecorderMaxBufferPeriod(void *pCtx);
+int64_t lb2GetRecorderVarianceBufferPeriod(void *pCtx);
+int* lb2GetPlayerBufferPeriod(void *pCtx);
+int lb2GetPlayerMaxBufferPeriod(void *pCtx);
+int64_t lb2GetPlayerVarianceBufferPeriod(void *pCtx);
+int lb2GetCaptureRank(void *pCtx);
+int lb2GetPlayerTimeStampsAndExpectedBufferPeriod(void *pCtx, callbackTimeStamps **ppTSs);
+int lb2GetRecorderTimeStampsAndExpectedBufferPeriod(void *pCtx, callbackTimeStamps **ppTSs);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // _Included_org_drrickorang_loopback_lb2_loopback2
diff --git a/LoopbackApp/app/src/main/cpp/lb2/loopback_test.cpp b/LoopbackApp/app/src/main/cpp/lb2/loopback_test.cpp
new file mode 100644
index 0000000..9e9b1a7
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/loopback_test.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "lb2/loopback_test.h"
+
+#include <chrono>
+#include <thread>
+
+#include "byte_buffer.h"
+#include "lb2/logging.h"
+#include "lb2/util.h"
+
+constexpr size_t LoopbackTest::COLLECTION_PERIOD_MS;
+
+LoopbackTest::LoopbackTest(SoundSystem* soundSys, TestContext* testCtx) :
+        mSoundSys(soundSys),
+        mReadBuffer(testCtx->createAudioBuffer()),
+        mTestCtx(testCtx),
+        mRecordingFifoData(new sample_t[RECORDING_FIFO_FRAMES * testCtx->getChannelCount()]) {
+    audio_utils_fifo_init(
+            &mRecordingFifo,
+            RECORDING_FIFO_FRAMES,
+            mTestCtx->getFrameSize(),
+            mRecordingFifoData.get());
+}
+
+LoopbackTest::~LoopbackTest() {
+    audio_utils_fifo_deinit(&mRecordingFifo);
+}
+
+bool LoopbackTest::init() {
+    return true;
+}
+
+int LoopbackTest::collectRecording(AudioBufferView<double> buffer) {
+    int framesRead = 0;
+    AudioBuffer<sample_t> readBuffer(mTestCtx->createAudioBuffer());
+
+    for (size_t i = 0; i < COLLECTION_LOOPS; ++i) {
+        std::this_thread::sleep_for(std::chrono::milliseconds(COLLECTION_PERIOD_MS));
+        if (i != 0) {
+            readBuffer.clear();
+        }
+        while (framesRead <= static_cast<int>(buffer.getFrameCount())) {
+            // Note that we always read in mTestCtx->getFrameCount() chunks.
+            // This is how the legacy version works, but it's not clear whether
+            // this is correct, since some data from the fifo may be lost
+            // if the size of the buffer provided by Java isn't a multiple of
+            // getFrameCount().
+            ssize_t actualFrames = audio_utils_fifo_read(
+                    &mRecordingFifo, readBuffer.getData(), readBuffer.getFrameCount());
+            if (actualFrames <= 0) break;
+            AudioBufferView<double> dst = buffer.getView(framesRead, actualFrames);
+            convertAudioBufferViewType(readBuffer.getView(0, dst.getFrameCount()), dst);
+            framesRead += actualFrames;
+        }
+    }
+    return framesRead * mTestCtx->getChannelCount();
+}
+
+void LoopbackTest::receiveRecording(size_t framesRead) {
+    ssize_t actualFrames =
+            audio_utils_fifo_write(&mRecordingFifo, mReadBuffer.getData(), framesRead);
+    if (actualFrames >= 0 && static_cast<size_t>(actualFrames) != framesRead) {
+        ALOGW("recording pipe problem (expected %lld): %lld",
+                (long long)framesRead, (long long)actualFrames);
+    } else if (actualFrames < 0) {
+        ALOGW("pipe write returned negative value: %lld", (long long)actualFrames);
+    }
+}
+
+
+LatencyTest::LatencyTest(SoundSystem* soundSys, LatencyTestContext* testCtx)
+        : LoopbackTest(soundSys, testCtx),
+          //mTestCtx(testCtx),
+          mDrainInput(true),
+          mInputFramesToDiscard(testCtx->getInputFramesToDiscard()),
+          mInitialSilenceFrameCount(wholeMultiplier(
+                          testCtx->getSamplingRateHz() * INITIAL_SILENCE_MS, MS_PER_SECOND)),
+          mInjectImpulseNextFramePos(0),
+          mImpulse(testCtx->getImpulse()) {
+}
+
+LatencyTest::~LatencyTest() {
+    mSoundSys->shutdown();
+}
+
+bool LatencyTest::init() {
+    if (!LoopbackTest::init()) return false;
+    return mSoundSys->init(std::bind(&LatencyTest::writeCallback, this, std::placeholders::_1));
+}
+
+AudioBufferView<sample_t> LatencyTest::writeCallback(size_t expectedFrames) {
+    // Always perform a read operation first since the read buffer is always
+    // filling in. But depending on the conditions, the read data is either
+    // completely discarded, or being sent to the Java layer, and may in addition
+    // be written back to the output.
+    //
+    // There are strange side effects on Pixel 2 if the app is trying to read
+    // too much data, so always read only as many frames as we can currently write.
+    // See b/68003241.
+    AudioBufferView<sample_t> readBuffer = mReadBuffer.getView(0, expectedFrames);
+    ssize_t framesRead = mSoundSys->readAudio(readBuffer);
+    // ALOGV("Read %lld frames of %lld",
+    //         (long long)framesRead, (long long)readBuffer.getFrameCount());
+    if (mInputFramesToDiscard > 0 || mInitialSilenceFrameCount > 0) {
+        if (mInputFramesToDiscard > 0) {
+            mInputFramesToDiscard -= framesRead;
+        } else {
+            if (framesRead > 0) {
+                receiveRecording(framesRead);
+            }
+            mInitialSilenceFrameCount -= expectedFrames;
+        }
+    } else if (mDrainInput) {
+        if (mSoundSys->drainInput()) {
+            mDrainInput = false;
+        }
+    } else {
+        if (framesRead > 0) {
+            receiveRecording(framesRead);
+        }
+        if (mInjectImpulseNextFramePos >= 0) {
+            ALOGV("Injecting impulse from pos %d", mInjectImpulseNextFramePos);
+            AudioBufferView<sample_t> impulseChunk =
+                    mImpulse.getView(mInjectImpulseNextFramePos, expectedFrames);
+            mInjectImpulseNextFramePos += impulseChunk.getFrameCount();
+            if (mInjectImpulseNextFramePos >= static_cast<int>(mImpulse.getFrameCount())) {
+                mInjectImpulseNextFramePos = -1;
+            }
+            return impulseChunk;
+        } else if (framesRead > 0) {
+            return readBuffer.getView(0, framesRead);
+        }
+    }
+    return AudioBuffer<sample_t>();
+}
+
+
+GlitchTest::GlitchTest(SoundSystem* soundSys, GlitchTestContext* testCtx)
+        : LoopbackTest(soundSys, testCtx),
+          mTestCtx(testCtx) {
+}
+
+GlitchTest::~GlitchTest() {
+    mSoundSys->shutdown();
+}
+
+bool GlitchTest::init() {
+    if (!LoopbackTest::init()) return false;
+    return mSoundSys->init(std::bind(&GlitchTest::writeCallback, this, std::placeholders::_1));
+}
+
+AudioBufferView<sample_t> GlitchTest::writeCallback(size_t expectedFrames) {
+    ssize_t framesRead = mSoundSys->readAudio(mReadBuffer);
+    if (framesRead > 0) {
+        receiveRecording(framesRead);
+        ssize_t bbResult = byteBuffer_write(
+                reinterpret_cast<char*>(mTestCtx->getByteBuffer().getData()),
+                mTestCtx->getByteBuffer().getFrameCount(),
+                reinterpret_cast<const char*>(mReadBuffer.getData()),
+                framesRead, mTestCtx->getChannelCount());
+        if (bbResult >= 0 && bbResult < framesRead) {
+            ALOGW("ByteBuffer only consumed %lld bytes from %lld",
+                    (long long)bbResult, (long long)framesRead);
+        } else if (bbResult < 0) {
+            ALOGW("ByteBuffer error: %lld", (long long)bbResult);
+        }
+    }
+    return mTestCtx->getNextImpulse(expectedFrames);
+}
diff --git a/LoopbackApp/app/src/main/cpp/lb2/loopback_test.h b/LoopbackApp/app/src/main/cpp/lb2/loopback_test.h
new file mode 100644
index 0000000..06a25ae
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/loopback_test.h
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LB2_LOOPBACK_TEST_H_
+#define LB2_LOOPBACK_TEST_H_
+
+#include <atomic>
+#include <memory>
+
+#include <audio_utils/fifo.h>
+
+#include "lb2/audio_buffer.h"
+#include "lb2/sound_system.h"
+#include "lb2/test_context.h"
+
+// Generic test interface. The test is driven by the write callback
+// of the sound system and periodic polling via 'collectRecording'
+// method.
+class LoopbackTest {
+  public:
+    LoopbackTest(SoundSystem* soundSys, TestContext* testCtx);
+    LoopbackTest(const LoopbackTest&) = delete;
+    LoopbackTest& operator=(const LoopbackTest&) = delete;
+    virtual ~LoopbackTest();
+
+    virtual bool init();
+    virtual int collectRecording(AudioBufferView<double> buffer);
+
+  protected:
+    // This method is called on the sound system callback thread.
+    void receiveRecording(size_t framesRead);
+
+    SoundSystem* mSoundSys;
+    AudioBuffer<sample_t> mReadBuffer;
+
+  private:
+    static constexpr size_t RECORDING_FIFO_FRAMES = 65536;
+    static constexpr size_t COLLECTION_LOOPS = 10;
+    static constexpr size_t COLLECTION_PERIOD_MS = 100;
+
+    TestContext* mTestCtx;
+    std::unique_ptr<sample_t[]> mRecordingFifoData;
+    struct audio_utils_fifo mRecordingFifo;
+};
+
+
+// Latency test implementation. Using the parameters from the test
+// context, first it drains the audio system read queue, then injects
+// provided impulse, and then copies read audio input to output.
+class LatencyTest : public LoopbackTest {
+  public:
+    LatencyTest(SoundSystem* soundSys, LatencyTestContext* testCtx);
+    LatencyTest(const LatencyTest&) = delete;
+    LatencyTest& operator=(const LatencyTest&) = delete;
+    virtual ~LatencyTest();
+
+    bool init() override;
+
+  private:
+    static constexpr size_t INITIAL_SILENCE_MS = 240;  // Approx. as in the legacy version.
+
+    AudioBufferView<sample_t> writeCallback(size_t expectedFrames);
+
+    //LatencyTestContext* mTestCtx;
+    int mDrainInput;
+    int mInputFramesToDiscard;
+    int mInitialSilenceFrameCount;
+    int mInjectImpulseNextFramePos;
+    AudioBufferView<sample_t> mImpulse;
+};
+
+
+// Glitch test implementation. Writes the test signal to output,
+// and reads back input.
+class GlitchTest : public LoopbackTest {
+  public:
+    GlitchTest(SoundSystem* soundSys, GlitchTestContext* testCtx);
+    GlitchTest(const GlitchTest&) = delete;
+    GlitchTest& operator=(const GlitchTest&) = delete;
+    virtual ~GlitchTest();
+
+    bool init() override;
+
+  private:
+    AudioBufferView<sample_t> writeCallback(size_t expectedFrames);
+
+    GlitchTestContext* mTestCtx;
+};
+
+#endif  // LB2_LOOPBACK_TEST_H_
diff --git a/LoopbackApp/app/src/main/cpp/lb2/oboe/src/aaudio/AAudioLoader.cpp b/LoopbackApp/app/src/main/cpp/lb2/oboe/src/aaudio/AAudioLoader.cpp
new file mode 100644
index 0000000..1c7be2f
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/oboe/src/aaudio/AAudioLoader.cpp
@@ -0,0 +1,194 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "lb2/oboe/src/aaudio/AAudioLoader.h"
+
+#include <dlfcn.h>
+
+#include "lb2/logging.h"
+
+#define LIB_AAUDIO_NAME "libaaudio.so"
+
+AAudioLoader::~AAudioLoader() {
+    close(); // TODO dangerous from a destructor, require caller to close()
+}
+
+AAudioLoader* AAudioLoader::getInstance() {
+    static AAudioLoader instance;
+    return &instance;
+}
+
+int AAudioLoader::open() {
+    if (mLibHandle != nullptr) {
+        return 0;
+    }
+    mLibHandle = dlopen(LIB_AAUDIO_NAME, 0);
+    if (mLibHandle == nullptr) {
+        ALOGI("AAudioLoader::open() could not find " LIB_AAUDIO_NAME);
+        return -1; // TODO review return code
+    } else {
+        ALOGD("AAudioLoader():  dlopen(%s) returned %p", LIB_AAUDIO_NAME, mLibHandle);
+    }
+
+    // Load all the function pointers.
+    createStreamBuilder = (aaudio_result_t (*)(AAudioStreamBuilder **builder))
+            dlsym(mLibHandle, "AAudio_createStreamBuilder");
+
+    builder_openStream = (aaudio_result_t (*)(AAudioStreamBuilder *builder,
+                                              AAudioStream **stream))
+            dlsym(mLibHandle, "AAudioStreamBuilder_openStream");
+
+    builder_setChannelCount    = load_V_PBI("AAudioStreamBuilder_setChannelCount");
+    if (builder_setChannelCount == nullptr) {
+        // Use old alias if needed.
+        builder_setChannelCount    = load_V_PBI("AAudioStreamBuilder_setSamplesPerFrame");
+    }
+
+    builder_setBufferCapacityInFrames = load_V_PBI("AAudioStreamBuilder_setBufferCapacityInFrames");
+    builder_setDeviceId        = load_V_PBI("AAudioStreamBuilder_setDeviceId");
+    builder_setDirection       = load_V_PBI("AAudioStreamBuilder_setDirection");
+    builder_setFormat          = load_V_PBI("AAudioStreamBuilder_setFormat");
+    builder_setFramesPerDataCallback = load_V_PBI("AAudioStreamBuilder_setFramesPerDataCallback");
+    builder_setSharingMode     = load_V_PBI("AAudioStreamBuilder_setSharingMode");
+    builder_setPerformanceMode     = load_V_PBI("AAudioStreamBuilder_setPerformanceMode");
+    builder_setSampleRate      = load_V_PBI("AAudioStreamBuilder_setSampleRate");
+
+    builder_delete             = load_I_PB("AAudioStreamBuilder_delete");
+
+    stream_getFormat = (aaudio_format_t (*)(AAudioStream *stream))
+            dlsym(mLibHandle, "AAudioStream_getFormat");
+
+    builder_setDataCallback = (void (*)(AAudioStreamBuilder *builder,
+                                        AAudioStream_dataCallback callback,
+                                        void *userData))
+            dlsym(mLibHandle, "AAudioStreamBuilder_setDataCallback");
+
+    builder_setErrorCallback = (void (*)(AAudioStreamBuilder *builder,
+                                        AAudioStream_errorCallback callback,
+                                        void *userData))
+            dlsym(mLibHandle, "AAudioStreamBuilder_setErrorCallback");
+
+    stream_read = (aaudio_result_t (*)(AAudioStream *stream,
+                                       void *buffer,
+                                       int32_t numFrames,
+                                       int64_t timeoutNanoseconds))
+            dlsym(mLibHandle, "AAudioStream_read");
+
+    stream_write = (aaudio_result_t (*)(AAudioStream *stream,
+                                        const void *buffer,
+                                        int32_t numFrames,
+                                        int64_t timeoutNanoseconds))
+            dlsym(mLibHandle, "AAudioStream_write");
+
+
+    stream_waitForStateChange = (aaudio_result_t (*)(AAudioStream *stream,
+                                                 aaudio_stream_state_t inputState,
+                                                 aaudio_stream_state_t *nextState,
+                                                 int64_t timeoutNanoseconds))
+            dlsym(mLibHandle, "AAudioStream_waitForStateChange");
+
+
+    stream_getTimestamp = (aaudio_result_t (*)(AAudioStream *stream,
+                                           clockid_t clockid,
+                                           int64_t *framePosition,
+                                           int64_t *timeNanoseconds))
+            dlsym(mLibHandle, "AAudioStream_getTimestamp");
+
+    stream_getChannelCount    = load_I_PS("AAudioStream_getChannelCount");
+    if (stream_getChannelCount == nullptr) {
+        // Use old alias if needed.
+        stream_getChannelCount    = load_I_PS("AAudioStream_getSamplesPerFrame");
+    }
+
+    stream_close              = load_I_PS("AAudioStream_close");
+
+    stream_getBufferSize      = load_I_PS("AAudioStream_getBufferSizeInFrames");
+    stream_getDeviceId        = load_I_PS("AAudioStream_getDeviceId");
+    stream_getDirection       = load_I_PS("AAudioStream_getDirection");
+    stream_getBufferCapacity  = load_I_PS("AAudioStream_getBufferCapacityInFrames");
+    stream_getFramesPerBurst  = load_I_PS("AAudioStream_getFramesPerBurst");
+    stream_getFramesRead      = load_L_PS("AAudioStream_getFramesRead");
+    stream_getFramesWritten   = load_L_PS("AAudioStream_getFramesWritten");
+    stream_getPerformanceMode = load_I_PS("AAudioStream_getPerformanceMode");
+    stream_getSampleRate      = load_I_PS("AAudioStream_getSampleRate");
+    stream_getSharingMode     = load_I_PS("AAudioStream_getSharingMode");
+    stream_getState           = load_I_PS("AAudioStream_getState");
+    stream_getXRunCount       = load_I_PS("AAudioStream_getXRunCount");
+
+    stream_requestStart       = load_I_PS("AAudioStream_requestStart");
+    stream_requestPause       = load_I_PS("AAudioStream_requestPause");
+    stream_requestFlush       = load_I_PS("AAudioStream_requestFlush");
+    stream_requestStop        = load_I_PS("AAudioStream_requestStop");
+
+    stream_setBufferSize      = load_I_PSI("AAudioStream_setBufferSizeInFrames");
+
+    convertResultToText       = load_PC_I("AAudio_convertResultToText");
+    convertStreamStateToText  = load_PC_I("AAudio_convertStreamStateToText");
+
+    return 0;
+}
+
+int AAudioLoader::close() {
+    if (mLibHandle != nullptr) {
+        dlclose(mLibHandle);
+        mLibHandle = nullptr;
+    }
+    return 0;
+}
+
+static void AAudioLoader_check(void *proc, const char *functionName) {
+    if (proc == nullptr) {
+        ALOGE("AAudioLoader could not find %s", functionName);
+    } else {
+        ALOGV("AAudioLoader(): dlsym(%s) succeeded.", functionName);
+    }
+}
+
+AAudioLoader::signature_PC_I AAudioLoader::load_PC_I(const char *functionName) {
+    signature_PC_I proc = (signature_PC_I) dlsym(mLibHandle, functionName);
+    AAudioLoader_check((void *)proc, functionName);
+    return proc;
+}
+
+AAudioLoader::signature_V_PBI AAudioLoader::load_V_PBI(const char *functionName) {
+    signature_V_PBI proc = (signature_V_PBI) dlsym(mLibHandle, functionName);
+    AAudioLoader_check((void *)proc, functionName);
+    return proc;
+}
+
+AAudioLoader::signature_I_PSI AAudioLoader::load_I_PSI(const char *functionName) {
+    signature_I_PSI proc = (signature_I_PSI) dlsym(mLibHandle, functionName);
+    AAudioLoader_check((void *)proc, functionName);
+    return proc;
+}
+
+AAudioLoader::signature_I_PS AAudioLoader::load_I_PS(const char *functionName) {
+    signature_I_PS proc = (signature_I_PS) dlsym(mLibHandle, functionName);
+    AAudioLoader_check((void *)proc, functionName);
+    return proc;
+}
+
+AAudioLoader::signature_L_PS AAudioLoader::load_L_PS(const char *functionName) {
+    signature_L_PS proc = (signature_L_PS) dlsym(mLibHandle, functionName);
+    AAudioLoader_check((void *)proc, functionName);
+    return proc;
+}
+
+AAudioLoader::signature_I_PB AAudioLoader::load_I_PB(const char *functionName) {
+    signature_I_PB proc = (signature_I_PB) dlsym(mLibHandle, functionName);
+    AAudioLoader_check((void *)proc, functionName);
+    return proc;
+}
diff --git a/LoopbackApp/app/src/main/cpp/lb2/oboe/src/aaudio/AAudioLoader.h b/LoopbackApp/app/src/main/cpp/lb2/oboe/src/aaudio/AAudioLoader.h
new file mode 100644
index 0000000..8346a09
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/oboe/src/aaudio/AAudioLoader.h
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OBOE_AAUDIO_LOADER_H_
+#define OBOE_AAUDIO_LOADER_H_
+
+#include <aaudio/AAudio.h>
+
+/**
+ * The AAudio API was not available in early versions of Android.
+ * To avoid linker errors, we dynamically link with the functions by name using dlsym().
+ * On older versions this linkage will safely fail.
+ */
+class AAudioLoader {
+  public:
+    // Use signatures for common functions.
+    typedef const char * (*signature_PC_I)(int32_t);
+    typedef int32_t (*signature_I_I)(int32_t);
+    typedef int32_t (*signature_I_II)(int32_t, int32_t);
+    typedef int32_t (*signature_I_IPI)(int32_t, int32_t *);
+    typedef int32_t (*signature_I_IIPI)(int32_t, int32_t, int32_t *);
+
+    typedef int32_t (*signature_I_PB)(AAudioStreamBuilder *);  // AAudioStreamBuilder_delete()
+    // AAudioStreamBuilder_setSampleRate()
+    typedef void    (*signature_V_PBI)(AAudioStreamBuilder *, int32_t);
+
+    typedef int32_t (*signature_I_PS)(AAudioStream *);  // AAudioStream_getSampleRate()
+    typedef int64_t (*signature_L_PS)(AAudioStream *);  // AAudioStream_getFramesRead()
+    // AAudioStream_setBufferSizeInFrames()
+    typedef int32_t (*signature_I_PSI)(AAudioStream *, int32_t);
+
+    static AAudioLoader* getInstance(); // singleton
+
+    /**
+     * Open the AAudio shared library and load the function pointers.
+     * This can be called multiple times.
+     * It should only be called from one thread.
+     *
+     * @return 0 if successful or negative error.
+     */
+    int open();
+
+    /**
+     * Close the AAudio shared library.
+     * This can be called multiple times.
+     * It should only be called from one thread.
+     *
+     * The open() and close() do not nest. Calling close() once will always close the library.
+     * The destructor will call close() so you don't need to.
+     *
+     * @return 0 if successful or negative error.
+     */
+    int close();
+
+    // Function pointers into the AAudio shared library.
+    aaudio_result_t (*createStreamBuilder)(AAudioStreamBuilder **builder);
+
+    aaudio_result_t  (*builder_openStream)(AAudioStreamBuilder *builder,
+                                           AAudioStream **stream);
+
+    signature_V_PBI builder_setBufferCapacityInFrames;
+    signature_V_PBI builder_setChannelCount;
+    signature_V_PBI builder_setDeviceId;
+    signature_V_PBI builder_setDirection;
+    signature_V_PBI builder_setFormat;
+    signature_V_PBI builder_setFramesPerDataCallback;
+    signature_V_PBI builder_setPerformanceMode;
+    signature_V_PBI builder_setSampleRate;
+    signature_V_PBI builder_setSharingMode;
+
+    void (*builder_setDataCallback)(AAudioStreamBuilder *builder,
+                                    AAudioStream_dataCallback callback,
+                                    void *userData);
+
+    void (*builder_setErrorCallback)(AAudioStreamBuilder *builder,
+                                    AAudioStream_errorCallback callback,
+                                    void *userData);
+
+    signature_I_PB  builder_delete;
+
+    aaudio_format_t (*stream_getFormat)(AAudioStream *stream);
+
+    aaudio_result_t (*stream_read)(AAudioStream* stream,
+                                   void *buffer,
+                                   int32_t numFrames,
+                                   int64_t timeoutNanoseconds);
+
+    aaudio_result_t (*stream_write)(AAudioStream *stream,
+                                   const void *buffer,
+                                   int32_t numFrames,
+                                   int64_t timeoutNanoseconds);
+
+    aaudio_result_t (*stream_waitForStateChange)(AAudioStream *stream,
+                                                 aaudio_stream_state_t inputState,
+                                                 aaudio_stream_state_t *nextState,
+                                                 int64_t timeoutNanoseconds);
+
+    aaudio_result_t (*stream_getTimestamp)(AAudioStream *stream,
+                                          clockid_t clockid,
+                                          int64_t *framePosition,
+                                          int64_t *timeNanoseconds);
+
+    signature_I_PS   stream_close;
+
+    signature_I_PS   stream_getChannelCount;
+    signature_I_PS   stream_getDeviceId;
+    signature_I_PS   stream_getDirection;
+    signature_I_PS   stream_getBufferSize;
+    signature_I_PS   stream_getBufferCapacity;
+    signature_I_PS   stream_getFramesPerBurst;
+    signature_I_PS   stream_getState;
+    signature_I_PS   stream_getPerformanceMode;
+    signature_I_PS   stream_getSampleRate;
+    signature_I_PS   stream_getSharingMode;
+    signature_I_PS   stream_getXRunCount;
+
+    signature_I_PSI  stream_setBufferSize;
+    signature_I_PS   stream_requestStart;
+    signature_I_PS   stream_requestPause;
+    signature_I_PS   stream_requestFlush;
+    signature_I_PS   stream_requestStop;
+
+    signature_L_PS   stream_getFramesRead;
+    signature_L_PS   stream_getFramesWritten;
+
+    signature_PC_I   convertResultToText;
+    signature_PC_I   convertStreamStateToText;
+
+    // TODO add any missing AAudio functions.
+
+  private:
+    AAudioLoader() {}
+    ~AAudioLoader();
+
+    // Load function pointers for specific signatures.
+    signature_PC_I   load_PC_I(const char *name);
+
+    signature_V_PBI  load_V_PBI(const char *name);
+    signature_I_PB   load_I_PB(const char *name);
+    signature_I_PS   load_I_PS(const char *name);
+    signature_L_PS   load_L_PS(const char *name);
+    signature_I_PSI  load_I_PSI(const char *name);
+
+    void *mLibHandle = nullptr;
+};
+
+#endif //OBOE_AAUDIO_LOADER_H_
diff --git a/LoopbackApp/app/src/main/cpp/lb2/sample.h b/LoopbackApp/app/src/main/cpp/lb2/sample.h
new file mode 100644
index 0000000..4687169
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/sample.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LB2_SAMPLE_H_
+#define LB2_SAMPLE_H_
+
+#include <cmath>
+#include <limits>
+
+using sample_t = int16_t;  // For flexibility. May change to a float type if needed.
+
+static_assert(std::is_integral<sample_t>::value,
+        "FULL_SAMPLE_SCALE assumes sample values are of integer type");
+// FIXME: Would we plan to use floats, the maximum value will be 1.0.
+constexpr double FULL_SAMPLE_SCALE = std::numeric_limits<sample_t>::max() + 1;
+
+inline double convertSampleType(sample_t s) {
+    static_assert(std::numeric_limits<sample_t>::is_signed, "sample value is assumed to be signed");
+    return s / FULL_SAMPLE_SCALE;
+}
+
+inline sample_t convertSampleType(double d) {
+    return std::trunc(d * FULL_SAMPLE_SCALE);
+}
+
+#endif  // LB2_SAMPLE_H_
diff --git a/LoopbackApp/app/src/main/cpp/lb2/sound_system.h b/LoopbackApp/app/src/main/cpp/lb2/sound_system.h
new file mode 100644
index 0000000..8ea6ec4
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/sound_system.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LB2_SOUND_SYSTEM_H_
+#define LB2_SOUND_SYSTEM_H_
+
+#include <functional>
+
+#include "lb2/audio_buffer.h"
+#include "lb2/test_context.h"  // for PerformanceMode
+
+// Interface for sound systems.
+// It is assumed that "pull" model (callback) is used for providing
+// sound data to the system, and "push" model (sync read) is used
+// for sound input.
+class SoundSystem {
+  public:
+    // The memory region pointed by this buffer must remain
+    // valid until the write callback is called the next time,
+    // or until 'shutdown' is called.
+    using WriteCallback = std::function<AudioBufferView<sample_t>(size_t expectedFrames)>;
+
+    SoundSystem() = default;
+    SoundSystem(const SoundSystem&) = delete;
+    SoundSystem& operator=(const SoundSystem&) = delete;
+    virtual ~SoundSystem() {}
+
+    // Probes the output hardware for the recommended parameters for input
+    // and output streams. Returns 'false' if probing is impossible or has failed.
+    // Note that this is a separate use case for the sound system. After commencing
+    // probing, the instance of the sound system used for probing must be shut down.
+    virtual bool probeDefaultSettings(PerformanceMode /*performanceMode*/, int* /*samplingRate*/,
+            int* /*playerBufferFrameCount*/, int* /*recorderBufferFrameCount*/) { return false; }
+    // Initializes the sound system for the regular testing scenario.
+    // Returns 'true' if initialization was successful, 'false' otherwise.
+    virtual bool init(WriteCallback callback) = 0;
+    // Make sure the buffer of the input stream is empty, so fresh audio data
+    // can be received immediately on the next call to 'readAudio'.
+    // Returns 'true' if there were no errors, 'false' otherwise.
+    virtual bool drainInput() = 0;
+    // Reads from audio input into the provided buffer. A non-negative result value
+    // indicates success, a negative return value indicates an error.
+    virtual ssize_t readAudio(AudioBufferView<sample_t> buffer) = 0;
+    // Shuts the sound system down.
+    virtual void shutdown() = 0;
+};
+
+#endif  // LB2_SOUND_SYSTEM_H_
diff --git a/LoopbackApp/app/src/main/cpp/lb2/sound_system_aaudio.cpp b/LoopbackApp/app/src/main/cpp/lb2/sound_system_aaudio.cpp
new file mode 100644
index 0000000..9fe99f7
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/sound_system_aaudio.cpp
@@ -0,0 +1,484 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "lb2/sound_system_aaudio.h"
+
+#include <aaudio/AAudio.h>
+
+#define LOG_TAG "ss_aaudio"
+#include "lb2/logging.h"
+#include "lb2/oboe/src/aaudio/AAudioLoader.h"
+#include "lb2/util.h"
+
+namespace {
+
+class Stream {
+  public:
+    explicit Stream(AAudioStream *stream);
+    Stream(const Stream&) = delete;
+    Stream& operator=(const Stream&) = delete;
+    ~Stream();
+
+    int getChannelCount() const { return mChannelCount; }
+    int getFramesPerBurst() const { return mFramesPerBurst; }
+    int getSamplingRateHz();
+    ssize_t read(AudioBufferView<sample_t> buffer);
+    bool setBufferFrameCount(int numFrames);
+    bool start();
+    bool stop();
+
+  private:
+    AAudioLoader *mAAudio;
+    AAudioStream *mAAStream;
+    const int mChannelCount;
+    const int mFramesPerBurst;
+};
+
+Stream::Stream(AAudioStream *stream)
+        : mAAudio(AAudioLoader::getInstance()),
+          mAAStream(stream),
+          mChannelCount(mAAudio->stream_getChannelCount(stream)),
+          mFramesPerBurst(mAAudio->stream_getFramesPerBurst(stream)) {
+    ALOGV("Created stream, channel count %d, frames per burst: %d",
+            mChannelCount, mFramesPerBurst);
+}
+
+Stream::~Stream() {
+    aaudio_result_t result = mAAudio->stream_close(mAAStream);
+    if (result != AAUDIO_OK) {
+        ALOGE("Failed to close stream %s (%d)", mAAudio->convertResultToText(result), result);
+    }
+}
+
+int Stream::getSamplingRateHz() {
+    return mAAudio->stream_getSampleRate(mAAStream);
+}
+
+ssize_t Stream::read(AudioBufferView<sample_t> buffer) {
+    ATRACE_CALL();
+    aaudio_result_t result = mAAudio->stream_read(
+            mAAStream, buffer.getData(), buffer.getFrameCount(), 0 /* timeout */);
+    if (result < 0) {
+        ALOGE("Failed to read from the stream %s (%d)",
+                mAAudio->convertResultToText(result), result);
+    }
+    return result;
+}
+
+bool Stream::setBufferFrameCount(int numFrames) {
+    aaudio_result_t result = mAAudio->stream_setBufferSize(mAAStream, numFrames);
+    if (result < 0) {
+        ALOGE("Failed to set frame buffer size to %d frames: %s (%d)",
+                numFrames, mAAudio->convertResultToText(result), result);
+    }
+    return result >= 0;
+}
+
+bool Stream::start() {
+    aaudio_result_t result = mAAudio->stream_requestStart(mAAStream);
+    if (result != AAUDIO_OK) {
+        ALOGE("Failed to start the stream %s (%d)", mAAudio->convertResultToText(result), result);
+        return false;
+    }
+    return true;
+}
+
+bool Stream::stop() {
+    aaudio_result_t result = mAAudio->stream_requestStop(mAAStream);
+    if (result != AAUDIO_OK) {
+        ALOGE("Failed to stop the stream %s (%d)", mAAudio->convertResultToText(result), result);
+        return false;
+    }
+    return true;
+}
+
+
+class StreamBuilder {
+  public:
+    explicit StreamBuilder(AAudioStreamBuilder *builder);
+    StreamBuilder(const StreamBuilder&) = delete;
+    StreamBuilder& operator=(const StreamBuilder&) = delete;
+    ~StreamBuilder();
+
+    std::unique_ptr<Stream> makeStream();
+    void setCallbacks(AAudioStream_dataCallback dataCb,
+            AAudioStream_errorCallback errorCb,
+            void *userData) {
+        mAAudio->builder_setDataCallback(mAABuilder, dataCb, userData);
+        mAAudio->builder_setErrorCallback(mAABuilder, errorCb, userData);
+    }
+    void setChannelCount(int32_t channelCount) {
+        mAAudio->builder_setChannelCount(mAABuilder, channelCount);
+    }
+    void setDirection(aaudio_direction_t direction) {
+        mAAudio->builder_setDirection(mAABuilder, direction);
+    }
+    void setFormat(aaudio_format_t format) {
+        mAAudio->builder_setFormat(mAABuilder, format);
+    }
+    void setPerformanceMode(aaudio_performance_mode_t mode) {
+        mAAudio->builder_setPerformanceMode(mAABuilder, mode);
+    }
+    void setSampleRate(int32_t sampleRate) {
+        mAAudio->builder_setSampleRate(mAABuilder, sampleRate);
+    }
+    void setSharingMode(aaudio_sharing_mode_t sharingMode) {
+        mAAudio->builder_setSharingMode(mAABuilder, sharingMode);
+    }
+
+  private:
+    AAudioLoader *mAAudio;
+    AAudioStreamBuilder *mAABuilder;
+};
+
+StreamBuilder::StreamBuilder(AAudioStreamBuilder *builder)
+        : mAAudio(AAudioLoader::getInstance()),
+          mAABuilder(builder) {
+}
+
+StreamBuilder::~StreamBuilder() {
+    aaudio_result_t result = mAAudio->builder_delete(mAABuilder);
+    if (result != AAUDIO_OK) {
+        ALOGE("Failed to delete stream builder %s (%d)",
+                mAAudio->convertResultToText(result), result);
+    }
+}
+
+std::unique_ptr<Stream> StreamBuilder::makeStream() {
+    AAudioStream *stream = nullptr;
+    aaudio_result_t result = mAAudio->builder_openStream(mAABuilder, &stream);
+    if (result != AAUDIO_OK || stream == nullptr) {
+        ALOGE("Failed to create stream %s (%d) %p",
+                mAAudio->convertResultToText(result), result, stream);
+        return nullptr;
+    }
+    return std::unique_ptr<Stream>(new Stream(stream));
+}
+
+std::unique_ptr<StreamBuilder> makeStreamBuilder() {
+    AAudioStreamBuilder *builder = nullptr;
+    aaudio_result_t result = AAudioLoader::getInstance()->createStreamBuilder(&builder);
+    if (result != AAUDIO_OK || builder == nullptr) {
+        ALOGE("Failed to create stream builder %s (%d) %p",
+                AAudioLoader::getInstance()->convertResultToText(result), result, builder);
+        return nullptr;
+    }
+    return std::unique_ptr<StreamBuilder>(new StreamBuilder(builder));
+}
+
+aaudio_performance_mode_t getAAudioPerfMode(PerformanceMode performanceMode) {
+    switch (performanceMode) {
+        case PerformanceMode::NONE: return AAUDIO_PERFORMANCE_MODE_NONE;
+        case PerformanceMode::DEFAULT:  // The testing mode we should use by default is low latency.
+        case PerformanceMode::LATENCY:
+        case PerformanceMode::LATENCY_EFFECTS: return AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
+        case PerformanceMode::POWER_SAVING: return AAUDIO_PERFORMANCE_MODE_POWER_SAVING;
+    }
+    ALOGE("Invalid performance mode value %d", static_cast<int>(performanceMode));
+    return AAUDIO_PERFORMANCE_MODE_NONE;
+}
+
+int calculateBufferSizeInFrames(int burstSizeInFrames, int bufferSizeMs, int samplingRateHz) {
+    const int desiredBufferSizeInFrames = wholeMultiplier(
+            bufferSizeMs * samplingRateHz, MS_PER_SECOND);
+    // Figure out how many bursts we need to cover the desired buffer size completely, and multiply
+    // that number by the burst size.
+    return wholeMultiplier(desiredBufferSizeInFrames, burstSizeInFrames) * burstSizeInFrames;
+}
+
+
+class Player {
+  public:
+    using ErrorCallback = std::function<void(aaudio_result_t)>;
+
+    Player() {}
+    Player(const Player&) = delete;
+    Player& operator=(const Player&) = delete;
+    ~Player() { shutdown(); }
+
+    bool probeDefaults(
+            PerformanceMode performanceMode, int *samplingRate, int *playerBufferFrameCount);
+    bool init(const TestContext *testCtx,
+            SoundSystem::WriteCallback writeClb,
+            ErrorCallback errorClb);
+    void shutdown();
+
+  private:
+    // Output stream buffer size in milliseconds. Larger values increase
+    // latency, but reduce possibility of glitching. AAudio operates in
+    // 2ms "bursts" by default (controlled by "aaudio.hw_burst_min_usec"
+    // system property), so 4 ms is 2 bursts--"double buffering".
+    // TODO: May actually read the property value to derive this
+    //       value, but property reading isn't exposed in NDK.
+    static constexpr int MINIMUM_STREAM_BUFFER_SIZE_MS = 4;
+
+    static aaudio_data_callback_result_t aaudioDataCallback(AAudioStream *stream,
+            void *userData,
+            void *audioData,
+            int32_t numFrames);
+    static void aaudioErrorCallback(AAudioStream *stream,
+            void *userData,
+            aaudio_result_t error);
+
+    std::unique_ptr<StreamBuilder> createBuilder(PerformanceMode performanceMode);
+
+    const TestContext *mTestCtx;
+    std::unique_ptr<Stream> mStream;
+    SoundSystem::WriteCallback mWriteCallback;
+    ErrorCallback mErrorCallback;
+};
+
+std::unique_ptr<StreamBuilder> Player::createBuilder(PerformanceMode performanceMode) {
+    std::unique_ptr<StreamBuilder> builder = makeStreamBuilder();
+    if (builder) {
+        builder->setDirection(AAUDIO_DIRECTION_OUTPUT);
+        builder->setSharingMode(AAUDIO_SHARING_MODE_EXCLUSIVE);
+        builder->setPerformanceMode(getAAudioPerfMode(performanceMode));
+        static_assert(sizeof(sample_t) == sizeof(int16_t), "sample format must be int16");
+        builder->setFormat(AAUDIO_FORMAT_PCM_I16);
+        builder->setCallbacks(&Player::aaudioDataCallback, &Player::aaudioErrorCallback, this);
+    }
+    return builder;
+}
+
+bool Player::probeDefaults(
+        PerformanceMode performanceMode, int *samplingRate, int *playerBufferFrameCount) {
+    std::unique_ptr<StreamBuilder> builder = createBuilder(performanceMode);
+    if (!builder) return false;
+    mStream = builder->makeStream();
+    if (!mStream) return false;
+    *samplingRate = mStream->getSamplingRateHz();
+    *playerBufferFrameCount = calculateBufferSizeInFrames(
+            mStream->getFramesPerBurst(), MINIMUM_STREAM_BUFFER_SIZE_MS, *samplingRate);
+    return true;
+}
+
+bool Player::init(const TestContext *testCtx,
+        SoundSystem::WriteCallback writeClb,
+        ErrorCallback errorClb) {
+    mTestCtx = testCtx;
+    std::unique_ptr<StreamBuilder> builder = createBuilder(testCtx->getPerformanceMode());
+    if (!builder) return false;
+    // Do not set channel count, because AAudio doesn't perform channel count conversion
+    // in the exclusive mode.
+    builder->setSampleRate(testCtx->getSamplingRateHz());
+    mStream = builder->makeStream();
+    if (!mStream) return false;
+    mStream->setBufferFrameCount(testCtx->getFrameCount());
+    mWriteCallback = writeClb;
+    mErrorCallback = errorClb;
+    return mStream->start();
+}
+
+void Player::shutdown() {
+    if (mStream) {
+        mStream->stop();
+        mStream.reset();
+    }
+}
+
+aaudio_data_callback_result_t Player::aaudioDataCallback(AAudioStream* /*stream*/,
+        void *userData,
+        void *audioData,
+        int32_t numFrames) {
+    ATRACE_CALL();
+    Player *self = static_cast<Player*>(userData);
+    AudioBufferView<sample_t> outputWave = self->mWriteCallback(numFrames);
+    if (outputWave.getFrameCount() > static_cast<size_t>(numFrames)) {
+        ALOGW("Output wave has more frames than callback allows: %lld > %d",
+                (long long)outputWave.getFrameCount(), numFrames);
+    }
+
+    copyAudioBufferViewData(outputWave,
+            AudioBufferView<sample_t>(static_cast<sample_t*>(audioData),
+                    numFrames, self->mStream->getChannelCount()));
+
+    return AAUDIO_CALLBACK_RESULT_CONTINUE;
+}
+
+void Player::aaudioErrorCallback(AAudioStream* /*stream*/,
+        void *userData,
+        aaudio_result_t error) {
+    Player *self = static_cast<Player*>(userData);
+    self->mErrorCallback(error);
+}
+
+
+class Recorder {
+  public:
+    Recorder() {}
+    Recorder(const Recorder&) = delete;
+    Recorder& operator=(const Recorder&) = delete;
+    ~Recorder() { shutdown(); }
+
+    bool probeDefaults(
+            PerformanceMode performanceMode, int *samplingRate, int *recorderBufferFrameCount);
+    bool init(const TestContext *testCtx);
+    bool drain();
+    ssize_t read(AudioBufferView<sample_t> buffer);
+    void shutdown();
+
+  private:
+    // The input stream buffer size in milliseconds. For the input, buffer
+    // size affects latency less than for the output stream (at least in MMAP mode),
+    // because the app normally drains the input buffer and should keep it low.
+    // Using twice the size of the Player buffer as an educated guess.
+    static constexpr int MINIMUM_STREAM_BUFFER_SIZE_MS = 8;
+
+    std::unique_ptr<StreamBuilder> createBuilder(PerformanceMode performanceMode);
+
+    const TestContext *mTestCtx;
+    std::unique_ptr<Stream> mStream;
+    std::unique_ptr<AudioBuffer<sample_t>> mConversionBuffer;
+};
+
+std::unique_ptr<StreamBuilder> Recorder::createBuilder(PerformanceMode performanceMode) {
+    std::unique_ptr<StreamBuilder> builder = makeStreamBuilder();
+    if (builder) {
+        builder->setDirection(AAUDIO_DIRECTION_INPUT);
+        builder->setSharingMode(AAUDIO_SHARING_MODE_EXCLUSIVE);
+        builder->setPerformanceMode(getAAudioPerfMode(performanceMode));
+        static_assert(sizeof(sample_t) == sizeof(int16_t), "sample format must be int16");
+        builder->setFormat(AAUDIO_FORMAT_PCM_I16);
+    }
+    return builder;
+}
+
+bool Recorder::probeDefaults(
+        PerformanceMode performanceMode, int *samplingRate, int *recorderBufferFrameCount) {
+    std::unique_ptr<StreamBuilder> builder = createBuilder(performanceMode);
+    if (!builder) return false;
+    mStream = builder->makeStream();
+    if (!mStream) return false;
+    *samplingRate = mStream->getSamplingRateHz();
+    *recorderBufferFrameCount = calculateBufferSizeInFrames(
+            mStream->getFramesPerBurst(), MINIMUM_STREAM_BUFFER_SIZE_MS, *samplingRate);
+    return true;
+}
+
+bool Recorder::init(const TestContext *testCtx) {
+    mTestCtx = testCtx;
+    std::unique_ptr<StreamBuilder> builder = createBuilder(testCtx->getPerformanceMode());
+    if (!builder) return false;
+    builder->setChannelCount(testCtx->getChannelCount());
+    builder->setSampleRate(testCtx->getSamplingRateHz());
+    mStream = builder->makeStream();
+    if (!mStream) return false;
+    if (mStream->getChannelCount() != mTestCtx->getChannelCount()) {
+        mConversionBuffer.reset(new AudioBuffer<sample_t>(
+                        mTestCtx->getFrameCount(), mStream->getChannelCount()));
+    }
+    mStream->setBufferFrameCount(testCtx->getFrameCount());
+    return mStream->start();
+}
+
+bool Recorder::drain() {
+    ATRACE_CALL();
+    AudioBuffer<sample_t> drainBuffer(mStream->getFramesPerBurst(), mStream->getChannelCount());
+    ssize_t framesRead;
+    do {
+        framesRead = mStream->read(drainBuffer);
+        if (framesRead < 0) return false;
+    } while (framesRead > 0);
+    return true;
+}
+
+ssize_t Recorder::read(AudioBufferView<sample_t> buffer) {
+    if (!mConversionBuffer) {
+        return mStream->read(buffer);
+    } else {
+        ssize_t result = mStream->read(mConversionBuffer->getView(0, buffer.getFrameCount()));
+        if (result <= 0) return result;
+
+        size_t framesRead = result;
+        copyAudioBufferViewData(mConversionBuffer->getView(0, framesRead), buffer);
+        return framesRead;
+    }
+}
+
+void Recorder::shutdown() {
+    if (mStream) {
+        mStream->stop();
+        mStream.reset();
+    }
+}
+
+
+}  // namespace
+
+struct SoundSystemAAudio::Impl {
+    Impl() : lastError(AAUDIO_OK) {}
+    Impl(const Impl&) = delete;
+    Impl& operator=(const Impl&) = delete;
+
+    void errorCallback(aaudio_result_t error) {
+        lastError = error;
+        ALOGE("Error callback received %s (%d)",
+                AAudioLoader::getInstance()->convertResultToText(error), error);
+    }
+
+    Player player;
+    Recorder recorder;
+    std::atomic<aaudio_result_t> lastError;
+};
+
+SoundSystemAAudio::SoundSystemAAudio()
+        : mTestCtx(nullptr), mImpl(new Impl()) {
+}
+
+SoundSystemAAudio::SoundSystemAAudio(const TestContext *testCtx)
+        : mTestCtx(testCtx), mImpl(new Impl()) {
+}
+
+SoundSystemAAudio::~SoundSystemAAudio() {
+    shutdown();
+}
+
+bool SoundSystemAAudio::probeDefaultSettings(PerformanceMode performanceMode, int *samplingRate,
+        int *playerBufferFrameCount, int *recorderBufferFrameCount) {
+    return (AAudioLoader::getInstance()->open() == 0)
+            && mImpl->recorder.probeDefaults(
+                    performanceMode, samplingRate, recorderBufferFrameCount)
+            && mImpl->player.probeDefaults(performanceMode, samplingRate, playerBufferFrameCount);
+}
+
+bool SoundSystemAAudio::init(WriteCallback callback) {
+    if (!mTestCtx) {
+        ALOGF("Attempting to use SoundSystemAAudio probing instance for testing!");
+    }
+    return (AAudioLoader::getInstance()->open() == 0)
+            && mImpl->recorder.init(mTestCtx)
+            && mImpl->player.init(
+                    mTestCtx,
+                    callback,
+                    std::bind(&Impl::errorCallback, mImpl.get(), std::placeholders::_1));
+}
+
+bool SoundSystemAAudio::drainInput() {
+    if (mImpl->lastError != AAUDIO_OK) return false;
+    return mImpl->recorder.drain();
+}
+
+ssize_t SoundSystemAAudio::readAudio(AudioBufferView<sample_t> buffer) {
+    if (mImpl->lastError != AAUDIO_OK) return -1;
+    return mImpl->recorder.read(buffer);
+}
+
+void SoundSystemAAudio::shutdown() {
+    mImpl->player.shutdown();
+    mImpl->recorder.shutdown();
+    AAudioLoader::getInstance()->close();
+}
diff --git a/LoopbackApp/app/src/main/cpp/lb2/sound_system_aaudio.h b/LoopbackApp/app/src/main/cpp/lb2/sound_system_aaudio.h
new file mode 100644
index 0000000..d6fb902
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/sound_system_aaudio.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LB2_SOUND_SYSTEM_AAUDIO_H_
+#define LB2_SOUND_SYSTEM_AAUDIO_H_
+
+#include <memory>
+
+#include "lb2/sound_system.h"
+#include "lb2/test_context.h"
+
+// Implementation of a sound system via AAudio API.
+class SoundSystemAAudio : public SoundSystem {
+  public:
+    // Default constructor--for probing.
+    SoundSystemAAudio();
+    // Constructor with a test context--for testing.
+    explicit SoundSystemAAudio(const TestContext *testCtx);
+    SoundSystemAAudio(const SoundSystemAAudio&) = delete;
+    SoundSystemAAudio& operator=(const SoundSystemAAudio&) = delete;
+    virtual ~SoundSystemAAudio();
+
+    bool probeDefaultSettings(PerformanceMode performanceMode, int *samplingRate,
+            int *playerBufferFrameCount, int *recorderBufferFrameCount) override;
+    bool init(WriteCallback callback) override;
+    bool drainInput() override;
+    ssize_t readAudio(AudioBufferView<sample_t> buffer) override;
+    void shutdown() override;
+
+  private:
+    struct Impl;  // AAudio-specific details.
+
+    const TestContext* mTestCtx;
+    const std::unique_ptr<Impl> mImpl;
+};
+
+#endif  // LB2_SOUND_SYSTEM_AAUDIO_H_
diff --git a/LoopbackApp/app/src/main/cpp/lb2/sound_system_echo.cpp b/LoopbackApp/app/src/main/cpp/lb2/sound_system_echo.cpp
new file mode 100644
index 0000000..1717494
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/sound_system_echo.cpp
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "lb2/sound_system_echo.h"
+
+#include <chrono>
+#include <functional>
+
+#define LOG_TAG "ss_echo"
+#include "lb2/logging.h"
+#include "lb2/util.h"
+
+SoundSystemEcho::SoundSystemEcho(const TestContext *testCtx)
+        : mTestCtx(testCtx),
+          mFifoData(new sample_t[testCtx->getSampleCount()]),
+          mMsecPerBuffer(calculateMsecPerBuffer(testCtx)),
+          mThreadRunning(false) {
+    audio_utils_fifo_init(
+            &mFifo,
+            mTestCtx->getFrameCount(),
+            mTestCtx->getFrameSize(),
+            mFifoData.get());
+}
+
+SoundSystemEcho::~SoundSystemEcho() {
+    shutdown();
+    audio_utils_fifo_deinit(&mFifo);
+}
+
+int SoundSystemEcho::calculateMsecPerBuffer(const TestContext *testCtx) {
+    return wholeMultiplier(MS_PER_SECOND * testCtx->getFrameCount(), testCtx->getSamplingRateHz());
+}
+
+void SoundSystemEcho::startThread() {
+    mThreadRunning = true;
+    mThread.reset(new std::thread(std::bind(&SoundSystemEcho::threadLoop, this)));
+}
+
+void SoundSystemEcho::stopThread() {
+    mThreadRunning = false;
+    mThread->join();
+    mThread.reset();
+}
+
+void SoundSystemEcho::threadLoop() {
+    while (mThreadRunning) {
+        AudioBufferView<sample_t> buffer = mWriteCallback(mTestCtx->getFrameCount());
+        // The FIFO will cut the data if it exceeds the buffer size.
+        audio_utils_fifo_write(&mFifo, buffer.getData(), buffer.getFrameCount());
+        std::this_thread::sleep_for(std::chrono::milliseconds(mMsecPerBuffer));
+    }
+}
+
+bool SoundSystemEcho::init(WriteCallback callback) {
+    if (mThreadRunning) {
+        shutdown();
+    }
+    mWriteCallback = callback;
+    startThread();
+    return true;
+}
+
+bool SoundSystemEcho::drainInput() {
+    AudioBuffer<sample_t> drainBuffer(
+            audio_utils_fifo_availToRead(&mFifo), mTestCtx->getChannelCount());
+    return audio_utils_fifo_read(&mFifo, drainBuffer.getData(), drainBuffer.getFrameCount()) >= 0;
+}
+
+ssize_t SoundSystemEcho::readAudio(AudioBufferView<sample_t> buffer) {
+    std::this_thread::sleep_for(std::chrono::milliseconds(mMsecPerBuffer));
+    ssize_t result = audio_utils_fifo_read(&mFifo, buffer.getData(), buffer.getFrameCount());
+    if (result != 0) return result;
+    buffer.clear();
+    return buffer.getFrameCount();
+}
+
+void SoundSystemEcho::shutdown() {
+    if (!mThreadRunning) return;
+    stopThread();
+    mWriteCallback = nullptr;
+}
diff --git a/LoopbackApp/app/src/main/cpp/lb2/sound_system_echo.h b/LoopbackApp/app/src/main/cpp/lb2/sound_system_echo.h
new file mode 100644
index 0000000..9935a6c
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/sound_system_echo.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LB2_SOUND_SYSTEM_ECHO_H_
+#define LB2_SOUND_SYSTEM_ECHO_H_
+
+#include <atomic>
+#include <memory>
+#include <thread>
+
+#include <audio_utils/fifo.h>
+
+#include "lb2/sound_system.h"
+#include "lb2/test_context.h"
+
+// Simplest implementation of a sound system that echoes written data
+// back to the reader. This represents an ideal model of a physical loopback dongle.
+class SoundSystemEcho : public SoundSystem {
+  public:
+    SoundSystemEcho(const TestContext *testCtx);
+    SoundSystemEcho(const SoundSystemEcho&) = delete;
+    SoundSystemEcho& operator=(const SoundSystemEcho&) = delete;
+    virtual ~SoundSystemEcho();
+
+    bool init(WriteCallback callback) override;
+    bool drainInput() override;
+    ssize_t readAudio(AudioBufferView<sample_t> buffer) override;
+    void shutdown() override;
+
+  private:
+    static int calculateMsecPerBuffer(const TestContext *testCtx);
+
+    void startThread();
+    void stopThread();
+    void threadLoop();
+
+    const TestContext* mTestCtx;
+    std::unique_ptr<sample_t[]> mFifoData;
+    struct audio_utils_fifo mFifo;
+    const int mMsecPerBuffer;
+    WriteCallback mWriteCallback;      // accessed by mThread
+    std::atomic<bool> mThreadRunning;  // accessed by mThread
+    std::unique_ptr<std::thread> mThread;
+};
+
+#endif  // LB2_SOUND_SYSTEM_ECHO_H_
diff --git a/LoopbackApp/app/src/main/cpp/lb2/test_context.cpp b/LoopbackApp/app/src/main/cpp/lb2/test_context.cpp
new file mode 100644
index 0000000..0e1985b
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/test_context.cpp
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "lb2/test_context.h"
+
+#include <math.h>
+#include <cmath>
+
+AudioBufferView<sample_t> GlitchTestContext::getNextImpulse(size_t frameCount) {
+    constexpr double TWO_PI = 2.0 * M_PI;
+    auto sineBuffer = mSineBuffer.getView(0, frameCount);
+    for (size_t i = 0; i < sineBuffer.getFrameCount(); ++i) {
+        sample_t s = convertSampleType(std::sin(mPhaseRad) * SIGNAL_AMPLITUDE);
+        sample_t *d = sineBuffer.getFrameAt(i);
+        for (int j = 0; j < getChannelCount(); ++j) {
+            *d++ = s;
+        }
+
+        mPhaseRad += TWO_PI * mPhaseIncrementPerFrame;
+        while (mPhaseRad > TWO_PI) mPhaseRad -= TWO_PI;
+    }
+    return sineBuffer;
+}
diff --git a/LoopbackApp/app/src/main/cpp/lb2/test_context.h b/LoopbackApp/app/src/main/cpp/lb2/test_context.h
new file mode 100644
index 0000000..4909600
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/test_context.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LB2_TEST_CONTEXT_H_
+#define LB2_TEST_CONTEXT_H_
+
+#include <memory>
+
+#include <SLES/OpenSLES.h>  // for SLuint... types use by performance mode consts
+#include <SLES/OpenSLES_AndroidConfiguration.h>
+
+#include "lb2/audio_buffer.h"
+
+// The Java side uses the same numbers as OpenSL ES, and '-1' for default,
+// see LoopbackApplication.java.
+enum class PerformanceMode {
+    DEFAULT = -1,
+    NONE = SL_ANDROID_PERFORMANCE_NONE,
+    LATENCY = SL_ANDROID_PERFORMANCE_LATENCY,
+    LATENCY_EFFECTS = SL_ANDROID_PERFORMANCE_LATENCY_EFFECTS,
+    POWER_SAVING = SL_ANDROID_PERFORMANCE_POWER_SAVING
+};
+
+// Generic context describing test parameters.
+// Made non-copyable because descendants can contain buffers.
+class TestContext : public CountsConverter<sample_t> {
+  public:
+    TestContext(PerformanceMode perfMode,
+            int testFrameCount,
+            int channelCount,
+            int samplingRateHz)
+            : CountsConverter<sample_t>(testFrameCount, channelCount),
+              mPerfMode(perfMode),
+              mSamplingRateHz(samplingRateHz) {}
+    TestContext(const TestContext&) = delete;
+    TestContext& operator=(const TestContext&) = delete;
+
+    // Allocates an audio buffer with the size enough to hold audio test data.
+    AudioBuffer<sample_t> createAudioBuffer() const {
+        return AudioBuffer<sample_t>(getFrameCount(), getChannelCount());
+    }
+    PerformanceMode getPerformanceMode() const { return mPerfMode; }
+    int getSamplingRateHz() const { return mSamplingRateHz; }
+
+  private:
+    const PerformanceMode mPerfMode;
+    const int mSamplingRateHz;
+};
+
+
+// Context describing latency test parameters.
+// Carries test impulse data, but doesn't own it.
+// The size of the impulse is assumed to be 1 frame buffer.
+class LatencyTestContext : public TestContext {
+  public:
+    LatencyTestContext(PerformanceMode perfMode,
+            int testFrameCount,
+            int channelCount,
+            int samplingRateHz,
+            int inputFramesToDiscard,
+            sample_t *impulse)
+            : TestContext(perfMode, testFrameCount, channelCount, samplingRateHz),
+              mInputFramesToDiscard(inputFramesToDiscard),
+              mImpulse(impulse, testFrameCount, channelCount) {}
+    LatencyTestContext(const LatencyTestContext&) = delete;
+    LatencyTestContext& operator=(const LatencyTestContext&) = delete;
+
+    int getInputFramesToDiscard() const { return mInputFramesToDiscard; }
+    AudioBufferView<sample_t> getImpulse() const { return mImpulse; }
+
+  private:
+    const int mInputFramesToDiscard;
+    const AudioBufferView<sample_t> mImpulse;
+};
+
+
+// Context describing glitch test parameters.
+// Generates test signal. Since the period of the test signal
+// is not necessarily aligned with the test buffer size,
+// the operation of getting next impulse piece is idempotent.
+class GlitchTestContext : public TestContext {
+  public:
+    GlitchTestContext(PerformanceMode perfMode,
+            int testFrameCount,
+            int channelCount,
+            int samplingRateHz,
+            double signalFrequencyHz,
+            AudioBufferView<sample_t> byteBuffer)
+            : TestContext(perfMode, testFrameCount, channelCount, samplingRateHz),
+              mByteBuffer(byteBuffer),
+              mPhaseIncrementPerFrame(signalFrequencyHz / samplingRateHz),
+              mSineBuffer(createAudioBuffer()),
+              mPhaseRad(0) {}
+    GlitchTestContext(const GlitchTestContext&) = delete;
+    GlitchTestContext& operator=(const GlitchTestContext&) = delete;
+
+    const AudioBufferView<sample_t>& getByteBuffer() const { return mByteBuffer; }
+    AudioBufferView<sample_t> getNextImpulse(size_t frameCount);  // non-idempotent
+
+  private:
+    static constexpr double SIGNAL_AMPLITUDE = 0.8;
+
+    const AudioBufferView<sample_t> mByteBuffer;
+    const double mPhaseIncrementPerFrame;
+    AudioBuffer<sample_t> mSineBuffer;
+    double mPhaseRad;
+};
+
+
+#endif  // LB2_TEST_CONTEXT_H_
diff --git a/LoopbackApp/app/src/main/cpp/lb2/util.h b/LoopbackApp/app/src/main/cpp/lb2/util.h
new file mode 100644
index 0000000..5229da7
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/lb2/util.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LB2_UTIL_H_
+#define LB2_UTIL_H_
+
+// TODO: move all to audio utilities
+
+constexpr int MS_PER_SECOND = 1000;
+
+// Assuming the arguments to be positive numbers, returns
+// a value 'm' such that 'part' * 'm' >= 'whole'.
+inline int wholeMultiplier(int whole, int part) {
+    // part * ((whole - 1) / part + 1) = whole - 1 + part >= whole, if part > 0
+    return (whole - 1) / part + 1;
+}
+
+#endif  // LB2_UTIL_H_
diff --git a/LoopbackApp/app/src/main/cpp/loopback.c b/LoopbackApp/app/src/main/cpp/loopback.c
new file mode 100644
index 0000000..46cbad2
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/loopback.c
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "lb2/loopback2.h"
+#include "loopback_sles.h"
+
+native_engine_t sEngines[NATIVE_ENGINE_COUNT] = {
+    // NATIVE_ENGINE_SLES
+    {
+        slesComputeDefaultSettings,
+        slesInit,
+        slesDestroy,
+        slesProcessNext,
+        slesGetRecorderBufferPeriod,
+        slesGetRecorderMaxBufferPeriod,
+        slesGetRecorderVarianceBufferPeriod,
+        slesGetPlayerBufferPeriod,
+        slesGetPlayerMaxBufferPeriod,
+        slesGetPlayerVarianceBufferPeriod,
+        slesGetCaptureRank,
+        slesGetPlayerTimeStampsAndExpectedBufferPeriod,
+        slesGetRecorderTimeStampsAndExpectedBufferPeriod
+    },
+    // NATIVE_ENGINE_AAUDIO
+    {
+        lb2ComputeDefaultSettings,
+        lb2Init,
+        lb2Destroy,
+        lb2ProcessNext,
+        lb2GetRecorderBufferPeriod,
+        lb2GetRecorderMaxBufferPeriod,
+        lb2GetRecorderVarianceBufferPeriod,
+        lb2GetPlayerBufferPeriod,
+        lb2GetPlayerMaxBufferPeriod,
+        lb2GetPlayerVarianceBufferPeriod,
+        lb2GetCaptureRank,
+        lb2GetPlayerTimeStampsAndExpectedBufferPeriod,
+        lb2GetRecorderTimeStampsAndExpectedBufferPeriod
+    }
+};
diff --git a/LoopbackApp/app/src/main/cpp/loopback.h b/LoopbackApp/app/src/main/cpp/loopback.h
new file mode 100644
index 0000000..45c80b7
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/loopback.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _Included_org_drrickorang_loopback_loopback
+#define _Included_org_drrickorang_loopback_loopback
+
+#include <stdbool.h>
+#include <time.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+    int* timeStampsMs;          // Array of milliseconds since first callback
+    short* callbackDurations;   // Array of milliseconds between callback and previous callback
+    short index;                // Current write position
+    struct timespec startTime;  // Time of first callback {seconds,nanoseconds}
+    int capacity;               // Total number of callback times/lengths that can be recorded
+    bool exceededCapacity;      // Set only if late callbacks come after array is full
+} callbackTimeStamps;
+
+#define NANOS_PER_SECOND 1000000000
+#define NANOS_PER_MILLI 1000000
+#define MILLIS_PER_SECOND 1000
+
+enum STATUS_ENUM {
+    STATUS_SUCCESS = 0,
+    STATUS_FAIL = 1
+};
+
+enum JAVA_CONSTANTS_ENUM {
+    // Must match constant 'range' in BufferPeriod.java
+    RANGE = 1002,
+    // Must match constants in Constant.java
+    TEST_TYPE_LATENCY = 222,
+    TEST_TYPE_BUFFER_PERIOD = 223,
+    AUDIO_THREAD_TYPE_JAVA = 0,
+    AUDIO_THREAD_TYPE_NATIVE_SLES = 1,
+    AUDIO_THREAD_TYPE_NATIVE_AAUDIO = 2,
+};
+
+typedef struct {
+    int (*computeDefaultSettings)(int performanceMode, int *samplingRate,
+            int *playerBufferFrameCount, int *recorderBufferFrameCount);
+    int (*init)(void **ppCtx, int samplingRate, int frameCount, int micSource,
+            int performanceMode,
+            int testType, double frequency1, char* byteBufferPtr, int byteBufferLength,
+            short* loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames);
+    int (*destroy)(void **ppCtx);
+    int (*processNext)(void *pCtx, double *pSamples, long maxSamples);
+    int* (*getRecorderBufferPeriod)(void *pCtx);
+    int (*getRecorderMaxBufferPeriod)(void *pCtx);
+    int64_t (*getRecorderVarianceBufferPeriod)(void *pCtx);
+    int* (*getPlayerBufferPeriod)(void *pCtx);
+    int (*getPlayerMaxBufferPeriod)(void *pCtx);
+    int64_t (*getPlayerVarianceBufferPeriod)(void *pCtx);
+    int (*getCaptureRank)(void *pCtx);
+    int (*getPlayerTimeStampsAndExpectedBufferPeriod)(void *pCtx, callbackTimeStamps **ppTSs);
+    int (*getRecorderTimeStampsAndExpectedBufferPeriod)(void *pCtx, callbackTimeStamps **ppTSs);
+} native_engine_t;
+
+typedef struct {
+    void *context;
+    native_engine_t *methods;
+} native_engine_instance_t;
+
+enum NATIVE_ENGINE_ENUM {
+    NATIVE_ENGINE_SLES = 0,
+    NATIVE_ENGINE_AAUDIO = 1,
+    NATIVE_ENGINE_COUNT = NATIVE_ENGINE_AAUDIO + 1
+};
+
+extern native_engine_t sEngines[NATIVE_ENGINE_COUNT];
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // _Included_org_drrickorang_loopback_loopback
diff --git a/LoopbackApp/app/src/main/cpp/loopback_sles.h b/LoopbackApp/app/src/main/cpp/loopback_sles.h
new file mode 100644
index 0000000..b2c9b53
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/loopback_sles.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _Included_org_drrickorang_loopback_loopback_sles
+#define _Included_org_drrickorang_loopback_loopback_sles
+
+#include "loopback.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int slesComputeDefaultSettings(int performanceMode, int *samplingRate,
+             int *playerBufferFrameCount, int *recorderBufferFrameCount);
+int slesInit(void ** ppCtx, int samplingRate, int frameCount, int micSource,
+             int performanceMode,
+             int testType, double frequency1, char* byteBufferPtr, int byteBufferLength,
+             short* loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames);
+int slesDestroy(void ** ppCtx);
+int slesProcessNext(void *pCtx, double *pSamples, long maxSamples);
+int* slesGetRecorderBufferPeriod(void *pCtx);
+int slesGetRecorderMaxBufferPeriod(void *pCtx);
+int64_t slesGetRecorderVarianceBufferPeriod(void *pCtx);
+int* slesGetPlayerBufferPeriod(void *pCtx);
+int slesGetPlayerMaxBufferPeriod(void *pCtx);
+int64_t slesGetPlayerVarianceBufferPeriod(void *pCtx);
+int slesGetCaptureRank(void *pCtx);
+int slesGetPlayerTimeStampsAndExpectedBufferPeriod(void *pCtx, callbackTimeStamps **ppTSs);
+int slesGetRecorderTimeStampsAndExpectedBufferPeriod(void *pCtx, callbackTimeStamps **ppTSs);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // _Included_org_drrickorang_loopback_loopback_sles
diff --git a/LoopbackApp/app/src/main/jni/sles.cpp b/LoopbackApp/app/src/main/cpp/sles.cpp
similarity index 87%
rename from LoopbackApp/app/src/main/jni/sles.cpp
rename to LoopbackApp/app/src/main/cpp/sles.cpp
index 159269b..9a07a44 100644
--- a/LoopbackApp/app/src/main/jni/sles.cpp
+++ b/LoopbackApp/app/src/main/cpp/sles.cpp
@@ -14,7 +14,6 @@
  * limitations under the License.
  */
 
-
 // FIXME taken from OpenSLES_AndroidConfiguration.h
 #define SL_ANDROID_KEY_PERFORMANCE_MODE  ((const SLchar*) "androidPerformanceMode")
 
@@ -30,20 +29,37 @@
 #include <cmath>
 #include "sles.h"
 #include "audio_utils/atomic.h"
-#include <stdio.h>
-#include <assert.h>
+#include "byte_buffer.h"
 #include <unistd.h>
 #include <string.h>
 
-int slesInit(sles_data ** ppSles, int samplingRate, int frameCount, int micSource,
+static int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource,
+        int performanceMode,
+        int testType, double frequency1, char* byteBufferPtr, int byteBufferLength,
+        short* loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames);
+static int slesDestroyServer(sles_data *pSles);
+
+static void initBufferStats(bufferStats *stats);
+static void collectBufferPeriod(bufferStats *stats, bufferStats *fdpStats,
+        callbackTimeStamps *timeStamps, short expectedBufferPeriod);
+static bool updateBufferStats(bufferStats *stats, int64_t diff_in_nano, int expectedBufferPeriod);
+static void recordTimeStamp(callbackTimeStamps *timeStamps,
+        int64_t callbackDuration, int64_t timeStamp);
+
+int slesComputeDefaultSettings(int /*performanceMode*/, int* /*samplingRate*/,
+            int* /*playerBufferFrameCount*/, int* /*recorderBufferFrameCount*/) {
+    // For OpenSL ES, these parameters can be determined by NativeAudioThread itself.
+    return STATUS_FAIL;
+}
+
+int slesInit(void ** ppCtx, int samplingRate, int frameCount, int micSource,
              int performanceMode,
              int testType, double frequency1, char* byteBufferPtr, int byteBufferLength,
              short* loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames) {
-    int status = SLES_FAIL;
+    sles_data ** ppSles = (sles_data**) ppCtx;
+    int status = STATUS_FAIL;
     if (ppSles != NULL) {
-        sles_data * pSles = (sles_data*) malloc(sizeof(sles_data));
-
-        memset(pSles, 0, sizeof(sles_data));
+        sles_data * pSles = (sles_data*) calloc(1, sizeof(sles_data));
 
         SLES_PRINTF("pSles malloc %zu bytes at %p", sizeof(sles_data), pSles);
         //__android_log_print(ANDROID_LOG_INFO, "sles_jni",
@@ -63,8 +79,9 @@
 
     return status;
 }
-int slesDestroy(sles_data ** ppSles) {
-    int status = SLES_FAIL;
+int slesDestroy(void ** ppCtx) {
+    sles_data ** ppSles = (sles_data**)ppCtx;
+    int status = STATUS_FAIL;
     if (ppSles != NULL) {
         slesDestroyServer(*ppSles);
 
@@ -74,20 +91,22 @@
             free(*ppSles);
             *ppSles = 0;
         }
-        status = SLES_SUCCESS;
+        status = STATUS_SUCCESS;
     }
     return status;
 }
 
-#define ASSERT_EQ(x, y) do { if ((x) == (y)) ; else { fprintf(stderr, "0x%x != 0x%x\n", \
-    (unsigned) (x), (unsigned) (y)); assert((x) == (y)); } } while (0)
+#define ASSERT(x) do { if(!(x)) { __android_log_assert("assert", "sles_jni", \
+                    "ASSERTION FAILED: " #x); } } while (0)
+#define ASSERT_EQ(x, y) do { if ((x) == (y)) ; else __android_log_assert("assert", "sles_jni", \
+                    "ASSERTION FAILED: 0x%x != 0x%x\n", (unsigned) (x), (unsigned) (y)); } while (0)
 
 // Called after audio recorder fills a buffer with data, then we can read from this filled buffer
 static void recorderCallback(SLAndroidSimpleBufferQueueItf caller __unused, void *context) {
     sles_data *pSles = (sles_data*) context;
     if (pSles != NULL) {
-        collectBufferPeriod(&pSles->recorderBufferStats, NULL /*fdpStats*/, &pSles->recorderTimeStamps,
-                            pSles->expectedBufferPeriod);
+        collectBufferPeriod(&pSles->recorderBufferStats, NULL /*fdpStats*/,
+                            &pSles->recorderTimeStamps, pSles->expectedBufferPeriod);
 
         //__android_log_print(ANDROID_LOG_INFO, "sles_jni", "in recorderCallback");
         SLresult result;
@@ -95,9 +114,9 @@
         //ee  SLES_PRINTF("<R");
 
         // We should only be called when a recording buffer is done
-        assert(pSles->rxFront <= pSles->rxBufCount);
-        assert(pSles->rxRear <= pSles->rxBufCount);
-        assert(pSles->rxFront != pSles->rxRear);
+        ASSERT(pSles->rxFront <= pSles->rxBufCount);
+        ASSERT(pSles->rxRear <= pSles->rxBufCount);
+        ASSERT(pSles->rxFront != pSles->rxRear);
         char *buffer = pSles->rxBuffers[pSles->rxFront]; //pSles->rxBuffers stores the data recorded
 
 
@@ -136,7 +155,8 @@
             }
         } else if (pSles->testType == TEST_TYPE_BUFFER_PERIOD) {
             if (pSles->fifo2Buffer != NULL) {
-                ssize_t actual = byteBuffer_write(pSles, buffer, (size_t) pSles->bufSizeInFrames);
+                ssize_t actual = byteBuffer_write(pSles->byteBufferPtr, pSles->byteBufferLength,
+                        buffer, (size_t) pSles->bufSizeInFrames, pSles->channels);
 
                 //FIXME should log errors using other methods instead of printing to terminal
                 if (actual != (ssize_t) pSles->bufSizeInFrames) {
@@ -159,7 +179,7 @@
         if (rxRearNext > pSles->rxBufCount) {
             rxRearNext = 0;
         }
-        assert(rxRearNext != pSles->rxFront);
+        ASSERT(rxRearNext != pSles->rxFront);
         pSles->rxBuffers[pSles->rxRear] = buffer;
         pSles->rxRear = rxRearNext;
 
@@ -171,47 +191,9 @@
 }
 
 
-// Write "count" amount of short from buffer to pSles->byteBufferPtr. This byteBuffer will read by
-// java code.
-ssize_t byteBuffer_write(sles_data *pSles, char *buffer, size_t count) {
-    // bytebufferSize is in byte
-    int32_t rear; // rear should not exceed 2^31 - 1, or else overflow will happen
-    memcpy(&rear, (char *) (pSles->byteBufferPtr + pSles->byteBufferLength - 4), sizeof(rear));
-
-    size_t frameSize = pSles->channels * sizeof(short); // only one channel
-    int32_t maxLengthInShort = (pSles->byteBufferLength - 4) / frameSize;
-    // mask the upper bits to get the correct position in the pipe
-    int32_t tempRear = rear & (maxLengthInShort - 1);
-    size_t part1 = maxLengthInShort - tempRear;
-
-    if (part1 > count) {
-        part1 = count;
-    }
-
-    if (part1 > 0) {
-        memcpy(pSles->byteBufferPtr + (tempRear * frameSize), buffer,
-               part1 * frameSize);
-
-        size_t part2 = count - part1;
-        if (part2 > 0) {
-            memcpy(pSles->byteBufferPtr, (buffer + (part1 * frameSize)),
-                   part2 * frameSize);
-        }
-
-        //TODO do we need something similar to the below function call?
-        //android_atomic_release_store(audio_utils_fifo_sum(fifo, fifo->mRear, availToWrite),
-        //        &fifo->mRear);
-    }
-
-    // increase value of rear
-    int32_t* rear2 = (int32_t *) (pSles->byteBufferPtr + pSles->byteBufferLength - 4);
-    *rear2 += count;
-    return count;
-}
-
 // Calculate nanosecond difference between two timespec structs from clock_gettime(CLOCK_MONOTONIC)
 // tv_sec [0, max time_t] , tv_nsec [0, 999999999]
-int64_t diffInNano(struct timespec previousTime, struct timespec currentTime) {
+static int64_t diffInNano(struct timespec previousTime, struct timespec currentTime) {
     return (int64_t) (currentTime.tv_sec - previousTime.tv_sec) * (int64_t) NANOS_PER_SECOND +
             currentTime.tv_nsec - previousTime.tv_nsec;
 }
@@ -227,9 +209,9 @@
         //ee  SLES_PRINTF("<P");
 
         // Get the buffer that just finished playing
-        assert(pSles->txFront <= pSles->txBufCount);
-        assert(pSles->txRear <= pSles->txBufCount);
-        assert(pSles->txFront != pSles->txRear);
+        ASSERT(pSles->txFront <= pSles->txBufCount);
+        ASSERT(pSles->txRear <= pSles->txBufCount);
+        ASSERT(pSles->txFront != pSles->txRear);
         char *buffer = pSles->txBuffers[pSles->txFront];
         if (++pSles->txFront > pSles->txBufCount) {
             pSles->txFront = 0;
@@ -244,7 +226,8 @@
                 if (availToRead < pSles->bufSizeInFrames * 2) {
                     break;
                 }
-                ssize_t actual = audio_utils_fifo_read(&pSles->fifo, buffer, pSles->bufSizeInFrames);
+                ssize_t actual = audio_utils_fifo_read(&pSles->fifo, buffer,
+                        pSles->bufSizeInFrames);
                 if (actual > 0) {
                     discardedInputFrames += actual;
                 }
@@ -327,13 +310,13 @@
         ASSERT_EQ(SL_RESULT_SUCCESS, result);
 
         // Update our model of the player queue
-        assert(pSles->txFront <= pSles->txBufCount);
-        assert(pSles->txRear <= pSles->txBufCount);
+        ASSERT(pSles->txFront <= pSles->txBufCount);
+        ASSERT(pSles->txRear <= pSles->txBufCount);
         SLuint32 txRearNext = pSles->txRear + 1;
         if (txRearNext > pSles->txBufCount) {
             txRearNext = 0;
         }
-        assert(txRearNext != pSles->txFront);
+        ASSERT(txRearNext != pSles->txFront);
         pSles->txBuffers[pSles->txRear] = buffer;
         pSles->txRear = txRearNext;
 
@@ -341,7 +324,7 @@
 }
 
 // Used to set initial values for the bufferStats struct before values can be recorded.
-void initBufferStats(bufferStats *stats) {
+static void initBufferStats(bufferStats *stats) {
     stats->buffer_period = new int[RANGE](); // initialized to zeros
     stats->previous_time = {0,0};
     stats->current_time = {0,0};
@@ -356,8 +339,8 @@
 
 // Called in the beginning of playerCallback() to collect the interval between each callback.
 // fdpStats is either NULL or a pointer to the buffer statistics for the full-duplex partner.
-void collectBufferPeriod(bufferStats *stats, bufferStats *fdpStats, callbackTimeStamps *timeStamps,
-                         short expectedBufferPeriod) {
+static void collectBufferPeriod(bufferStats *stats, bufferStats *fdpStats,
+        callbackTimeStamps *timeStamps, short expectedBufferPeriod) {
     clock_gettime(CLOCK_MONOTONIC, &(stats->current_time));
 
     if (timeStamps->startTime.tv_sec == 0 && timeStamps->startTime.tv_nsec == 0) {
@@ -385,8 +368,8 @@
 
 // Records an outlier given the duration in nanoseconds and the number of nanoseconds
 // between it and the start of the test.
-void recordTimeStamp(callbackTimeStamps *timeStamps,
-                     int64_t callbackDuration, int64_t timeStamp) {
+static void recordTimeStamp(callbackTimeStamps *timeStamps,
+        int64_t callbackDuration, int64_t timeStamp) {
     if (timeStamps->exceededCapacity) {
         return;
     }
@@ -403,7 +386,7 @@
     }
 }
 
-void atomicSetIfGreater(volatile int32_t *addr, int32_t val) {
+static void atomicSetIfGreater(volatile int32_t *addr, int32_t val) {
     // TODO: rewrite this to avoid the need for unbounded spinning
     int32_t old;
     do {
@@ -413,7 +396,7 @@
 }
 
 // Updates the stats being collected about buffer periods. Returns true if this is an outlier.
-bool updateBufferStats(bufferStats *stats, int64_t diff_in_nano, int expectedBufferPeriod) {
+static bool updateBufferStats(bufferStats *stats, int64_t diff_in_nano, int expectedBufferPeriod) {
     stats->measurement_count++;
 
     // round up to nearest millisecond
@@ -449,11 +432,11 @@
     return diff_in_milli > expectedBufferPeriod + LATE_CALLBACK_OUTLIER_THRESHOLD;
 }
 
-int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource,
-                     int performanceMode,
-                     int testType, double frequency1, char *byteBufferPtr, int byteBufferLength,
-                     short *loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames) {
-    int status = SLES_FAIL;
+static int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource,
+        int performanceMode,
+        int testType, double frequency1, char *byteBufferPtr, int byteBufferLength,
+        short *loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames) {
+    int status = STATUS_FAIL;
 
     if (pSles != NULL) {
 
@@ -513,6 +496,7 @@
         //        char **freeBuffers;
 
         // Buffer indices
+#if 0
         pSles->rxFront;    // oldest recording
         pSles->rxRear;     // next to be recorded
         pSles->txFront;    // oldest playing
@@ -521,9 +505,12 @@
         pSles->freeRear;   // next to be freed
 
         pSles->fifo; //(*)
+#endif
         pSles->fifo2Buffer = NULL;  //this fifo is for sending data to java code (to plot it)
+#if 0
         pSles->recorderBufferQueue;
         pSles->playerBufferQueue;
+#endif
 
 
 
@@ -534,12 +521,12 @@
 
         // Initialize free buffers
         pSles->freeBuffers = (char **) calloc(pSles->freeBufCount + 1, sizeof(char *));
-        SLES_PRINTF("  calloc freeBuffers %zu bytes at %p",pSles->freeBufCount + 1,
+        SLES_PRINTF("  calloc freeBuffers %llu bytes at %p", (long long)pSles->freeBufCount + 1,
                     pSles->freeBuffers);
         unsigned j;
         for (j = 0; j < pSles->freeBufCount; ++j) {
             pSles->freeBuffers[j] = (char *) malloc(pSles->bufSizeInBytes);
-            SLES_PRINTF(" buff%d malloc %zu bytes at %p",j, pSles->bufSizeInBytes,
+            SLES_PRINTF(" buff%d malloc %llu bytes at %p",j, (long long)pSles->bufSizeInBytes,
                         pSles->freeBuffers[j]);
         }
         pSles->freeFront = 0;
@@ -548,13 +535,15 @@
 
         // Initialize record queue
         pSles->rxBuffers = (char **) calloc(pSles->rxBufCount + 1, sizeof(char *));
-        SLES_PRINTF("  calloc rxBuffers %zu bytes at %p",pSles->rxBufCount + 1, pSles->rxBuffers);
+        SLES_PRINTF("  calloc rxBuffers %llu bytes at %p", (long long)pSles->rxBufCount + 1,
+                pSles->rxBuffers);
         pSles->rxFront = 0;
         pSles->rxRear = 0;
 
         // Initialize play queue
         pSles->txBuffers = (char **) calloc(pSles->txBufCount + 1, sizeof(char *));
-        SLES_PRINTF("  calloc txBuffers %zu bytes at %p",pSles->txBufCount + 1, pSles->txBuffers);
+        SLES_PRINTF("  calloc txBuffers %llu bytes at %p", (long long)pSles->txBufCount + 1,
+                pSles->txBuffers);
         pSles->txFront = 0;
         pSles->txRear = 0;
 
@@ -621,7 +610,9 @@
         SLresult result;
 
         // create engine
+#if 0
         pSles->engineObject;
+#endif
         result = slCreateEngine(&(pSles->engineObject), 0, NULL, 0, NULL, NULL);
         ASSERT_EQ(SL_RESULT_SUCCESS, result);
         result = (*(pSles->engineObject))->Realize(pSles->engineObject, SL_BOOLEAN_FALSE);
@@ -632,7 +623,9 @@
         ASSERT_EQ(SL_RESULT_SUCCESS, result);
 
         // create output mix
+#if 0
         pSles->outputmixObject;
+#endif
         result = (*engineEngine)->CreateOutputMix(engineEngine, &(pSles->outputmixObject), 0, NULL,
                 NULL);
         ASSERT_EQ(SL_RESULT_SUCCESS, result);
@@ -668,8 +661,6 @@
         result = (*engineEngine)->CreateAudioPlayer(engineEngine, &(pSles->playerObject),
                 &audiosrc, &audiosnk, 2, ids_tx, flags_tx);
         if (SL_RESULT_CONTENT_UNSUPPORTED == result) {
-            fprintf(stderr, "Could not create audio player (result %x), check sample rate\n",
-                    result);
             SLES_PRINTF("ERROR: Could not create audio player (result %x), check sample rate\n",
                                                      result);
             goto cleanup;
@@ -710,7 +701,7 @@
         for (j = 0; j < pSles->txBufCount; ++j) {
 
             // allocate a free buffer
-            assert(pSles->freeFront != pSles->freeRear);
+            ASSERT(pSles->freeFront != pSles->freeRear);
             char *buffer = pSles->freeBuffers[pSles->freeFront];
             if (++pSles->freeFront > pSles->freeBufCount) {
                 pSles->freeFront = 0;
@@ -721,7 +712,7 @@
             if (txRearNext > pSles->txBufCount) {
                 txRearNext = 0;
             }
-            assert(txRearNext != pSles->txFront);
+            ASSERT(txRearNext != pSles->txFront);
             pSles->txBuffers[pSles->txRear] = buffer;
             pSles->txRear = txRearNext;
             result = (*(pSles->playerBufferQueue))->Enqueue(pSles->playerBufferQueue,
@@ -758,9 +749,7 @@
             result = (*engineEngine)->CreateAudioRecorder(engineEngine, &(pSles->recorderObject),
                     &audiosrc, &audiosnk, 2, ids_rx, flags_rx);
             if (SL_RESULT_SUCCESS != result) {
-                fprintf(stderr, "Could not create audio recorder (result %x), "
-                        "check sample rate and channel count\n", result);
-                status = SLES_FAIL;
+                status = STATUS_FAIL;
 
                 SLES_PRINTF("ERROR: Could not create audio recorder (result %x), "
                              "check sample rate and channel count\n", result);
@@ -814,7 +803,7 @@
         for (j = 0; j < pSles->rxBufCount; ++j) {
 
             // allocate a free buffer
-            assert(pSles->freeFront != pSles->freeRear);
+            ASSERT(pSles->freeFront != pSles->freeRear);
             char *buffer = pSles->freeBuffers[pSles->freeFront];
             if (++pSles->freeFront > pSles->freeBufCount) {
                 pSles->freeFront = 0;
@@ -825,7 +814,7 @@
             if (rxRearNext > pSles->rxBufCount) {
                 rxRearNext = 0;
             }
-            assert(rxRearNext != pSles->rxFront);
+            ASSERT(rxRearNext != pSles->rxFront);
             pSles->rxBuffers[pSles->rxRear] = buffer;
             pSles->rxRear = rxRearNext;
             result = (*(pSles->recorderBufferQueue))->Enqueue(pSles->recorderBufferQueue,
@@ -840,20 +829,19 @@
 
 
         // Tear down the objects and exit
-        status = SLES_SUCCESS;
+        status = STATUS_SUCCESS;
         cleanup:
 
         SLES_PRINTF("Finished initialization with status: %d", status);
 
-        int xx = 1;
-
     }
     return status;
 }
 
 // Read data from fifo2Buffer and store into pSamples.
-int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples) {
-    //int status = SLES_FAIL;
+int slesProcessNext(void *pCtx, double *pSamples, long maxSamples) {
+    //int status = STATUS_FAIL;
+    sles_data *pSles = (sles_data*)pCtx;
 
     SLES_PRINTF("slesProcessNext: pSles = %p, currentSample: %p,  maxSamples = %ld",
                 pSles, pSamples, maxSamples);
@@ -915,8 +903,8 @@
 }
 
 
-int slesDestroyServer(sles_data *pSles) {
-    int status = SLES_FAIL;
+static int slesDestroyServer(sles_data *pSles) {
+    int status = STATUS_FAIL;
 
      SLES_PRINTF("Start slesDestroyServer: pSles = %p", pSles);
 
@@ -1007,38 +995,45 @@
         }
 
 
-        status = SLES_SUCCESS;
+        status = STATUS_SUCCESS;
     }
     SLES_PRINTF("End slesDestroyServer: status = %d", status);
     return status;
 }
 
 
-int* slesGetRecorderBufferPeriod(sles_data *pSles) {
+int* slesGetRecorderBufferPeriod(void *pCtx) {
+    sles_data *pSles = (sles_data*)pCtx;
     return pSles->recorderBufferStats.buffer_period;
 }
 
-int slesGetRecorderMaxBufferPeriod(sles_data *pSles) {
+int slesGetRecorderMaxBufferPeriod(void *pCtx) {
+    sles_data *pSles = (sles_data*)pCtx;
     return pSles->recorderBufferStats.max_buffer_period;
 }
 
-int64_t slesGetRecorderVarianceBufferPeriod(sles_data *pSles) {
+int64_t slesGetRecorderVarianceBufferPeriod(void *pCtx) {
+    sles_data *pSles = (sles_data*)pCtx;
     return pSles->recorderBufferStats.var;
 }
 
-int* slesGetPlayerBufferPeriod(sles_data *pSles) {
+int* slesGetPlayerBufferPeriod(void *pCtx) {
+    sles_data *pSles = (sles_data*)pCtx;
     return pSles->playerBufferStats.buffer_period;
 }
 
-int slesGetPlayerMaxBufferPeriod(sles_data *pSles) {
+int slesGetPlayerMaxBufferPeriod(void *pCtx) {
+    sles_data *pSles = (sles_data*)pCtx;
     return pSles->playerBufferStats.max_buffer_period;
 }
 
-int64_t slesGetPlayerVarianceBufferPeriod(sles_data *pSles) {
+int64_t slesGetPlayerVarianceBufferPeriod(void *pCtx) {
+    sles_data *pSles = (sles_data*)pCtx;
     return pSles->playerBufferStats.var;
 }
 
-int slesGetCaptureRank(sles_data *pSles) {
+int slesGetCaptureRank(void *pCtx) {
+    sles_data *pSles = (sles_data*)pCtx;
     // clear the capture flags since they're being handled now
     int recorderRank = android_atomic_exchange(0, &pSles->recorderBufferStats.captureRank);
     int playerRank = android_atomic_exchange(0, &pSles->playerBufferStats.captureRank);
@@ -1049,3 +1044,15 @@
         return playerRank;
     }
 }
+
+int slesGetPlayerTimeStampsAndExpectedBufferPeriod(void *pCtx, callbackTimeStamps **ppTSs) {
+    sles_data *pSles = (sles_data*)pCtx;
+    *ppTSs = &pSles->playerTimeStamps;
+    return pSles->expectedBufferPeriod;
+}
+
+int slesGetRecorderTimeStampsAndExpectedBufferPeriod(void *pCtx, callbackTimeStamps **ppTSs) {
+    sles_data *pSles = (sles_data*)pCtx;
+    *ppTSs = &pSles->recorderTimeStamps;
+    return pSles->expectedBufferPeriod;
+}
diff --git a/LoopbackApp/app/src/main/cpp/sles.h b/LoopbackApp/app/src/main/cpp/sles.h
new file mode 100644
index 0000000..af82eb9
--- /dev/null
+++ b/LoopbackApp/app/src/main/cpp/sles.h
@@ -0,0 +1,113 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _Included_org_drrickorang_loopback_sles
+#define _Included_org_drrickorang_loopback_sles
+
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+#include <pthread.h>
+#include <android/log.h>
+#include <jni.h>
+#include <stdbool.h>
+
+//struct audio_utils_fifo;
+#define SLES_PRINTF(...)  __android_log_print(ANDROID_LOG_INFO, "sles_jni", __VA_ARGS__);
+
+#include <audio_utils/fifo.h>
+
+#include "loopback_sles.h"
+
+typedef struct {
+    int* buffer_period;
+    struct timespec previous_time;
+    struct timespec current_time;
+    int buffer_count;
+    int max_buffer_period;
+
+    volatile int32_t captureRank;   // Set > 0 when the callback requests a systrace/bug report
+
+    int measurement_count; // number of measurements which were actually recorded
+    int64_t SDM; // sum of squares of deviations from the expected mean
+    int64_t var; // variance in nanoseconds^2
+} bufferStats;
+
+//TODO fix this
+typedef struct {
+    SLuint32 rxBufCount;     // -r#
+    SLuint32 txBufCount;     // -t#
+    SLuint32 bufSizeInFrames;  // -f#
+    SLuint32 channels;       // -c#
+    SLuint32 sampleRate; // -s#
+    SLuint32 exitAfterSeconds; // -e#
+    SLuint32 freeBufCount;   // calculated
+    SLuint32 bufSizeInBytes; // calculated
+    int injectImpulse; // -i#i
+    size_t totalDiscardedInputFrames;   // total number of input frames discarded
+    int ignoreFirstFrames;
+
+    // Storage area for the buffer queues
+    char **rxBuffers;
+    char **txBuffers;
+    char **freeBuffers;
+
+    // Buffer indices
+    SLuint32 rxFront;    // oldest recording
+    SLuint32 rxRear;     // next to be recorded
+    SLuint32 txFront;    // oldest playing
+    SLuint32 txRear;     // next to be played
+    SLuint32 freeFront;  // oldest free
+    SLuint32 freeRear;   // next to be freed
+
+    struct audio_utils_fifo fifo;   // jitter buffer between recorder and player callbacks,
+                                    // to mitigate unpredictable phase difference between these,
+                                    // or even concurrent callbacks on two CPU cores
+    struct audio_utils_fifo fifo2;  // For sending data to java code (to plot it)
+    short *fifo2Buffer;
+    short *fifoBuffer;
+    SLAndroidSimpleBufferQueueItf recorderBufferQueue;
+    SLBufferQueueItf playerBufferQueue;
+
+    //other things that belong here
+    SLObjectItf playerObject;
+    SLObjectItf recorderObject;
+    SLObjectItf outputmixObject;
+    SLObjectItf engineObject;
+
+    bufferStats recorderBufferStats;
+    bufferStats playerBufferStats;
+
+    int testType;
+    double frequency1;
+    double bufferTestPhase1;
+    int count;
+    char* byteBufferPtr;
+    int byteBufferLength;
+
+    short* loopbackTone;
+
+    callbackTimeStamps recorderTimeStamps;
+    callbackTimeStamps playerTimeStamps;
+    short expectedBufferPeriod;
+} sles_data;
+
+// how late in ms a callback must be to trigger a systrace/bugreport
+#define LATE_CALLBACK_CAPTURE_THRESHOLD 4
+#define LATE_CALLBACK_OUTLIER_THRESHOLD 1
+#define BUFFER_PERIOD_DISCARD 10
+#define BUFFER_PERIOD_DISCARD_FULL_DUPLEX_PARTNER 2
+
+#endif //_Included_org_drrickorang_loopback_sles
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java
index 83dfdfb..77e5fb4 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java
@@ -28,7 +28,6 @@
 
 public class AboutActivity extends Activity {
 
-
     public void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
 
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java
index e8b44a7..b4b8b2f 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java
@@ -29,6 +29,7 @@
 
 /**
  * This class is used to save the results to a .wav file.
+ * FIXME Should save data in original resolution instead of converting to 16-bit PCM.
  */
 
 public class AudioFileOutput {
@@ -144,11 +145,11 @@
     }
 
 
-    private void writeDataBuffer(double [] data, int startIndex, int end) {
+    private void writeDataBuffer(double[] data, int startIndex, int end) {
         if (mOutputStream != null) {
             try {
                 int bufferSize = 1024; //blocks of 1024 samples
-                byte [] buffer = new byte[bufferSize * 2];
+                byte[] buffer = new byte[bufferSize * 2];
 
                 for (int ii = startIndex; ii < end; ii += bufferSize) {
                     //clear buffer
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java
index 329d62b..7b25d61 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java
@@ -17,7 +17,10 @@
 package org.drrickorang.loopback;
 
 import android.app.Notification;
+import android.app.NotificationChannel;
+import android.app.NotificationManager;
 import android.app.Service;
+import android.content.Context;
 import android.content.Intent;
 import android.os.Build;
 import android.os.IBinder;
@@ -33,9 +36,11 @@
 
 public class AudioTestService extends Service {
     private static final String TAG = "AudioTestService";
+    private static final String CHANNEL_ID = "AudioTestChannel";
+    private static final int NOTIFICATION_ID = 1400;
 
     private final IBinder mBinder = new AudioTestBinder();
-
+    private NotificationChannel mNotificationChannel;
 
     @Override
     public void onCreate() {
@@ -57,18 +62,30 @@
      * and restarted after a while.
      */
     private void runAsForegroundService() {
-        int notificationId = 1400;
-        Notification.Builder builder = new Notification.Builder(this)
-                .setSmallIcon(R.drawable.ic_launcher).setContentTitle("Loopback App")
-                .setContentText("Please disregard me.");
-        Notification notification;
-        if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
-            notification = builder.getNotification();
-        } else {
-            notification = builder.build();
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+            mNotificationChannel = new NotificationChannel(
+                    CHANNEL_ID,
+                    getString(R.string.notificationText),
+                    NotificationManager.IMPORTANCE_LOW);
+            NotificationManager notificationManager =
+                    (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
+            notificationManager.createNotificationChannel(mNotificationChannel);
         }
 
-        startForeground(notificationId, notification);
+        Notification.Builder builder = new Notification.Builder(this)
+                .setSmallIcon(R.drawable.ic_launcher).setContentTitle(getString(R.string.app_name))
+                .setContentText(getString(R.string.notificationText));
+        if (mNotificationChannel != null) {
+            builder.setChannelId(CHANNEL_ID);
+        }
+        Notification notification;
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
+            notification = builder.build();
+        } else {
+            notification = builder.getNotification();
+        }
+
+        startForeground(NOTIFICATION_ID, notification);
     }
 
 
@@ -82,6 +99,11 @@
     @Override
     public void onDestroy() {
         log("Service onDestroy");
+        if (mNotificationChannel != null) {
+            NotificationManager notificationManager =
+                    (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
+            notificationManager.deleteNotificationChannel(CHANNEL_ID);
+        }
     }
 
 
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferCallbackTimes.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferCallbackTimes.java
index 28e8c76..43eb5e7 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferCallbackTimes.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferCallbackTimes.java
@@ -26,7 +26,8 @@
  * Maintains and returns pairs of callback timestamps (in milliseconds since beginning of test) and
  * lengths (milliseconds between a callback and the previous callback).
  */
-public class BufferCallbackTimes implements Iterable<BufferCallbackTimes.BufferCallback>, Parcelable {
+public class BufferCallbackTimes implements Iterable<BufferCallbackTimes.BufferCallback>,
+        Parcelable {
     private final int[] mTimeStamps;
     private final short[] mCallbackDurations;
     private final short mExpectedBufferPeriod;
@@ -166,4 +167,5 @@
             callbackDuration = cd;
         }
     }
+
 }
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java
index 97ab6ad..a6455e0 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java
@@ -43,6 +43,7 @@
     private int       mMaxBufferPeriod = 0;
 
     private int       mCount = 0;
+    // Must match constant 'RANGE' in jni/loopback.h
     private final int range = 1002; // store counts for 0ms to 1000ms, and for > 1000ms
     private int       mExpectedBufferPeriod = 0;
 
@@ -121,7 +122,7 @@
     }
 
     public void prepareMemberObjects(int maxRecords, int expectedBufferPeriod,
-                                     CaptureHolder captureHolder){
+                                     CaptureHolder captureHolder) {
         mCallbackTimes = new BufferCallbackTimes(maxRecords, expectedBufferPeriod);
         mCaptureHolder = captureHolder;
         mExpectedBufferPeriod = expectedBufferPeriod;
@@ -139,7 +140,7 @@
         return mMaxBufferPeriod;
     }
 
-    public BufferCallbackTimes getCallbackTimes(){
+    public BufferCallbackTimes getCallbackTimes() {
         return mCallbackTimes;
     }
 
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CaptureHolder.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CaptureHolder.java
index 99143f2..be3590d 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CaptureHolder.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CaptureHolder.java
@@ -58,7 +58,7 @@
     private final boolean mIsCapturingBugreports;
     private final int mCaptureCapacity;
     private CaptureThread mCaptureThread;
-    private volatile CapturedState mCapturedStates[];
+    private final CapturedState mCapturedStates[];
     private WaveDataRingBuffer mWaveDataBuffer;
 
     //for creating AudioFileOutput objects
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CatchEventsEditText.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CatchEventsEditText.java
index b8b3f8f..4f36712 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CatchEventsEditText.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CatchEventsEditText.java
@@ -72,4 +72,5 @@
         }
         return super.onKeyPreIme(keyCode, event);
     }
-}
\ No newline at end of file
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java
index f132e3f..fd1dbec 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java
@@ -27,12 +27,18 @@
     public static final int    MILLIS_PER_SECOND = 1000;
     public static final int    SECONDS_PER_HOUR = 3600;
 
+    // Must match constants in jni/loopback.h
     public static final int LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY = 222;
     public static final int LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD = 223;
     public static final int LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_CALIBRATION = 224;
 
+    // Keys for CTS Loopback invocation
+    public static final String KEY_CTSINVOCATION = "CTS-Test";
+    public static final String KEY_NUMITERATIONS = "NumIterations";
+
     public static final int AUDIO_THREAD_TYPE_JAVA = 0;
-    public static final int AUDIO_THREAD_TYPE_NATIVE = 1;
+    public static final int AUDIO_THREAD_TYPE_NATIVE_SLES = 1;
+    public static final int AUDIO_THREAD_TYPE_NATIVE_AAUDIO = 2;
 
     public static final int BYTES_PER_SHORT = 2;
     public static final int SHORTS_PER_INT = 2;
@@ -64,6 +70,9 @@
     // Settings Activity and ADB constants
     public static final int SAMPLING_RATE_MAX = 48000;
     public static final int SAMPLING_RATE_MIN = 8000;
+    public static final int CORRELATION_BLOCK_SIZE_MAX = 8192;
+    public static final int CORRELATION_BLOCK_SIZE_MIN = 2048;
+    public static final int DEFAULT_CORRELATION_BLOCK_SIZE = 4096;
     public static final int PLAYER_BUFFER_FRAMES_MAX = 8000;
     public static final int PLAYER_BUFFER_FRAMES_MIN = 16;
     public static final int RECORDER_BUFFER_FRAMES_MAX = 8000;
@@ -82,7 +91,6 @@
     public static final int MAX_IGNORE_FIRST_FRAMES = SAMPLING_RATE_MAX * 3 / 10;
     public static final int DEFAULT_IGNORE_FIRST_FRAMES = 0;
 
-
     // Controls size of pre allocated timestamp arrays
     public static final int MAX_RECORDED_LATE_CALLBACKS_PER_SECOND = 2;
     // Ignore first few buffer callback periods
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java
index 6c59bd9..510a82b 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java
@@ -29,10 +29,10 @@
 public class Correlation implements Parcelable {
     private static final String TAG = "Correlation";
 
-    private int       mBlockSize = 4096;
+    private int       mBlockSize = Constant.DEFAULT_CORRELATION_BLOCK_SIZE;
     private int       mSamplingRate;
-    private double [] mDataDownsampled = new double [mBlockSize];
-    private double [] mDataAutocorrelated = new double[mBlockSize];
+    private double [] mDataDownsampled;
+    private double [] mDataAutocorrelated;
 
     public double mEstimatedLatencySamples = 0;
     public double mEstimatedLatencyMs = 0;
@@ -46,18 +46,19 @@
 
     public Correlation() {
         // Default constructor for when no data will be restored
+
     }
 
     public void init(int blockSize, int samplingRate) {
-        mBlockSize = blockSize;
+        setBlockSize(blockSize);
         mSamplingRate = samplingRate;
     }
 
-
     public void computeCorrelation(double [] data, int samplingRate) {
         log("Started Auto Correlation for data with " + data.length + " points");
         mSamplingRate = samplingRate;
-
+        mDataDownsampled = new double [mBlockSize];
+        mDataAutocorrelated = new double[mBlockSize];
         downsampleData(data, mDataDownsampled, mAmplitudeThreshold);
 
         //correlation vector
@@ -123,7 +124,13 @@
         mDataIsValid = false;
     }
 
+    public void setBlockSize(int blockSize) {
+        mBlockSize = clamp(blockSize, Constant.CORRELATION_BLOCK_SIZE_MIN,
+                Constant.CORRELATION_BLOCK_SIZE_MAX);
+    }
+
     private boolean downsampleData(double [] data, double [] dataDownsampled, double threshold) {
+        log("Correlation block size used in down sample: " + mBlockSize);
 
         boolean status;
         for (int i = 0; i < mBlockSize; i++) {
@@ -190,6 +197,20 @@
         return status;
     }
 
+    /**
+     * Returns value if value is within inclusive bounds min through max
+     * otherwise returns min or max according to if value is less than or greater than the range
+     */
+    // TODO move to audio_utils
+    private int clamp(int value, int min, int max) {
+
+        if (max < min) throw new UnsupportedOperationException("min must be <= max");
+
+        if (value < min) return min;
+        else if (value > max) return max;
+        else return value;
+    }
+
     @Override
     public int describeContents() {
         return 0;
@@ -200,7 +221,7 @@
     public void writeToParcel(Parcel dest, int flags) {
         Bundle bundle = new Bundle();
         bundle.putBoolean("mDataIsValid", mDataIsValid);
-        if(mDataIsValid) {
+        if (mDataIsValid) {
             bundle.putDouble("mEstimatedLatencySamples", mEstimatedLatencySamples);
             bundle.putDouble("mEstimatedLatencyMs", mEstimatedLatencyMs);
             bundle.putDouble("mEstimatedLatencyConfidence", mEstimatedLatencyConfidence);
@@ -214,7 +235,7 @@
     private Correlation(Parcel in) {
         Bundle bundle = in.readBundle(getClass().getClassLoader());
         mDataIsValid = bundle.getBoolean("mDataIsValid");
-        if(mDataIsValid) {
+        if (mDataIsValid) {
             mEstimatedLatencySamples    = bundle.getDouble("mEstimatedLatencySamples");
             mEstimatedLatencyMs         = bundle.getDouble("mEstimatedLatencyMs");
             mEstimatedLatencyConfidence = bundle.getDouble("mEstimatedLatencyConfidence");
@@ -237,4 +258,5 @@
     private static void log(String msg) {
         Log.v(TAG, msg);
     }
+
 }
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java
index e69efb0..6fe7b33 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java
@@ -33,12 +33,8 @@
 
     FFT(int FFTSamplingSize) {
         mFFTSamplingSize = FFTSamplingSize;
-        setUpFFT();
-    }
 
-
-    /** This function is only called in constructor to set up variables needed for computing FFT. */
-    private void setUpFFT() {
+        // set up variables needed for computing FFT
         m = (int) (Math.log(mFFTSamplingSize) / Math.log(2));
 
         // Make sure n is a power of 2
@@ -109,4 +105,5 @@
             }
         }
     }
+
 }
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchAndCallbackHeatMapView.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchAndCallbackHeatMapView.java
index de24e81..c323241 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchAndCallbackHeatMapView.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchAndCallbackHeatMapView.java
@@ -495,4 +495,5 @@
             );
         }
     }
+
 }
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java
index e52c116..398e319 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java
@@ -157,6 +157,7 @@
 
 
     /** convert samples in shortBuffer to double, then copy into doubleBuffer. */
+    // TODO move to audio_utils
     private void bufferShortToDouble(short[] shortBuffer, double[] doubleBuffer) {
         double temp;
         for (int i = 0; i < shortBuffer.length; i++) {
@@ -221,22 +222,22 @@
         mFFTCount++;
     }
 
-    private void checkGlitchConcentration(){
+    private void checkGlitchConcentration() {
 
         final int recordedGlitch = mGlitches[mGlitchesIndex-1];
-        if (recordedGlitch - mLastGlitchCaptureAttempt <= COOLDOWN_WINDOW){
+        if (recordedGlitch - mLastGlitchCaptureAttempt <= COOLDOWN_WINDOW) {
             return;
         }
 
         final int windowBegin = recordedGlitch - GLITCH_CONCENTRATION_WINDOW_SIZE;
 
         int numGlitches = 0;
-        for (int index = mGlitchesIndex-1; index >= 0 && mGlitches[index] >= windowBegin; --index){
+        for (int index = mGlitchesIndex-1; index >= 0 && mGlitches[index] >= windowBegin; --index) {
             ++numGlitches;
         }
 
         int captureResponse = mCaptureHolder.captureState(numGlitches);
-        if (captureResponse != CaptureHolder.NEW_CAPTURE_IS_LEAST_INTERESTING){
+        if (captureResponse != CaptureHolder.NEW_CAPTURE_IS_LEAST_INTERESTING) {
             mLastGlitchCaptureAttempt = recordedGlitch;
         }
 
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesStringBuilder.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesStringBuilder.java
index 535d991..8437dfe 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesStringBuilder.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesStringBuilder.java
@@ -30,6 +30,10 @@
 public class GlitchesStringBuilder {
     private static final String TAG = "GlitchesStringBuilder";
 
+    private GlitchesStringBuilder() {
+        // not instantiable
+        throw new RuntimeException("not reachable");
+    }
 
     public static String getGlitchString(int fftsamplingsize, int FFTOverlapSamples,
                                          int[] glitchesData, int samplingRate,
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java
index 1055168..31e987b 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java
@@ -87,7 +87,8 @@
         fillCanvas(canvas, this.getRight(), this.getBottom());
     }
 
-    public void fillCanvas(Canvas canvas, int right, int bottom){
+    // also called in LoopbackActivity.java
+    void fillCanvas(Canvas canvas, int right, int bottom) {
         canvas.drawColor(Color.GRAY);
 
         if (mData == null || mData.length == 0) {
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoadThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoadThread.java
index 9c98c2e..142dabb 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoadThread.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoadThread.java
@@ -36,7 +36,7 @@
         log("Entering load thread");
         long count = 0;
         mIsRunning = true;
-        while(mIsRunning) {
+        while (mIsRunning) {
             count++;
         }
 
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java
index d3acd03..2549624 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java
@@ -19,6 +19,8 @@
 import android.Manifest;
 import android.app.Activity;
 import android.app.DialogFragment;
+import android.app.Fragment;
+import android.app.FragmentManager;
 import android.content.ComponentName;
 import android.content.Context;
 import android.content.Intent;
@@ -27,17 +29,21 @@
 import android.database.Cursor;
 import android.graphics.Bitmap;
 import android.graphics.Canvas;
+import android.hardware.usb.UsbDevice;
+import android.hardware.usb.UsbManager;
+import android.media.AudioDeviceInfo;
 import android.media.AudioManager;
 import android.net.Uri;
 import android.os.Build;
 import android.os.Bundle;
 import android.os.Handler;
 import android.os.IBinder;
+import android.os.Looper;
 import android.os.Message;
 import android.os.ParcelFileDescriptor;
 import android.provider.MediaStore;
-import android.support.v4.app.ActivityCompat;
-import android.support.v4.content.ContextCompat;
+import androidx.core.app.ActivityCompat;
+import androidx.core.content.ContextCompat;
 import android.text.format.DateFormat;
 import android.util.Log;
 import android.view.Gravity;
@@ -57,7 +63,9 @@
 import java.io.FileDescriptor;
 import java.io.FileOutputStream;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.Locale;
+import java.util.Map;
 
 
 /**
@@ -95,6 +103,7 @@
     private static final int BUFFER_TEST_ENDED = 303;
     private static final int CALIBRATION_STARTED = 304;
     private static final int CALIBRATION_ENDED = 305;
+    private static final int NO_TEST_ACTIVE = 306;
 
     // 0-100 controls compression rate, currently ignore because PNG format is being used
     private static final int EXPORTED_IMAGE_QUALITY = 100;
@@ -117,11 +126,12 @@
     private TextView mTextViewCurrentLevel;
     private TextView mTextViewResultSummary;
 
-    private int          mTestType;
-    private double []    mWaveData;    // this is where we store the data for the wave plot
-    private Correlation  mCorrelation = new Correlation();
-    private BufferPeriod mRecorderBufferPeriod = new BufferPeriod();
-    private BufferPeriod mPlayerBufferPeriod = new BufferPeriod();
+    private static final String TAG_RETAINED_FRAGMENT = "RetainedFragment";
+    private RetainedFragment mRetainedFragment;
+    private int              mTestType;
+    private Correlation      mCorrelation = new Correlation();
+    private BufferPeriod     mRecorderBufferPeriod = new BufferPeriod();
+    private BufferPeriod     mPlayerBufferPeriod = new BufferPeriod();
 
     // for native buffer period
     private int[]  mNativeRecorderBufferPeriodArray;
@@ -135,6 +145,7 @@
 
     private static final String INTENT_SAMPLING_FREQUENCY = "SF";
     private static final String INTENT_CHANNEL_INDEX = "CI";
+    private static final String INTENT_CORRELATION_BLOCK_SIZE = "BS";
     private static final String INTENT_FILENAME = "FileName";
     private static final String INTENT_RECORDER_BUFFER = "RecorderBuffer";
     private static final String INTENT_PLAYER_BUFFER = "PlayerBuffer";
@@ -150,9 +161,10 @@
     private static final String INTENT_ENABLE_WAVCAPTURE = "CaptureWavs";
     private static final String INTENT_NUM_CAPTURES = "NumCaptures";
     private static final String INTENT_WAV_DURATION = "WavDuration";
+    private static final String INTENT_USB_AUDIO_ROUTE = "USB";
 
     // for running the test using adb command
-    private boolean mIntentRunning = false; // if it is running triggered by intent with parameters
+    private volatile boolean mIntentRunning; // if it is running triggered by intent with parameters
     private String  mIntentFileName;
 
     // Note: these values should only be assigned in restartAudioSystem()
@@ -177,6 +189,10 @@
     private int     mBufferTestDurationInSeconds;
     private int     mBufferTestWavePlotDurationInSeconds;
 
+    // CTS Test Flag
+    private boolean mIsCTSTest;
+    private int     mCtsNumIterations;
+
     // threads that load CPUs
     private LoadThread[]     mLoadThreads;
 
@@ -195,10 +211,11 @@
         }
     };
 
-    private Handler mMessageHandler = new Handler() {
+    private Handler mMessageHandler = new Handler(Looper.getMainLooper()) {
         public void handleMessage(Message msg) {
             super.handleMessage(msg);
             switch (msg.what) {
+
             case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED:
                 log("got message java latency test started!!");
                 showToast("Java Latency Test Started");
@@ -206,21 +223,23 @@
                 refreshState();
                 refreshPlots();
                 break;
+
             case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR:
                 log("got message java latency test rec can't start!!");
-                showToast("Java Latency Test Recording Error. Please try again");
+                showToastImportant("Java Latency Test Recording Error. Please try again");
                 refreshState();
                 stopAudioTestThreads();
                 mIntentRunning = false;
                 refreshSoundLevelBar();
                 break;
+
             case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP:
             case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE:
                 if (mAudioThread != null) {
-                    mWaveData = mAudioThread.getWaveData();
+                    mRetainedFragment.setWaveData(mAudioThread.getWaveData());
                     mRecorderCallbackTimes = mRecorderBufferPeriod.getCallbackTimes();
                     mPlayerCallbackTimes = mPlayerBufferPeriod.getCallbackTimes();
-                    mCorrelation.computeCorrelation(mWaveData, mSamplingRate);
+                    mCorrelation.computeCorrelation(mRetainedFragment.getWaveData(), mSamplingRate);
                     log("got message java latency rec complete!!");
                     refreshPlots();
                     refreshState();
@@ -242,6 +261,7 @@
                 }
                 refreshSoundLevelBar();
                 break;
+
             case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED:
                 log("got message java buffer test rec started!!");
                 showToast("Java Buffer Test Started");
@@ -250,18 +270,20 @@
                 refreshPlots();
                 mBufferTestStartTime = System.currentTimeMillis();
                 break;
+
             case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR:
                 log("got message java buffer test rec can't start!!");
-                showToast("Java Buffer Test Recording Error. Please try again");
+                showToastImportant("Java Buffer Test Recording Error. Please try again");
                 refreshState();
                 stopAudioTestThreads();
                 mIntentRunning = false;
                 refreshSoundLevelBar();
                 break;
+
             case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
             case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE:
                 if (mAudioThread != null) {
-                    mWaveData = mAudioThread.getWaveData();
+                    mRetainedFragment.setWaveData(mAudioThread.getWaveData());
                     mGlitchesData = mAudioThread.getAllGlitches();
                     mGlitchingIntervalTooLong = mAudioThread.getGlitchingIntervalTooLong();
                     mFFTSamplingSize = mAudioThread.getFFTSamplingSize();
@@ -293,13 +315,15 @@
                 }
                 refreshSoundLevelBar();
                 break;
+
             case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED:
-                log("got message native latency test rec started!!");
                 showToast("Native Latency Test Started");
+                log("got message native latency test rec started!!");
                 resetResults();
                 refreshState();
                 refreshPlots();
                 break;
+
             case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED:
                 log("got message native buffer test rec started!!");
                 showToast("Native Buffer Test Started");
@@ -308,20 +332,23 @@
                 refreshPlots();
                 mBufferTestStartTime = System.currentTimeMillis();
                 break;
+
             case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR:
                 log("got message native latency test rec can't start!!");
-                showToast("Native Latency Test Recording Error. Please try again");
+                showToastImportant("Native Latency Test Recording Error. Please try again");
                 refreshState();
                 mIntentRunning = false;
                 refreshSoundLevelBar();
                 break;
+
             case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR:
                 log("got message native buffer test rec can't start!!");
-                showToast("Native Buffer Test Recording Error. Please try again");
+                showToastImportant("Native Buffer Test Recording Error. Please try again");
                 refreshState();
                 mIntentRunning = false;
                 refreshSoundLevelBar();
                 break;
+
             case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
             case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP:
             case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE:
@@ -333,7 +360,7 @@
                     mGlitchingIntervalTooLong = mNativeAudioThread.getGlitchingIntervalTooLong();
                     mFFTSamplingSize = mNativeAudioThread.getNativeFFTSamplingSize();
                     mFFTOverlapSamples = mNativeAudioThread.getNativeFFTOverlapSamples();
-                    mWaveData = mNativeAudioThread.getWaveData();
+                    mRetainedFragment.setWaveData(mNativeAudioThread.getWaveData());
                     mNativeRecorderBufferPeriodArray = mNativeAudioThread.getRecorderBufferPeriod();
                     mNativeRecorderMaxBufferPeriod =
                             mNativeAudioThread.getRecorderMaxBufferPeriod();
@@ -348,7 +375,8 @@
 
                     if (msg.what != NativeAudioThread.
                             LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE) {
-                        mCorrelation.computeCorrelation(mWaveData, mSamplingRate);
+                        mCorrelation.computeCorrelation(mRetainedFragment.getWaveData(),
+                                mSamplingRate);
                     }
 
                     log("got message native buffer test rec complete!!");
@@ -363,7 +391,7 @@
                                 LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE_ERRORS:
                         case NativeAudioThread.
                                 LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE_ERRORS:
-                        showToast("Native Test Completed with Fatal Errors");
+                        showToastImportant("Native Test Completed with Fatal Errors");
                         break;
                         case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
                         case NativeAudioThread.
@@ -388,6 +416,7 @@
                 }  // mNativeAudioThread != null
                 refreshSoundLevelBar();
                 break;
+
             default:
                 log("Got message:" + msg.what);
                 break;
@@ -395,6 +424,7 @@
 
             // Control UI elements visibility specific to latency or buffer/glitch test
             switch (msg.what) {
+
                 // Latency test started
                 case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED:
                 case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED:
@@ -411,6 +441,13 @@
                 case NativeAudioThread.
                         LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE_ERRORS:
                     setTransportButtonsState(LATENCY_TEST_ENDED);
+                    if (mIsCTSTest) {
+                        Intent intent = getIntent();
+                        intent.putExtra("RoundTripTime", mCorrelation.mEstimatedLatencyMs);
+                        intent.putExtra("Confidence", mCorrelation.mEstimatedLatencyConfidence);
+                        setResult(RESULT_OK, intent);
+                        finish();
+                    }
                     break;
 
                 // Buffer test started
@@ -440,10 +477,32 @@
                 case CALIBRATION_ENDED:
                     setTransportButtonsState(CALIBRATION_ENDED);
                     break;
+
+                default:
+                    log("Got message:" + msg.what);
+                    break;
             }
         }
     };
 
+    public static class RetainedFragment extends Fragment {
+        private double[] mWaveData;    // this is where we store the data for the wave plot
+
+        // this method is only called once for this fragment
+        @Override
+        public void onCreate(Bundle savedInstanceState) {
+            super.onCreate(savedInstanceState);
+            setRetainInstance(true);
+        }
+
+        public void setWaveData(double[] waveData) {
+            this.mWaveData = waveData;
+        }
+
+        public double[] getWaveData() {
+            return mWaveData;
+        }
+    }
 
     @Override
     public void onCreate(Bundle savedInstanceState) {
@@ -453,12 +512,21 @@
         View view = getLayoutInflater().inflate(R.layout.main_activity, null);
         setContentView(view);
 
+        // find the retained fragment on activity restarts
+        FragmentManager fm = getFragmentManager();
+        mRetainedFragment = (RetainedFragment) fm.findFragmentByTag(TAG_RETAINED_FRAGMENT);
+        // create the fragment and data the first time
+        if (mRetainedFragment == null) {
+            mRetainedFragment = new RetainedFragment();
+            fm.beginTransaction().add(mRetainedFragment, TAG_RETAINED_FRAGMENT).commit();
+        }
+
         // TODO: Write script to file at more appropriate time, from settings activity or intent
         // TODO: Respond to failure with more than just a toast
-        if (hasWriteFilePermission()){
+        if (hasWriteFilePermission()) {
             boolean successfulWrite = AtraceScriptsWriter.writeScriptsToFile(this);
-            if(!successfulWrite) {
-                showToast("Unable to write loopback_listener script to device");
+            if (!successfulWrite) {
+                showToastImportant("Unable to write loopback_listener script to device");
             }
         } else {
             requestWriteFilePermission(PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE_SCRIPT);
@@ -498,7 +566,7 @@
         mTextViewResultSummary = (TextView) findViewById(R.id.resultSummary);
         refreshSoundLevelBar();
 
-        if(savedInstanceState != null) {
+        if (savedInstanceState != null) {
             restoreInstanceState(savedInstanceState);
         }
 
@@ -506,6 +574,8 @@
             requestRecordAudioPermission(PERMISSIONS_REQUEST_RECORD_AUDIO_LATENCY);
         }
 
+        setTransportButtonsState(NO_TEST_ACTIVE);
+
         applyIntent(getIntent());
     }
 
@@ -517,8 +587,7 @@
         boolean bound = bindService(audioTestIntent, mServiceConnection, Context.BIND_AUTO_CREATE);
         if (bound) {
             log("Successfully bound to service!");
-        }
-        else {
+        } else {
             log("Failed to bind service!");
         }
     }
@@ -549,6 +618,13 @@
      */
     private void applyIntent(Intent intent) {
         Bundle b = intent.getExtras();
+
+        if (b != null) {
+            for (String key: b.keySet()) {
+                Log.d (TAG, key + " is a key in the bundle");
+            }
+        }
+
         if (b != null && !mIntentRunning) {
             // adb shell am start -n org.drrickorang.loopback/.LoopbackActivity
             // --ei SF 48000 --es FileName test1 --ei RecorderBuffer 512 --ei PlayerBuffer 512
@@ -574,6 +650,24 @@
                 return;
             }
 
+            if (b.containsKey(INTENT_AUDIO_THREAD)) {
+                int newAudioThreadType = b.getInt(INTENT_AUDIO_THREAD);
+                if (newAudioThreadType != getApp().getAudioThreadType()) {
+                    getApp().setAudioThreadType(newAudioThreadType);
+                    getApp().computeDefaults();
+                }
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_PERFORMANCE_MODE)) {
+                int newPerformanceMode = b.getInt(INTENT_PERFORMANCE_MODE);
+                if (newPerformanceMode != getApp().getPerformanceMode()) {
+                    getApp().setPerformanceMode(newPerformanceMode);
+                    getApp().computeDefaults();
+                }
+                mIntentRunning = true;
+            }
+
             if (b.containsKey(INTENT_BUFFER_TEST_DURATION)) {
                 getApp().setBufferTestDuration(b.getInt(INTENT_BUFFER_TEST_DURATION));
                 mIntentRunning = true;
@@ -584,6 +678,11 @@
                 mIntentRunning = true;
             }
 
+            if (b.containsKey(INTENT_CORRELATION_BLOCK_SIZE)) {
+                mCorrelation.setBlockSize(b.getInt(INTENT_CORRELATION_BLOCK_SIZE));
+                mIntentRunning = true;
+            }
+
             if (b.containsKey(INTENT_CHANNEL_INDEX)) {
                 getApp().setChannelIndex(b.getInt(INTENT_CHANNEL_INDEX));
                 mChannelIndex = b.getInt(INTENT_CHANNEL_INDEX);
@@ -607,21 +706,11 @@
                 mIntentRunning = true;
             }
 
-            if (b.containsKey(INTENT_AUDIO_THREAD)) {
-                getApp().setAudioThreadType(b.getInt(INTENT_AUDIO_THREAD));
-                mIntentRunning = true;
-            }
-
             if (b.containsKey(INTENT_MIC_SOURCE)) {
                 getApp().setMicSource(b.getInt(INTENT_MIC_SOURCE));
                 mIntentRunning = true;
             }
 
-            if (b.containsKey(INTENT_PERFORMANCE_MODE)) {
-                getApp().setPerformanceMode(b.getInt(INTENT_PERFORMANCE_MODE));
-                mIntentRunning = true;
-            }
-
             if (b.containsKey(INTENT_IGNORE_FIRST_FRAMES)) {
                 getApp().setIgnoreFirstFrames(b.getInt(INTENT_IGNORE_FIRST_FRAMES));
                 mIntentRunning = true;
@@ -664,6 +753,10 @@
                 mIntentRunning = true;
             }
 
+            if (b.containsKey(INTENT_USB_AUDIO_ROUTE)) {
+                waitForUsbRoute();
+            }
+
             if (mIntentRunning || b.containsKey(INTENT_TEST_TYPE)) {
                 // run tests with provided or default parameters
                 refreshState();
@@ -680,6 +773,10 @@
                         break;
                     case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
                     default:
+                        if (b.containsKey(Constant.KEY_CTSINVOCATION)) {
+                            mIsCTSTest = true;
+                            mCtsNumIterations = b.getInt(Constant.KEY_NUMITERATIONS);
+                        }
                         startLatencyTest();
                         break;
                 }
@@ -733,16 +830,25 @@
     protected void onResume() {
         super.onResume();
         log("on resume called");
+        setTransportButtonsState(NO_TEST_ACTIVE);
     }
 
 
-    @Override
-    protected void onPause() {
-        super.onPause();
-    }
+//    @Override
+//    protected void onPause() {
+//        super.onPause();
+//        // this means that this activity will not be recreated now, user is leaving it
+//        // or the activity is otherwise finishing
+//        if(isFinishing()) {
+//            FragmentManager fm = getFragmentManager();
+//            // we will not need this fragment anymore, this may also be a good place to signal
+//            // to the retained fragment object to perform its own cleanup.
+//            fm.beginTransaction().remove(mRetainedFragment).commit();
+//        }
+//    }
 
     @Override
-    public boolean onCreateOptionsMenu(Menu menu){
+    public boolean onCreateOptionsMenu(Menu menu) {
         MenuInflater inflater = getMenuInflater();
         inflater.inflate(R.menu.tool_bar_menu, menu);
         return true;
@@ -772,6 +878,9 @@
                     showToast("Test in progress... please wait");
                 }
                 return true;
+
+            default:
+                break;
         }
 
         return super.onOptionsItemSelected(item);
@@ -825,7 +934,9 @@
 
         // select java or native audio thread
         int micSourceMapped;
+
         switch (mAudioThreadType) {
+
         case Constant.AUDIO_THREAD_TYPE_JAVA:
             micSourceMapped = getApp().mapMicSource(Constant.AUDIO_THREAD_TYPE_JAVA, mMicSource);
 
@@ -844,7 +955,8 @@
                     expectedPlayerBufferPeriod, mCaptureHolder);
 
             mAudioThread = new LoopbackAudioThread(mSamplingRate, mPlayerBufferSizeInBytes,
-                          mRecorderBufferSizeInBytes, micSourceMapped, /* no performance mode */ mRecorderBufferPeriod,
+                          mRecorderBufferSizeInBytes, micSourceMapped,
+                          /* no performance mode */ mRecorderBufferPeriod,
                           mPlayerBufferPeriod, mTestType, mBufferTestDurationInSeconds,
                           mBufferTestWavePlotDurationInSeconds, getApplicationContext(),
                           mChannelIndex, mCaptureHolder);
@@ -852,13 +964,16 @@
             mAudioThread.mSessionId = sessionId;
             mAudioThread.start();
             break;
-        case Constant.AUDIO_THREAD_TYPE_NATIVE:
-            micSourceMapped = getApp().mapMicSource(Constant.AUDIO_THREAD_TYPE_NATIVE, mMicSource);
+        case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+        case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
+            micSourceMapped = getApp().mapMicSource(Constant.AUDIO_THREAD_TYPE_NATIVE_SLES,
+                    mMicSource);
             int performanceModeMapped = getApp().mapPerformanceMode(mPerformanceMode);
             // Note: mRecorderBufferSizeInBytes will not actually be used, since recorder buffer
             // size = player buffer size in native mode
-            mNativeAudioThread = new NativeAudioThread(mSamplingRate, mPlayerBufferSizeInBytes,
-                                mRecorderBufferSizeInBytes, micSourceMapped, performanceModeMapped, mTestType,
+            mNativeAudioThread = new NativeAudioThread(mAudioThreadType, mSamplingRate,
+                                mPlayerBufferSizeInBytes, mRecorderBufferSizeInBytes,
+                                micSourceMapped, performanceModeMapped, mTestType,
                                 mBufferTestDurationInSeconds, mBufferTestWavePlotDurationInSeconds,
                                 mIgnoreFirstFrames, mCaptureHolder);
             mNativeAudioThread.setMessageHandler(mMessageHandler);
@@ -918,13 +1033,32 @@
         playerBufferPeriod.resetRecord();
     }
 
+    private boolean canPerformBufferTest() {
+        // Retrieve the thread type from the app, because mAudioThreadType
+        // only gets populated during the start of the test.
+        switch (getApp().getAudioThreadType()) {
+            case Constant.AUDIO_THREAD_TYPE_JAVA:
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+                return true;
+        }
+        // Buffer test isn't yet implemented for AAudio.
+        return false;
+    }
 
-    private void setTransportButtonsState(int state){
+    private void setTransportButtonsState(int state) {
         Button latencyStart = (Button) findViewById(R.id.buttonStartLatencyTest);
         Button bufferStart = (Button) findViewById(R.id.buttonStartBufferTest);
         Button calibrationStart = (Button) findViewById(R.id.buttonCalibrateSoundLevel);
 
+        boolean canEnableBufferTest = canPerformBufferTest();
+
         switch (state) {
+            case NO_TEST_ACTIVE:
+                latencyStart.setEnabled(true);
+                bufferStart.setEnabled(canEnableBufferTest);
+                calibrationStart.setEnabled(true);
+                break;
+
             case LATENCY_TEST_STARTED:
                 findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.INVISIBLE);
                 mTextViewResultSummary.setText("");
@@ -939,7 +1073,7 @@
                 findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.VISIBLE);
                 latencyStart.setCompoundDrawablesWithIntrinsicBounds(
                         R.drawable.ic_play_arrow, 0, 0, 0);
-                bufferStart.setEnabled(true);
+                bufferStart.setEnabled(canEnableBufferTest);
                 calibrationStart.setEnabled(true);
                 break;
 
@@ -967,7 +1101,7 @@
                 findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.INVISIBLE);
                 findViewById(R.id.resultSummary).setVisibility(View.INVISIBLE);
                 findViewById(R.id.glitchReportPanel).setVisibility(View.INVISIBLE);
-                bufferStart.setCompoundDrawablesWithIntrinsicBounds(
+                calibrationStart.setCompoundDrawablesWithIntrinsicBounds(
                         R.drawable.ic_stop, 0, 0, 0);
                 latencyStart.setEnabled(false);
                 bufferStart.setEnabled(false);
@@ -978,12 +1112,15 @@
                 findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.VISIBLE);
                 findViewById(R.id.resultSummary).setVisibility(View.VISIBLE);
                 findViewById(R.id.glitchReportPanel).setVisibility(View.VISIBLE);
-                bufferStart.setCompoundDrawablesWithIntrinsicBounds(
+                calibrationStart.setCompoundDrawablesWithIntrinsicBounds(
                         R.drawable.ic_play_arrow, 0, 0, 0);
                 latencyStart.setEnabled(true);
-                bufferStart.setEnabled(true);
+                bufferStart.setEnabled(canEnableBufferTest);
                 calibrationStart.setEnabled(true);
                 break;
+
+            default:
+                break;
         }
     }
 
@@ -1015,7 +1152,8 @@
 
         showToast("Calibrating sound level...");
         final SoundLevelCalibration calibration =
-                new SoundLevelCalibration(getApp().getSamplingRate(),
+                new SoundLevelCalibration(getApp().getAudioThreadType(),
+                        getApp().getSamplingRate(),
                         getApp().getPlayerBufferSizeInBytes(),
                         getApp().getRecorderBufferSizeInBytes(),
                         getApp().getMicSource(), getApp().getPerformanceMode(), this);
@@ -1096,11 +1234,14 @@
                         mAudioThread.runTest();
                     }
                     break;
-                case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+                case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                     if (mNativeAudioThread != null) {
                         mNativeAudioThread.runTest();
                     }
                     break;
+                default:
+                    break;
             }
         }
     };
@@ -1122,6 +1263,14 @@
 
 
     private void startBufferTest() {
+        // In the interactive mode the buffer test button should be disabled
+        // if the buffer test isn't supported, but the function can also be invoked
+        // via an intent.
+        if (!canPerformBufferTest()) {
+            showToastImportant("Buffer test is not supported with this thread type");
+            log("Buffer test is not supported with this thread type");
+            return;
+        }
 
         if (!isBusy()) {
             mBarMasterLevel.setEnabled(false);
@@ -1140,11 +1289,14 @@
                     mAudioThread.runBufferTest();
                 }
                 break;
-            case Constant.AUDIO_THREAD_TYPE_NATIVE:
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                 if (mNativeAudioThread != null) {
                     mNativeAudioThread.runBufferTest();
                 }
                 break;
+            default:
+                break;
             }
         } else {
             int duration = 0;
@@ -1152,11 +1304,14 @@
             case Constant.AUDIO_THREAD_TYPE_JAVA:
                 duration = mAudioThread.getDurationInSeconds();
                 break;
-            case Constant.AUDIO_THREAD_TYPE_NATIVE:
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                 duration = mNativeAudioThread.getDurationInSeconds();
                 break;
+            default:
+                break;
             }
-            showToast("Long-run Test in progress, in total should take " +
+            showToastImportant("Long-run Test in progress, in total should take " +
                     Integer.toString(duration) + "s, please wait");
         }
     }
@@ -1251,7 +1406,7 @@
         startActivityForResult(FilenameIntent, RequestCode);
     }
 
-    private String getFileNamePrefix(){
+    private String getFileNamePrefix() {
         if (mIntentFileName != null && !mIntentFileName.isEmpty()) {
             return mIntentFileName;
         } else {
@@ -1283,10 +1438,13 @@
             bufferPeriodArray = mRecorderBufferPeriod.getBufferPeriodArray();
             maxBufferPeriod = mRecorderBufferPeriod.getMaxBufferPeriod();
             break;
-        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+        case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+        case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
             bufferPeriodArray = mNativeRecorderBufferPeriodArray;
             maxBufferPeriod = mNativeRecorderMaxBufferPeriod;
             break;
+        default:
+            break;
         }
         saveBufferPeriod(Uri.parse(FILE_SAVE_PATH + fileName + "_recorderBufferPeriod.txt"),
                 bufferPeriodArray, maxBufferPeriod);
@@ -1302,10 +1460,13 @@
             bufferPeriodArray = mPlayerBufferPeriod.getBufferPeriodArray();
             maxBufferPeriod = mPlayerBufferPeriod.getMaxBufferPeriod();
             break;
-        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+        case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+        case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
             bufferPeriodArray = mNativePlayerBufferPeriodArray;
             maxBufferPeriod = mNativePlayerMaxBufferPeriod;
             break;
+        default:
+            break;
         }
         saveBufferPeriod(Uri.parse(FILE_SAVE_PATH + fileName + "_playerBufferPeriod.txt")
                 , bufferPeriodArray, maxBufferPeriod);
@@ -1333,23 +1494,27 @@
 
         if (resultCode == Activity.RESULT_OK) {
             switch (requestCode) {
+
             case SAVE_TO_WAVE_REQUEST:
                 log("got SAVE TO WAV intent back!");
                 if (resultData != null) {
                     saveToWaveFile(resultData.getData());
                 }
                 break;
+
             case SAVE_TO_PNG_REQUEST:
                 log("got SAVE TO PNG intent back!");
                 if (resultData != null) {
                     saveScreenShot(resultData.getData());
                 }
                 break;
+
             case SAVE_TO_TXT_REQUEST:
                 if (resultData != null) {
                     saveTextToFile(resultData.getData(), getReport().toString());
                 }
                 break;
+
             case SAVE_RECORDER_BUFFER_PERIOD_TO_TXT_REQUEST:
                 if (resultData != null) {
                     int[] bufferPeriodArray = null;
@@ -1359,7 +1524,8 @@
                         bufferPeriodArray = mRecorderBufferPeriod.getBufferPeriodArray();
                         maxBufferPeriod = mRecorderBufferPeriod.getMaxBufferPeriod();
                         break;
-                    case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                         bufferPeriodArray = mNativeRecorderBufferPeriodArray;
                         maxBufferPeriod = mNativeRecorderMaxBufferPeriod;
                         break;
@@ -1367,6 +1533,7 @@
                     saveBufferPeriod(resultData.getData(), bufferPeriodArray, maxBufferPeriod);
                 }
                 break;
+
             case SAVE_PLAYER_BUFFER_PERIOD_TO_TXT_REQUEST:
                 if (resultData != null) {
                     int[] bufferPeriodArray = null;
@@ -1376,7 +1543,8 @@
                         bufferPeriodArray = mPlayerBufferPeriod.getBufferPeriodArray();
                         maxBufferPeriod = mPlayerBufferPeriod.getMaxBufferPeriod();
                         break;
-                    case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                         bufferPeriodArray = mNativePlayerBufferPeriodArray;
                         maxBufferPeriod = mNativePlayerMaxBufferPeriod;
                         break;
@@ -1384,6 +1552,7 @@
                     saveBufferPeriod(resultData.getData(), bufferPeriodArray, maxBufferPeriod);
                 }
                 break;
+
             case SAVE_RECORDER_BUFFER_PERIOD_TO_PNG_REQUEST:
                 if (resultData != null) {
                     int[] bufferPeriodArray = null;
@@ -1393,7 +1562,8 @@
                             bufferPeriodArray = mRecorderBufferPeriod.getBufferPeriodArray();
                             maxBufferPeriod = mRecorderBufferPeriod.getMaxBufferPeriod();
                             break;
-                        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                        case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+                        case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                             bufferPeriodArray = mNativeRecorderBufferPeriodArray;
                             maxBufferPeriod = mNativeRecorderMaxBufferPeriod;
                             break;
@@ -1401,6 +1571,7 @@
                     saveHistogram(resultData.getData(), bufferPeriodArray, maxBufferPeriod);
                 }
                 break;
+
             case SAVE_PLAYER_BUFFER_PERIOD_TO_PNG_REQUEST:
                 if (resultData != null) {
                     int[] bufferPeriodArray = null;
@@ -1410,7 +1581,8 @@
                             bufferPeriodArray = mPlayerBufferPeriod.getBufferPeriodArray();
                             maxBufferPeriod = mPlayerBufferPeriod.getMaxBufferPeriod();
                             break;
-                        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                        case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+                        case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                             bufferPeriodArray = mNativePlayerBufferPeriodArray;
                             maxBufferPeriod = mNativePlayerMaxBufferPeriod;
                             break;
@@ -1418,36 +1590,44 @@
                     saveHistogram(resultData.getData(), bufferPeriodArray, maxBufferPeriod);
                 }
                 break;
+
             case SAVE_PLAYER_BUFFER_PERIOD_TIMES_TO_TXT_REQUEST:
                 if (resultData != null) {
                     saveTextToFile(resultData.getData(),
                             mPlayerCallbackTimes.toString());
                 }
                 break;
+
             case SAVE_RECORDER_BUFFER_PERIOD_TIMES_TO_TXT_REQUEST:
                 if (resultData != null) {
                     saveTextToFile(resultData.getData(),
                             mRecorderCallbackTimes.toString());
                 }
                 break;
+
             case SAVE_GLITCH_OCCURRENCES_TO_TEXT_REQUEST:
                 if (resultData != null) {
                     saveGlitchOccurrences(resultData.getData(), mGlitchesData);
                 }
                 break;
+
             case SAVE_GLITCH_AND_CALLBACK_HEATMAP_REQUEST:
                 if (resultData != null && mGlitchesData != null && mRecorderCallbackTimes != null
-                        & mPlayerCallbackTimes != null){
+                        && mPlayerCallbackTimes != null) {
                     saveHeatMap(resultData.getData(), mRecorderCallbackTimes, mPlayerCallbackTimes,
                             GlitchesStringBuilder.getGlitchMilliseconds(mFFTSamplingSize,
                                     mFFTOverlapSamples, mGlitchesData, mSamplingRate),
                             mGlitchingIntervalTooLong, mBufferTestElapsedSeconds,
                             resultData.getData().toString());
                 }
+
             case SETTINGS_ACTIVITY_REQUEST:
                 log("return from new settings!");
-
                 break;
+
+            default:
+                break;
+
             }
         }
     }
@@ -1476,7 +1656,7 @@
         mPlayerCallbackTimes = null;
         mRecorderCallbackTimes = null;
         mGlitchesData = null;
-        mWaveData = null;
+        mRetainedFragment.setWaveData(null);
     }
 
 
@@ -1537,7 +1717,8 @@
                 RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodMax",
                         mRecorderBufferPeriod.getMaxBufferPeriod());
                 break;
-            case Constant.AUDIO_THREAD_TYPE_NATIVE:
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                 RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodArray",
                         mNativeRecorderBufferPeriodArray);
                 RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodMax",
@@ -1566,7 +1747,8 @@
                 PlayerBufferPeriodIntent.putExtra("playerBufferPeriodMax",
                         mPlayerBufferPeriod.getMaxBufferPeriod());
                 break;
-            case Constant.AUDIO_THREAD_TYPE_NATIVE:
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                 PlayerBufferPeriodIntent.putExtra("playerBufferPeriodArray",
                         mNativePlayerBufferPeriodArray);
                 PlayerBufferPeriodIntent.putExtra("playerBufferPeriodMax",
@@ -1599,9 +1781,9 @@
                         mGlitchingIntervalTooLong, estimateNumberOfGlitches(mGlitchesData)));
 
                 // display pop up window, dismissible with back button
-                popUp.showAtLocation(findViewById(R.id.linearLayoutMain), Gravity.TOP, 0, 0);
+                popUp.showAtLocation((View) findViewById(R.id.linearLayoutMain), Gravity.TOP, 0, 0);
             } else {
-                showToast("Please run the buffer test to get data");
+                showToastImportant("Please run the buffer test to get data");
             }
 
         } else {
@@ -1627,9 +1809,9 @@
                 reportText.setText(getReport().toString());
 
                 // display pop up window, dismissible with back button
-                popUp.showAtLocation(findViewById(R.id.linearLayoutMain), Gravity.TOP, 0, 0);
+                popUp.showAtLocation((View) findViewById(R.id.linearLayoutMain), Gravity.TOP, 0, 0);
             } else {
-                showToast("Please run the tests to get data");
+                showToastImportant("Please run the tests to get data");
             }
 
         } else {
@@ -1657,10 +1839,10 @@
                                 mGlitchingIntervalTooLong, mBufferTestElapsedSeconds,
                                 getResources().getString(R.string.heatTitle)));
 
-                popUp.showAtLocation(findViewById(R.id.linearLayoutMain), Gravity.TOP, 0, 0);
+                popUp.showAtLocation((View) findViewById(R.id.linearLayoutMain), Gravity.TOP, 0, 0);
 
             } else {
-                showToast("Please run the tests to get data");
+                showToastImportant("Please run the tests to get data");
             }
 
         } else {
@@ -1669,13 +1851,22 @@
     }
 
     /** Redraw the plot according to mWaveData */
-    void refreshPlots() {
-        mWavePlotView.setData(mWaveData, mSamplingRate);
+    private void refreshPlots() {
+        mWavePlotView.setData(mRetainedFragment.getWaveData(), mSamplingRate);
         mWavePlotView.redraw();
     }
 
+    static String audioThreadTypeToString(int audioThreadType) {
+        switch (audioThreadType) {
+            case Constant.AUDIO_THREAD_TYPE_JAVA: return "JAVA";
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES: return "NATIVE (SLES)";
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO: return "NATIVE (AAUDIO)";
+        }
+        return "DEFAULT";
+    }
+
     /** Refresh the text on the main activity that shows the app states and audio settings. */
-    void refreshState() {
+    private void refreshState() {
         log("refreshState!");
         refreshSoundLevelBar();
 
@@ -1694,13 +1885,13 @@
         case Constant.AUDIO_THREAD_TYPE_JAVA:
             s.append(" Play Frames: " ).append(playerFrames);
             s.append(" Record Frames: ").append(recorderFrames);
-            s.append(" Audio: JAVA");
             break;
-        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+        case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+        case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
             s.append(" Frames: ").append(playerFrames);
-            s.append(" Audio: NATIVE");
             break;
         }
+        s.append(" Audio: ").append(audioThreadTypeToString(mAudioThreadType));
 
         // mic source
         String micSourceName = getApp().getMicSourceString(mMicSource);
@@ -1758,12 +1949,27 @@
         Log.v(TAG, msg);
     }
 
-
     public void showToast(final String msg) {
+        /* doShowToast(msg, false); */
+    }
+
+    public void showToastImportant(final String msg) {
+        /* doShowToast(msg, true); */
+    }
+
+    private void doShowToast(final String msg, boolean isImportant) {
+        // If launched from an intent, do not show unimportant toasts.
+        // Intents are typically used by scripts, which run actions at high
+        // rate. On some versions of Android this causes the toasts to
+        // queue up, and they continue displaying long after the app has
+        // finished the actions.
+        if (mIntentRunning && !isImportant) {
+            return;
+        }
         // Make sure UI manipulations are only done on the UI thread
-        LoopbackActivity.this.runOnUiThread(new Runnable() {
+        runOnUiThread(new Runnable() {
             public void run() {
-                Toast toast = Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_LONG);
+                Toast toast = Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT);
                 toast.setGravity(Gravity.CENTER_VERTICAL | Gravity.CENTER_HORIZONTAL, 10, 10);
                 toast.show();
             }
@@ -1778,11 +1984,12 @@
 
 
     /** Save a .wav file of the wave plot on the main activity. */
-    void saveToWaveFile(Uri uri) {
-        if (mWaveData != null && mWaveData.length > 0) {
+    private void saveToWaveFile(Uri uri) {
+        double[] waveData = mRetainedFragment.getWaveData();
+        if (waveData != null && waveData.length > 0) {
             AudioFileOutput audioFileOutput = new AudioFileOutput(getApplicationContext(), uri,
                                                                   mSamplingRate);
-            boolean status = audioFileOutput.writeData(mWaveData);
+            boolean status = audioFileOutput.writeData(waveData);
             if (status) {
                 String wavFileAbsolutePath = getPath(uri);
                 // for some devices getPath fails
@@ -1794,7 +2001,7 @@
                 }
                 showToast("Finished exporting wave File " + wavFileAbsolutePath);
             } else {
-                showToast("Something failed saving wave file");
+                showToastImportant("Something failed saving wave file");
             }
 
         }
@@ -1802,7 +2009,7 @@
 
 
     /** Save a screenshot of the main activity. */
-    void saveScreenShot(Uri uri) {
+    private void saveScreenShot(Uri uri) {
         ParcelFileDescriptor parcelFileDescriptor = null;
         FileOutputStream outputStream;
         try {
@@ -1900,7 +2107,7 @@
      * Save a .txt file of the given buffer period's data.
      * First column is time, second column is count.
      */
-    void saveBufferPeriod(Uri uri, int[] bufferPeriodArray, int maxBufferPeriod) {
+    private void saveBufferPeriod(Uri uri, int[] bufferPeriodArray, int maxBufferPeriod) {
         ParcelFileDescriptor parcelFileDescriptor = null;
         FileOutputStream outputStream;
         if (bufferPeriodArray != null) {
@@ -1941,7 +2148,7 @@
     }
 
     /** Save a .txt file of various test results. */
-    void saveTextToFile(Uri uri, String outputText) {
+    private void saveTextToFile(Uri uri, String outputText) {
         ParcelFileDescriptor parcelFileDescriptor = null;
         FileOutputStream outputStream;
         try {
@@ -1981,15 +2188,7 @@
                 Constant.BYTES_PER_FRAME + endline);
         sb.append(INTENT_AUDIO_THREAD + " = " + mAudioThreadType + endline);
 
-        String audioType = "unknown";
-        switch (mAudioThreadType) {
-            case Constant.AUDIO_THREAD_TYPE_JAVA:
-                audioType = "JAVA";
-                break;
-            case Constant.AUDIO_THREAD_TYPE_NATIVE:
-                audioType = "NATIVE";
-                break;
-        }
+        String audioType = audioThreadTypeToString(mAudioThreadType);
         sb.append(INTENT_AUDIO_THREAD + "_String = " + audioType + endline);
 
         sb.append(INTENT_MIC_SOURCE + " = " + mMicSource + endline);
@@ -1998,6 +2197,7 @@
         sb.append(INTENT_AUDIO_LEVEL + " = " + mSoundLevel + endline);
 
         switch (mTestType) {
+
             case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
                 sb.append(INTENT_IGNORE_FIRST_FRAMES + " = " + mIgnoreFirstFrames + endline);
                 if (mCorrelation.isValid()) {
@@ -2013,6 +2213,7 @@
                 sb.append(String.format("Average = %.4f", mCorrelation.mAverage) + endline);
                 sb.append(String.format("RMS = %.4f", mCorrelation.mRms) + endline);
                 break;
+
             case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
                 sb.append("Buffer Test Duration (s) = " + mBufferTestDurationInSeconds + endline);
 
@@ -2026,7 +2227,8 @@
                         recorderBufferDataMax = mRecorderBufferPeriod.getMaxBufferPeriod();
                         recorderBufferDataStdDev = mRecorderBufferPeriod.getStdDevBufferPeriod();
                         break;
-                    case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                         recorderBufferData = mNativeRecorderBufferPeriodArray;
                         recorderBufferDataMax = mNativeRecorderMaxBufferPeriod;
                         recorderBufferDataStdDev = mNativeRecorderStdDevBufferPeriod;
@@ -2041,7 +2243,7 @@
                             usefulDataRange);
                     PerformanceMeasurement measurement = new PerformanceMeasurement(
                             mRecorderCallbackTimes.getExpectedBufferPeriod(), usefulBufferData);
-                    float recorderPercentAtExpected =
+                    double recorderPercentAtExpected =
                             measurement.percentBufferPeriodsAtExpected();
                     double benchmark = measurement.computeWeightedBenchmark();
                     int outliers = measurement.countOutliers();
@@ -2074,7 +2276,8 @@
                         playerBufferDataMax = mPlayerBufferPeriod.getMaxBufferPeriod();
                         playerBufferDataStdDev = mPlayerBufferPeriod.getStdDevBufferPeriod();
                         break;
-                    case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
                         playerBufferData = mNativePlayerBufferPeriodArray;
                         playerBufferDataMax = mNativePlayerMaxBufferPeriod;
                         playerBufferDataStdDev = mNativePlayerStdDevBufferPeriod;
@@ -2091,7 +2294,7 @@
                             usefulDataRange);
                     PerformanceMeasurement measurement = new PerformanceMeasurement(
                             mPlayerCallbackTimes.getExpectedBufferPeriod(), usefulBufferData);
-                    float playerPercentAtExpected = measurement.percentBufferPeriodsAtExpected();
+                    double playerPercentAtExpected = measurement.percentBufferPeriodsAtExpected();
                     double benchmark = measurement.computeWeightedBenchmark();
                     int outliers = measurement.countOutliers();
                     sb.append("Player Buffer Periods At Expected = "
@@ -2113,8 +2316,8 @@
                 }
                 // report glitches per hour
                 int numberOfGlitches = estimateNumberOfGlitches(mGlitchesData);
-                float testDurationInHours = mBufferTestElapsedSeconds
-                        / (float) Constant.SECONDS_PER_HOUR;
+                double testDurationInHours = mBufferTestElapsedSeconds
+                        / (double) Constant.SECONDS_PER_HOUR;
 
                 // Report Glitches Per Hour if sufficient data available, ie at least half an hour
                 if (testDurationInHours >= .5) {
@@ -2232,7 +2435,7 @@
      * Check whether we have the RECORD_AUDIO permission
      * @return true if we do
      */
-    private boolean hasRecordAudioPermission(){
+    private boolean hasRecordAudioPermission() {
         boolean hasPermission = (ContextCompat.checkSelfPermission(this,
                 Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED);
 
@@ -2243,7 +2446,7 @@
     /**
      * Requests the RECORD_AUDIO permission from the user
      */
-    private void requestRecordAudioPermission(int requestCode){
+    private void requestRecordAudioPermission(int requestCode) {
 
         String requiredPermission = Manifest.permission.RECORD_AUDIO;
 
@@ -2252,8 +2455,8 @@
         if (ActivityCompat.shouldShowRequestPermissionRationale(this,
                 requiredPermission)) {
 
-            showToast("This app needs to record audio through the microphone to test the device's "+
-                    "performance");
+            showToastImportant("This app needs to record audio through the microphone to test "+
+                    "the device's performance");
         }
 
         // request the permission.
@@ -2316,8 +2519,6 @@
     }
 
     private void restoreInstanceState(Bundle in) {
-        mWaveData = in.getDoubleArray("mWaveData");
-
         mTestType = in.getInt("mTestType");
         mMicSource = in.getInt("mMicSource");
         mAudioThreadType = in.getInt("mAudioThreadType");
@@ -2330,7 +2531,7 @@
         mTestStartTimeString = in.getString("mTestStartTimeString");
 
         mGlitchesData = in.getIntArray("mGlitchesData");
-        if(mGlitchesData != null) {
+        if (mGlitchesData != null) {
             mGlitchingIntervalTooLong = in.getBoolean("mGlitchingIntervalTooLong");
             mFFTSamplingSize = in.getInt("mFFTSamplingSize");
             mFFTOverlapSamples = in.getInt("mFFTOverlapSamples");
@@ -2343,7 +2544,7 @@
             findViewById(R.id.glitchReportPanel).setVisibility(View.VISIBLE);
         }
 
-        if(mWaveData != null) {
+        if (mRetainedFragment.getWaveData() != null) {
             mCorrelation = in.getParcelable("mCorrelation");
             mPlayerBufferPeriod = in.getParcelable("mPlayerBufferPeriod");
             mRecorderBufferPeriod = in.getParcelable("mRecorderBufferPeriod");
@@ -2355,7 +2556,7 @@
             mNativeRecorderBufferPeriodArray = in.getIntArray("mNativeRecorderBufferPeriodArray");
             mNativeRecorderMaxBufferPeriod = in.getInt("mNativeRecorderMaxBufferPeriod");
 
-            mWavePlotView.setData(mWaveData, mSamplingRate);
+            mWavePlotView.setData(mRetainedFragment.getWaveData(), mSamplingRate);
             refreshState();
             findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.VISIBLE);
             findViewById(R.id.resultSummary).setVisibility(View.VISIBLE);
@@ -2365,8 +2566,6 @@
     @Override
     protected void onSaveInstanceState(Bundle out) {
         super.onSaveInstanceState(out);
-        // TODO: keep larger pieces of data in a fragment to speed up response to rotation
-        out.putDoubleArray("mWaveData", mWaveData);
 
         out.putInt("mTestType", mTestType);
         out.putInt("mMicSource", mMicSource);
@@ -2399,4 +2598,69 @@
         out.putInt("mBufferTestDurationInSeconds", mBufferTestDurationInSeconds);
         out.putInt("mBufferTestWavePlotDurationInSeconds", mBufferTestWavePlotDurationInSeconds);
     }
+
+    private void waitForUsbRoute() {
+        log("Start checking for USB Route connection");
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        long startTime = System.currentTimeMillis();
+        int iter = 0;
+        while (true) {
+            if (System.currentTimeMillis() - startTime > 15 * 1000) {
+                log("15 Seconds has elapsed before USB_AUDIO_ROUTE is detected, continue test.");
+                break;
+            }
+            iter++;
+            AudioDeviceInfo[] devices;
+            boolean usb_available = false;
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+                devices = am.getDevices(AudioManager.GET_DEVICES_INPUTS);
+                for (AudioDeviceInfo devInfo : devices) {
+
+                    if (devInfo.getType() != AudioDeviceInfo.TYPE_BUILTIN_MIC && devInfo.getType() != AudioDeviceInfo.TYPE_TELEPHONY) {
+                        log(" USB Check iteration: " + String.valueOf(iter));
+                        log(" USB Check get type: " + String.valueOf(devInfo.getType()));
+                    }
+                    if (devInfo.getType() == AudioDeviceInfo.TYPE_USB_DEVICE ||
+                            devInfo.getType() == AudioDeviceInfo.TYPE_USB_HEADSET) {
+                        log(" USB Headset detected, continue test");
+                        usb_available = true;
+                        break;
+                    }
+                }
+
+            } else {
+                log("This system version does not support USB Audio Route check, continue test");
+                break;
+            }
+
+            log(" USB-> Check MediaRoute");
+            UsbManager manager = (UsbManager) getSystemService(Context.USB_SERVICE);
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) {
+                HashMap<String, UsbDevice> usbDevices = manager.getDeviceList();
+                for (Map.Entry<String, UsbDevice> entry : usbDevices.entrySet()) {
+                    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+                        if (entry.getValue().getProductName().contains("USB Audio")) {
+                            log(" USB Headset detected inside UsbManager, continue test");
+                            usb_available = true;
+                            log(" USB list: key " + entry.getKey() + " and value: " + String.valueOf(entry.getValue()));
+                            break;
+                        }
+                    }
+                }
+
+            }
+            if (usb_available) {
+                long elapsed = System.currentTimeMillis() - startTime;
+                log("USB detection takes " + String.valueOf(elapsed) + " ms");
+                break;
+            }
+            try {
+                Thread.sleep(500);
+            } catch (InterruptedException e) {
+                log("Got interrupted during USB Audio Route check");
+                e.printStackTrace();
+            }
+        }
+    }
+
 }
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java
index f38ef5f..cd888b3 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java
@@ -20,10 +20,6 @@
 import android.content.Context;
 import android.content.pm.PackageManager;
 import android.content.res.Configuration;
-import android.media.AudioFormat;
-import android.media.AudioManager;
-import android.media.AudioRecord;
-import android.media.AudioTrack;
 import android.media.MediaRecorder;
 import android.os.Build;
 import android.util.Log;
@@ -38,10 +34,9 @@
     private static final String TAG = "LoopbackApplication";
 
     // here defines all the initial setting values, some get modified in ComputeDefaults()
-    private int mSamplingRate = 48000;
+    private TestSettings mSettings = new TestSettings(48000 /*samplingRate*/,
+            0 /*playerBufferSizeInBytes*/, 0 /*recorderBuffSizeInBytes*/);
     private int mChannelIndex = -1;
-    private int mPlayerBufferSizeInBytes = 0; // for both native and java
-    private int mRecorderBuffSizeInBytes = 0; // for both native and java
     private int mAudioThreadType = Constant.AUDIO_THREAD_TYPE_JAVA; //0:Java, 1:Native (JNI)
     private int mMicSource = 3; //maps to MediaRecorder.AudioSource.VOICE_RECOGNITION;
     private int mPerformanceMode = -1; // DEFAULT
@@ -56,8 +51,11 @@
     private int mNumStateCaptures = Constant.DEFAULT_NUM_CAPTURES;
 
     public void setDefaults() {
+        // Prefer SLES until buffer test is implemented for AAudio.
         if (isSafeToUseSles()) {
-            mAudioThreadType = Constant.AUDIO_THREAD_TYPE_NATIVE;
+            mAudioThreadType = Constant.AUDIO_THREAD_TYPE_NATIVE_SLES;
+        } else if (isSafeToUseAAudio()) {
+            mAudioThreadType = Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO;
         } else {
             mAudioThreadType = Constant.AUDIO_THREAD_TYPE_JAVA;
         }
@@ -66,11 +64,11 @@
     }
 
     int getSamplingRate() {
-        return mSamplingRate;
+        return mSettings.getSamplingRate();
     }
 
     void setSamplingRate(int samplingRate) {
-        mSamplingRate = clamp(samplingRate, Constant.SAMPLING_RATE_MIN, Constant.SAMPLING_RATE_MAX);
+        mSettings.setSamplingRate(samplingRate);
     }
 
     int getChannelIndex() { return mChannelIndex; }
@@ -83,9 +81,12 @@
 
 
     void setAudioThreadType(int audioThreadType) {
-        if (isSafeToUseSles() && audioThreadType != Constant.AUDIO_THREAD_TYPE_JAVA) {
-            //safe to use native and Java thread not selected
-            mAudioThreadType = Constant.AUDIO_THREAD_TYPE_NATIVE;
+        if (isSafeToUseAAudio() && audioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO) {
+            mAudioThreadType = Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO;
+        } else if (isSafeToUseSles() && (
+                        audioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE_SLES ||
+                        audioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO)) {
+            mAudioThreadType = Constant.AUDIO_THREAD_TYPE_NATIVE_SLES;
         } else {
             mAudioThreadType = Constant.AUDIO_THREAD_TYPE_JAVA;
         }
@@ -130,7 +131,7 @@
                 }
                 break;
             }
-        } else if (threadType == Constant.AUDIO_THREAD_TYPE_NATIVE) {
+        } else if (threadType == Constant.AUDIO_THREAD_TYPE_NATIVE_SLES) {
             // FIXME taken from OpenSLES_AndroidConfiguration.h
             switch (source) {
             default:
@@ -161,6 +162,7 @@
                 }
                 break;
             }
+            // Doesn't matter for AUDIO_THREAD_TYPE_NATIVE_AAUDIO.
         }
 
         return mappedSource;
@@ -219,7 +221,9 @@
     }
 
 
-    void setPerformanceMode(int performanceMode) { mPerformanceMode = performanceMode; }
+    void setPerformanceMode(int performanceMode) {
+        mPerformanceMode = performanceMode;
+    }
 
     int getIgnoreFirstFrames() {
         return mIgnoreFirstFrames;
@@ -230,24 +234,21 @@
     }
 
     int getPlayerBufferSizeInBytes() {
-        return mPlayerBufferSizeInBytes;
+        return mSettings.getPlayerBufferSizeInBytes();
     }
 
-
     void setPlayerBufferSizeInBytes(int playerBufferSizeInBytes) {
-        mPlayerBufferSizeInBytes = clamp(playerBufferSizeInBytes, Constant.PLAYER_BUFFER_FRAMES_MIN,
-                Constant.PLAYER_BUFFER_FRAMES_MAX);
+        mSettings.setPlayerBufferSizeInBytes(playerBufferSizeInBytes);
     }
 
 
     int getRecorderBufferSizeInBytes() {
-        return mRecorderBuffSizeInBytes;
+        return mSettings.getRecorderBufferSizeInBytes();
     }
 
 
     void setRecorderBufferSizeInBytes(int recorderBufferSizeInBytes) {
-        mRecorderBuffSizeInBytes = clamp(recorderBufferSizeInBytes,
-                Constant.RECORDER_BUFFER_FRAMES_MIN, Constant.RECORDER_BUFFER_FRAMES_MAX);
+        mSettings.setRecorderBufferSizeInBytes(recorderBufferSizeInBytes);
     }
 
 
@@ -257,7 +258,7 @@
 
 
     void setBufferTestDuration(int bufferTestDurationInSeconds) {
-        mBufferTestDurationInSeconds = clamp(bufferTestDurationInSeconds,
+        mBufferTestDurationInSeconds = Utilities.clamp(bufferTestDurationInSeconds,
                 Constant.BUFFER_TEST_DURATION_SECONDS_MIN,
                 Constant.BUFFER_TEST_DURATION_SECONDS_MAX);
     }
@@ -269,7 +270,7 @@
 
 
     void setBufferTestWavePlotDuration(int bufferTestWavePlotDurationInSeconds) {
-        mBufferTestWavePlotDurationInSeconds = clamp(bufferTestWavePlotDurationInSeconds,
+        mBufferTestWavePlotDurationInSeconds = Utilities.clamp(bufferTestWavePlotDurationInSeconds,
                 Constant.BUFFER_TEST_WAVE_PLOT_DURATION_SECONDS_MIN,
                 Constant.BUFFER_TEST_WAVE_PLOT_DURATION_SECONDS_MAX);
     }
@@ -279,15 +280,16 @@
     }
 
     void setNumberOfLoadThreads(int numberOfLoadThreads) {
-        mNumberOfLoadThreads = clamp(numberOfLoadThreads, Constant.MIN_NUM_LOAD_THREADS,
+        mNumberOfLoadThreads = Utilities.clamp(numberOfLoadThreads, Constant.MIN_NUM_LOAD_THREADS,
                 Constant.MAX_NUM_LOAD_THREADS);
     }
 
-    public void setNumberOfCaptures (int num){
-        mNumStateCaptures = clamp(num, Constant.MIN_NUM_CAPTURES, Constant.MAX_NUM_CAPTURES);
+    public void setNumberOfCaptures (int num) {
+        mNumStateCaptures = Utilities.clamp(num, Constant.MIN_NUM_CAPTURES,
+                Constant.MAX_NUM_CAPTURES);
     }
 
-    public void setCaptureSysTraceEnabled (boolean enabled){
+    public void setCaptureSysTraceEnabled (boolean enabled) {
         mCaptureSysTraceEnabled = enabled;
     }
 
@@ -295,7 +297,7 @@
         mCaptureBugreportEnabled = enabled;
     }
 
-    public void setCaptureWavsEnabled (boolean enabled){
+    public void setCaptureWavsEnabled(boolean enabled) {
         mCaptureWavSnippetsEnabled = enabled;
     }
 
@@ -327,60 +329,27 @@
         return mCaptureWavSnippetsEnabled;
     }
 
-    /**
-     * Returns value if value is within inclusive bounds min through max
-     * otherwise returns min or max according to if value is less than or greater than the range
-     */
-    private int clamp(int value, int min, int max) {
-
-        if (max < min) throw new UnsupportedOperationException("min must be <= max");
-
-        if (value < min) return min;
-        else if (value > max) return max;
-        else return value;
-    }
-
 
     /** Compute Default audio settings. */
     public void computeDefaults() {
-        int samplingRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
-        setSamplingRate(samplingRate);
-
-        if (mAudioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE) {
-
-            int minBufferSizeInFrames;
-            if (isSafeToUseGetProperty()) {
-                AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
-                String value = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
-                minBufferSizeInFrames = Integer.parseInt(value);
-            } else {
-                minBufferSizeInFrames = 1024;
-                log("On button test micSource Name: ");
-            }
-            int minBufferSizeInBytes = Constant.BYTES_PER_FRAME * minBufferSizeInFrames;
-
-            setPlayerBufferSizeInBytes(minBufferSizeInBytes);
-            setRecorderBufferSizeInBytes(minBufferSizeInBytes);
+        if (mAudioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE_SLES ||
+            mAudioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO) {
+            mSettings = NativeAudioThread.computeDefaultSettings(
+                    this, mAudioThreadType, mPerformanceMode);
         } else {
-            int minPlayerBufferSizeInBytes = AudioTrack.getMinBufferSize(samplingRate,
-                    AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
-            setPlayerBufferSizeInBytes(minPlayerBufferSizeInBytes);
-
-            int minRecorderBufferSizeInBytes =  AudioRecord.getMinBufferSize(samplingRate,
-                    AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
-            setRecorderBufferSizeInBytes(minRecorderBufferSizeInBytes);
+            mSettings = LoopbackAudioThread.computeDefaultSettings();
         }
-
     }
 
 
     String getSystemInfo() {
         String info = null;
         try {
-            int versionCode = getApplicationContext().getPackageManager().getPackageInfo(
-                              getApplicationContext().getPackageName(), 0).versionCode;
-            String versionName = getApplicationContext().getPackageManager().getPackageInfo(
-                                 getApplicationContext().getPackageName(), 0).versionName;
+            Context context = getApplicationContext();
+            android.content.pm.PackageInfo packageInfo = context.getPackageManager().getPackageInfo(
+                    context.getPackageName(), 0);
+            int versionCode = packageInfo.versionCode;
+            String versionName = packageInfo.versionName;
             info = "App ver. " + versionCode + "." + versionName + " | " + Build.MODEL + " | " +
                     Build.FINGERPRINT;
         } catch (PackageManager.NameNotFoundException e) {
@@ -391,22 +360,23 @@
     }
 
 
-    /** Check if it's safe to use Open SLES. */
-    boolean isSafeToUseSles() {
-        return  Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD;
+    /** Check if it's safe to use OpenSL ES. */
+    static boolean isSafeToUseSles() {
+        return Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD;
+    }
+
+    /** Check if it's safe to use AAudio. */
+    static boolean isSafeToUseAAudio() {
+        return Build.VERSION.SDK_INT >= Build.VERSION_CODES.O;
     }
 
 
-    /** Check if it's safe to use getProperty(). */
-    boolean isSafeToUseGetProperty() {
-        return  Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
-    }
-
-
+/*
     @Override
     public void onConfigurationChanged(Configuration newConfig) {
         super.onConfigurationChanged(newConfig);
     }
+*/
 
 
     @Override
@@ -417,19 +387,24 @@
     }
 
 
+/*
     @Override
     public void onLowMemory() {
         super.onLowMemory();
     }
+*/
 
 
+/*
     @Override
     public void onTerminate() {
         super.onTerminate();
     }
+*/
 
 
     private static void log(String msg) {
         Log.v(TAG, msg);
     }
+
 }
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java
index b4c3b3a..12291ee 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java
@@ -20,6 +20,7 @@
 import android.media.AudioDeviceInfo;
 import android.media.AudioFormat;
 import android.media.AudioManager;
+import android.media.AudioRecord;
 import android.media.AudioTrack;
 import android.media.MediaRecorder;
 import android.os.Build;
@@ -79,6 +80,15 @@
     private final CaptureHolder mCaptureHolder;
     private boolean        mIsAdjustingSoundLevel = true; // only used in buffer test
 
+    public static TestSettings computeDefaultSettings() {
+        int samplingRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+        int minPlayerBufferSizeInBytes = AudioTrack.getMinBufferSize(samplingRate,
+                AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
+        int minRecorderBufferSizeInBytes = AudioRecord.getMinBufferSize(samplingRate,
+                AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
+        return new TestSettings(samplingRate, minPlayerBufferSizeInBytes,
+                minRecorderBufferSizeInBytes);
+    }
 
     public LoopbackAudioThread(int samplingRate, int playerBufferInBytes, int recorderBufferInBytes,
                                int micSource, BufferPeriod recorderBufferPeriod,
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java
index 95d5899..d2ed18b 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java
@@ -19,9 +19,13 @@
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 
-import android.util.Log;
+import android.content.Context;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.os.Build;
 import android.os.Handler;
 import android.os.Message;
+import android.util.Log;
 
 
 /**
@@ -50,6 +54,7 @@
     public double[] mSamples; // store samples that will be shown on WavePlotView
     int             mSamplesIndex;
 
+    private int mThreadType;
     private int mTestType;
     private int mSamplingRate;
     private int mMinPlayerBufferSizeInBytes = 0;
@@ -85,11 +90,36 @@
     private PipeByteBuffer        mPipeByteBuffer;
     private GlitchDetectionThread mGlitchDetectionThread;
 
+    /** Check if it's safe to use getProperty(). */
+    static boolean isSafeToUseGetProperty() {
+        return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
+    }
 
-    public NativeAudioThread(int samplingRate, int playerBufferInBytes, int recorderBufferInBytes,
-                             int micSource, int performanceMode, int testType, int bufferTestDurationInSeconds,
+    public static TestSettings computeDefaultSettings(Context context,
+            int threadType, int performanceMode) {
+        TestSettings nativeResult = nativeComputeDefaultSettings(
+                Constant.BYTES_PER_FRAME, threadType, performanceMode);
+        if (nativeResult != null) {
+            return nativeResult;
+        }
+
+        int samplingRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+        int minBufferSizeInFrames = 1024;
+        if (isSafeToUseGetProperty()) {
+            AudioManager am = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+            String value = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
+            minBufferSizeInFrames = Integer.parseInt(value);
+        }
+        int minBufferSizeInBytes = Constant.BYTES_PER_FRAME * minBufferSizeInFrames;
+        return new TestSettings(samplingRate, minBufferSizeInBytes, minBufferSizeInBytes);
+    }
+
+    public NativeAudioThread(int threadType, int samplingRate, int playerBufferInBytes,
+                             int recorderBufferInBytes, int micSource, int performanceMode,
+                             int testType, int bufferTestDurationInSeconds,
                              int bufferTestWavePlotDurationInSeconds, int ignoreFirstFrames,
                              CaptureHolder captureHolder) {
+        mThreadType = threadType;
         mSamplingRate = samplingRate;
         mMinPlayerBufferSizeInBytes = playerBufferInBytes;
         mMinRecorderBuffSizeInBytes = recorderBufferInBytes;
@@ -104,6 +134,7 @@
     }
 
     public NativeAudioThread(NativeAudioThread old) {
+        mThreadType = old.mThreadType;
         mSamplingRate = old.mSamplingRate;
         mMinPlayerBufferSizeInBytes = old.mMinPlayerBufferSizeInBytes;
         mMinRecorderBuffSizeInBytes = old.mMinRecorderBuffSizeInBytes;
@@ -130,25 +161,28 @@
 
 
     //jni calls
-    public native long  slesInit(int samplingRate, int frameCount, int micSource,
+    public static native TestSettings nativeComputeDefaultSettings(
+            int bytesPerFrame, int threadType, int performanceMode);
+    public native long  nativeInit(int threadType,
+                                 int samplingRate, int frameCount, int micSource,
                                  int performanceMode,
                                  int testType, double frequency1, ByteBuffer byteBuffer,
                                  short[] sincTone, int maxRecordedLateCallbacks,
                                  int ignoreFirstFrames);
-    public native int   slesProcessNext(long sles_data, double[] samples, long offset);
-    public native int   slesDestroy(long sles_data);
+    public native int   nativeProcessNext(long nativeHandle, double[] samples, long offset);
+    public native int   nativeDestroy(long nativeHandle);
 
     // to get buffer period data
-    public native int[]  slesGetRecorderBufferPeriod(long sles_data);
-    public native int    slesGetRecorderMaxBufferPeriod(long sles_data);
-    public native double slesGetRecorderVarianceBufferPeriod(long sles_data);
-    public native int[]  slesGetPlayerBufferPeriod(long sles_data);
-    public native int    slesGetPlayerMaxBufferPeriod(long sles_data);
-    public native double slesGetPlayerVarianceBufferPeriod(long sles_data);
-    public native BufferCallbackTimes slesGetPlayerCallbackTimeStamps(long sles_data);
-    public native BufferCallbackTimes slesGetRecorderCallbackTimeStamps(long sles_data);
+    public native int[]  nativeGetRecorderBufferPeriod(long nativeHandle);
+    public native int    nativeGetRecorderMaxBufferPeriod(long nativeHandle);
+    public native double nativeGetRecorderVarianceBufferPeriod(long nativeHandle);
+    public native int[]  nativeGetPlayerBufferPeriod(long nativeHandle);
+    public native int    nativeGetPlayerMaxBufferPeriod(long nativeHandle);
+    public native double nativeGetPlayerVarianceBufferPeriod(long nativeHandle);
+    public native BufferCallbackTimes nativeGetPlayerCallbackTimeStamps(long nativeHandle);
+    public native BufferCallbackTimes nativeGetRecorderCallbackTimeStamps(long nativeHandle);
 
-    public native int slesGetCaptureRank(long sles_data);
+    public native int nativeGetCaptureRank(long nativeHandle);
 
 
     public void run() {
@@ -174,12 +208,13 @@
             mMessageHandler.sendMessage(msg);
         }
 
-        //generate sinc tone use for loopback test
+        // generate windowed tone use for loopback test
         short loopbackTone[] = new short[mMinPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME];
         if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY) {
             ToneGeneration sincToneGen = new RampedSineTone(mSamplingRate,
                     Constant.LOOPBACK_FREQUENCY);
-            sincToneGen.generateTone(loopbackTone, loopbackTone.length);
+            int sincLength = Math.min(Constant.LOOPBACK_SAMPLE_FRAMES, loopbackTone.length);
+            sincToneGen.generateTone(loopbackTone, sincLength);
         }
 
         log(String.format("about to init, sampling rate: %d, buffer:%d", mSamplingRate,
@@ -188,15 +223,15 @@
         // mPipeByteBuffer is only used in buffer test
         mPipeByteBuffer = new PipeByteBuffer(Constant.MAX_SHORTS);
         long startTimeMs = System.currentTimeMillis();
-        long sles_data = slesInit(mSamplingRate,
+        long nativeHandle = nativeInit(mThreadType, mSamplingRate,
                 mMinPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME, mMicSource,
                 mPerformanceMode, mTestType,
                 mFrequency1, mPipeByteBuffer.getByteBuffer(), loopbackTone,
                 mBufferTestDurationInSeconds * Constant.MAX_RECORDED_LATE_CALLBACKS_PER_SECOND,
                 mIgnoreFirstFrames);
-        log(String.format("sles_data = 0x%X", sles_data));
+        log(String.format("nativeHandle = 0x%X", nativeHandle));
 
-        if (sles_data == 0) {
+        if (nativeHandle == 0) {
             //notify error!!
             log(" ERROR at JNI initialization");
             if (mMessageHandler != null) {
@@ -235,7 +270,7 @@
                 // retrieve native recorder's recorded data
                 for (int ii = 0; ii < latencyTestDurationInSeconds; ii++) {
                     log(String.format("block %d...", ii));
-                    int samplesRead = slesProcessNext(sles_data, mSamples, offset);
+                    int samplesRead = nativeProcessNext(nativeHandle, mSamples, offset);
                     totalSamplesRead += samplesRead;
                     offset += samplesRead;
                     log(" [" + ii + "] jni samples read:" + samplesRead +
@@ -253,7 +288,7 @@
                     if (mIsRequestStop) {
                         break;
                     } else {
-                        int rank = slesGetCaptureRank(sles_data);
+                        int rank = nativeGetCaptureRank(nativeHandle);
                         if (rank > 0) {
                             //log("Late callback detected");
                             mCaptureHolder.captureState(rank);
@@ -274,15 +309,17 @@
             }
 
             // collect buffer period data
-            mRecorderBufferPeriod = slesGetRecorderBufferPeriod(sles_data);
-            mRecorderMaxBufferPeriod = slesGetRecorderMaxBufferPeriod(sles_data);
-            mRecorderStdDevBufferPeriod = Math.sqrt(slesGetRecorderVarianceBufferPeriod(sles_data));
-            mPlayerBufferPeriod = slesGetPlayerBufferPeriod(sles_data);
-            mPlayerMaxBufferPeriod = slesGetPlayerMaxBufferPeriod(sles_data);
-            mPlayerStdDevBufferPeriod = Math.sqrt(slesGetPlayerVarianceBufferPeriod(sles_data));
+            mRecorderBufferPeriod = nativeGetRecorderBufferPeriod(nativeHandle);
+            mRecorderMaxBufferPeriod = nativeGetRecorderMaxBufferPeriod(nativeHandle);
+            mRecorderStdDevBufferPeriod = Math.sqrt(nativeGetRecorderVarianceBufferPeriod(
+                    nativeHandle));
+            mPlayerBufferPeriod = nativeGetPlayerBufferPeriod(nativeHandle);
+            mPlayerMaxBufferPeriod = nativeGetPlayerMaxBufferPeriod(nativeHandle);
+            mPlayerStdDevBufferPeriod = Math.sqrt(nativeGetPlayerVarianceBufferPeriod(
+                    nativeHandle));
 
-            mPlayerCallbackTimes = slesGetPlayerCallbackTimeStamps(sles_data);
-            mRecorderCallbackTimes = slesGetRecorderCallbackTimeStamps(sles_data);
+            mPlayerCallbackTimes = nativeGetPlayerCallbackTimeStamps(nativeHandle);
+            mRecorderCallbackTimes = nativeGetRecorderCallbackTimeStamps(nativeHandle);
 
             // get glitches data only for buffer test
             if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD) {
@@ -296,7 +333,7 @@
                 mCaptureHolder.captureState(0);
             }
 
-            runDestroy(sles_data);
+            runDestroy(nativeHandle);
 
             final int maxTry = 20;
             int tryCount = 0;
@@ -373,18 +410,17 @@
     }
 
 
-    private void runDestroy(final long sles_data) {
+    private void runDestroy(final long localNativeHandle) {
         isDestroying = true;
 
         //start thread
-        final long local_sles_data = sles_data;
         Thread thread = new Thread(new Runnable() {
             public void run() {
                 isDestroying = true;
                 log("**Start runnable destroy");
 
-                int status = slesDestroy(local_sles_data);
-                log(String.format("**End runnable destroy sles delete status: %d", status));
+                int status = nativeDestroy(localNativeHandle);
+                log(String.format("**End runnable destroy native delete status: %d", status));
                 isDestroying = false;
             }
         });
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java
index 35c5e18..266e06b 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java
@@ -20,8 +20,8 @@
 
 
 /**
- * This class is used to automatically the audio performance according to recorder/player buffer
- * period.
+ * This class is used to automatically measure the audio performance according to recorder/player
+ * buffer period.
  */
 
 public class PerformanceMeasurement {
@@ -109,7 +109,7 @@
      * Determine percent of Buffer Period Callbacks that occurred at the expected time
      * Returns a value between 0 and 1
      */
-    public float percentBufferPeriodsAtExpected() {
+    public double percentBufferPeriodsAtExpected() {
         int occurrenceNearExpectedBufferPeriod = 0;
         // indicate how many buckets around mExpectedBufferPeriod do we want to add to the count
         int acceptableOffset = 2;
@@ -119,7 +119,7 @@
         for (int i = start; i <= end; i++) {
             occurrenceNearExpectedBufferPeriod += mBufferData[i];
         }
-        return ((float) occurrenceNearExpectedBufferPeriod) / mTotalOccurrence;
+        return ((double) occurrenceNearExpectedBufferPeriod) / mTotalOccurrence;
     }
 
 
@@ -222,6 +222,7 @@
     /**
      * Calculate the mean of int array "data". In this array, data[i] = x means there are
      * x occurrences of value i.
+     * TODO move to audio_utils
      */
     private double computeMean(int[] data) {
         int count = 0;
@@ -246,6 +247,7 @@
     /**
      * Calculate the standard deviation of int array "data". In this array, data[i] = x means
      * there are x occurrences of value i.
+     * TODO move to audio_utils
      */
     private double computeStandardDeviation(int[] data, double mean) {
         double sumDeviation = 0;
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java
index 8eb1214..23addac 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java
@@ -26,15 +26,13 @@
 
     protected int       mSamplesOverrun;
     protected int       mOverruns;
-    protected final int mMaxValues;   // always in power of two
-
+    protected final int mMaxValues;   // always a power of two
 
     /** maxSamples must be >= 2. */
     public Pipe(int maxSamples) {
         mMaxValues = Utilities.roundup(maxSamples); // round up to the nearest power of 2
     }
 
-
     /**
      * Read at most "count" number of samples into array "buffer", starting from index "offset".
      * If the available samples to read is smaller than count, just read as much as it can and
@@ -42,11 +40,10 @@
      */
     public abstract int read(short[] buffer, int offset, int count);
 
-
     /** Return the amount of samples available to read. */
     public abstract int availableToRead();
 
-
     /** Clear the pipe. */
     public abstract void flush();
+
 }
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java
index 7c95aaf..306bbfc 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java
@@ -28,6 +28,7 @@
  * read(), which converts data in ByteBuffer into shorts.
  * Data in the pipe are stored in the ByteBuffer array "mByteBuffer".
  * The write side of a pipe permits overruns; flow control is the caller's responsibility.
+ * TODO move to audio_utils
  */
 
 public class PipeByteBuffer extends Pipe {
@@ -63,7 +64,7 @@
 
     /**
      * Convert data in mByteBuffer into short, and put them into "buffer".
-     * Note: rear and mFront are keep in terms of number of short instead of number of byte.
+     * Note: rear and mFront are kept in terms of number of shorts instead of number of bytes.
      */
     @Override
     public int read(short[] buffer, int offset, int requiredSamples) {
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RampedSineTone.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RampedSineTone.java
index dc0227f..16d8fed 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RampedSineTone.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RampedSineTone.java
@@ -18,7 +18,8 @@
 
 /**
  * Creates a tone that can be injected (and then looped back) in the Latency test.
- * The generated tone is a sine wave whose amplitude linearly increases than decreases
+ * The generated tone is a sine wave whose amplitude linearly increases than decreases linearly,
+ * that is it has a triangular window.
  */
 public class RampedSineTone extends SineWaveTone {
 
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java
index 8c3c7a1..e42ee74 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java
@@ -150,7 +150,7 @@
             e.printStackTrace();
             return false;
         } finally {
-            if (mRecorder == null){
+            if (mRecorder == null) {
                 return false;
             } else if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
                 mRecorder.release();
@@ -217,7 +217,7 @@
             e.printStackTrace();
             return false;
         } finally {
-            if (mRecorder == null){
+            if (mRecorder == null) {
                 return false;
             } else if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
                 mRecorder.release();
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java
index 1167a25..f807727 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java
@@ -18,6 +18,7 @@
 
 import android.app.Activity;
 import android.content.Intent;
+import android.graphics.Color;
 import android.os.Bundle;
 import android.util.Log;
 import android.view.Gravity;
@@ -88,7 +89,8 @@
         ArrayAdapter<CharSequence> adapterPerformanceMode = ArrayAdapter.createFromResource(this,
                 R.array.performance_mode_array, android.R.layout.simple_spinner_item);
         // Specify the layout to use when the list of choices appears
-        adapterPerformanceMode.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+        adapterPerformanceMode.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item
+                );
         // Apply the adapter to the spinner
         mSpinnerPerformanceMode.setAdapter(adapterPerformanceMode);
         //set current value
@@ -113,16 +115,33 @@
         //spinner native
         int audioThreadType = getApp().getAudioThreadType();
         mSpinnerAudioThreadType = (Spinner) findViewById(R.id.spinnerAudioThreadType);
-        ArrayAdapter<CharSequence> adapter2 = ArrayAdapter.createFromResource(this,
-                R.array.audioThreadType_array, android.R.layout.simple_spinner_item);
+        ArrayAdapter<CharSequence> adapterThreadType = new ArrayAdapter<CharSequence>(this,
+                android.R.layout.simple_spinner_item,
+                getResources().getTextArray(R.array.audioThreadType_array)) {
+            @Override
+            public boolean isEnabled(int position) {
+                switch (position) {
+                    case Constant.AUDIO_THREAD_TYPE_JAVA: return true;
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES: return getApp().isSafeToUseSles();
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO:
+                        return getApp().isSafeToUseAAudio();
+                }
+                return false;
+            }
+            @Override
+            public View getDropDownView(int position, View convertView, ViewGroup parent) {
+                TextView mTextView = (TextView)super.getDropDownView(position, convertView, parent);
+                // TODO: Use theme colors
+                mTextView.setTextColor(isEnabled(position) ? Color.BLACK : Color.GRAY);
+                return mTextView;
+            }
+        };
         // Specify the layout to use when the list of choices appears
-        adapter2.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+        adapterThreadType.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
         // Apply the adapter to the spinner
-        mSpinnerAudioThreadType.setAdapter(adapter2);
+        mSpinnerAudioThreadType.setAdapter(adapterThreadType);
         //set current value
         mSpinnerAudioThreadType.setSelection(audioThreadType, false);
-        if (!getApp().isSafeToUseSles())
-            mSpinnerAudioThreadType.setEnabled(false);
         mSpinnerAudioThreadType.setOnItemSelectedListener(this);
 
         mSpinnerChannelIndex = (Spinner) findViewById(R.id.spinnerChannelIndex);
@@ -180,7 +199,7 @@
                 getApp().setPlayerBufferSizeInBytes(value * Constant.BYTES_PER_FRAME);
                 int audioThreadType = mSpinnerAudioThreadType.getSelectedItemPosition();
                 // in native mode, recorder buffer size = player buffer size
-                if (audioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE) {
+                if (audioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE_SLES) {
                     getApp().setRecorderBufferSizeInBytes(value * Constant.BYTES_PER_FRAME);
                     mRecorderBufferUI.setValue(value);
                 }
@@ -280,9 +299,25 @@
         finish();
     }
 
+    private boolean canPerformBufferTest() {
+        switch (getApp().getAudioThreadType()) {
+            case Constant.AUDIO_THREAD_TYPE_JAVA:
+            case Constant.AUDIO_THREAD_TYPE_NATIVE_SLES:
+                return true;
+        }
+        // Buffer test isn't yet implemented for AAudio.
+        return false;
+    }
 
     private void refresh() {
+        mSpinnerMicSource.setEnabled(
+                getApp().getAudioThreadType() == Constant.AUDIO_THREAD_TYPE_JAVA ||
+                getApp().getAudioThreadType() == Constant.AUDIO_THREAD_TYPE_NATIVE_SLES);
+
+        boolean bufferTestEnabled = canPerformBufferTest();
+        mBufferTestDurationUI.setEnabled(bufferTestEnabled);
         mBufferTestDurationUI.setValue(getApp().getBufferTestDuration());
+        mWavePlotDurationUI.setEnabled(bufferTestEnabled);
         mWavePlotDurationUI.setValue(getApp().getBufferTestWavePlotDuration());
 
         mPlayerBufferUI.setValue(getApp().getPlayerBufferSizeInBytes() / Constant.BYTES_PER_FRAME);
@@ -290,7 +325,8 @@
                 getApp().getRecorderBufferSizeInBytes() / Constant.BYTES_PER_FRAME);
 
         mRecorderBufferUI.setEnabled(
-                getApp().getAudioThreadType() == Constant.AUDIO_THREAD_TYPE_JAVA);
+                getApp().getAudioThreadType() == Constant.AUDIO_THREAD_TYPE_JAVA ||
+                getApp().getAudioThreadType() == Constant.AUDIO_THREAD_TYPE_NATIVE_AAUDIO);
 
         int samplingRate = getApp().getSamplingRate();
         String currentValue = String.valueOf(samplingRate);
@@ -353,6 +389,7 @@
         case R.id.spinnerPerformanceMode:
             int performanceMode = mSpinnerPerformanceMode.getSelectedItemPosition() - 1;
             getApp().setPerformanceMode(performanceMode);
+            getApp().computeDefaults();
             setSettingsHaveChanged();
             log("performanceMode:" + performanceMode);
             refresh();
@@ -362,7 +399,7 @@
 
     @Override
     public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
-        if (buttonView.getId() == mWavCaptureToggleButton.getId()){
+        if (buttonView.getId() == mWavCaptureToggleButton.getId()) {
             getApp().setCaptureWavsEnabled(isChecked);
         } else if (buttonView.getId() == mSystraceToggleButton.getId()) {
             getApp().setCaptureSysTraceEnabled(isChecked);
@@ -400,7 +437,7 @@
         }
 
         // display pop up window, dismissible with back button
-        popUp.showAtLocation(findViewById(R.id.settingsMainLayout), Gravity.TOP, 0, 0);
+        popUp.showAtLocation((View) findViewById(R.id.settingsMainLayout), Gravity.TOP, 0, 0);
     }
 
         /** Called when the user clicks the button */
@@ -423,7 +460,7 @@
 
 //    private void computeDefaults() {
 //
-////        if (getApp().getAudioThreadType() == LoopbackApplication.AUDIO_THREAD_TYPE_JAVA) {
+////        if (getApp().getAudioThreadType() == LoopbackApplication.AUDIO_THREAD_TYPE_JAVA)
 ////            mNumberPickerRecorderBuffer.setEnabled(true);
 ////        else
 ////            mNumberPickerRecorderBuffer.setEnabled(false);
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java
index 186d847..bad9c30 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java
@@ -21,6 +21,7 @@
  * This class generates a sine wave with given frequency and samplingRate.
  * It keeps a member variable "mPhase", so as it continually be called, it will continue to generate
  * the next section of the sine wave.
+ * TODO move to audio_utils
  */
 
 public class SineWaveTone extends ToneGeneration {
@@ -45,7 +46,7 @@
 
             mPhase += mPhaseIncrement;
             // insert glitches if mIsGlitchEnabled == true, and insert it for every second
-            if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+            if (mIsGlitchEnabled && (mCount % mSamplingRate == 0)) {
                 mPhase += mPhaseIncrement;
             }
 
@@ -66,7 +67,7 @@
 
             mPhase += mPhaseIncrement;
             // insert glitches if mIsGlitchEnabled == true, and insert it for every second
-            if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+            if (mIsGlitchEnabled && (mCount % mSamplingRate == 0)) {
                 mPhase += mPhaseIncrement;
             }
 
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SoundLevelCalibration.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SoundLevelCalibration.java
index ed70a09..52a436b 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SoundLevelCalibration.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SoundLevelCalibration.java
@@ -18,6 +18,7 @@
 
 import android.content.Context;
 import android.media.AudioManager;
+import android.os.Build;
 import android.os.Handler;
 import android.os.Looper;
 import android.util.Log;
@@ -26,8 +27,8 @@
     private static final int SECONDS_PER_LEVEL = 1;
     private static final int MAX_STEPS = 15; // The maximum number of levels that should be tried
     private static final double CRITICAL_RATIO = 0.41; // Ratio of input over output amplitude at
-                                                      // which the feedback loop neither decays nor
-                                                      // grows (determined experimentally)
+                                                       // which the feedback loop neither decays nor
+                                                       // grows (determined experimentally)
     private static final String TAG = "SoundLevelCalibration";
 
     private NativeAudioThread mNativeAudioThread = null;
@@ -51,15 +52,15 @@
         }
     }
 
-    SoundLevelCalibration(int samplingRate, int playerBufferSizeInBytes,
-                                 int recorderBufferSizeInBytes, int micSource, int performanceMode, Context context) {
+    SoundLevelCalibration(int threadType, int samplingRate, int playerBufferSizeInBytes,
+            int recorderBufferSizeInBytes, int micSource, int performanceMode, Context context) {
 
         // TODO: Allow capturing wave data without doing glitch detection.
         CaptureHolder captureHolder = new CaptureHolder(0, "", false, false, false, context,
                 samplingRate);
         // TODO: Run for less than 1 second.
-        mNativeAudioThread = new NativeAudioThread(samplingRate, playerBufferSizeInBytes,
-                recorderBufferSizeInBytes, micSource, performanceMode,
+        mNativeAudioThread = new NativeAudioThread(threadType, samplingRate,
+                playerBufferSizeInBytes, recorderBufferSizeInBytes, micSource, performanceMode,
                 Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD, SECONDS_PER_LEVEL,
                 SECONDS_PER_LEVEL, 0, captureHolder);
         mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
@@ -70,7 +71,16 @@
         final int maxLevel = mAudioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
         int levelBottom = 0;
         int levelTop = maxLevel + 1;
-        while(levelTop - levelBottom > 1) {
+
+        // The ratio of 0.36 seems to correctly calibrate with the Mir dongle on Taimen and Walleye,
+        // but it does not work with the Mir dongle on devices with a 3.5mm jack. Using
+        // CRITICAL_RATIO leads tp a correct calibration when plugging the loopback dongle into
+        // a 3.5mm jack directly.
+        // TODO: Find a better solution that, if possible, doesn't involve querying device names.
+        final double ratio = (Build.DEVICE.equals("walleye")
+                              || Build.DEVICE.equals("taimen")) ? 0.36 : CRITICAL_RATIO;
+
+        while (levelTop - levelBottom > 1) {
             int level = (levelBottom + levelTop) / 2;
             Log.d(TAG, "setting level to " + level);
             setVolume(level);
@@ -79,7 +89,7 @@
             mNativeAudioThread = new NativeAudioThread(mNativeAudioThread); // generate fresh thread
             Log.d(TAG, "calibrate: at sound level " + level + " volume was " + amplitude);
 
-            if (amplitude < Constant.SINE_WAVE_AMPLITUDE * CRITICAL_RATIO) {
+            if (amplitude < Constant.SINE_WAVE_AMPLITUDE * ratio) {
                 levelBottom = level;
             } else {
                 levelTop = level;
@@ -104,6 +114,7 @@
     }
 
     // TODO: Only gives accurate results for an undistorted sine wave. Check for distortion.
+    // TODO move to audio_utils
     private static double averageAmplitude(double[] data) {
         if (data == null || data.length == 0) {
             return 0; // no data is present
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TestSettings.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TestSettings.java
new file mode 100644
index 0000000..df8a4e8
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TestSettings.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+// Object to store the settings of the test that can be computed
+// automatically by the test threads.
+public class TestSettings {
+    public TestSettings(int samplingRate, int playerBufferSizeInBytes,
+            int recorderBuffSizeInBytes) {
+        mSamplingRate = samplingRate;
+        mPlayerBufferSizeInBytes = playerBufferSizeInBytes;
+        mRecorderBuffSizeInBytes = recorderBuffSizeInBytes;
+    }
+
+    public int getSamplingRate() {
+        return mSamplingRate;
+    }
+
+    public int getPlayerBufferSizeInBytes() {
+        return mPlayerBufferSizeInBytes;
+    }
+
+    public int getRecorderBufferSizeInBytes() {
+        return mRecorderBuffSizeInBytes;
+    }
+
+    public void setSamplingRate(int samplingRate) {
+        mSamplingRate = Utilities.clamp(samplingRate,
+                Constant.SAMPLING_RATE_MIN, Constant.SAMPLING_RATE_MAX);
+    }
+
+    public void setPlayerBufferSizeInBytes(int playerBufferSizeInBytes) {
+        mPlayerBufferSizeInBytes = Utilities.clamp(playerBufferSizeInBytes,
+                Constant.PLAYER_BUFFER_FRAMES_MIN, Constant.PLAYER_BUFFER_FRAMES_MAX);
+    }
+
+    public void setRecorderBufferSizeInBytes(int recorderBufferSizeInBytes) {
+        mRecorderBuffSizeInBytes = Utilities.clamp(recorderBufferSizeInBytes,
+                Constant.RECORDER_BUFFER_FRAMES_MIN, Constant.RECORDER_BUFFER_FRAMES_MAX);
+    }
+
+    private int mSamplingRate;
+    private int mPlayerBufferSizeInBytes;
+    private int mRecorderBuffSizeInBytes;
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java
index 27083cf..6445162 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java
@@ -23,18 +23,21 @@
  * it will continue to generate the next section of the sine wave.
  */
 
+/*
 public class TwoSineWavesTone extends ToneGeneration {
     private int          mCount; // counts the total samples produced.
     private double       mPhase1; // current phase associated with mFrequency1
     private double       mPhase2; // current phase associated with mFrequency2
     private final double mPhaseIncrement1; // phase incrementation associated with mFrequency1
     private final double mPhaseIncrement2; // phase incrementation associated with mFrequency2
+*/
 
 
     /**
      * Currently, this class is never used, but it can be used in the future to create a different
      * kind of wave when running the test.
      */
+/*
     public TwoSineWavesTone(int samplingRate, double frequency1, double frequency2) {
         super(samplingRate);
         mCount = 0;
@@ -55,7 +58,7 @@
             mPhase2 += mPhaseIncrement2;
 
             // insert glitches for every second if mIsGlitchEnabled == true.
-            if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+            if (mIsGlitchEnabled && (mCount % mSamplingRate == 0)) {
                 mPhase1 += mPhaseIncrement1;
                 mPhase2 += mPhaseIncrement2;
             }
@@ -83,7 +86,7 @@
             mPhase1 += mPhaseIncrement1;
             mPhase2 += mPhaseIncrement2;
             // insert glitches if mIsGlitchEnabled == true, and insert it for every second
-            if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+            if (mIsGlitchEnabled && (mCount % mSamplingRate == 0)) {
                 mPhase1 += mPhaseIncrement1;
                 mPhase2 += mPhaseIncrement2;
             }
@@ -107,3 +110,4 @@
         mPhase2 = 0;
     }
 }
+*/
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java
index 15928bf..dd5925e 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java
@@ -19,6 +19,7 @@
 
 /**
  * This class contains functions that can be reused in different classes.
+ * TODO move to audio_utils
  */
 
 public class Utilities {
@@ -55,4 +56,17 @@
         return rounded;
     }
 
+
+    /**
+     * Returns value if value is within inclusive bounds min through max
+     * otherwise returns min or max according to if value is less than or greater than the range
+     */
+    public static int clamp(int value, int min, int max) {
+
+        if (max < min) throw new UnsupportedOperationException("min must be <= max");
+
+        if (value < min) return min;
+        else if (value > max) return max;
+        else return value;
+    }
 }
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WaveDataRingBuffer.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WaveDataRingBuffer.java
index ee47238..641ef25 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WaveDataRingBuffer.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WaveDataRingBuffer.java
@@ -49,7 +49,7 @@
         return mLoadedDeck.getWaveRecord();
     }
 
-    private void SwapDecks() {
+    private void swapDecks() {
         WaveDeck temp = mShelvedDeck;
         mShelvedDeck = mLoadedDeck;
         mLoadedDeck = temp;
@@ -61,7 +61,7 @@
      **/
     public synchronized ReadableWaveDeck getWaveDeck() {
         if (!mShelvedDeck.isBeingRead()) {
-            SwapDecks();
+            swapDecks();
             mShelvedDeck.readyForRead();
             mLoadedDeck.reset();
             return mShelvedDeck;
@@ -143,12 +143,12 @@
         }
 
         /** Make buffer available for new recording **/
-        public void reset() {
+        private void reset() {
             mIndex = 0;
             mArrayFull = false;
         }
 
-        public boolean isBeingRead() {
+        private boolean isBeingRead() {
             return mIsBeingRead;
         }
 
@@ -170,4 +170,5 @@
             return successfulWrite;
         }
     }
+
 }
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java
index 71b31c5..923c3f5 100644
--- a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java
@@ -45,8 +45,8 @@
     private double [] mValuesArray;  //top points to plot
     private double [] mValuesArray2; //bottom
 
-    private double [] mInsetArray;
-    private double [] mInsetArray2;
+    private double[]  mInsetArray;
+    private double[]  mInsetArray2;
     private int       mInsetSize = 20;
 
     private double mZoomFactorX = 1.0; //1:1  1 sample / point .  Note: Point != pixel.
@@ -402,12 +402,12 @@
     }
 
 
-    void resetArray() {
+    private void resetArray() {
         Arrays.fill(mValuesArray, 0);
         Arrays.fill(mValuesArray2, 0);
     }
 
-    void refreshView() {
+    private void refreshView() {
         double maxZoom = getMaxZoomOut();
         setZoom(maxZoom);
         setOffset(0, false);
@@ -415,7 +415,7 @@
         refreshGraph();
     }
 
-    void computeInset() {
+    private void computeInset() {
         if (mBigDataArray != null) {
             int sampleCount = mBigDataArray.length;
             double pointsPerSample = (double) mInsetSize / sampleCount;
@@ -470,7 +470,7 @@
     }
 
 
-    void computeViewArray(double zoomFactorX, int sampleOffset) {
+    private void computeViewArray(double zoomFactorX, int sampleOffset) {
         //zoom factor: how many samples per point. 1.0 = 1.0 samples per point
         // sample offset in samples.
         if (zoomFactorX < 1.0)
@@ -519,6 +519,7 @@
     }
 
 
+    // FIXME why not public?
     void setData(double[] dataVector, int sampleRate) {
         if (sampleRate < 1)
             throw new IllegalArgumentException("sampleRate must be a positive integer");
@@ -531,6 +532,7 @@
         }
     }
 
+    // also called in LoopbackActivity
     void redraw() {
         invalidate();
     }
@@ -550,7 +552,7 @@
         @Override
         public boolean onDown(MotionEvent event) {
             Log.d(DEBUG_TAG, "onDown: " + event.toString() + " " + TAG);
-            if(!mScroller.isFinished()) {
+            if (!mScroller.isFinished()) {
                 mScroller.forceFinished(true);
                 refreshGraph();
             }
@@ -600,13 +602,13 @@
             setOffset(0, false);
             refreshGraph();
         }
-    }
+
+    }   // MyGestureListener
 
     private class MyScaleGestureListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
-        private static final String DEBUG_TAG = "MyScaleGestureListener";
+        //private static final String DEBUG_TAG = "MyScaleGestureListener";
         int focusSample = 0;
 
-
         @Override
         public boolean onScaleBegin(ScaleGestureDetector detector) {
             focusSample = (int) (detector.getFocusX() * getZoom()) + mCurrentOffset;
@@ -623,7 +625,8 @@
             refreshGraph();
             return true;
         }
-    }
+
+    }   // MyScaleGestureListener
 
     private static void log(String msg) {
         Log.v(TAG, msg);
diff --git a/LoopbackApp/app/src/main/jni/Android.mk b/LoopbackApp/app/src/main/jni/Android.mk
deleted file mode 100644
index ef0c829..0000000
--- a/LoopbackApp/app/src/main/jni/Android.mk
+++ /dev/null
@@ -1,27 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE      := libloopback
-LOCAL_MODULE_TAGS := optional
-LOCAL_SRC_FILES   := \
-	sles.cpp \
-	jni_sles.c \
-	audio_utils/atomic.c \
-	audio_utils/fifo.c \
-	audio_utils/roundup.c
-LOCAL_C_INCLUDES := \
-        frameworks/wilhelm/include
-
-LOCAL_SHARED_LIBRARIES := \
-	libOpenSLES \
-	liblog \
-    libandroid
-
-LOCAL_LDLIBS += -lOpenSLES -llog -landroid
-#LOCAL_PRELINK_MODULE := false
-
-#LOCAL_LDFLAGS += -Wl,--hash-style=sysv
-#LOCAL_CFLAGS := -DSTDC_HEADERS
-
-include $(BUILD_SHARED_LIBRARY)
diff --git a/LoopbackApp/app/src/main/jni/jni_sles.c b/LoopbackApp/app/src/main/jni/jni_sles.c
deleted file mode 100644
index 0417252..0000000
--- a/LoopbackApp/app/src/main/jni/jni_sles.c
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <android/log.h>
-#include "sles.h"
-#include "jni_sles.h"
-#include <stdio.h>
-
-
-JNIEXPORT jlong JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesInit
-  (JNIEnv *env, jobject obj __unused, jint samplingRate, jint frameCount, jint micSource,
-   jint performanceMode,
-   jint testType, jdouble frequency1, jobject byteBuffer, jshortArray loopbackTone,
-   jint maxRecordedLateCallbacks, jint ignoreFirstFrames) {
-
-    sles_data * pSles = NULL;
-
-    char* byteBufferPtr = (*env)->GetDirectBufferAddress(env, byteBuffer);
-    int byteBufferLength = (*env)->GetDirectBufferCapacity(env, byteBuffer);
-
-    short* loopbackToneArray = (*env)->GetShortArrayElements(env, loopbackTone, 0);
-
-    if (slesInit(&pSles, samplingRate, frameCount, micSource,
-                 performanceMode,
-                 testType, frequency1, byteBufferPtr, byteBufferLength,
-                 loopbackToneArray, maxRecordedLateCallbacks, ignoreFirstFrames) != SLES_FAIL) {
-        return (long) pSles;
-    }
-
-    // FIXME This should be stored as a (long) field in the object,
-    // so that incorrect Java code could not synthesize a bad sles pointer.
-    return 0;
-}
-
-
-JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesProcessNext
-(JNIEnv *env __unused, jobject obj __unused, jlong sles, jdoubleArray samplesArray, jlong offset) {
-    sles_data * pSles = (sles_data*) (size_t) sles;
-
-    long maxSamples = (*env)->GetArrayLength(env, samplesArray);
-    double *pSamples = (*env)->GetDoubleArrayElements(env, samplesArray, 0);
-
-    long availableSamples = maxSamples-offset;
-    double *pCurrentSample = pSamples+offset;
-
-    SLES_PRINTF("jni slesProcessNext pSles:%p, currentSample %p, availableSamples %ld ",
-                pSles, pCurrentSample, availableSamples);
-
-    int samplesRead = slesProcessNext(pSles, pCurrentSample, availableSamples);
-    return samplesRead;
-}
-
-
-JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesDestroy
-  (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
-    sles_data * pSles = (sles_data*) (size_t) sles;
-    int status = slesDestroy(&pSles);
-    return status;
-}
-
-
-JNIEXPORT jintArray JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderBufferPeriod
-  (JNIEnv *env, jobject obj __unused, jlong sles) {
-    sles_data * pSles = (sles_data*) (size_t) sles;
-    int* recorderBufferPeriod = slesGetRecorderBufferPeriod(pSles);
-
-    // get the length = RANGE
-    jintArray result = (*env)->NewIntArray(env, RANGE);
-    (*env)->SetIntArrayRegion(env, result, 0, RANGE, recorderBufferPeriod);
-
-    return result;
-}
-
-
-JNIEXPORT jint JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderMaxBufferPeriod
-  (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
-    sles_data * pSles = (sles_data*) (size_t) sles;
-    int recorderMaxBufferPeriod = slesGetRecorderMaxBufferPeriod(pSles);
-
-    return recorderMaxBufferPeriod;
-}
-
-
-JNIEXPORT jdouble JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderVarianceBufferPeriod
-        (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
-    sles_data *pSles = (sles_data *) (size_t) sles;
-    int64_t result = slesGetRecorderVarianceBufferPeriod(pSles);
-    // variance has units ns^2 so we have to square the conversion factor
-    double scaled = (double) result / ((double) NANOS_PER_MILLI * (double) NANOS_PER_MILLI);
-    return scaled;
-}
-
-
-JNIEXPORT jintArray
-JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerBufferPeriod
-  (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
-    sles_data * pSles = (sles_data*) (size_t) sles;
-    int* playerBufferPeriod = slesGetPlayerBufferPeriod(pSles);
-
-    jintArray result = (*env)->NewIntArray(env, RANGE);
-    (*env)->SetIntArrayRegion(env, result, 0, RANGE, playerBufferPeriod);
-
-    return result;
-}
-
-
-JNIEXPORT jint JNICALL
-        Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerMaxBufferPeriod
-  (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
-    sles_data * pSles = (sles_data*) (size_t) sles;
-    int playerMaxBufferPeriod = slesGetPlayerMaxBufferPeriod(pSles);
-
-    return playerMaxBufferPeriod;
-}
-
-
-JNIEXPORT jdouble JNICALL
-Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerVarianceBufferPeriod
-        (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
-    sles_data *pSles = (sles_data *) (size_t) sles;
-    int64_t result = slesGetPlayerVarianceBufferPeriod(pSles);
-    // variance has units ns^2 so we have to square the conversion factor
-    double scaled = (double) result / ((double) NANOS_PER_MILLI * (double) NANOS_PER_MILLI);
-    return scaled;
-}
-
-
-jobject getCallbackTimes(JNIEnv *env, callbackTimeStamps *callbacks, short expectedBufferPeriod){
-    jintArray timeStamps = (*env)->NewIntArray(env, callbacks->index);
-    (*env)->SetIntArrayRegion(env, timeStamps, 0, callbacks->index, callbacks->timeStampsMs);
-
-    jshortArray callbackLengths = (*env)->NewShortArray(env, callbacks->index);
-    (*env)->SetShortArrayRegion(env, callbackLengths, 0, callbacks->index,
-                                callbacks->callbackDurations);
-
-    jclass cls = (*env)->FindClass(env, "org/drrickorang/loopback/BufferCallbackTimes");
-    jmethodID methodID = (*env)->GetMethodID(env, cls, "<init>", "([I[SZS)V");
-    jobject callbackTimes=(*env)->NewObject(env,cls, methodID, timeStamps, callbackLengths,
-                                            callbacks->exceededCapacity, expectedBufferPeriod);
-    return callbackTimes;
-}
-
-JNIEXPORT jobject
-JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerCallbackTimeStamps
-        (JNIEnv *env, jobject obj __unused, jlong sles) {
-    sles_data * pSles = (sles_data*) (size_t) sles;
-    return getCallbackTimes(env, &(pSles->playerTimeStamps), pSles->expectedBufferPeriod);
-}
-
-JNIEXPORT jobject
-JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderCallbackTimeStamps
-        (JNIEnv *env, jobject obj __unused, jlong sles) {
-    sles_data * pSles = (sles_data*) (size_t) sles;
-    return getCallbackTimes(env, &(pSles->recorderTimeStamps), pSles->expectedBufferPeriod);
-}
-
-JNIEXPORT jint
-JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetCaptureRank
-        (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
-    sles_data * pSles = (sles_data*) (size_t) sles;
-    return slesGetCaptureRank(pSles);
-}
diff --git a/LoopbackApp/app/src/main/jni/sles.h b/LoopbackApp/app/src/main/jni/sles.h
deleted file mode 100644
index c176656..0000000
--- a/LoopbackApp/app/src/main/jni/sles.h
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <SLES/OpenSLES.h>
-#include <SLES/OpenSLES_Android.h>
-#include <pthread.h>
-#include <android/log.h>
-#include <jni.h>
-#include <stdbool.h>
-
-#ifndef _Included_org_drrickorang_loopback_sles
-#define _Included_org_drrickorang_loopback_sles
-
-//struct audio_utils_fifo;
-#define SLES_PRINTF(...)  __android_log_print(ANDROID_LOG_INFO, "sles_jni", __VA_ARGS__);
-
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-#include <audio_utils/fifo.h>
-
-typedef struct {
-    int* timeStampsMs;          // Array of milliseconds since first callback
-    short* callbackDurations;   // Array of milliseconds between callback and previous callback
-    short index;                // Current write position
-    struct timespec startTime;  // Time of first callback {seconds,nanoseconds}
-    int capacity;               // Total number of callback times/lengths that can be recorded
-    bool exceededCapacity;      // Set only if late callbacks come after array is full
-} callbackTimeStamps;
-
-typedef struct {
-    int* buffer_period;
-    struct timespec previous_time;
-    struct timespec current_time;
-    int buffer_count;
-    int max_buffer_period;
-
-    volatile int32_t captureRank;   // Set > 0 when the callback requests a systrace/bug report
-
-    int measurement_count; // number of measurements which were actually recorded
-    int64_t SDM; // sum of squares of deviations from the expected mean
-    int64_t var; // variance in nanoseconds^2
-} bufferStats;
-
-//TODO fix this
-typedef struct {
-    SLuint32 rxBufCount;     // -r#
-    SLuint32 txBufCount;     // -t#
-    SLuint32 bufSizeInFrames;  // -f#
-    SLuint32 channels;       // -c#
-    SLuint32 sampleRate; // -s#
-    SLuint32 exitAfterSeconds; // -e#
-    SLuint32 freeBufCount;   // calculated
-    SLuint32 bufSizeInBytes; // calculated
-    int injectImpulse; // -i#i
-    size_t totalDiscardedInputFrames;   // total number of input frames discarded
-    int ignoreFirstFrames;
-
-    // Storage area for the buffer queues
-    char **rxBuffers;
-    char **txBuffers;
-    char **freeBuffers;
-
-    // Buffer indices
-    SLuint32 rxFront;    // oldest recording
-    SLuint32 rxRear;     // next to be recorded
-    SLuint32 txFront;    // oldest playing
-    SLuint32 txRear;     // next to be played
-    SLuint32 freeFront;  // oldest free
-    SLuint32 freeRear;   // next to be freed
-
-    struct audio_utils_fifo fifo;   // jitter buffer between recorder and player callbacks,
-                                    // to mitigate unpredictable phase difference between these,
-                                    // or even concurrent callbacks on two CPU cores
-    struct audio_utils_fifo fifo2;  // For sending data to java code (to plot it)
-    short *fifo2Buffer;
-    short *fifoBuffer;
-    SLAndroidSimpleBufferQueueItf recorderBufferQueue;
-    SLBufferQueueItf playerBufferQueue;
-
-    //other things that belong here
-    SLObjectItf playerObject;
-    SLObjectItf recorderObject;
-    SLObjectItf outputmixObject;
-    SLObjectItf engineObject;
-
-    bufferStats recorderBufferStats;
-    bufferStats playerBufferStats;
-
-    int testType;
-    double frequency1;
-    double bufferTestPhase1;
-    int count;
-    char* byteBufferPtr;
-    int byteBufferLength;
-
-    short* loopbackTone;
-
-    callbackTimeStamps recorderTimeStamps;
-    callbackTimeStamps playerTimeStamps;
-    short expectedBufferPeriod;
-} sles_data;
-
-#define NANOS_PER_SECOND 1000000000
-#define NANOS_PER_MILLI 1000000
-#define MILLIS_PER_SECOND 1000
-
-// how late in ms a callback must be to trigger a systrace/bugreport
-#define LATE_CALLBACK_CAPTURE_THRESHOLD 4
-#define LATE_CALLBACK_OUTLIER_THRESHOLD 1
-#define BUFFER_PERIOD_DISCARD 10
-#define BUFFER_PERIOD_DISCARD_FULL_DUPLEX_PARTNER 2
-
-enum {
-    SLES_SUCCESS = 0,
-    SLES_FAIL = 1,
-    RANGE = 1002,
-    TEST_TYPE_LATENCY = 222,
-    TEST_TYPE_BUFFER_PERIOD = 223
-} SLES_STATUS_ENUM;
-
-int slesInit(sles_data ** ppSles, int samplingRate, int frameCount, int micSource,
-             int performanceMode,
-             int testType, double frequency1, char* byteBufferPtr, int byteBufferLength,
-             short* loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames);
-
-//note the double pointer to properly free the memory of the structure
-int slesDestroy(sles_data ** ppSles);
-
-
-///full
-int slesFull(sles_data *pSles);
-
-int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource,
-                     int performanceMode,
-                     int testType, double frequency1, char* byteBufferPtr, int byteBufferLength,
-                     short* loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames);
-int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples);
-int slesDestroyServer(sles_data *pSles);
-int* slesGetRecorderBufferPeriod(sles_data *pSles);
-int slesGetRecorderMaxBufferPeriod(sles_data *pSles);
-int64_t slesGetRecorderVarianceBufferPeriod(sles_data *pSles);
-int* slesGetPlayerBufferPeriod(sles_data *pSles);
-int slesGetPlayerMaxBufferPeriod(sles_data *pSles);
-int64_t slesGetPlayerVarianceBufferPeriod(sles_data *pSles);
-int slesGetCaptureRank(sles_data *pSles);
-
-void initBufferStats(bufferStats *stats);
-void collectBufferPeriod(bufferStats *stats, bufferStats *fdpStats, callbackTimeStamps *timeStamps,
-                         short expectedBufferPeriod);
-bool updateBufferStats(bufferStats *stats, int64_t diff_in_nano, int expectedBufferPeriod);
-void recordTimeStamp(callbackTimeStamps *timeStamps,
-                     int64_t callbackDuration, int64_t timeStamp);
-
-ssize_t byteBuffer_write(sles_data *pSles, char *buffer, size_t count);
-
-#ifdef __cplusplus
-}
-#endif
-#endif //_Included_org_drrickorang_loopback_sles
diff --git a/LoopbackApp/app/src/main/res/raw/loopback_listener b/LoopbackApp/app/src/main/res/raw/loopback_listener
index a29b0c9..5974b1d 100644
--- a/LoopbackApp/app/src/main/res/raw/loopback_listener
+++ b/LoopbackApp/app/src/main/res/raw/loopback_listener
@@ -10,6 +10,7 @@
 BUGREPORT_SUFFIX="_bugreport.txt.gz";
 TERMINATE_SIGNAL="QUIT";
 SIGNAL_FILE="/sdcard/Loopback/loopback_signal"
+APP="-a org.drrickorang.loopback"
 TRACE_CATEGORIES="sched audio $@"
 BUFFER_KB="8000"
 
@@ -25,7 +26,7 @@
 
 # Begin an asynchronous systrace writing into a circular buffer of size BUFFER_KB
 echo "LOOPBACK LISTENER: starting trace"
-atrace --async_start -z -c -b $BUFFER_KB $TRACE_CATEGORIES
+atrace --async_start -z -c -b $BUFFER_KB $APP $TRACE_CATEGORIES
 echo " "
 
 # Remove signal file erroneously left behind from previous tests
@@ -50,7 +51,7 @@
                 case $filename in
                 *$SYSTRACE_SUFFIX)
                     echo "LOOPBACK LISTENER: dumping systrace to file $filename"
-                    atrace --async_dump -z -c -b $BUFFER_KB $TRACE_CATEGORIES > $filename
+                    atrace --async_dump -z -c -b $BUFFER_KB $APP $TRACE_CATEGORIES > $filename
                     ;;
 
                 *$BUGREPORT_SUFFIX)
diff --git a/LoopbackApp/app/src/main/res/values/strings.xml b/LoopbackApp/app/src/main/res/values/strings.xml
index bb9d69f..fdae707 100644
--- a/LoopbackApp/app/src/main/res/values/strings.xml
+++ b/LoopbackApp/app/src/main/res/values/strings.xml
@@ -76,6 +76,7 @@
         https://goo.gl/dxcw0d\n\n\n
         adb parameters:  all parameters are optional. If not specified, defaults will be used.\n
         -ei SF \t\t\t\t\t\t\t\t\t\t\t\t ####\t\t sampling frequency \n
+        -ei BS \t\t\t\t\t\t\t\t\t\t\t\t ####\t\t down sample block size \n
         -es Filename \t\t\t\t\t\t\t ssss\t\t\t output filename \n
         -ei MicSource \t\t\t\t\t\t\t ####\t\t microphone source\n
         \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 0: DEFAULT\n
@@ -85,7 +86,8 @@
         \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 4: VOICE_COMMUNICATION\n
         -ei AudioThread \t\t\t\t\t ####\t\t Audio Thread Type\n
         \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 0: Java\n
-        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 1: Native (JNI)\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 1: Native (SLES)\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 2: Native (AAudio)\n
         -ei AudioLevel \t\t\t\t\t\t ####\t\t Audio Level [0:15]\n
         -ei RecorderBuffer \t\t\t\t ####\t\t Recorder Buffer Frames\n
         -ei PlayerBuffer \t\t\t\t\t\t ####\t\t Player Buffer Frames\n
@@ -120,7 +122,8 @@
     <!-- spinnerAudioThreadType Options -->
     <string-array name="audioThreadType_array">
         <item>Java</item>
-        <item>native (JNI)</item>
+        <item>native (SLES)</item>
+        <item>native (AAudio)</item>
     </string-array>
 
     <string name="labelChannelIndex">Channel Index</string>
@@ -190,4 +193,6 @@
         that it thinks is optimal. It has only been tested with the loopback plug and may fail
         completely in open air.
     </string>
+
+    <string name="notificationText">Please disregard me.</string>
 </resources>
diff --git a/LoopbackApp/build.gradle b/LoopbackApp/build.gradle
index f1cc249..b32f870 100644
--- a/LoopbackApp/build.gradle
+++ b/LoopbackApp/build.gradle
@@ -1,15 +1,17 @@
 // Top-level build file where you can add configuration options common to all sub-projects/modules.
 buildscript {
     repositories {
+        google()
         jcenter()
     }
     dependencies {
-        classpath 'com.android.tools.build:gradle-experimental:0.9.0'
+        classpath 'com.android.tools.build:gradle:3.1.3'
     }
 }
 
 allprojects {
     repositories {
+        google()
         jcenter()
     }
 }
diff --git a/LoopbackApp/gradle/wrapper/gradle-wrapper.properties b/LoopbackApp/gradle/wrapper/gradle-wrapper.properties
index dde259e..1eead36 100644
--- a/LoopbackApp/gradle/wrapper/gradle-wrapper.properties
+++ b/LoopbackApp/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@
-#Tue Mar 21 12:29:44 PDT 2017
+#Wed Apr 18 09:02:00 PDT 2018
 distributionBase=GRADLE_USER_HOME
 distributionPath=wrapper/dists
 zipStoreBase=GRADLE_USER_HOME
 zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip