Merge remote-tracking branch 'origin/upstream-master'
am: 8ea137d108

Change-Id: Ib12d60716b77917710d90fafbb73b5ac193475e2
diff --git a/LoopbackApp/.gitignore b/LoopbackApp/.gitignore
new file mode 100644
index 0000000..4d893e9
--- /dev/null
+++ b/LoopbackApp/.gitignore
@@ -0,0 +1,5 @@
+.idea/*
+*.iml
+local.properties
+build/*
+.gradle/*
diff --git a/LoopbackApp/AUTHORS b/LoopbackApp/AUTHORS
new file mode 100644
index 0000000..6ca313d
--- /dev/null
+++ b/LoopbackApp/AUTHORS
@@ -0,0 +1,9 @@
+# This is the official list of authors for copyright purposes.
+# This file is distinct from the CONTRIBUTORS files.
+# See the latter for an explanation.
+
+# Names should be added to this file as:
+# Name or Organization <email address>
+# The email address is not required for organizations.
+
+Google Inc.
diff --git a/LoopbackApp/Android.mk b/LoopbackApp/Android.mk
new file mode 100644
index 0000000..b8a8165
--- /dev/null
+++ b/LoopbackApp/Android.mk
@@ -0,0 +1 @@
+include $(call all-makefiles-under, app/src/main)
diff --git a/LoopbackApp/CONTRIBUTING b/LoopbackApp/CONTRIBUTING
new file mode 100644
index 0000000..88f06d0
--- /dev/null
+++ b/LoopbackApp/CONTRIBUTING
@@ -0,0 +1,25 @@
+Want to contribute? Great! First, read this page (including the small print at the end).
+
+### Before you contribute
+Before we can use your code, you must sign the
+[Google Individual Contributor License
+Agreement](https://developers.google.com/open-source/cla/individual?csw=1)
+(CLA), which you can do online. The CLA is necessary mainly because you own the
+copyright to your changes, even after your contribution becomes part of our
+codebase, so we need your permission to use and distribute your code. We also
+need to be sure of various other things—for instance that you'll tell us if you
+know that your code infringes on other people's patents. You don't have to sign
+the CLA until after you've submitted your code for review and a member has
+approved it, but you must do it before we can put your code into our codebase.
+Before you start working on a larger contribution, you should get in touch with
+us first through the issue tracker with your idea so that we can help out and
+possibly guide you. Coordinating up front makes it much easier to avoid
+frustration later on.
+
+### Code reviews
+All submissions, including submissions by project members, require review. We
+use Github pull requests for this purpose.
+
+### The small print
+Contributions made by corporations are covered by a different agreement than
+the one above, the Software Grant and Corporate Contributor License Agreement.
diff --git a/LoopbackApp/CONTRIBUTORS b/LoopbackApp/CONTRIBUTORS
new file mode 100644
index 0000000..b294e50
--- /dev/null
+++ b/LoopbackApp/CONTRIBUTORS
@@ -0,0 +1,10 @@
+# People who have agreed to one of the CLAs and can contribute patches.
+# The AUTHORS file lists the copyright holders; this file
+# lists people.  For example, Google employees are listed here
+# but not in AUTHORS, because Google holds the copyright.
+#
+# https://developers.google.com/open-source/cla/individual
+# https://developers.google.com/open-source/cla/corporate
+#
+# Names should be added to this file as:
+#     Name <email address>
diff --git a/LoopbackApp/LICENSE b/LoopbackApp/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/LoopbackApp/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/LoopbackApp/PrivacyPolicy.md b/LoopbackApp/PrivacyPolicy.md
new file mode 100644
index 0000000..8cd8404
--- /dev/null
+++ b/LoopbackApp/PrivacyPolicy.md
@@ -0,0 +1,8 @@
+## Loopback Android App Privacy Policy
+
+This file is required in order to comply with Google Play requirements for apps using sensitive permissions:
+https://play.google.com/about/privacy-security/additional-requirements/
+
+Loopback app needs the Record Audio permission in order to measure round-trip audio latency.
+
+Loopback app processes all data locally on the Android device and does not transmit any information via the Internet.
diff --git a/LoopbackApp/README b/LoopbackApp/README
new file mode 100644
index 0000000..cf9661e
--- /dev/null
+++ b/LoopbackApp/README
@@ -0,0 +1,5 @@
+Audio latency testing app using the Dr. Rick O'Rang audio loopback dongle.
+
+References
+https://source.android.com/devices/audio/loopback.html
+https://source.android.com/devices/audio/latency_measure.html#loopback
diff --git a/LoopbackApp/app/.gitignore b/LoopbackApp/app/.gitignore
new file mode 100644
index 0000000..796b96d
--- /dev/null
+++ b/LoopbackApp/app/.gitignore
@@ -0,0 +1 @@
+/build
diff --git a/LoopbackApp/app/build.gradle b/LoopbackApp/app/build.gradle
new file mode 100644
index 0000000..ff69f81
--- /dev/null
+++ b/LoopbackApp/app/build.gradle
@@ -0,0 +1,37 @@
+apply plugin: 'com.android.model.application'
+
+model {
+    android {
+        compileSdkVersion = 23
+        buildToolsVersion = "25.0"
+
+        defaultConfig.with {
+            applicationId = "org.drrickorang.loopback"
+            minSdkVersion.apiLevel = 11
+            targetSdkVersion.apiLevel = 23
+        }
+        ndk {
+            moduleName "loopback"
+            cppFlags.addAll "-I${project.rootDir}/app/src/main/jni".toString(), "-g"
+            CFlags.addAll "-I${project.rootDir}/app/src/main/jni".toString()
+
+            ldLibs.addAll "OpenSLES", "log"
+        }
+        buildTypes {
+            release {
+                minifyEnabled false
+                proguardFiles.add file('proguard.cfg')
+            }
+            debug {
+                ndk {
+                    debuggable true
+                }
+            }
+        }    }
+
+
+}
+
+dependencies {
+    compile 'com.android.support:appcompat-v7:23.0.1'
+}
diff --git a/LoopbackApp/app/src/main/Android.mk b/LoopbackApp/app/src/main/Android.mk
new file mode 100644
index 0000000..23b50d2
--- /dev/null
+++ b/LoopbackApp/app/src/main/Android.mk
@@ -0,0 +1,26 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := $(call all-subdir-java-files)
+
+LOCAL_JNI_SHARED_LIBRARIES := libloopback
+
+LOCAL_PACKAGE_NAME := Loopback
+
+LOCAL_CERTIFICATE := platform
+
+LOCAL_RESOURCE_DIR := $(LOCAL_PATH)/res \
+    frameworks/support/v7/appcompat/res
+
+LOCAL_AAPT_FLAGS := --auto-add-overlay \
+    --extra-packages android.support.v4
+
+LOCAL_STATIC_JAVA_LIBRARIES := \
+    android-support-v4
+
+include $(BUILD_PACKAGE)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/LoopbackApp/app/src/main/AndroidManifest.xml b/LoopbackApp/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..a057e02
--- /dev/null
+++ b/LoopbackApp/app/src/main/AndroidManifest.xml
@@ -0,0 +1,97 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- Declare the contents of this Android application.  The namespace
+     attribute brings in the Android platform namespace, and the package
+     supplies a unique name for the application.  When writing your
+     own application, the package name must be changed from "com.example.*"
+     to come from a domain that you own or have control over. -->
+<manifest
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    package="org.drrickorang.loopback"
+
+    android:versionCode="19"
+    android:versionName="0.9.75">
+
+    <uses-permission android:name="android.permission.RECORD_AUDIO"/>
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.STORAGE" />
+    <uses-permission android:name="android.permission.CAPTURE_AUDIO_OUTPUT" />
+    <uses-permission android:name="android.permission.VIBRATE" />
+
+    <application
+        android:label="@string/app_name"
+        android:icon="@drawable/ic_launcher"
+        android:name="LoopbackApplication">
+        <activity
+            android:name="org.drrickorang.loopback.LoopbackActivity"
+            android:theme="@android:style/Theme.Holo.Light"
+            android:configChanges="orientation|keyboardHidden|screenLayout"
+            android:launchMode="singleTop"
+            android:screenOrientation="portrait">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN"/>
+                <category android:name="android.intent.category.LAUNCHER"/>
+            </intent-filter>
+        </activity>
+
+        <service android:name=".AudioTestService" />
+
+        <activity
+            android:name="org.drrickorang.loopback.SettingsActivity"
+            android:parentActivityName="org.drrickorang.loopback.LoopbackActivity"
+            android:theme="@android:style/Theme.Holo.Light"
+            android:configChanges="orientation|screenLayout"
+            android:windowSoftInputMode="stateAlwaysHidden"
+            >
+            <meta-data
+                android:name="android.support.PARENT_ACTIVITY"
+                android:value="org.drrickorang.loopback.LoopbackActivity"/>
+        </activity>
+
+        <activity
+            android:name="org.drrickorang.loopback.AboutActivity"
+            android:label="About"
+            android:parentActivityName="org.drrickorang.loopback.LoopbackActivity"
+            android:theme="@android:style/Theme.Holo.Light">
+            <meta-data
+                android:name="android.support.PARENT_ACTIVITY"
+                android:value="org.drrickorang.loopback.LoopbackActivity" />
+        </activity>
+
+        <activity
+            android:name=".RecorderBufferPeriodActivity"
+            android:label="Recorder Buffer Period Histogram"
+            android:parentActivityName="org.drrickorang.loopback.LoopbackActivity"
+            android:theme="@android:style/Theme.Holo.Light">
+            <meta-data
+                android:name="android.support.PARENT_ACTIVITY"
+                android:value="org.drrickorang.loopback.LoopbackActivity" />
+        </activity>
+
+        <activity
+            android:name=".PlayerBufferPeriodActivity"
+            android:label="Player Buffer Period Histogram"
+            android:parentActivityName="org.drrickorang.loopback.LoopbackActivity"
+            android:theme="@android:style/Theme.Holo.Light">
+            <meta-data
+                android:name="android.support.PARENT_ACTIVITY"
+                android:value="org.drrickorang.loopback.LoopbackActivity" />
+        </activity>
+
+    </application>
+</manifest>
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java
new file mode 100644
index 0000000..83dfdfb
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AboutActivity.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.view.View;
+import android.widget.TextView;
+
+
+/**
+ * This activity shows information related to this application.
+ */
+
+public class AboutActivity extends Activity {
+
+
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+
+        // FIXME spaces in xml not showing up as expected, so the displayed text may look unaligned
+        View view = getLayoutInflater().inflate(R.layout.about_activity, null);
+        setContentView(view);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AtraceScriptsWriter.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AtraceScriptsWriter.java
new file mode 100644
index 0000000..7cfb74a
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AtraceScriptsWriter.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License
+ */
+
+package org.drrickorang.loopback;
+
+import android.content.Context;
+import android.util.Log;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+/**
+ *  Places loopback_listener shell script on device storage
+ */
+public class AtraceScriptsWriter {
+
+    private static final String TAG = "AtraceScriptsWriter";
+    private static final String LISTENER_SCRIPT_LOCATION =
+            CaptureHolder.DIRECTORY + "/loopback_listener";
+
+    /** Writes scripts to device storage, return true on successful write **/
+    public static boolean writeScriptsToFile(Context ctx) {
+        try {
+            File file = new File(CaptureHolder.DIRECTORY);
+
+            // Create a directory for script and signal file
+            if (!file.exists()) {
+                if (file.mkdir()) {
+                    Log.d(TAG, "writeScriptsToFile: Loopback folder created");
+                } else {
+                    System.out.println("Failed to create folder!");
+                    return false;
+                }
+            }
+            // Check for writable directory that already existed or after creating
+            if (!file.isDirectory() || !file.canWrite()) {
+                Log.d(TAG, "writeScriptsToFile: " + CaptureHolder.DIRECTORY
+                        + (!file.isDirectory() ? "is not a directory " : "")
+                        + (!file.canWrite() ? "is not writable" : ""));
+                return false;
+            }
+            copyResToFile(ctx, R.raw.loopback_listener, LISTENER_SCRIPT_LOCATION);
+        } catch (IOException e) {
+            Log.e(TAG, "Unable to write script to file", e);
+            return false;
+        }
+        return true;
+    }
+
+    private static void copyResToFile(Context ctx, int resId, String targetFile)
+            throws IOException {
+        InputStream inputStream = ctx.getResources().openRawResource(resId);
+        OutputStream outputStream = new FileOutputStream(targetFile);
+        copy(inputStream, outputStream);
+        outputStream.close();
+        inputStream.close();
+    }
+
+
+    private static int copy(InputStream input, OutputStream output) throws IOException {
+        final int BYTES_TO_READ = 2048;
+        byte[] buffer = new byte[BYTES_TO_READ];
+        int total = 0;
+        int n;
+        while ((n = input.read(buffer)) != -1) {
+            output.write(buffer, 0, n);
+            total = total + n;
+        }
+        return total;
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java
new file mode 100644
index 0000000..e8b44a7
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioFileOutput.java
@@ -0,0 +1,182 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import java.io.FileDescriptor;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.Arrays;
+
+import android.content.Context;
+import android.net.Uri;
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+
+
+/**
+ * This class is used to save the results to a .wav file.
+ */
+
+public class AudioFileOutput {
+    private static final String TAG = "AudioFileOutput";
+
+    private Uri              mUri;
+    private Context          mContext;
+    private FileOutputStream mOutputStream;
+    private final int        mSamplingRate;
+
+
+    public AudioFileOutput(Context context, Uri uri, int samplingRate) {
+        mContext = context;
+        mUri = uri;
+        mSamplingRate = samplingRate;
+    }
+
+
+    public boolean writeData(double[] data) {
+        return writeRingBufferData(data, 0, data.length);
+    }
+
+    /**
+     * Writes recorded wav data to file
+     *  endIndex <= startIndex:  Writes [startIndex, data.length) then [0, endIndex)
+     *  endIndex > startIndex :  Writes [startIndex, endIndex)
+     * Returns true on successful write to file
+     */
+    public boolean writeRingBufferData(double[] data, int startIndex, int endIndex) {
+
+        boolean status = false;
+        ParcelFileDescriptor parcelFileDescriptor = null;
+        try {
+            parcelFileDescriptor =
+                    mContext.getContentResolver().openFileDescriptor(mUri, "w");
+            FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
+            mOutputStream = new FileOutputStream(fileDescriptor);
+            log("Done creating output stream");
+            int sampleCount = endIndex - startIndex;
+            if (sampleCount <= 0) {
+                sampleCount += data.length;
+            }
+            writeHeader(sampleCount);
+
+            if (endIndex > startIndex) {
+                writeDataBuffer(data, startIndex, endIndex);
+            } else {
+                writeDataBuffer(data, startIndex, data.length);
+                writeDataBuffer(data, 0, endIndex);
+            }
+
+            mOutputStream.close();
+            status = true;
+            parcelFileDescriptor.close();
+        } catch (Exception e) {
+            mOutputStream = null;
+            log("Failed to open wavefile" + e);
+        } finally {
+            try {
+                if (parcelFileDescriptor != null) {
+                    parcelFileDescriptor.close();
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+                log("Error closing ParcelFile Descriptor");
+            }
+        }
+        return status;
+    }
+
+    private void writeHeader(int samples) {
+        if (mOutputStream != null) {
+            try {
+                int channels = 1;
+                int blockAlignment = 2;
+                int bitsPerSample = 16;
+                byte[] chunkSize = new byte[4];
+                byte[] dataSize = new byte[4];
+                int tempChunkSize =  (samples * 2) + 36;
+                chunkSize[3] = (byte) (tempChunkSize >> 24);
+                chunkSize[2] = (byte) (tempChunkSize >> 16);
+                chunkSize[1] = (byte) (tempChunkSize >> 8);
+                chunkSize[0] = (byte) tempChunkSize;
+                int tempDataSize  = samples * 2;
+                dataSize[3] = (byte) (tempDataSize >> 24);
+                dataSize[2] = (byte) (tempDataSize >> 16);
+                dataSize[1] = (byte) (tempDataSize >> 8);
+                dataSize[0] = (byte) tempDataSize;
+
+                byte[] header = new byte[] {
+                    'R', 'I', 'F', 'F',
+                    chunkSize[0], chunkSize[1], chunkSize[2], chunkSize[3],
+                    'W', 'A', 'V', 'E',
+                    'f', 'm', 't', ' ',
+                    16, 0, 0, 0,
+                    1, 0,   // PCM
+                    (byte) channels, 0,   // number of channels
+                    (byte) mSamplingRate, (byte) (mSamplingRate >> 8), 0, 0,    // sample rate
+                    0, 0, 0, 0, // byte rate
+                    (byte) (channels * blockAlignment),
+                    0,   // block alignment
+                    (byte) bitsPerSample,
+                    0,  // bits per sample
+                    'd', 'a', 't', 'a',
+                    dataSize[0], dataSize[1], dataSize[2], dataSize[3],
+                };
+                mOutputStream.write(header);
+                log("Done writing header");
+            } catch (IOException e) {
+                Log.e(TAG, "Error writing header " + e);
+            }
+        }
+    }
+
+
+    private void writeDataBuffer(double [] data, int startIndex, int end) {
+        if (mOutputStream != null) {
+            try {
+                int bufferSize = 1024; //blocks of 1024 samples
+                byte [] buffer = new byte[bufferSize * 2];
+
+                for (int ii = startIndex; ii < end; ii += bufferSize) {
+                    //clear buffer
+                    Arrays.fill(buffer, (byte) 0);
+                    int bytesUsed = 0;
+                    for (int jj = 0; jj < bufferSize; jj++) {
+                        int index = ii + jj;
+                        if (index >= end)
+                            break;
+                        int value = (int) Math.round(data[index] * Short.MAX_VALUE);
+                        byte ba = (byte) (0xFF & (value >> 8));  //little-endian
+                        byte bb = (byte) (0xFF & (value));
+                        buffer[(jj * 2) + 1] = ba;
+                        buffer[jj * 2]   = bb;
+                        bytesUsed += 2;
+                    }
+                    mOutputStream.write(buffer, 0, bytesUsed);
+                }
+                log("Done writing data");
+            } catch (IOException e) {
+                Log.e(TAG, "Error writing data " + e);
+            }
+        }
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java
new file mode 100644
index 0000000..329d62b
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/AudioTestService.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.app.Notification;
+import android.app.Service;
+import android.content.Intent;
+import android.os.Build;
+import android.os.IBinder;
+import android.os.Binder;
+import android.util.Log;
+
+
+/**
+ * This is the Service being created during the first onStart() in the activity.
+ * Threads that are needed for the test will be created under this Service.
+ * At the end of the test, this Service will pass the test results back to LoopbackActivity.
+ */
+
+public class AudioTestService extends Service {
+    private static final String TAG = "AudioTestService";
+
+    private final IBinder mBinder = new AudioTestBinder();
+
+
+    @Override
+    public void onCreate() {
+        runAsForegroundService();
+        log("Audio Test Service created!");
+    }
+
+
+    @Override
+    public int onStartCommand(Intent intent, int flags, int startId) {
+        log("Service onStartCommand: " + startId);
+        //runAsForegroundService();
+        return Service.START_NOT_STICKY;
+    }
+
+
+    /**
+     * This method will run the Service as Foreground Service, so the Service won't be killed
+     * and restarted after a while.
+     */
+    private void runAsForegroundService() {
+        int notificationId = 1400;
+        Notification.Builder builder = new Notification.Builder(this)
+                .setSmallIcon(R.drawable.ic_launcher).setContentTitle("Loopback App")
+                .setContentText("Please disregard me.");
+        Notification notification;
+        if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
+            notification = builder.getNotification();
+        } else {
+            notification = builder.build();
+        }
+
+        startForeground(notificationId, notification);
+    }
+
+
+    @Override
+    public IBinder onBind(Intent intent) {
+        log("Service onBind");
+        return mBinder;
+    }
+
+
+    @Override
+    public void onDestroy() {
+        log("Service onDestroy");
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+
+    /**
+     * This class is only used by AudioTestService to create a binder that passes the
+     * AudioTestService back to LoopbackActivity.
+     */
+    public class AudioTestBinder extends Binder {
+        AudioTestService getService() {
+            return AudioTestService.this;
+        }
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferCallbackTimes.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferCallbackTimes.java
new file mode 100644
index 0000000..28e8c76
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferCallbackTimes.java
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.util.Iterator;
+
+/**
+ * Maintains and returns pairs of callback timestamps (in milliseconds since beginning of test) and
+ * lengths (milliseconds between a callback and the previous callback).
+ */
+public class BufferCallbackTimes implements Iterable<BufferCallbackTimes.BufferCallback>, Parcelable {
+    private final int[] mTimeStamps;
+    private final short[] mCallbackDurations;
+    private final short mExpectedBufferPeriod;
+    private boolean mExceededCapacity;
+    private int mIndex;
+
+    public BufferCallbackTimes(int maxRecords, int expectedBufferPeriod) {
+        mIndex = 0;
+        mTimeStamps = new int[maxRecords];
+        mCallbackDurations = new short[maxRecords];
+        mExceededCapacity = false;
+        mExpectedBufferPeriod = (short) expectedBufferPeriod;
+    }
+
+    /**
+     * Instantiates an iterable object with already recorded callback times and lengths
+     * used for callbacks recorded by native sles callback functions.
+     *
+     * exceededCapacity should be set to true only when there were late callbacks observed but
+     * unable to be recorded because allocated arrays were already at capacity
+     */
+    public BufferCallbackTimes(int[] timeStamps, short[] callbackDurations,
+                               boolean exceededCapacity, short expectedBufferPeriod) {
+        mTimeStamps = timeStamps;
+        mCallbackDurations = callbackDurations;
+        mExceededCapacity = exceededCapacity;
+        mIndex = mTimeStamps.length;
+        mExpectedBufferPeriod = expectedBufferPeriod;
+    }
+
+    /** Record the length of a late/early callback and the time it occurred. Used by Java Thread. */
+    public void recordCallbackTime(int timeStamp, short callbackLength) {
+        if (!mExceededCapacity && callbackLength != mExpectedBufferPeriod
+                && callbackLength != mExpectedBufferPeriod + 1) {
+            //only marked as exceeded if attempting to record a late callback after arrays full
+            if (mIndex == mTimeStamps.length) {
+                mExceededCapacity = true;
+                return;
+            }
+            mTimeStamps[mIndex] = timeStamp;
+            mCallbackDurations[mIndex] = callbackLength;
+            mIndex++;
+        }
+    }
+
+    @Override
+    public String toString() {
+        StringBuilder sb = new StringBuilder();
+        for (BufferCallback callback : this) {
+            sb.append(callback.timeStamp);
+            sb.append(",");
+            sb.append(callback.callbackDuration);
+            sb.append("\n");
+        }
+        return sb.toString();
+    }
+
+    // True only if arrays are full and recording more late or early callbacks is attempted.
+    public boolean isCapacityExceeded() {
+        return mExceededCapacity;
+    }
+
+    public int getNumLateOrEarlyCallbacks() {
+        return mIndex;
+    }
+
+    public short getExpectedBufferPeriod() {
+        return mExpectedBufferPeriod;
+    }
+
+    @Override
+    public Iterator<BufferCallback> iterator() {
+        return new Iterator<BufferCallback>() {
+            int mIteratorIndex = 0;
+
+            @Override
+            public boolean hasNext() {
+                return mIteratorIndex < mIndex;
+            }
+
+            @Override
+            public BufferCallback next() {
+                return new BufferCallback(mTimeStamps[mIteratorIndex],
+                        mCallbackDurations[mIteratorIndex++]);
+            }
+
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException("Buffer Time Stamps are Immutable");
+            }
+        };
+    }
+
+    @Override
+    public int describeContents() {
+        return 0;
+    }
+
+    @Override
+    public void writeToParcel(Parcel dest, int flags) {
+        Bundle out = new Bundle();
+        out.putIntArray("mTimeStamps", mTimeStamps);
+        out.putShortArray("mCallbackDurations", mCallbackDurations);
+        out.putShort("mExpectedBufferPeriod", mExpectedBufferPeriod);
+        out.putBoolean("mExceededCapacity", mExceededCapacity);
+        out.putInt("mIndex", mIndex);
+        dest.writeBundle(out);
+    }
+
+    private BufferCallbackTimes(Parcel source) {
+        Bundle in = source.readBundle(getClass().getClassLoader());
+        mTimeStamps = in.getIntArray("mTimeStamps");
+        mCallbackDurations = in.getShortArray("mCallbackDurations");
+        mExpectedBufferPeriod = in.getShort("mExpectedBufferPeriod");
+        mExceededCapacity = in.getBoolean("mExceededCapacity");
+        mIndex = in.getInt("mIndex");
+    }
+
+    public static final Parcelable.Creator<BufferCallbackTimes> CREATOR
+             = new Parcelable.Creator<BufferCallbackTimes>() {
+         public BufferCallbackTimes createFromParcel(Parcel in) {
+             return new BufferCallbackTimes(in);
+         }
+
+         public BufferCallbackTimes[] newArray(int size) {
+             return new BufferCallbackTimes[size];
+         }
+     };
+
+    /** Wrapper for iteration over timestamp and length pairs */
+    public class BufferCallback {
+        public final int timeStamp;
+        public final short callbackDuration;
+
+        BufferCallback(final int ts, final short cd) {
+            timeStamp = ts;
+            callbackDuration = cd;
+        }
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java
new file mode 100644
index 0000000..97ab6ad
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/BufferPeriod.java
@@ -0,0 +1,186 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+
+import java.util.Arrays;
+
+
+/**
+ * This class records the buffer period of the audio player or recorder when in Java mode.
+ * Currently the accuracy is in 1ms.
+ */
+
+//TODO for native mode, should use a scale more accurate than the current 1ms
+public class BufferPeriod implements Parcelable {
+    private static final String TAG = "BufferPeriod";
+
+    private long mStartTimeNs = 0;  // first time collectBufferPeriod() is called
+    private long mPreviousTimeNs = 0;
+    private long mCurrentTimeNs = 0;
+
+    private int       mMeasurements = 0;
+    private long      mVar;     // variance in nanoseconds^2
+    private long      mSDM = 0; // sum of squares of deviations from the expected mean
+    private int       mMaxBufferPeriod = 0;
+
+    private int       mCount = 0;
+    private final int range = 1002; // store counts for 0ms to 1000ms, and for > 1000ms
+    private int       mExpectedBufferPeriod = 0;
+
+    private int[] mBufferPeriod = new int[range];
+    private BufferCallbackTimes mCallbackTimes;
+    private CaptureHolder mCaptureHolder;
+
+    public BufferPeriod() {
+        // Default constructor for when no data will be restored
+    }
+
+    /**
+     * For player, this function is called before every AudioTrack.write().
+     * For recorder, this function is called after every AudioRecord.read() with read > 0.
+     */
+    public void collectBufferPeriod() {
+        mCurrentTimeNs = System.nanoTime();
+        mCount++;
+
+        // if mPreviousTimeNs = 0, it's the first time this function is called
+        if (mPreviousTimeNs == 0) {
+            mStartTimeNs = mCurrentTimeNs;
+        }
+
+        if (mPreviousTimeNs != 0 && mCount > Constant.BUFFER_PERIOD_DISCARD) {
+            mMeasurements++;
+
+            long diffInNano = mCurrentTimeNs - mPreviousTimeNs;
+            // diffInMilli is rounded up
+            int diffInMilli = (int) ((diffInNano + Constant.NANOS_PER_MILLI - 1) /
+                                      Constant.NANOS_PER_MILLI);
+
+            long timeStampInNano = mCurrentTimeNs - mStartTimeNs;
+            int timeStampInMilli = (int) ((timeStampInNano + Constant.NANOS_PER_MILLI - 1) /
+                                           Constant.NANOS_PER_MILLI);
+
+            if (diffInMilli > mMaxBufferPeriod) {
+                mMaxBufferPeriod = diffInMilli;
+            }
+
+            // from 0 ms to 1000 ms, plus a sum of all occurrences > 1000ms
+            if (diffInMilli >= (range - 1)) {
+                mBufferPeriod[range - 1]++;
+            } else if (diffInMilli >= 0) {
+                mBufferPeriod[diffInMilli]++;
+            } else { // for diffInMilli < 0
+                log("Having negative BufferPeriod.");
+            }
+
+            long delta = diffInNano - (long) mExpectedBufferPeriod * Constant.NANOS_PER_MILLI;
+            mSDM += delta * delta;
+            if (mCount > 1) {
+                mVar = mSDM / mMeasurements;
+            }
+
+            mCallbackTimes.recordCallbackTime(timeStampInMilli, (short) diffInMilli);
+
+            // If diagnosing specific Java thread callback behavior set a conditional here and use
+            // mCaptureHolder.captureState(rank); to capture systraces and bugreport and/or wav file
+        }
+
+        mPreviousTimeNs = mCurrentTimeNs;
+    }
+
+
+    /** Reset all variables, called if wants to start a new buffer period's record. */
+    public void resetRecord() {
+        mPreviousTimeNs = 0;
+        mCurrentTimeNs = 0;
+        Arrays.fill(mBufferPeriod, 0);
+        mMaxBufferPeriod = 0;
+        mMeasurements = 0;
+        mExpectedBufferPeriod = 0;
+        mCount = 0;
+        mCallbackTimes = null;
+    }
+
+    public void prepareMemberObjects(int maxRecords, int expectedBufferPeriod,
+                                     CaptureHolder captureHolder){
+        mCallbackTimes = new BufferCallbackTimes(maxRecords, expectedBufferPeriod);
+        mCaptureHolder = captureHolder;
+        mExpectedBufferPeriod = expectedBufferPeriod;
+    }
+
+    public int[] getBufferPeriodArray() {
+        return mBufferPeriod;
+    }
+
+    public double getStdDevBufferPeriod() {
+        return Math.sqrt(mVar) / (double) Constant.NANOS_PER_MILLI;
+    }
+
+    public int getMaxBufferPeriod() {
+        return mMaxBufferPeriod;
+    }
+
+    public BufferCallbackTimes getCallbackTimes(){
+        return mCallbackTimes;
+    }
+
+    @Override
+    public int describeContents() {
+        return 0;
+    }
+
+    // Only save values which represent the results. Any ongoing timing would not give useful
+    // results after a save/restore.
+    @Override
+    public void writeToParcel(Parcel dest, int flags) {
+        Bundle out = new Bundle();
+        out.putInt("mMaxBufferPeriod", mMaxBufferPeriod);
+        out.putIntArray("mBufferPeriod", mBufferPeriod);
+        out.putInt("mExpectedBufferPeriod", mExpectedBufferPeriod);
+        out.putParcelable("mCallbackTimes", mCallbackTimes);
+        dest.writeBundle(out);
+    }
+
+    private BufferPeriod(Parcel source) {
+        Bundle in = source.readBundle(getClass().getClassLoader());
+        mMaxBufferPeriod = in.getInt("mMaxBufferPeriod");
+        mBufferPeriod = in.getIntArray("mBufferPeriod");
+        mExpectedBufferPeriod = in.getInt("mExpectedBufferPeriod");
+        mCallbackTimes = in.getParcelable("mCallbackTimes");
+    }
+
+    public static final Parcelable.Creator<BufferPeriod> CREATOR
+             = new Parcelable.Creator<BufferPeriod>() {
+         public BufferPeriod createFromParcel(Parcel in) {
+             return new BufferPeriod(in);
+         }
+
+         public BufferPeriod[] newArray(int size) {
+             return new BufferPeriod[size];
+         }
+     };
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CaptureHolder.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CaptureHolder.java
new file mode 100644
index 0000000..99143f2
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CaptureHolder.java
@@ -0,0 +1,326 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.content.Context;
+import android.net.Uri;
+import android.util.Log;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintWriter;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Captures systrace, bugreport, and wav snippets. Capable of relieving capture requests from
+ * multiple threads and maintains queue of most interesting records
+ */
+public class CaptureHolder {
+
+    private static final String TAG = "CAPTURE";
+    public static final String STORAGE = "/sdcard/";
+    public static final String DIRECTORY = STORAGE + "Loopback";
+    private static final String SIGNAL_FILE = DIRECTORY + "/loopback_signal";
+    // These suffixes are used to tell the listener script what types of data to collect.
+    // They MUST match the definitions in the script file.
+    private static final String SYSTRACE_SUFFIX = ".trace";
+    private static final String BUGREPORT_SUFFIX = "_bugreport.txt.gz";
+
+    private static final String WAV_SUFFIX = ".wav";
+    private static final String TERMINATE_SIGNAL = "QUIT";
+
+    // Status codes returned by captureState
+    public static final int NEW_CAPTURE_IS_LEAST_INTERESTING = -1;
+    public static final int CAPTURE_ALREADY_IN_PROGRESS = 0;
+    public static final int STATE_CAPTURED = 1;
+    public static final int CAPTURING_DISABLED = 2;
+
+    private final String mFileNamePrefix;
+    private final long mStartTimeMS;
+    private final boolean mIsCapturingWavs;
+    private final boolean mIsCapturingSystraces;
+    private final boolean mIsCapturingBugreports;
+    private final int mCaptureCapacity;
+    private CaptureThread mCaptureThread;
+    private volatile CapturedState mCapturedStates[];
+    private WaveDataRingBuffer mWaveDataBuffer;
+
+    //for creating AudioFileOutput objects
+    private final Context mContext;
+    private final int mSamplingRate;
+
+    public CaptureHolder(int captureCapacity, String fileNamePrefix, boolean captureWavs,
+                         boolean captureSystraces, boolean captureBugreports, Context context,
+                         int samplingRate) {
+        mCaptureCapacity = captureCapacity;
+        mFileNamePrefix = fileNamePrefix;
+        mIsCapturingWavs = captureWavs;
+        mIsCapturingSystraces = captureSystraces;
+        mIsCapturingBugreports = captureBugreports;
+        mStartTimeMS = System.currentTimeMillis();
+        mCapturedStates = new CapturedState[mCaptureCapacity];
+        mContext = context;
+        mSamplingRate = samplingRate;
+    }
+
+    public void setWaveDataBuffer(WaveDataRingBuffer waveDataBuffer) {
+        mWaveDataBuffer = waveDataBuffer;
+    }
+
+    /**
+     * Launch thread to capture a systrace/bugreport and/or wav snippets and insert into collection
+     * If capturing is not enabled or capture state thread is already running returns immediately
+     * If newly requested capture is determined to be less interesting than all previous captures
+     * returns without running capture thread
+     *
+     * Can be called from both GlitchDetectionThread and Sles/Java buffer callbacks.
+     * Rank parameter and time of capture can be used by getIndexOfLeastInterestingCapture to
+     * determine which records to delete when at capacity.
+     * Therefore rank could represent glitchiness or callback behaviour and comparisons will need to
+     * be adjusted based on testing priorities
+     *
+     * Please note if calling from audio thread could cause glitches to occur because of blocking on
+     * this synchronized method.  Additionally capturing a systrace and bugreport and writing to
+     * disk will likely have an affect on audio performance.
+     */
+    public synchronized int captureState(int rank) {
+
+        if (!isCapturing()) {
+            Log.d(TAG, "captureState: Capturing state not enabled");
+            return CAPTURING_DISABLED;
+        }
+
+        if (mCaptureThread != null && mCaptureThread.getState() != Thread.State.TERMINATED) {
+            // Capture already in progress
+            Log.d(TAG, "captureState: Capture thread already running");
+            mCaptureThread.updateRank(rank);
+            return CAPTURE_ALREADY_IN_PROGRESS;
+        }
+
+        long timeFromTestStartMS = System.currentTimeMillis() - mStartTimeMS;
+        long hours = TimeUnit.MILLISECONDS.toHours(timeFromTestStartMS);
+        long minutes = TimeUnit.MILLISECONDS.toMinutes(timeFromTestStartMS) -
+                TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(timeFromTestStartMS));
+        long seconds = TimeUnit.MILLISECONDS.toSeconds(timeFromTestStartMS) -
+                TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(timeFromTestStartMS));
+        String timeString = String.format("%02dh%02dm%02ds", hours, minutes, seconds);
+
+        String fileNameBase = STORAGE + mFileNamePrefix + '_' + timeString;
+        CapturedState cs = new CapturedState(fileNameBase, timeFromTestStartMS, rank);
+
+        int indexOfLeastInteresting = getIndexOfLeastInterestingCapture(cs);
+        if (indexOfLeastInteresting == NEW_CAPTURE_IS_LEAST_INTERESTING) {
+            Log.d(TAG, "captureState: All Previously captured states were more interesting than" +
+                    " requested capture");
+            return NEW_CAPTURE_IS_LEAST_INTERESTING;
+        }
+
+        mCaptureThread = new CaptureThread(cs, indexOfLeastInteresting);
+        mCaptureThread.start();
+
+        return STATE_CAPTURED;
+    }
+
+    /**
+     * Send signal to listener script to terminate and stop atrace
+     **/
+    public void stopLoopbackListenerScript() {
+        if (mCaptureThread == null || !mCaptureThread.stopLoopbackListenerScript()) {
+            // The capture thread is unable to execute this operation.
+            stopLoopbackListenerScriptImpl();
+        }
+    }
+
+    static void stopLoopbackListenerScriptImpl() {
+        try {
+            OutputStream outputStream = new FileOutputStream(SIGNAL_FILE);
+            outputStream.write(TERMINATE_SIGNAL.getBytes());
+            outputStream.close();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+
+        Log.d(TAG, "stopLoopbackListenerScript: Signaled Listener Script to exit");
+    }
+
+    /**
+     * Currently returns recorded state with lowest Glitch count
+     * Alternate criteria can be established here and in captureState rank parameter
+     *
+     * returns -1 (NEW_CAPTURE_IS_LEAST_INTERESTING) if candidate is least interesting, otherwise
+     * returns index of record to replace
+     */
+    private int getIndexOfLeastInterestingCapture(CapturedState candidateCS) {
+        CapturedState leastInteresting = candidateCS;
+        int index = NEW_CAPTURE_IS_LEAST_INTERESTING;
+        for (int i = 0; i < mCapturedStates.length; i++) {
+            if (mCapturedStates[i] == null) {
+                // Array is not yet at capacity, insert in next available position
+                return i;
+            }
+            if (mCapturedStates[i].rank < leastInteresting.rank) {
+                index = i;
+                leastInteresting = mCapturedStates[i];
+            }
+        }
+        return index;
+    }
+
+    public boolean isCapturing() {
+        return mIsCapturingWavs || mIsCapturingSystraces || mIsCapturingBugreports;
+    }
+
+    /**
+     * Data struct for filenames of previously captured results. Rank and time captured can be used
+     * for determining position in rolling queue
+     */
+    private class CapturedState {
+        public final String fileNameBase;
+        public final long timeFromStartOfTestMS;
+        public int rank;
+
+        public CapturedState(String fileNameBase, long timeFromStartOfTestMS, int rank) {
+            this.fileNameBase = fileNameBase;
+            this.timeFromStartOfTestMS = timeFromStartOfTestMS;
+            this.rank = rank;
+        }
+
+        @Override
+        public String toString() {
+            return "CapturedState { fileName:" + fileNameBase + ", Rank:" + rank + "}";
+        }
+    }
+
+    private class CaptureThread extends Thread {
+
+        private CapturedState mNewCapturedState;
+        private int mIndexToPlace;
+        private boolean mIsRunning;
+        private boolean mSignalScriptToQuit;
+
+        /**
+         * Create new thread with capture state struct for captured systrace, bugreport and wav
+         **/
+        public CaptureThread(CapturedState cs, int indexToPlace) {
+            mNewCapturedState = cs;
+            mIndexToPlace = indexToPlace;
+            setName("CaptureThread");
+            setPriority(Thread.MIN_PRIORITY);
+        }
+
+        @Override
+        public void run() {
+            synchronized (this) {
+                mIsRunning = true;
+            }
+
+            // Write names of desired captures to signal file, signalling
+            // the listener script to write systrace and/or bugreport to those files
+            if (mIsCapturingSystraces || mIsCapturingBugreports) {
+                Log.d(TAG, "CaptureThread: signaling listener to write to:" +
+                        mNewCapturedState.fileNameBase + "*");
+                try {
+                    PrintWriter writer = new PrintWriter(SIGNAL_FILE);
+                    // mNewCapturedState.fileNameBase is the path and basename of the state files.
+                    // Each suffix is used to tell the listener script to record that type of data.
+                    if (mIsCapturingSystraces) {
+                        writer.println(mNewCapturedState.fileNameBase + SYSTRACE_SUFFIX);
+                    }
+                    if (mIsCapturingBugreports) {
+                        writer.println(mNewCapturedState.fileNameBase + BUGREPORT_SUFFIX);
+                    }
+                    writer.close();
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+
+            // Write wav if member mWaveDataBuffer has been set
+            if (mIsCapturingWavs && mWaveDataBuffer != null) {
+                Log.d(TAG, "CaptureThread: begin Writing wav data to file");
+                WaveDataRingBuffer.ReadableWaveDeck deck = mWaveDataBuffer.getWaveDeck();
+                if (deck != null) {
+                    AudioFileOutput audioFile = new AudioFileOutput(mContext,
+                            Uri.parse("file://mnt" + mNewCapturedState.fileNameBase
+                                    + WAV_SUFFIX),
+                            mSamplingRate);
+                    boolean success = deck.writeToFile(audioFile);
+                    Log.d(TAG, "CaptureThread: wav data written successfully: " + success);
+                }
+            }
+
+            // Check for sys and bug finished
+            // loopback listener script signals completion by deleting signal file
+            if (mIsCapturingSystraces || mIsCapturingBugreports) {
+                File signalFile = new File(SIGNAL_FILE);
+                while (signalFile.exists()) {
+                    try {
+                        sleep(100);
+                    } catch (InterruptedException e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+
+            // Delete least interesting if necessary and insert new capture in list
+            String suffixes[] = {SYSTRACE_SUFFIX, BUGREPORT_SUFFIX, WAV_SUFFIX};
+            if (mCapturedStates[mIndexToPlace] != null) {
+                Log.d(TAG, "Deleting capture: " + mCapturedStates[mIndexToPlace]);
+                for (String suffix : suffixes) {
+                    File oldFile = new File(mCapturedStates[mIndexToPlace].fileNameBase + suffix);
+                    boolean deleted = oldFile.delete();
+                    if (!deleted) {
+                        Log.d(TAG, "Delete old capture: " + oldFile.toString() +
+                                (oldFile.exists() ? " unable to delete" : " was not present"));
+                    }
+                }
+            }
+            Log.d(TAG, "Adding capture to list: " + mNewCapturedState);
+            mCapturedStates[mIndexToPlace] = mNewCapturedState;
+
+            // Log captured states
+            String log = "Captured states:";
+            for (CapturedState cs:mCapturedStates) log += "\n...." + cs;
+            Log.d(TAG, log);
+
+            synchronized (this) {
+                if (mSignalScriptToQuit) {
+                    CaptureHolder.stopLoopbackListenerScriptImpl();
+                    mSignalScriptToQuit = false;
+                }
+                mIsRunning = false;
+            }
+            Log.d(TAG, "Completed capture thread terminating");
+        }
+
+        // Sets the rank of the current capture to rank if it is greater than the current value
+        public synchronized void updateRank(int rank) {
+            mNewCapturedState.rank = Math.max(mNewCapturedState.rank, rank);
+        }
+
+        public synchronized boolean stopLoopbackListenerScript() {
+            if (mIsRunning) {
+                mSignalScriptToQuit = true;
+                return true;
+            } else {
+                return false;
+            }
+        }
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CatchEventsEditText.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CatchEventsEditText.java
new file mode 100644
index 0000000..b8b3f8f
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/CatchEventsEditText.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.KeyEvent;
+import android.view.inputmethod.EditorInfo;
+import android.widget.EditText;
+import android.widget.TextView;
+
+/**
+ *  Provides a callback for both soft-keyboard dismissed or confirm submission button
+ */
+public class CatchEventsEditText extends EditText implements TextView.OnEditorActionListener {
+
+    public interface EditTextEventListener {
+        public void textEdited(EditText v);
+    }
+
+    private EditTextEventListener mEditListener;
+
+    public CatchEventsEditText(Context context) {
+        super(context);
+        setOnEditorActionListener(this);
+    }
+
+    public CatchEventsEditText(Context context, AttributeSet attrs) {
+        super(context, attrs);
+        setOnEditorActionListener(this);
+    }
+
+    public CatchEventsEditText(Context context, AttributeSet attrs, int defStyleAttr) {
+        super(context, attrs, defStyleAttr);
+        setOnEditorActionListener(this);
+    }
+
+    public void setEditTextEvenListener(EditTextEventListener listener) {
+        mEditListener = listener;
+    }
+
+    @Override
+    public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
+        if ((event != null && (event.getKeyCode() == KeyEvent.KEYCODE_ENTER))
+                || (actionId == EditorInfo.IME_ACTION_DONE)) {
+            mEditListener.textEdited(this);
+        }
+        // Necessary to return false even when event handled for soft-keyboard to be dismissed
+        // Differs from on click listener chains where first listener to handle returns true
+        return false;
+    }
+
+    @Override
+    public boolean onKeyPreIme(int keyCode, KeyEvent event) {
+        if (event.getKeyCode() == KeyEvent.KEYCODE_BACK
+                && event.getAction() == KeyEvent.ACTION_UP) {
+            mEditListener.textEdited(this);
+        }
+        return super.onKeyPreIme(keyCode, event);
+    }
+}
\ No newline at end of file
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java
new file mode 100644
index 0000000..f132e3f
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Constant.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This file stores constants that are used across multiple files.
+ */
+
+public class Constant {
+    public static final double TWO_PI = 2.0 * Math.PI;
+    public static final long   NANOS_PER_MILLI = 1000000;
+    public static final int    MILLIS_PER_SECOND = 1000;
+    public static final int    SECONDS_PER_HOUR = 3600;
+
+    public static final int LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY = 222;
+    public static final int LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD = 223;
+    public static final int LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_CALIBRATION = 224;
+
+    public static final int AUDIO_THREAD_TYPE_JAVA = 0;
+    public static final int AUDIO_THREAD_TYPE_NATIVE = 1;
+
+    public static final int BYTES_PER_SHORT = 2;
+    public static final int SHORTS_PER_INT = 2;
+    // FIXME Assumes 16-bit and mono, will not work for other bit depths or multi-channel.
+    public static final int BYTES_PER_FRAME = 2;    // bytes per sample
+
+    // prime numbers that don't overlap with FFT frequencies
+    public static final double PRIME_FREQUENCY_1 = 703.0;
+    public static final double PRIME_FREQUENCY_2 = 719.0;
+
+    // amplitude for ToneGeneration
+    public static final double SINE_WAVE_AMPLITUDE = 0.8;
+    public static final double TWO_SINE_WAVES_AMPLITUDE = 0.4;
+
+    // the number used to configured PipeShort/PipeByteBuffer
+    public static final int MAX_SHORTS = 65536;
+
+    // used to identify a variable is currently unknown
+    public static final int UNKNOWN = -1;
+
+    // used when joining a thread
+    public static final int JOIN_WAIT_TIME_MS = 1000;
+
+    // Loopback on Java thread test audio tone constants
+    public static final int LOOPBACK_SAMPLE_FRAMES = 300;
+    public static final double LOOPBACK_AMPLITUDE = 0.95;
+    public static final int LOOPBACK_FREQUENCY = 4000;
+
+    // Settings Activity and ADB constants
+    public static final int SAMPLING_RATE_MAX = 48000;
+    public static final int SAMPLING_RATE_MIN = 8000;
+    public static final int PLAYER_BUFFER_FRAMES_MAX = 8000;
+    public static final int PLAYER_BUFFER_FRAMES_MIN = 16;
+    public static final int RECORDER_BUFFER_FRAMES_MAX = 8000;
+    public static final int RECORDER_BUFFER_FRAMES_MIN = 16;
+    public static final int BUFFER_TEST_DURATION_SECONDS_MAX = 36000;
+    public static final int BUFFER_TEST_DURATION_SECONDS_MIN = 1;
+    public static final int BUFFER_TEST_WAVE_PLOT_DURATION_SECONDS_MAX = 120;
+    public static final int BUFFER_TEST_WAVE_PLOT_DURATION_SECONDS_MIN = 1;
+    public static final int MAX_NUM_LOAD_THREADS = 20;
+    public static final int MIN_NUM_LOAD_THREADS = 0;
+    public static final int MIN_NUM_CAPTURES = 1;
+    public static final int MAX_NUM_CAPTURES = 100;
+    public static final int DEFAULT_NUM_CAPTURES = 5;
+    public static final int MIN_IGNORE_FIRST_FRAMES = 0;
+    // impulse happens after 300 ms and shouldn't be ignored
+    public static final int MAX_IGNORE_FIRST_FRAMES = SAMPLING_RATE_MAX * 3 / 10;
+    public static final int DEFAULT_IGNORE_FIRST_FRAMES = 0;
+
+
+    // Controls size of pre allocated timestamp arrays
+    public static final int MAX_RECORDED_LATE_CALLBACKS_PER_SECOND = 2;
+    // Ignore first few buffer callback periods
+    public static final int BUFFER_PERIOD_DISCARD = 10;
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java
new file mode 100644
index 0000000..6c59bd9
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Correlation.java
@@ -0,0 +1,240 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+
+
+/**
+ * This class is used to automatically estimate latency and its confidence.
+ */
+
+public class Correlation implements Parcelable {
+    private static final String TAG = "Correlation";
+
+    private int       mBlockSize = 4096;
+    private int       mSamplingRate;
+    private double [] mDataDownsampled = new double [mBlockSize];
+    private double [] mDataAutocorrelated = new double[mBlockSize];
+
+    public double mEstimatedLatencySamples = 0;
+    public double mEstimatedLatencyMs = 0;
+    public double mEstimatedLatencyConfidence = 0.0;
+    public double mAverage = 0.0;
+    public double mRms = 0.0;
+
+    private double mAmplitudeThreshold = 0.001;  // 0.001 = -60 dB noise
+
+    private boolean mDataIsValid = false; // Used to mark computed latency information is available
+
+    public Correlation() {
+        // Default constructor for when no data will be restored
+    }
+
+    public void init(int blockSize, int samplingRate) {
+        mBlockSize = blockSize;
+        mSamplingRate = samplingRate;
+    }
+
+
+    public void computeCorrelation(double [] data, int samplingRate) {
+        log("Started Auto Correlation for data with " + data.length + " points");
+        mSamplingRate = samplingRate;
+
+        downsampleData(data, mDataDownsampled, mAmplitudeThreshold);
+
+        //correlation vector
+        autocorrelation(mDataDownsampled, mDataAutocorrelated);
+
+
+        int N = data.length; //all samples available
+        double groupSize =  (double) N / mBlockSize;  //samples per downsample point.
+
+        double maxValue = 0;
+        int maxIndex = -1;
+
+        double minLatencyMs = 8; //min latency expected. This algorithm should be improved.
+        int minIndex = (int) (0.5 + minLatencyMs * mSamplingRate / (groupSize * 1000));
+
+        double average = 0;
+        double rms = 0;
+
+        //find max
+        for (int i = minIndex; i < mDataAutocorrelated.length; i++) {
+            average += mDataAutocorrelated[i];
+            rms += mDataAutocorrelated[i] * mDataAutocorrelated[i];
+           if (mDataAutocorrelated[i] > maxValue) {
+               maxValue = mDataAutocorrelated[i];
+               maxIndex = i;
+           }
+        }
+
+        rms = Math.sqrt(rms / mDataAutocorrelated.length);
+        average = average / mDataAutocorrelated.length;
+        log(String.format(" Maxvalue %f, max Index : %d/%d (%d)  minIndex = %d", maxValue, maxIndex,
+                          mDataAutocorrelated.length, data.length, minIndex));
+        log(String.format("  average : %.3f  rms: %.3f", average, rms));
+
+        mAverage = average;
+        mRms = rms;
+
+        mEstimatedLatencyConfidence = 0.0;
+        if (average > 0) {
+            double factor = 3.0;
+
+            double raw = (rms - average) / (factor * average);
+            log(String.format("Raw: %.3f", raw));
+            mEstimatedLatencyConfidence = Math.max(Math.min(raw, 1.0), 0.0);
+        }
+        log(String.format(" ****Confidence: %.2f", mEstimatedLatencyConfidence));
+
+        mEstimatedLatencySamples = maxIndex * groupSize;
+        mEstimatedLatencyMs = mEstimatedLatencySamples * 1000 / mSamplingRate;
+        log(String.format(" latencySamples: %.2f  %.2f ms", mEstimatedLatencySamples,
+                          mEstimatedLatencyMs));
+
+        mDataIsValid = mEstimatedLatencyMs > 0.0001;
+    }
+
+    // Called by LoopbackActivity before displaying latency test results
+    public boolean isValid() {
+        return mDataIsValid;
+    }
+
+    // Called at beginning of new test
+    public void invalidate() {
+        mDataIsValid = false;
+    }
+
+    private boolean downsampleData(double [] data, double [] dataDownsampled, double threshold) {
+
+        boolean status;
+        for (int i = 0; i < mBlockSize; i++) {
+            dataDownsampled[i] = 0;
+        }
+
+        int N = data.length; //all samples available
+        double groupSize =  (double) N / mBlockSize;
+
+        int ignored = 0;
+
+        int currentIndex = 0;
+        double nextGroup = groupSize;
+        for (int i = 0; i < N && currentIndex < mBlockSize; i++) {
+
+            if (i > nextGroup) { //advanced to next group.
+                currentIndex++;
+                nextGroup += groupSize;
+            }
+
+            if (currentIndex >= mBlockSize) {
+                break;
+            }
+
+            double value =  Math.abs(data[i]);
+            if (value >= threshold) {
+                dataDownsampled[currentIndex] += value;
+            } else {
+                ignored++;
+            }
+        }
+
+        log(String.format(" Threshold: %.3f, ignored:%d/%d (%%.2f)",
+                threshold, ignored, N, (double) ignored/(double)N));
+
+        status = true;
+        return status;
+    }
+
+
+    private boolean autocorrelation(double [] data, double [] dataOut) {
+        boolean status = false;
+
+        double sumsquared = 0;
+        int N = data.length;
+        for (int i = 0; i < N; i++) {
+            double value = data[i];
+            sumsquared += value * value;
+        }
+
+        if (sumsquared > 0) {
+            //correlate (not circular correlation)
+            for (int i = 0; i < N; i++) {
+                dataOut[i] = 0;
+                for (int j = 0; j < N - i; j++) {
+
+                    dataOut[i] += data[j] * data[i + j];
+                }
+                dataOut[i] = dataOut[i] / sumsquared;
+            }
+            status = true;
+        }
+
+        return status;
+    }
+
+    @Override
+    public int describeContents() {
+        return 0;
+    }
+
+    // Store the results before this object is destroyed
+    @Override
+    public void writeToParcel(Parcel dest, int flags) {
+        Bundle bundle = new Bundle();
+        bundle.putBoolean("mDataIsValid", mDataIsValid);
+        if(mDataIsValid) {
+            bundle.putDouble("mEstimatedLatencySamples", mEstimatedLatencySamples);
+            bundle.putDouble("mEstimatedLatencyMs", mEstimatedLatencyMs);
+            bundle.putDouble("mEstimatedLatencyConfidence", mEstimatedLatencyConfidence);
+            bundle.putDouble("mAverage", mAverage);
+            bundle.putDouble("mRms", mRms);
+        }
+        dest.writeBundle(bundle);
+    }
+
+    // Restore the results which were previously calculated
+    private Correlation(Parcel in) {
+        Bundle bundle = in.readBundle(getClass().getClassLoader());
+        mDataIsValid = bundle.getBoolean("mDataIsValid");
+        if(mDataIsValid) {
+            mEstimatedLatencySamples    = bundle.getDouble("mEstimatedLatencySamples");
+            mEstimatedLatencyMs         = bundle.getDouble("mEstimatedLatencyMs");
+            mEstimatedLatencyConfidence = bundle.getDouble("mEstimatedLatencyConfidence");
+            mAverage                    = bundle.getDouble("mAverage");
+            mRms                        = bundle.getDouble("mRms");
+        }
+    }
+
+    public static final Parcelable.Creator<Correlation> CREATOR
+            = new Parcelable.Creator<Correlation>() {
+        public Correlation createFromParcel(Parcel in) {
+            return new Correlation(in);
+        }
+
+        public Correlation[] newArray(int size) {
+            return new Correlation[size];
+        }
+    };
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java
new file mode 100644
index 0000000..e69efb0
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/FFT.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class computes FFT of inputting data.
+ * Note: this part of code is originally from another project, so there's actually multiple copies
+ * of this code. Should somehow merge these copies in the future. Also, no modification on
+ * naming has been made, but naming should be changed once we merge all copies.
+ */
+
+public class FFT {
+    private int       m;
+    private double[]  cos;   // precomputed cosine tables for FFT
+    private double[]  sin;   // precomputed sine tables for FFT
+    private final int mFFTSamplingSize;
+
+
+    FFT(int FFTSamplingSize) {
+        mFFTSamplingSize = FFTSamplingSize;
+        setUpFFT();
+    }
+
+
+    /** This function is only called in constructor to set up variables needed for computing FFT. */
+    private void setUpFFT() {
+        m = (int) (Math.log(mFFTSamplingSize) / Math.log(2));
+
+        // Make sure n is a power of 2
+        if (mFFTSamplingSize != (1 << m))
+            throw new RuntimeException("FFT sampling size must be power of 2");
+
+        // precomputed tables
+        cos = new double[mFFTSamplingSize / 2];
+        sin = new double[mFFTSamplingSize / 2];
+
+        for (int i = 0; i < mFFTSamplingSize / 2; i++) {
+            cos[i] = Math.cos(-2 * Math.PI * i / mFFTSamplingSize);
+            sin[i] = Math.sin(-2 * Math.PI * i / mFFTSamplingSize);
+        }
+    }
+
+
+    /**
+     * Do FFT, and store the result's real part to "x", imaginary part to "y".
+     */
+    public void fft(double[] x, double[] y, int sign) {
+        int i, j, k, n1, n2, a;
+        double c, s, t1, t2;
+
+        // Bit-reverse
+        j = 0;
+        n2 = mFFTSamplingSize / 2;
+        for (i = 1; i < mFFTSamplingSize - 1; i++) {
+            n1 = n2;
+            while (j >= n1) {
+                j = j - n1;
+                n1 = n1 / 2;
+            }
+            j = j + n1;
+
+            if (i < j) {
+                t1 = x[i];
+                x[i] = x[j];
+                x[j] = t1;
+                t1 = y[i];
+                y[i] = y[j];
+                y[j] = t1;
+            }
+        }
+
+        // FFT
+        n1 = 0;
+        n2 = 1;
+
+        for (i = 0; i < m; i++) {
+            n1 = n2;
+            n2 = n2 + n2;
+            a = 0;
+
+            for (j = 0; j < n1; j++) {
+                c = cos[a];
+                s = sign * sin[a];
+                a += 1 << (m - i - 1);
+
+                for (k = j; k < mFFTSamplingSize; k = k + n2) {
+                    t1 = c * x[k + n1] - s * y[k + n1];
+                    t2 = s * x[k + n1] + c * y[k + n1];
+                    x[k + n1] = x[k] - t1;
+                    y[k + n1] = y[k] - t2;
+                    x[k] = x[k] + t1;
+                    y[k] = y[k] + t2;
+                }
+            }
+        }
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchAndCallbackHeatMapView.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchAndCallbackHeatMapView.java
new file mode 100644
index 0000000..de24e81
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchAndCallbackHeatMapView.java
@@ -0,0 +1,498 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.graphics.RectF;
+import android.view.View;
+import android.widget.LinearLayout.LayoutParams;
+
+/**
+ * Creates a heat map graphic for glitches and callback durations over the time period of the test
+ * Instantiated view is used for displaying heat map on android device,  static methods can be used
+ * without an instantiated view to draw graph on a canvas for use in exporting an image file
+ */
+public class GlitchAndCallbackHeatMapView extends View {
+
+    private final BufferCallbackTimes mPlayerCallbackTimes;
+    private final BufferCallbackTimes mRecorderCallbackTimes;
+    private final int[] mGlitchTimes;
+    private boolean mGlitchesExceededCapacity;
+    private final int mTestDurationSeconds;
+    private final String mTitle;
+
+    private static final int MILLIS_PER_SECOND = 1000;
+    private static final int SECONDS_PER_MINUTE = 60;
+    private static final int MINUTES_PER_HOUR = 60;
+    private static final int SECONDS_PER_HOUR = 3600;
+
+    private static final int LABEL_SIZE = 36;
+    private static final int TITLE_SIZE = 80;
+    private static final int LINE_WIDTH = 5;
+    private static final int INNER_MARGIN = 20;
+    private static final int OUTER_MARGIN = 60;
+    private static final int COLOR_LEGEND_AREA_WIDTH = 250;
+    private static final int COLOR_LEGEND_WIDTH = 75;
+    private static final int EXCEEDED_LEGEND_WIDTH = 150;
+    private static final int MAX_DURATION_FOR_SECONDS_BUCKET = 240;
+    private static final int NUM_X_AXIS_TICKS = 9;
+    private static final int NUM_LEGEND_LABELS = 5;
+    private static final int TICK_SIZE = 30;
+
+    private static final int MAX_COLOR = 0xFF0D47A1; // Dark Blue
+    private static final int START_COLOR = Color.WHITE;
+    private static final float LOG_FACTOR = 2.0f; // >=1 Higher value creates a more linear curve
+
+    public GlitchAndCallbackHeatMapView(Context context, BufferCallbackTimes recorderCallbackTimes,
+                                        BufferCallbackTimes playerCallbackTimes, int[] glitchTimes,
+                                        boolean glitchesExceededCapacity, int testDurationSeconds,
+                                        String title) {
+        super(context);
+
+        mRecorderCallbackTimes = recorderCallbackTimes;
+        mPlayerCallbackTimes = playerCallbackTimes;
+        mGlitchTimes = glitchTimes;
+        mGlitchesExceededCapacity = glitchesExceededCapacity;
+        mTestDurationSeconds = testDurationSeconds;
+        mTitle = title;
+
+        setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
+        setWillNotDraw(false);
+    }
+
+    @Override
+    protected void onDraw(Canvas canvas) {
+        super.onDraw(canvas);
+        Bitmap bmpResult = Bitmap.createBitmap(canvas.getHeight(), canvas.getWidth(),
+                Bitmap.Config.ARGB_8888);
+        // Provide rotated canvas to FillCanvas method
+        Canvas tmpCanvas = new Canvas(bmpResult);
+        fillCanvas(tmpCanvas, mRecorderCallbackTimes, mPlayerCallbackTimes, mGlitchTimes,
+                mGlitchesExceededCapacity, mTestDurationSeconds, mTitle);
+        tmpCanvas.translate(-1 * tmpCanvas.getWidth(), 0);
+        tmpCanvas.rotate(-90, tmpCanvas.getWidth(), 0);
+        // Display landscape oriented image on android device
+        canvas.drawBitmap(bmpResult, tmpCanvas.getMatrix(), new Paint(Paint.ANTI_ALIAS_FLAG));
+    }
+
+    /**
+     * Draw a heat map of callbacks and glitches for display on Android device or for export as png
+     */
+    public static void fillCanvas(final Canvas canvas,
+                                  final BufferCallbackTimes recorderCallbackTimes,
+                                  final BufferCallbackTimes playerCallbackTimes,
+                                  final int[] glitchTimes, final boolean glitchesExceededCapacity,
+                                  final int testDurationSeconds, final String title) {
+
+        final Paint heatPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        heatPaint.setStyle(Paint.Style.FILL);
+
+        final Paint textPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        textPaint.setColor(Color.BLACK);
+        textPaint.setTextSize(LABEL_SIZE);
+        textPaint.setTextAlign(Paint.Align.CENTER);
+
+        final Paint titlePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        titlePaint.setColor(Color.BLACK);
+        titlePaint.setTextAlign(Paint.Align.CENTER);
+        titlePaint.setTextSize(TITLE_SIZE);
+
+        final Paint linePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        linePaint.setColor(Color.BLACK);
+        linePaint.setStyle(Paint.Style.STROKE);
+        linePaint.setStrokeWidth(LINE_WIDTH);
+
+        final Paint colorPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        colorPaint.setStyle(Paint.Style.STROKE);
+
+        ColorInterpolator colorInter = new ColorInterpolator(START_COLOR, MAX_COLOR);
+
+        Rect textBounds = new Rect();
+        titlePaint.getTextBounds(title, 0, title.length(), textBounds);
+        Rect titleArea = new Rect(0, OUTER_MARGIN, canvas.getWidth(),
+                OUTER_MARGIN + textBounds.height());
+
+        Rect bottomLegendArea = new Rect(0, canvas.getHeight() - LABEL_SIZE - OUTER_MARGIN,
+                canvas.getWidth(), canvas.getHeight() - OUTER_MARGIN);
+
+        int graphWidth = canvas.getWidth() - COLOR_LEGEND_AREA_WIDTH - OUTER_MARGIN * 3;
+        int graphHeight = (bottomLegendArea.top - titleArea.bottom - OUTER_MARGIN * 3) / 2;
+
+        Rect callbackHeatArea = new Rect(0, 0, graphWidth, graphHeight);
+        callbackHeatArea.offsetTo(OUTER_MARGIN, titleArea.bottom + OUTER_MARGIN);
+
+        Rect glitchHeatArea = new Rect(0, 0, graphWidth, graphHeight);
+        glitchHeatArea.offsetTo(OUTER_MARGIN, callbackHeatArea.bottom + OUTER_MARGIN);
+
+        final int bucketSize =
+                testDurationSeconds < MAX_DURATION_FOR_SECONDS_BUCKET ? 1 : SECONDS_PER_MINUTE;
+
+        String units = testDurationSeconds < MAX_DURATION_FOR_SECONDS_BUCKET ? "Second" : "Minute";
+        String glitchLabel = "Glitches Per " + units;
+        String callbackLabel = "Maximum Callback Duration(ms) Per " + units;
+
+        // Create White background
+        canvas.drawColor(Color.WHITE);
+
+        // Label Graph
+        canvas.drawText(title, titleArea.left + titleArea.width() / 2, titleArea.bottom,
+                titlePaint);
+
+        // Callback Graph /////////////
+        // label callback graph
+        Rect graphArea = new Rect(callbackHeatArea);
+        graphArea.left += LABEL_SIZE + INNER_MARGIN;
+        graphArea.bottom -= LABEL_SIZE;
+        graphArea.top += LABEL_SIZE + INNER_MARGIN;
+        canvas.drawText(callbackLabel, graphArea.left + graphArea.width() / 2,
+                graphArea.top - INNER_MARGIN, textPaint);
+
+        int labelX = graphArea.left - INNER_MARGIN;
+        int labelY = graphArea.top + graphArea.height() / 4;
+        canvas.save();
+        canvas.rotate(-90, labelX, labelY);
+        canvas.drawText("Recorder", labelX, labelY, textPaint);
+        canvas.restore();
+        labelY = graphArea.bottom - graphArea.height() / 4;
+        canvas.save();
+        canvas.rotate(-90, labelX, labelY);
+        canvas.drawText("Player", labelX, labelY, textPaint);
+        canvas.restore();
+
+        // draw callback heat graph
+        CallbackGraphData recorderData =
+                new CallbackGraphData(recorderCallbackTimes, bucketSize, testDurationSeconds);
+        CallbackGraphData playerData =
+                new CallbackGraphData(playerCallbackTimes, bucketSize, testDurationSeconds);
+        int maxCallbackValue = Math.max(recorderData.getMax(), playerData.getMax());
+
+        drawHeatMap(canvas, recorderData.getBucketedCallbacks(), maxCallbackValue, colorInter,
+                recorderCallbackTimes.isCapacityExceeded(), recorderData.getLastFilledIndex(),
+                new Rect(graphArea.left + LINE_WIDTH, graphArea.top,
+                        graphArea.right - LINE_WIDTH, graphArea.centerY()));
+        drawHeatMap(canvas, playerData.getBucketedCallbacks(), maxCallbackValue, colorInter,
+                playerCallbackTimes.isCapacityExceeded(), playerData.getLastFilledIndex(),
+                new Rect(graphArea.left + LINE_WIDTH, graphArea.centerY(),
+                        graphArea.right - LINE_WIDTH, graphArea.bottom));
+
+        drawTimeTicks(canvas, testDurationSeconds, bucketSize, callbackHeatArea.bottom,
+                graphArea.bottom, graphArea.left, graphArea.width(), textPaint, linePaint);
+
+        // draw graph boarder
+        canvas.drawRect(graphArea, linePaint);
+
+        // Callback Legend //////////////
+        if (maxCallbackValue > 0) {
+            Rect legendArea = new Rect(graphArea);
+            legendArea.left = graphArea.right + OUTER_MARGIN * 2;
+            legendArea.right = legendArea.left + COLOR_LEGEND_WIDTH;
+            drawColorLegend(canvas, maxCallbackValue, colorInter, linePaint, textPaint, legendArea);
+        }
+
+
+        // Glitch Graph /////////////
+        // label Glitch graph
+        graphArea.bottom = glitchHeatArea.bottom - LABEL_SIZE;
+        graphArea.top = glitchHeatArea.top + LABEL_SIZE + INNER_MARGIN;
+        canvas.drawText(glitchLabel, graphArea.left + graphArea.width() / 2,
+                graphArea.top - INNER_MARGIN, textPaint);
+
+        // draw glitch heat graph
+        int[] bucketedGlitches = new int[(testDurationSeconds + bucketSize - 1) / bucketSize];
+        int lastFilledGlitchBucket = bucketGlitches(glitchTimes, bucketSize * MILLIS_PER_SECOND,
+                bucketedGlitches);
+        int maxGlitchValue = 0;
+        for (int totalGlitch : bucketedGlitches) {
+            maxGlitchValue = Math.max(totalGlitch, maxGlitchValue);
+        }
+        drawHeatMap(canvas, bucketedGlitches, maxGlitchValue, colorInter,
+                glitchesExceededCapacity, lastFilledGlitchBucket,
+                new Rect(graphArea.left + LINE_WIDTH, graphArea.top,
+                        graphArea.right - LINE_WIDTH, graphArea.bottom));
+
+        drawTimeTicks(canvas, testDurationSeconds, bucketSize,
+                graphArea.bottom + INNER_MARGIN + LABEL_SIZE, graphArea.bottom, graphArea.left,
+                graphArea.width(), textPaint, linePaint);
+
+        // draw graph border
+        canvas.drawRect(graphArea, linePaint);
+
+        // Callback Legend //////////////
+        if (maxGlitchValue > 0) {
+            Rect legendArea = new Rect(graphArea);
+            legendArea.left = graphArea.right + OUTER_MARGIN * 2;
+            legendArea.right = legendArea.left + COLOR_LEGEND_WIDTH;
+
+            drawColorLegend(canvas, maxGlitchValue, colorInter, linePaint, textPaint, legendArea);
+        }
+
+        // Draw legend for exceeded capacity
+        if (playerCallbackTimes.isCapacityExceeded() || recorderCallbackTimes.isCapacityExceeded()
+                || glitchesExceededCapacity) {
+            RectF exceededArea = new RectF(graphArea.left, bottomLegendArea.top,
+                    graphArea.left + EXCEEDED_LEGEND_WIDTH, bottomLegendArea.bottom);
+            drawExceededMarks(canvas, exceededArea);
+            canvas.drawRect(exceededArea, linePaint);
+            textPaint.setTextAlign(Paint.Align.LEFT);
+            canvas.drawText(" = No Data Available, Recording Capacity Exceeded",
+                    exceededArea.right + INNER_MARGIN, bottomLegendArea.bottom, textPaint);
+            textPaint.setTextAlign(Paint.Align.CENTER);
+        }
+
+    }
+
+    /**
+     * Find total number of glitches duration per minute or second
+     * Returns index of last minute or second bucket with a recorded glitches
+     */
+    private static int bucketGlitches(int[] glitchTimes, int bucketSizeMS, int[] bucketedGlitches) {
+        int bucketIndex = 0;
+
+        for (int glitchMS : glitchTimes) {
+            bucketIndex = glitchMS / bucketSizeMS;
+            bucketedGlitches[bucketIndex]++;
+        }
+
+        return bucketIndex;
+    }
+
+    private static void drawHeatMap(Canvas canvas, int[] bucketedValues, int maxValue,
+                                    ColorInterpolator colorInter, boolean capacityExceeded,
+                                    int lastFilledIndex, Rect graphArea) {
+        Paint colorPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        colorPaint.setStyle(Paint.Style.FILL);
+        float rectWidth = (float) graphArea.width() / bucketedValues.length;
+        RectF colorRect = new RectF(graphArea.left, graphArea.top, graphArea.left + rectWidth,
+                graphArea.bottom);
+
+        // values are log scaled to a value between 0 and 1 using the following formula:
+        // (log(value + 1 ) / log(max + 1))^2
+        // Data is typically concentrated around the extreme high and low values,  This log scale
+        // allows low values to still be visible and the exponent makes the curve slightly more
+        // linear in order that the color gradients are still distinguishable
+
+        float logMax = (float) Math.log(maxValue + 1);
+
+        for (int i = 0; i <= lastFilledIndex; ++i) {
+            colorPaint.setColor(colorInter.getInterColor(
+                    (float) Math.pow((Math.log(bucketedValues[i] + 1) / logMax), LOG_FACTOR)));
+            canvas.drawRect(colorRect, colorPaint);
+            colorRect.offset(rectWidth, 0);
+        }
+
+        if (capacityExceeded) {
+            colorRect.right = graphArea.right;
+            drawExceededMarks(canvas, colorRect);
+        }
+    }
+
+    private static void drawColorLegend(Canvas canvas, int maxValue, ColorInterpolator colorInter,
+                                        Paint linePaint, Paint textPaint, Rect legendArea) {
+        Paint colorPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        colorPaint.setStyle(Paint.Style.STROKE);
+        colorPaint.setStrokeWidth(1);
+        textPaint.setTextAlign(Paint.Align.LEFT);
+
+        float logMax = (float) Math.log(legendArea.height() + 1);
+        for (int y = legendArea.bottom; y >= legendArea.top; --y) {
+            float inter = (float) Math.pow(
+                    (Math.log(legendArea.bottom - y + 1) / logMax), LOG_FACTOR);
+            colorPaint.setColor(colorInter.getInterColor(inter));
+            canvas.drawLine(legendArea.left, y, legendArea.right, y, colorPaint);
+        }
+
+        int tickSpacing = (maxValue + NUM_LEGEND_LABELS - 1) / NUM_LEGEND_LABELS;
+        for (int i = 0; i < maxValue; i += tickSpacing) {
+            float yPos = legendArea.bottom - (((float) i / maxValue) * legendArea.height());
+            canvas.drawText(Integer.toString(i), legendArea.right + INNER_MARGIN,
+                    yPos + LABEL_SIZE / 2, textPaint);
+            canvas.drawLine(legendArea.right, yPos, legendArea.right - TICK_SIZE, yPos,
+                    linePaint);
+        }
+        canvas.drawText(Integer.toString(maxValue), legendArea.right + INNER_MARGIN,
+                legendArea.top + LABEL_SIZE / 2, textPaint);
+
+        canvas.drawRect(legendArea, linePaint);
+        textPaint.setTextAlign(Paint.Align.CENTER);
+    }
+
+    private static void drawTimeTicks(Canvas canvas, int testDurationSeconds, int bucketSizeSeconds,
+                                      int textYPos, int tickYPos, int startXPos, int width,
+                                      Paint textPaint, Paint linePaint) {
+
+        int secondsPerTick;
+
+        if (bucketSizeSeconds == SECONDS_PER_MINUTE) {
+            secondsPerTick = (((testDurationSeconds / SECONDS_PER_MINUTE) + NUM_X_AXIS_TICKS - 1) /
+                    NUM_X_AXIS_TICKS) * SECONDS_PER_MINUTE;
+        } else {
+            secondsPerTick = (testDurationSeconds + NUM_X_AXIS_TICKS - 1) / NUM_X_AXIS_TICKS;
+        }
+
+        for (int seconds = 0; seconds <= testDurationSeconds - secondsPerTick;
+             seconds += secondsPerTick) {
+            float xPos = startXPos + (((float) seconds / testDurationSeconds) * width);
+
+            if (bucketSizeSeconds == SECONDS_PER_MINUTE) {
+                canvas.drawText(String.format("%dh:%02dm", seconds / SECONDS_PER_HOUR,
+                                (seconds / SECONDS_PER_MINUTE) % MINUTES_PER_HOUR),
+                        xPos, textYPos, textPaint);
+            } else {
+                canvas.drawText(String.format("%dm:%02ds", seconds / SECONDS_PER_MINUTE,
+                                seconds % SECONDS_PER_MINUTE),
+                        xPos, textYPos, textPaint);
+            }
+
+            canvas.drawLine(xPos, tickYPos, xPos, tickYPos - TICK_SIZE, linePaint);
+        }
+
+        //Draw total duration marking on right side of graph
+        if (bucketSizeSeconds == SECONDS_PER_MINUTE) {
+            canvas.drawText(
+                    String.format("%dh:%02dm", testDurationSeconds / SECONDS_PER_HOUR,
+                            (testDurationSeconds / SECONDS_PER_MINUTE) % MINUTES_PER_HOUR),
+                    startXPos + width, textYPos, textPaint);
+        } else {
+            canvas.drawText(
+                    String.format("%dm:%02ds", testDurationSeconds / SECONDS_PER_MINUTE,
+                            testDurationSeconds % SECONDS_PER_MINUTE),
+                    startXPos + width, textYPos, textPaint);
+        }
+    }
+
+    /**
+     * Draw hash marks across a given rectangle, used to indicate no data available for that
+     * time period
+     */
+    private static void drawExceededMarks(Canvas canvas, RectF rect) {
+
+        final float LINE_WIDTH = 8;
+        final int STROKE_COLOR = Color.GRAY;
+        final float STROKE_OFFSET = LINE_WIDTH * 3; //space between lines
+
+        Paint strikePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        strikePaint.setColor(STROKE_COLOR);
+        strikePaint.setStyle(Paint.Style.STROKE);
+        strikePaint.setStrokeWidth(LINE_WIDTH);
+
+        canvas.save();
+        canvas.clipRect(rect);
+
+        float startY = rect.bottom + STROKE_OFFSET;
+        float endY = rect.top - STROKE_OFFSET;
+        float startX = rect.left - rect.height();  //creates a 45 degree angle
+        float endX = rect.left;
+
+        for (; startX < rect.right; startX += STROKE_OFFSET, endX += STROKE_OFFSET) {
+            canvas.drawLine(startX, startY, endX, endY, strikePaint);
+        }
+
+        canvas.restore();
+    }
+
+    private static class CallbackGraphData {
+
+        private int[] mBucketedCallbacks;
+        private int mLastFilledIndex;
+
+        /**
+         * Fills buckets with maximum callback duration per minute or second
+         */
+        CallbackGraphData(BufferCallbackTimes callbackTimes, int bucketSizeSeconds,
+                          int testDurationSeconds) {
+            mBucketedCallbacks =
+                    new int[(testDurationSeconds + bucketSizeSeconds - 1) / bucketSizeSeconds];
+            int bucketSizeMS = bucketSizeSeconds * MILLIS_PER_SECOND;
+            int bucketIndex = 0;
+            for (BufferCallbackTimes.BufferCallback callback : callbackTimes) {
+
+                bucketIndex = callback.timeStamp / bucketSizeMS;
+                if (callback.callbackDuration > mBucketedCallbacks[bucketIndex]) {
+                    mBucketedCallbacks[bucketIndex] = callback.callbackDuration;
+                }
+
+                // Original callback bucketing strategy, callbacks within a second/minute were added
+                // together in attempt to capture total amount of lateness within a time period.
+                // May become useful for debugging specific problems at some later date
+                /*if (callback.callbackDuration > callbackTimes.getExpectedBufferPeriod()) {
+                    bucketedCallbacks[bucketIndex] += callback.callbackDuration;
+                }*/
+            }
+            mLastFilledIndex = bucketIndex;
+        }
+
+        public int getMax() {
+            int maxCallbackValue = 0;
+            for (int bucketValue : mBucketedCallbacks) {
+                maxCallbackValue = Math.max(maxCallbackValue, bucketValue);
+            }
+            return maxCallbackValue;
+        }
+
+        public int[] getBucketedCallbacks() {
+            return mBucketedCallbacks;
+        }
+
+        public int getLastFilledIndex() {
+            return mLastFilledIndex;
+        }
+    }
+
+    private static class ColorInterpolator {
+
+        private final int mAlphaStart;
+        private final int mAlphaRange;
+        private final int mRedStart;
+        private final int mRedRange;
+        private final int mGreenStart;
+        private final int mGreenRange;
+        private final int mBlueStart;
+        private final int mBlueRange;
+
+        public ColorInterpolator(int startColor, int endColor) {
+            mAlphaStart = Color.alpha(startColor);
+            mAlphaRange = Color.alpha(endColor) - mAlphaStart;
+
+            mRedStart = Color.red(startColor);
+            mRedRange = Color.red(endColor) - mRedStart;
+
+            mGreenStart = Color.green(startColor);
+            mGreenRange = Color.green(endColor) - mGreenStart;
+
+            mBlueStart = Color.blue(startColor);
+            mBlueRange = Color.blue(endColor) - mBlueStart;
+        }
+
+        /**
+         * Takes a float between 0 and 1 and returns a color int between mStartColor and mEndColor
+         **/
+        public int getInterColor(float input) {
+
+            return Color.argb(
+                    mAlphaStart + (int) (input * mAlphaRange),
+                    mRedStart + (int) (input * mRedRange),
+                    mGreenStart + (int) (input * mGreenRange),
+                    mBlueStart + (int) (input * mBlueRange)
+            );
+        }
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java
new file mode 100644
index 0000000..e52c116
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchDetectionThread.java
@@ -0,0 +1,318 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.util.Log;
+
+import java.util.Arrays;
+
+
+/**
+ * This thread is responsible for detecting glitches in the samples.
+ */
+
+public class GlitchDetectionThread extends Thread {
+    private static final String TAG = "GlitchDetectionThread";
+    // the acceptable difference between the expected center of mass and what we actually get
+    private static final double mAcceptablePercentDifference = 0.02; // change this if necessary
+
+    // Measured in FFT samples
+    private static final int GLITCH_CONCENTRATION_WINDOW_SIZE = 1500; // approx 30 seconds at 48kHz
+    private static final int COOLDOWN_WINDOW = 4500; // approx 90 seconds at 48kHz
+
+    private boolean mIsRunning; // condition must be true for the thread to run
+    private short   mShortBuffer[]; // keep the data read from Pipe
+    private int     mShortBufferIndex = 0;
+    private Pipe    mPipe;
+    private static int mThreadSleepDurationMs;
+
+    private double  mDoubleBuffer[]; // keep the data used for FFT calculation
+    private boolean mIsFirstFFT = true; // whether or not it's the first FFT calculation
+
+    private WaveDataRingBuffer mWaveDataRing; // Record last n seconds of wave data
+
+    private final double  mFrequency1;
+    private final double  mFrequency2; //currently not used
+    private final int     mSamplingRate;
+    private final int     mFFTSamplingSize;   // amount of samples used to perform a FFT
+    private final int     mFFTOverlapSamples; // amount of overlapped samples used between two FFTs
+    private final int     mNewSamplesPerFFT;  // amount of new samples (not from last FFT) in a FFT
+    private double  mCenterOfMass;  // expected center of mass of samples
+
+    private final int[]   mGlitches;  // for every value = n, n is nth FFT where a glitch is found
+    private int     mGlitchesIndex;
+    private int     mFFTCount; // store the current number of FFT performed
+    private FFT     mFFT;
+    private boolean mGlitchingIntervalTooLong = false; // true if mGlitches is full
+
+    // Pre-Allocated buffers for glitch detection process
+    private final double[] mFFTResult;
+    private final double[] mCurrentSamples;
+    private final double[] mImagArray;
+
+    // Used for captured SysTrace dumps
+    private CaptureHolder mCaptureHolder;
+    private int mLastGlitchCaptureAttempt = 0;
+
+    GlitchDetectionThread(double frequency1, double frequency2, int samplingRate,
+          int FFTSamplingSize, int FFTOverlapSamples, int bufferTestDurationInSeconds,
+          int bufferTestWavePlotDurationInSeconds, Pipe pipe, CaptureHolder captureHolder) {
+        mPipe = pipe;
+        mFrequency1 = frequency1;
+        mFrequency2 = frequency2;
+        mFFTSamplingSize = FFTSamplingSize;
+        mFFTOverlapSamples = FFTOverlapSamples;
+        mNewSamplesPerFFT = mFFTSamplingSize - mFFTOverlapSamples;
+        mSamplingRate = samplingRate;
+        mIsRunning = true;
+
+        mShortBuffer = new short[mFFTSamplingSize];
+        mDoubleBuffer = new double[mFFTSamplingSize];
+        mWaveDataRing = new WaveDataRingBuffer(mSamplingRate * bufferTestWavePlotDurationInSeconds);
+
+        final int acceptableGlitchingIntervalsPerSecond = 10;
+        mGlitches = new int[bufferTestDurationInSeconds * acceptableGlitchingIntervalsPerSecond];
+        mGlitchesIndex = 0;
+        mFFTCount = 0;
+
+        mFFTResult = new double[mFFTSamplingSize/2];
+        mCurrentSamples = new double[mFFTSamplingSize];
+        mImagArray = new double[mFFTSamplingSize];
+
+        mFFT = new FFT(mFFTSamplingSize);
+        computeExpectedCenterOfMass();
+
+        setName("Loopback_GlitchDetection");
+
+        mCaptureHolder = captureHolder;
+        mCaptureHolder.setWaveDataBuffer(mWaveDataRing);
+
+        mThreadSleepDurationMs = FFTOverlapSamples * Constant.MILLIS_PER_SECOND / mSamplingRate;
+        if (mThreadSleepDurationMs < 1) {
+            mThreadSleepDurationMs = 1; // sleeps at least 1ms
+        }
+    }
+
+
+    public void run() {
+        while (mIsRunning) {
+            int requiredRead;
+            int actualRead;
+
+            requiredRead = mFFTSamplingSize - mShortBufferIndex;
+            actualRead = mPipe.read(mShortBuffer, mShortBufferIndex, requiredRead);
+
+            if (actualRead > 0) {
+                mShortBufferIndex += actualRead;
+            }
+
+            if (actualRead == Pipe.OVERRUN) {
+                log("There's an overrun");
+            }
+
+            // Once we have enough data, we can do a FFT on it. Note that between two FFTs, part of
+            // the samples (of size mFFTOverlapSamples) are used in both FFTs .
+            if (mShortBufferIndex == mFFTSamplingSize) {
+                bufferShortToDouble(mShortBuffer, mDoubleBuffer);
+
+                // copy data in mDoubleBuffer to mWaveData
+                if (mIsFirstFFT) {
+                    // if it's the first FFT, copy the whole "mNativeBuffer" to mWaveData
+                    mWaveDataRing.writeWaveData(mDoubleBuffer, 0, mFFTSamplingSize);
+                    mIsFirstFFT = false;
+                } else {
+                    mWaveDataRing.writeWaveData(mDoubleBuffer, mFFTOverlapSamples,
+                            mNewSamplesPerFFT);
+                }
+
+                detectGlitches();
+                // move new samples to the beginning of the array as they will be reused in next fft
+                System.arraycopy(mShortBuffer, mNewSamplesPerFFT, mShortBuffer,
+                                 0, mFFTOverlapSamples);
+                mShortBufferIndex = mFFTOverlapSamples;
+            } else {
+                try {
+                    sleep(mThreadSleepDurationMs);
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+    }
+
+
+    /** convert samples in shortBuffer to double, then copy into doubleBuffer. */
+    private void bufferShortToDouble(short[] shortBuffer, double[] doubleBuffer) {
+        double temp;
+        for (int i = 0; i < shortBuffer.length; i++) {
+            temp = (double) shortBuffer[i];
+            temp *= (1.0 / Short.MAX_VALUE);
+            doubleBuffer[i] = temp;
+        }
+    }
+
+
+    /** Should be called by other thread to stop this thread */
+    public void requestStop() {
+        mIsRunning = false;
+        interrupt();
+    }
+
+
+    /**
+     * Use the data in mDoubleBuffer to do glitch detection since we know what
+     * data we are expecting.
+     */
+    private void detectGlitches() {
+        double centerOfMass;
+
+        // retrieve a copy of recorded wave data for manipulating and analyzing
+        System.arraycopy(mDoubleBuffer, 0, mCurrentSamples, 0, mDoubleBuffer.length);
+
+        Utilities.hanningWindow(mCurrentSamples);
+
+        double width = (double) mSamplingRate / mCurrentSamples.length;
+        computeFFT(mCurrentSamples, mFFTResult);     // gives an array of sampleSize / 2
+        final double threshold = 0.1;
+
+        // for all elements in the FFT result that are smaller than threshold,
+        // eliminate them as they are probably noise
+        for (int j = 0; j < mFFTResult.length; j++) {
+            if (mFFTResult[j] < threshold) {
+                mFFTResult[j] = 0;
+            }
+        }
+
+        // calculate the center of mass of sample's FFT
+        centerOfMass = computeCenterOfMass(mFFTResult, width);
+        double difference = (Math.abs(centerOfMass - mCenterOfMass) / mCenterOfMass);
+        if (mGlitchesIndex >= mGlitches.length) {
+            // we just want to show this log once and set the flag once.
+            if (!mGlitchingIntervalTooLong) {
+                log("Not enough room to store glitches!");
+                mGlitchingIntervalTooLong = true;
+            }
+        } else {
+            // centerOfMass == -1 if the wave we get is silence.
+            if (difference > mAcceptablePercentDifference || centerOfMass == -1) {
+                // Glitch Detected
+                mGlitches[mGlitchesIndex] = mFFTCount;
+                mGlitchesIndex++;
+                if (mCaptureHolder.isCapturing()) {
+                    checkGlitchConcentration();
+                }
+            }
+        }
+        mFFTCount++;
+    }
+
+    private void checkGlitchConcentration(){
+
+        final int recordedGlitch = mGlitches[mGlitchesIndex-1];
+        if (recordedGlitch - mLastGlitchCaptureAttempt <= COOLDOWN_WINDOW){
+            return;
+        }
+
+        final int windowBegin = recordedGlitch - GLITCH_CONCENTRATION_WINDOW_SIZE;
+
+        int numGlitches = 0;
+        for (int index = mGlitchesIndex-1; index >= 0 && mGlitches[index] >= windowBegin; --index){
+            ++numGlitches;
+        }
+
+        int captureResponse = mCaptureHolder.captureState(numGlitches);
+        if (captureResponse != CaptureHolder.NEW_CAPTURE_IS_LEAST_INTERESTING){
+            mLastGlitchCaptureAttempt = recordedGlitch;
+        }
+
+    }
+
+    /** Compute the center of mass of fftResults. Width is the width of each beam. */
+    private double computeCenterOfMass(double[] fftResult, double width) {
+        int length = fftResult.length;
+        double weightedSum = 0;
+        double totalWeight = 0;
+        for (int i = 0; i < length; i++) {
+            weightedSum += fftResult[i] * i;
+            totalWeight += fftResult[i];
+        }
+
+        // this may happen since we are eliminating the noises. So if the wave we got is silence,
+        // totalWeight might == 0.
+        if (totalWeight == 0) {
+            return -1;
+        }
+
+        return (weightedSum * width) / totalWeight;
+    }
+
+
+    /** Compute FFT of a set of data "samples". */
+    private void computeFFT(double[] src, double[] dst) {
+        Arrays.fill(mImagArray, 0);
+        mFFT.fft(src, mImagArray, 1);    // here src array and imagArray get set
+
+
+        for (int i = 0; i < (src.length / 2); i++) {
+            dst[i] = Math.sqrt(src[i] * src[i] + mImagArray[i] * mImagArray[i]);
+        }
+
+    }
+
+
+    /** Compute the center of mass if the samples have no glitches. */
+    private void computeExpectedCenterOfMass() {
+        SineWaveTone sineWaveTone = new SineWaveTone(mSamplingRate, mFrequency1);
+        double[] sineWave = new double[mFFTSamplingSize];
+        double centerOfMass;
+        double[] sineFFTResult = new double[mFFTSamplingSize/2];
+
+        sineWaveTone.generateTone(sineWave, mFFTSamplingSize);
+        Utilities.hanningWindow(sineWave);
+        double width = (double) mSamplingRate / sineWave.length;
+
+        computeFFT(sineWave, sineFFTResult);     // gives an array of sample sizes / 2
+        centerOfMass = computeCenterOfMass(sineFFTResult, width);  // return center of mass
+        mCenterOfMass = centerOfMass;
+        log("the expected center of mass:" + Double.toString(mCenterOfMass));
+    }
+
+
+    public double[] getWaveData() {
+        return mWaveDataRing.getWaveRecord();
+    }
+
+
+    public boolean getGlitchingIntervalTooLong() {
+        return mGlitchingIntervalTooLong;
+    }
+
+
+    public int[] getGlitches() {
+        //return a copy of recorded glitches in an array sized to hold only recorded glitches
+        int[] output = new int[mGlitchesIndex];
+        System.arraycopy(mGlitches, 0, output, 0, mGlitchesIndex);
+        return output;
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesStringBuilder.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesStringBuilder.java
new file mode 100644
index 0000000..535d991
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/GlitchesStringBuilder.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.View;
+import android.widget.TextView;
+
+
+/**
+ * Creates a list of time intervals where glitches occurred.
+ */
+
+public class GlitchesStringBuilder {
+    private static final String TAG = "GlitchesStringBuilder";
+
+
+    public static String getGlitchString(int fftsamplingsize, int FFTOverlapSamples,
+                                         int[] glitchesData, int samplingRate,
+                                         boolean glitchingIntervalTooLong, int numberOfGlitches) {
+        int newSamplesPerFFT = fftsamplingsize - FFTOverlapSamples;
+
+        // the time span of new samples for a single FFT in ms
+        double newSamplesInMs = ((double) newSamplesPerFFT / samplingRate) *
+                                Constant.MILLIS_PER_SECOND;
+        log("newSamplesInMs: " + Double.toString(newSamplesInMs));
+
+        // the time span of all samples for a single FFT in ms
+        double allSamplesInMs = ((double) fftsamplingsize / samplingRate) *
+                                Constant.MILLIS_PER_SECOND;
+        log("allSamplesInMs: " + Double.toString(allSamplesInMs));
+
+        StringBuilder listOfGlitches = new StringBuilder();
+        listOfGlitches.append("Total Glitching Interval too long: " +
+                glitchingIntervalTooLong + "\n");
+        listOfGlitches.append("Estimated number of glitches: " + numberOfGlitches + "\n");
+        listOfGlitches.append("List of glitching intervals: \n");
+
+        for (int i = 0; i < glitchesData.length; i++) {
+            int timeInMs; // starting time of glitches
+            //append the time of glitches to "listOfGlitches"
+            timeInMs = (int) (glitchesData[i] * newSamplesInMs); // round down
+            listOfGlitches.append(timeInMs + "~" + (timeInMs + (int) allSamplesInMs) + "ms\n");
+        }
+
+        return listOfGlitches.toString();
+    }
+
+    /** Generate String of Glitch Times in ms return separated. */
+    public static String getGlitchStringForFile(int fftSamplingSize, int FFTOverlapSamples,
+                                                int[] glitchesData, int samplingRate) {
+        int newSamplesPerFFT = fftSamplingSize - FFTOverlapSamples;
+
+        // the time span of new samples for a single FFT in ms
+        double newSamplesInMs = ((double) newSamplesPerFFT / samplingRate) *
+                Constant.MILLIS_PER_SECOND;
+
+        StringBuilder listOfGlitches = new StringBuilder();
+
+        for (int i = 0; i < glitchesData.length; i++) {
+            int timeInMs; // starting time of glitches
+            //append the time of glitches to "listOfGlitches"
+            timeInMs = (int) (glitchesData[i] * newSamplesInMs); // round down
+            listOfGlitches.append(timeInMs + "\n");
+        }
+
+        return listOfGlitches.toString();
+    }
+
+    /** Generate array of Glitch Times in ms */
+    public static int[] getGlitchMilliseconds(int fftSamplingSize, int FFTOverlapSamples,
+                                                int[] glitchesData, int samplingRate) {
+        int[] glitchMilliseconds = new int[glitchesData.length];
+        int newSamplesPerFFT = fftSamplingSize - FFTOverlapSamples;
+
+        // the time span of new samples for a single FFT in ms
+        double newSamplesInMs = ((double) newSamplesPerFFT / samplingRate) *
+                Constant.MILLIS_PER_SECOND;
+
+        for (int i = 0; i < glitchesData.length; i++) {
+            glitchMilliseconds[i] = (int) (glitchesData[i] * newSamplesInMs); // round down
+        }
+
+        return glitchMilliseconds;
+    }
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java
new file mode 100644
index 0000000..1055168
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/HistogramView.java
@@ -0,0 +1,354 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.View;
+
+
+/**
+ * This is the histogram used to show recorder/player buffer period.
+ */
+
+public class HistogramView extends View {
+    private static final String TAG = "HistogramView";
+
+
+    private Paint mHistPaint;
+    private Paint mTextPaint;
+    private Paint mLinePaint;
+    private Paint mXLabelPaint;
+
+    private int[] mData; // data for buffer period
+    private int[] mDisplayData; // modified data that is used to draw histogram
+    private int mMaxBufferPeriod = 0;
+    // number of x-axis labels excluding the last x-axis label
+    private int mNumberOfXLabel = 5;  // mNumberOfXLabel must > 0
+
+    private final int mYAxisBase = 10; // base of y-axis log scale
+    private final int mYLabelSize = 40;
+    private final int mXLabelSize = 40;
+    private final int mLineWidth = 3;
+    private final int mMaxNumberOfBeams = 202; // the max amount of beams to display on the screen
+
+    // Note: if want to change this to base other than 10, must change the way x labels are
+    // displayed. It's currently half-hardcoded.
+    private final int mBucketBase = 10; // a bucket's range
+
+
+    public HistogramView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+        initPaints();
+    }
+
+
+    /** Initiate all the Paint objects. */
+    private void initPaints() {
+        mHistPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        mHistPaint.setStyle(Paint.Style.FILL);
+        mHistPaint.setColor(Color.BLUE);
+
+        mTextPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        mTextPaint.setColor(Color.BLACK);
+        mTextPaint.setTextSize(mYLabelSize);
+
+        mXLabelPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        mXLabelPaint.setColor(Color.BLACK);
+        mXLabelPaint.setTextSize(mXLabelSize);
+
+        mLinePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
+        mLinePaint.setColor(Color.BLACK);
+        mLinePaint.setStrokeWidth(mLineWidth);
+    }
+
+    @Override
+    protected void onDraw(Canvas canvas) {
+        super.onDraw(canvas);
+        fillCanvas(canvas, this.getRight(), this.getBottom());
+    }
+
+    public void fillCanvas(Canvas canvas, int right, int bottom){
+        canvas.drawColor(Color.GRAY);
+
+        if (mData == null || mData.length == 0) {
+            return;
+        }
+
+        int arrayLength = mData.length;
+        boolean exceedBufferPeriodRange;
+        if (mMaxBufferPeriod != 0) {
+            final int extraYMargin = 5; // the extra margin between y labels and y-axis
+            final int beamInterval = 2; // separate each beam in the histogram by such amount
+            int range; // the number of beams that's going to be displayed on histogram
+            if (mMaxBufferPeriod > arrayLength - 1) {
+                range = arrayLength;
+                exceedBufferPeriodRange = true;
+            } else {
+                range = mMaxBufferPeriod + 1;
+                exceedBufferPeriodRange = false;
+            }
+
+            if (range == 0) {
+                return;
+            }
+
+            boolean isUsingDisplayData = false;
+            int oldRange = range;
+            int interval = 1;
+
+            // if there are more beams than allowed to be displayed on screen,
+            // put beams into buckets
+            if (range > mMaxNumberOfBeams) {
+                isUsingDisplayData = true;
+                int bucketOrder = 0;
+                if (exceedBufferPeriodRange) { // there should be one extra beam for 101+
+                    range -= 2;
+                    while (range > mMaxNumberOfBeams - 2) {
+                        range /= mBucketBase;
+                        bucketOrder++;
+                    }
+                    range += 2; // assuming always XXX1+, not something like 0~473, 474+.
+
+                } else {
+                    range--;
+                    int temp = range;
+                    while (range > mMaxNumberOfBeams - 2) {
+                        range /= mBucketBase;
+                        bucketOrder++;
+                    }
+
+                    if ((temp % mBucketBase) != 0) {
+                        range += 2;
+                    } else {
+                        range++;
+                    }
+                }
+
+                interval = (int) Math.pow(mBucketBase, bucketOrder);
+                mDisplayData = new int[mMaxNumberOfBeams];
+                mDisplayData[0] = mData[0];
+
+                // putting data into buckets.
+                for (int i = 1; i < (range - 1); i++) {
+                    for (int j = (((i - 1) * interval) + 1); (j <= (i * interval)); j++) {
+                        mDisplayData[i] += mData[j];
+                    }
+                }
+
+                if (exceedBufferPeriodRange) {
+                    mDisplayData[range - 1] = mData[oldRange - 1];
+                } else {
+                    for (int i = (((range - 2) * interval) + 1); i < oldRange; i++) {
+                        mDisplayData[range - 1] += mData[i];
+                    }
+                }
+            } else {
+                mDisplayData = mData;
+            }
+
+            // calculate the max frequency among all latencies
+            int maxBufferPeriodFreq = 0;
+            for (int i = 1; i < range; i++) {
+                if (mDisplayData[i] > maxBufferPeriodFreq) {
+                    maxBufferPeriodFreq = mDisplayData[i];
+                }
+            }
+
+            if (maxBufferPeriodFreq == 0) {
+                return;
+            }
+
+            // find the closest order of "mYAxisBase" according to maxBufferPeriodFreq
+            int order = (int) Math.ceil((Math.log10(maxBufferPeriodFreq)) /
+                        (Math.log10(mYAxisBase)));
+            float height = ((float) (bottom - mXLabelSize - mLineWidth) / (order + 1));
+
+            // y labels
+            String[] yLabels = new String[order + 2]; // store {"0", "1", "10", ...} for base = 10
+            yLabels[0] = "0";
+            int yStartPoint = bottom - mXLabelSize - mLineWidth;
+            canvas.drawText(yLabels[0], 0, yStartPoint, mTextPaint);
+            int currentValue = 1;
+            for (int i = 1; i < yLabels.length; i++) {
+                yLabels[i] = Integer.toString(currentValue);
+                // Label is displayed at lower than it should be by the amount of "mYLabelSize"
+                canvas.drawText(yLabels[i], 0, yStartPoint - (i * height) + mYLabelSize,
+                        mTextPaint);
+                currentValue *= mYAxisBase;
+            }
+
+            // draw x axis
+            canvas.drawLine(0, bottom - mXLabelSize, right, bottom - mXLabelSize, mLinePaint);
+
+            // draw y axis
+            int yMargin = getTextWidth(yLabels[order + 1], mTextPaint);
+            canvas.drawLine(yMargin + extraYMargin, bottom, yMargin + extraYMargin,
+                    0, mLinePaint);
+
+            // width of each beam in the histogram
+            float width = ((float) (right - yMargin - extraYMargin - mLineWidth -
+                          (range * beamInterval)) / range);
+
+            // draw x labels
+            String lastXLabel;
+            int xLabelInterval;
+            int xStartPoint = yMargin + extraYMargin + mLineWidth;  // position of first beam
+            String[] xLabels;
+
+            // mNumberOfXLabel includes "0" but excludes the last label, which will be at last beam
+            // if mNumberOfXLabel exceeds the total beams that's going to have, reduce its value
+            if (mNumberOfXLabel - 1 > range - 2) {
+                mNumberOfXLabel = range - 1;
+            }
+
+            //
+            if (!isUsingDisplayData) { // in this case each beam represent one buffer period
+                if ((range - 2) < mNumberOfXLabel) {
+                    xLabelInterval = 1;
+                } else {
+                    xLabelInterval = (range - 2) / mNumberOfXLabel;
+                }
+
+                xLabels = new String[mNumberOfXLabel];
+                xLabels[0] = "0";       // first label is for 0
+                canvas.drawText(xLabels[0], yMargin + extraYMargin + mLineWidth, bottom,
+                        mXLabelPaint);
+
+                float xLabelLineStartX;
+                float xLabelLineStartY;
+                int xLabelLineLength = 10;
+                for (int i = 1; i < mNumberOfXLabel; i++) {
+                    xLabelLineStartX = xStartPoint +
+                                       (xLabelInterval * i * (width + beamInterval));
+                    xLabels[i] = Integer.toString(i * xLabelInterval);
+                    canvas.drawText(xLabels[i], xLabelLineStartX, bottom, mXLabelPaint);
+
+                    //add a vertical line to indicate label's corresponding beams
+                    xLabelLineStartY = bottom - mXLabelSize;
+                    canvas.drawLine(xLabelLineStartX, xLabelLineStartY, xLabelLineStartX,
+                                    xLabelLineStartY - xLabelLineLength, mLinePaint);
+                }
+
+                // last label is for the last beam
+                if (exceedBufferPeriodRange) {
+                    lastXLabel = Integer.toString(range - 1) + "+";
+                } else {
+                    lastXLabel = Integer.toString(range - 1);
+                }
+
+                canvas.drawText(lastXLabel, right - getTextWidth(lastXLabel, mXLabelPaint) - 1,
+                        bottom, mXLabelPaint);
+
+            } else {    // in this case each beam represent a range of buffer period
+                // if mNumberOfXLabel exceeds amount of beams, decrease mNumberOfXLabel
+                if ((range - 2) < mNumberOfXLabel) {
+                    xLabelInterval = 1;
+                } else {
+                    xLabelInterval = (range - 2) / mNumberOfXLabel;
+                }
+
+                xLabels = new String[mNumberOfXLabel];
+                xLabels[0] = "0";       // first label is for 0ms
+                canvas.drawText(xLabels[0], yMargin + extraYMargin + mLineWidth, bottom,
+                        mXLabelPaint);
+
+                // draw all the middle labels
+                for (int i = 1; i < mNumberOfXLabel; i++) {
+                    xLabels[i] = Integer.toString((i * xLabelInterval) - 1) + "1-" +
+                                 Integer.toString(i * xLabelInterval) + "0";
+                    canvas.drawText(xLabels[i], xStartPoint + (xLabelInterval * i *
+                            (width + beamInterval)), bottom, mXLabelPaint);
+                }
+
+                // draw the last label for the last beam
+                if (exceedBufferPeriodRange) {
+                    lastXLabel = Integer.toString(oldRange - 1) + "+";
+                } else {
+                    if ((((range - 2) * interval) + 1) == oldRange - 1) {
+                        lastXLabel = Integer.toString(oldRange - 1);
+                    } else {
+                        lastXLabel = Integer.toString(range - 2) + "1-" +
+                                Integer.toString(oldRange - 1);
+                    }
+                }
+
+                canvas.drawText(lastXLabel, right - getTextWidth(lastXLabel, mXLabelPaint) - 1,
+                        bottom, mXLabelPaint);
+            }
+
+            // draw the histogram
+            float currentLeft = yMargin + extraYMargin + mLineWidth;
+            float currentTop;
+            float currentRight;
+            int currentBottom = bottom - mXLabelSize - mLineWidth;
+            for (int i = 0; i < range; i++) {
+                currentRight = currentLeft + width;
+                // calculate the height of the beam. Skip drawing if mDisplayData[i] = 0
+                if (mDisplayData[i] != 0) {
+                    float units = (float) (((Math.log10((double) mDisplayData[i])) /
+                            Math.log10(mYAxisBase)) + 1.0);
+                    currentTop = currentBottom - (height * units);
+                    canvas.drawRect(currentLeft, currentTop, currentRight,
+                            currentBottom, mHistPaint);
+                }
+
+                currentLeft = currentRight + beamInterval;
+            }
+
+        }
+    }
+
+
+    /** get the width of "text" when using "paint". */
+    public int getTextWidth(String text, Paint paint) {
+        int width;
+        Rect bounds = new Rect();
+        paint.getTextBounds(text, 0, text.length(), bounds);
+        width = bounds.left + bounds.width();
+        return width;
+    }
+
+    /** Copy buffer period data into "mData" */
+    public void setBufferPeriodArray(int[] data) {
+        if (data == null) {
+            return;
+        }
+
+        if (mData == null || data.length != mData.length) {
+            mData = new int[data.length];
+        }
+
+        System.arraycopy(data, 0, mData, 0, data.length);
+    }
+
+
+    public void setMaxBufferPeriod(int ReadBufferPeriod) {
+        mMaxBufferPeriod = ReadBufferPeriod;
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoadThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoadThread.java
new file mode 100644
index 0000000..9c98c2e
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoadThread.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.util.Log;
+
+
+/**
+ * This thread is used to add load to CPU, in order to test performance of audio under load.
+ */
+
+public class LoadThread extends Thread {
+    private static final String TAG = "LoadThread";
+
+    private volatile boolean mIsRunning;
+
+    public LoadThread(String threadName) {
+        super(threadName);
+    }
+
+    public void run() {
+        log("Entering load thread");
+        long count = 0;
+        mIsRunning = true;
+        while(mIsRunning) {
+            count++;
+        }
+
+        log("exiting CPU load thread with count = " + count);
+    }
+
+
+    public void requestStop() {
+        mIsRunning = false;
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java
new file mode 100644
index 0000000..d3acd03
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackActivity.java
@@ -0,0 +1,2402 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.Manifest;
+import android.app.Activity;
+import android.app.DialogFragment;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.content.pm.PackageManager;
+import android.database.Cursor;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.media.AudioManager;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Message;
+import android.os.ParcelFileDescriptor;
+import android.provider.MediaStore;
+import android.support.v4.app.ActivityCompat;
+import android.support.v4.content.ContextCompat;
+import android.text.format.DateFormat;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.Menu;
+import android.view.MenuInflater;
+import android.view.MenuItem;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Button;
+import android.widget.LinearLayout;
+import android.widget.PopupWindow;
+import android.widget.SeekBar;
+import android.widget.TextView;
+import android.widget.Toast;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileOutputStream;
+import java.util.Arrays;
+import java.util.Locale;
+
+
+/**
+ * This is the main activity of the Loopback app. Two tests (latency test and buffer test) can be
+ * initiated here. Note: buffer test and glitch detection is the same test, it's just that this test
+ * has two parts of result.
+ */
+
+public class LoopbackActivity extends Activity
+        implements SaveFilesDialogFragment.NoticeDialogListener {
+    private static final String TAG = "LoopbackActivity";
+
+    private static final int SAVE_TO_WAVE_REQUEST = 42;
+    private static final int SAVE_TO_PNG_REQUEST = 43;
+    private static final int SAVE_TO_TXT_REQUEST = 44;
+    private static final int SAVE_RECORDER_BUFFER_PERIOD_TO_TXT_REQUEST = 45;
+    private static final int SAVE_PLAYER_BUFFER_PERIOD_TO_TXT_REQUEST = 46;
+    private static final int SAVE_RECORDER_BUFFER_PERIOD_TO_PNG_REQUEST = 47;
+    private static final int SAVE_PLAYER_BUFFER_PERIOD_TO_PNG_REQUEST = 48;
+    private static final int SAVE_RECORDER_BUFFER_PERIOD_TIMES_TO_TXT_REQUEST = 49;
+    private static final int SAVE_PLAYER_BUFFER_PERIOD_TIMES_TO_TXT_REQUEST = 50;
+    private static final int SAVE_GLITCH_OCCURRENCES_TO_TEXT_REQUEST = 51;
+    private static final int SAVE_GLITCH_AND_CALLBACK_HEATMAP_REQUEST = 52;
+
+    private static final int SETTINGS_ACTIVITY_REQUEST = 54;
+
+    private static final int THREAD_SLEEP_DURATION_MS = 200;
+    private static final int PERMISSIONS_REQUEST_RECORD_AUDIO_LATENCY = 201;
+    private static final int PERMISSIONS_REQUEST_RECORD_AUDIO_BUFFER = 202;
+    private static final int PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE_RESULTS = 203;
+    private static final int PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE_SCRIPT = 204;
+    private static final int LATENCY_TEST_STARTED = 300;
+    private static final int LATENCY_TEST_ENDED = 301;
+    private static final int BUFFER_TEST_STARTED = 302;
+    private static final int BUFFER_TEST_ENDED = 303;
+    private static final int CALIBRATION_STARTED = 304;
+    private static final int CALIBRATION_ENDED = 305;
+
+    // 0-100 controls compression rate, currently ignore because PNG format is being used
+    private static final int EXPORTED_IMAGE_QUALITY = 100;
+
+    private static final int HISTOGRAM_EXPORT_WIDTH = 2000;
+    private static final int HISTOGRAM_EXPORT_HEIGHT = 2000;
+    private static final int HEATMAP_DRAW_WIDTH = 2560;
+    private static final int HEATMAP_DRAW_HEIGHT = 1440;
+    private static final int HEATMAP_EXPORT_DIVISOR = 2;
+
+    LoopbackAudioThread  mAudioThread = null;
+    NativeAudioThread    mNativeAudioThread = null;
+    private Thread       mCalibrationThread;
+    private WavePlotView mWavePlotView;
+    private String       mTestStartTimeString = "IncorrectTime";  // The time the test begins
+    private static final String FILE_SAVE_PATH = "file://mnt/sdcard/";
+
+    private SeekBar  mBarMasterLevel; // drag the volume
+    private TextView mTextInfo;
+    private TextView mTextViewCurrentLevel;
+    private TextView mTextViewResultSummary;
+
+    private int          mTestType;
+    private double []    mWaveData;    // this is where we store the data for the wave plot
+    private Correlation  mCorrelation = new Correlation();
+    private BufferPeriod mRecorderBufferPeriod = new BufferPeriod();
+    private BufferPeriod mPlayerBufferPeriod = new BufferPeriod();
+
+    // for native buffer period
+    private int[]  mNativeRecorderBufferPeriodArray;
+    private int    mNativeRecorderMaxBufferPeriod;
+    private double mNativeRecorderStdDevBufferPeriod;
+    private int[]  mNativePlayerBufferPeriodArray;
+    private int    mNativePlayerMaxBufferPeriod;
+    private double mNativePlayerStdDevBufferPeriod;
+    private BufferCallbackTimes mRecorderCallbackTimes;
+    private BufferCallbackTimes mPlayerCallbackTimes;
+
+    private static final String INTENT_SAMPLING_FREQUENCY = "SF";
+    private static final String INTENT_CHANNEL_INDEX = "CI";
+    private static final String INTENT_FILENAME = "FileName";
+    private static final String INTENT_RECORDER_BUFFER = "RecorderBuffer";
+    private static final String INTENT_PLAYER_BUFFER = "PlayerBuffer";
+    private static final String INTENT_AUDIO_THREAD = "AudioThread";
+    private static final String INTENT_MIC_SOURCE = "MicSource";
+    private static final String INTENT_PERFORMANCE_MODE = "PerformanceMode";
+    private static final String INTENT_AUDIO_LEVEL = "AudioLevel";
+    private static final String INTENT_IGNORE_FIRST_FRAMES = "IgnoreFirstFrames";
+    private static final String INTENT_TEST_TYPE = "TestType";
+    private static final String INTENT_BUFFER_TEST_DURATION = "BufferTestDuration";
+    private static final String INTENT_NUMBER_LOAD_THREADS = "NumLoadThreads";
+    private static final String INTENT_ENABLE_SYSTRACE = "CaptureSysTrace";
+    private static final String INTENT_ENABLE_WAVCAPTURE = "CaptureWavs";
+    private static final String INTENT_NUM_CAPTURES = "NumCaptures";
+    private static final String INTENT_WAV_DURATION = "WavDuration";
+
+    // for running the test using adb command
+    private boolean mIntentRunning = false; // if it is running triggered by intent with parameters
+    private String  mIntentFileName;
+
+    // Note: these values should only be assigned in restartAudioSystem()
+    private int   mAudioThreadType = Constant.UNKNOWN;
+    private int   mMicSource;
+    private int   mPerformanceMode;
+    private int   mSamplingRate;
+    private int   mChannelIndex;
+    private int   mSoundLevel;
+    private int   mPlayerBufferSizeInBytes;
+    private int   mRecorderBufferSizeInBytes;
+    private int   mIgnoreFirstFrames; // TODO: this only applies to native mode
+    private CaptureHolder mCaptureHolder;
+
+    // for buffer test
+    private int[]   mGlitchesData;
+    private boolean mGlitchingIntervalTooLong;
+    private int     mFFTSamplingSize;
+    private int     mFFTOverlapSamples;
+    private long    mBufferTestStartTime;
+    private int     mBufferTestElapsedSeconds;
+    private int     mBufferTestDurationInSeconds;
+    private int     mBufferTestWavePlotDurationInSeconds;
+
+    // threads that load CPUs
+    private LoadThread[]     mLoadThreads;
+
+    // for getting the Service
+    boolean mBound = false;
+    private AudioTestService mAudioTestService;
+    private final ServiceConnection mServiceConnection = new ServiceConnection() {
+        public void onServiceConnected(ComponentName className, IBinder service) {
+            mAudioTestService = ((AudioTestService.AudioTestBinder) service).getService();
+            mBound = true;
+        }
+
+        public void onServiceDisconnected(ComponentName className) {
+            mAudioTestService = null;
+            mBound = false;
+        }
+    };
+
+    private Handler mMessageHandler = new Handler() {
+        public void handleMessage(Message msg) {
+            super.handleMessage(msg);
+            switch (msg.what) {
+            case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED:
+                log("got message java latency test started!!");
+                showToast("Java Latency Test Started");
+                resetResults();
+                refreshState();
+                refreshPlots();
+                break;
+            case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR:
+                log("got message java latency test rec can't start!!");
+                showToast("Java Latency Test Recording Error. Please try again");
+                refreshState();
+                stopAudioTestThreads();
+                mIntentRunning = false;
+                refreshSoundLevelBar();
+                break;
+            case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP:
+            case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE:
+                if (mAudioThread != null) {
+                    mWaveData = mAudioThread.getWaveData();
+                    mRecorderCallbackTimes = mRecorderBufferPeriod.getCallbackTimes();
+                    mPlayerCallbackTimes = mPlayerBufferPeriod.getCallbackTimes();
+                    mCorrelation.computeCorrelation(mWaveData, mSamplingRate);
+                    log("got message java latency rec complete!!");
+                    refreshPlots();
+                    refreshState();
+
+                    switch (msg.what) {
+                    case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP:
+                        showToast("Java Latency Test Stopped");
+                        break;
+                    case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE:
+                        showToast("Java Latency Test Completed");
+                        break;
+                    }
+
+                    stopAudioTestThreads();
+                    if (mIntentRunning && mIntentFileName != null && mIntentFileName.length() > 0) {
+                        saveAllTo(mIntentFileName);
+                    }
+                    mIntentRunning = false;
+                }
+                refreshSoundLevelBar();
+                break;
+            case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED:
+                log("got message java buffer test rec started!!");
+                showToast("Java Buffer Test Started");
+                resetResults();
+                refreshState();
+                refreshPlots();
+                mBufferTestStartTime = System.currentTimeMillis();
+                break;
+            case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR:
+                log("got message java buffer test rec can't start!!");
+                showToast("Java Buffer Test Recording Error. Please try again");
+                refreshState();
+                stopAudioTestThreads();
+                mIntentRunning = false;
+                refreshSoundLevelBar();
+                break;
+            case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
+            case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE:
+                if (mAudioThread != null) {
+                    mWaveData = mAudioThread.getWaveData();
+                    mGlitchesData = mAudioThread.getAllGlitches();
+                    mGlitchingIntervalTooLong = mAudioThread.getGlitchingIntervalTooLong();
+                    mFFTSamplingSize = mAudioThread.getFFTSamplingSize();
+                    mFFTOverlapSamples = mAudioThread.getFFTOverlapSamples();
+                    mRecorderCallbackTimes = mRecorderBufferPeriod.getCallbackTimes();
+                    mPlayerCallbackTimes = mPlayerBufferPeriod.getCallbackTimes();
+                    refreshPlots();  // only plot that last few seconds
+                    refreshState();
+                    //rounded up number of seconds elapsed
+                    mBufferTestElapsedSeconds =
+                            (int) ((System.currentTimeMillis() - mBufferTestStartTime +
+                            Constant.MILLIS_PER_SECOND - 1) / Constant.MILLIS_PER_SECOND);
+                    switch (msg.what) {
+                    case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
+                        showToast("Java Buffer Test Stopped");
+                        break;
+                    case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE:
+                        showToast("Java Buffer Test Completed");
+                        break;
+                    }
+                    if (getApp().isCaptureEnabled()) {
+                        mCaptureHolder.stopLoopbackListenerScript();
+                    }
+                    stopAudioTestThreads();
+                    if (mIntentRunning && mIntentFileName != null && mIntentFileName.length() > 0) {
+                        saveAllTo(mIntentFileName);
+                    }
+                    mIntentRunning = false;
+                }
+                refreshSoundLevelBar();
+                break;
+            case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED:
+                log("got message native latency test rec started!!");
+                showToast("Native Latency Test Started");
+                resetResults();
+                refreshState();
+                refreshPlots();
+                break;
+            case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED:
+                log("got message native buffer test rec started!!");
+                showToast("Native Buffer Test Started");
+                resetResults();
+                refreshState();
+                refreshPlots();
+                mBufferTestStartTime = System.currentTimeMillis();
+                break;
+            case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR:
+                log("got message native latency test rec can't start!!");
+                showToast("Native Latency Test Recording Error. Please try again");
+                refreshState();
+                mIntentRunning = false;
+                refreshSoundLevelBar();
+                break;
+            case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR:
+                log("got message native buffer test rec can't start!!");
+                showToast("Native Buffer Test Recording Error. Please try again");
+                refreshState();
+                mIntentRunning = false;
+                refreshSoundLevelBar();
+                break;
+            case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
+            case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP:
+            case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE:
+            case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE:
+            case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE_ERRORS:
+            case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE_ERRORS:
+                    if (mNativeAudioThread != null) {
+                    mGlitchesData = mNativeAudioThread.getNativeAllGlitches();
+                    mGlitchingIntervalTooLong = mNativeAudioThread.getGlitchingIntervalTooLong();
+                    mFFTSamplingSize = mNativeAudioThread.getNativeFFTSamplingSize();
+                    mFFTOverlapSamples = mNativeAudioThread.getNativeFFTOverlapSamples();
+                    mWaveData = mNativeAudioThread.getWaveData();
+                    mNativeRecorderBufferPeriodArray = mNativeAudioThread.getRecorderBufferPeriod();
+                    mNativeRecorderMaxBufferPeriod =
+                            mNativeAudioThread.getRecorderMaxBufferPeriod();
+                    mNativeRecorderStdDevBufferPeriod =
+                            mNativeAudioThread.getRecorderStdDevBufferPeriod();
+                    mNativePlayerBufferPeriodArray = mNativeAudioThread.getPlayerBufferPeriod();
+                    mNativePlayerMaxBufferPeriod = mNativeAudioThread.getPlayerMaxBufferPeriod();
+                    mNativePlayerStdDevBufferPeriod =
+                            mNativeAudioThread.getPlayerStdDevBufferPeriod();
+                    mRecorderCallbackTimes = mNativeAudioThread.getRecorderCallbackTimes();
+                    mPlayerCallbackTimes = mNativeAudioThread.getPlayerCallbackTimes();
+
+                    if (msg.what != NativeAudioThread.
+                            LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE) {
+                        mCorrelation.computeCorrelation(mWaveData, mSamplingRate);
+                    }
+
+                    log("got message native buffer test rec complete!!");
+                    refreshPlots();
+                    refreshState();
+                    //rounded up number of seconds elapsed
+                    mBufferTestElapsedSeconds =
+                            (int) ((System.currentTimeMillis() - mBufferTestStartTime +
+                                    Constant.MILLIS_PER_SECOND - 1) / Constant.MILLIS_PER_SECOND);
+                    switch (msg.what) {
+                        case NativeAudioThread.
+                                LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE_ERRORS:
+                        case NativeAudioThread.
+                                LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE_ERRORS:
+                        showToast("Native Test Completed with Fatal Errors");
+                        break;
+                        case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
+                        case NativeAudioThread.
+                                LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP:
+                        showToast("Native Test Stopped");
+                        break;
+                    default:
+                        showToast("Native Test Completed");
+                        break;
+                    }
+
+
+                    stopAudioTestThreads();
+                    if (mIntentRunning && mIntentFileName != null && mIntentFileName.length() > 0) {
+                        saveAllTo(mIntentFileName);
+                    }
+                    mIntentRunning = false;
+
+                    if (getApp().isCaptureEnabled()) {
+                        mCaptureHolder.stopLoopbackListenerScript();
+                    }
+                }  // mNativeAudioThread != null
+                refreshSoundLevelBar();
+                break;
+            default:
+                log("Got message:" + msg.what);
+                break;
+            }
+
+            // Control UI elements visibility specific to latency or buffer/glitch test
+            switch (msg.what) {
+                // Latency test started
+                case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED:
+                case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED:
+                    setTransportButtonsState(LATENCY_TEST_STARTED);
+                    break;
+
+                // Latency test ended
+                case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE:
+                case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR:
+                case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP:
+                case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR:
+                case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE:
+                case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP:
+                case NativeAudioThread.
+                        LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE_ERRORS:
+                    setTransportButtonsState(LATENCY_TEST_ENDED);
+                    break;
+
+                // Buffer test started
+                case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED:
+                case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED:
+                    setTransportButtonsState(BUFFER_TEST_STARTED);
+                    break;
+
+                // Buffer test ended
+                case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE:
+                case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR:
+                case LoopbackAudioThread.LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
+                case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR:
+                case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE:
+                case NativeAudioThread.LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP:
+                case NativeAudioThread.
+                        LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE_ERRORS:
+                    setTransportButtonsState(BUFFER_TEST_ENDED);
+                    break;
+
+                // Sound Calibration started
+                case CALIBRATION_STARTED:
+                    setTransportButtonsState(CALIBRATION_STARTED);
+                    break;
+
+                // Sound Calibration ended
+                case CALIBRATION_ENDED:
+                    setTransportButtonsState(CALIBRATION_ENDED);
+                    break;
+            }
+        }
+    };
+
+
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+
+        // Set the layout for this activity. You can find it
+        View view = getLayoutInflater().inflate(R.layout.main_activity, null);
+        setContentView(view);
+
+        // TODO: Write script to file at more appropriate time, from settings activity or intent
+        // TODO: Respond to failure with more than just a toast
+        if (hasWriteFilePermission()){
+            boolean successfulWrite = AtraceScriptsWriter.writeScriptsToFile(this);
+            if(!successfulWrite) {
+                showToast("Unable to write loopback_listener script to device");
+            }
+        } else {
+            requestWriteFilePermission(PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE_SCRIPT);
+        }
+
+
+        mTextInfo = (TextView) findViewById(R.id.textInfo);
+        mBarMasterLevel = (SeekBar) findViewById(R.id.BarMasterLevel);
+
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        int maxVolume = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
+        mBarMasterLevel.setMax(maxVolume);
+
+        mBarMasterLevel.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
+            @Override
+            public void onStopTrackingTouch(SeekBar seekBar) {
+            }
+
+            @Override
+            public void onStartTrackingTouch(SeekBar seekBar) {
+            }
+
+            @Override
+            public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+                AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+                am.setStreamVolume(AudioManager.STREAM_MUSIC,
+                        progress, 0);
+                refreshSoundLevelBar();
+                log("Changed stream volume to: " + progress);
+            }
+        });
+        mWavePlotView = (WavePlotView) findViewById(R.id.viewWavePlot);
+
+        mTextViewCurrentLevel = (TextView) findViewById(R.id.textViewCurrentLevel);
+        mTextViewCurrentLevel.setTextSize(15);
+
+        mTextViewResultSummary = (TextView) findViewById(R.id.resultSummary);
+        refreshSoundLevelBar();
+
+        if(savedInstanceState != null) {
+            restoreInstanceState(savedInstanceState);
+        }
+
+        if (!hasRecordAudioPermission()) {
+            requestRecordAudioPermission(PERMISSIONS_REQUEST_RECORD_AUDIO_LATENCY);
+        }
+
+        applyIntent(getIntent());
+    }
+
+    @Override
+    protected void onStart() {
+        super.onStart();
+        Intent audioTestIntent = new Intent(this, AudioTestService.class);
+        startService(audioTestIntent);
+        boolean bound = bindService(audioTestIntent, mServiceConnection, Context.BIND_AUTO_CREATE);
+        if (bound) {
+            log("Successfully bound to service!");
+        }
+        else {
+            log("Failed to bind service!");
+        }
+    }
+
+
+    @Override
+    protected void onStop() {
+        super.onStop();
+        log("Activity on stop!");
+        // Unbind from the service
+        if (mBound) {
+            unbindService(mServiceConnection);
+            mBound = false;
+        }
+    }
+
+    @Override
+    public void onNewIntent(Intent intent) {
+        log("On New Intent called!");
+        applyIntent(intent);
+    }
+
+
+    /**
+     * This method will be called whenever the test starts running (either by operating on the
+     * device or by adb command). In the case where the test is started through adb command,
+     * adb parameters will be read into intermediate variables.
+     */
+    private void applyIntent(Intent intent) {
+        Bundle b = intent.getExtras();
+        if (b != null && !mIntentRunning) {
+            // adb shell am start -n org.drrickorang.loopback/.LoopbackActivity
+            // --ei SF 48000 --es FileName test1 --ei RecorderBuffer 512 --ei PlayerBuffer 512
+            // --ei AudioThread 1 --ei MicSource 3 --ei AudioLevel 12
+            // --ei TestType 223 --ei BufferTestDuration 60 --ei NumLoadThreads 4
+            // --ei CI -1 --ez CaptureSysTrace true --ez CaptureWavs false --ei NumCaptures 5
+            // --ei WavDuration 15
+
+            // Note: for native mode, player and recorder buffer sizes are the same, and can only be
+            // set through player buffer size
+
+            boolean hasRecordAudioPermission = hasRecordAudioPermission();
+            boolean hasWriteFilePermission = hasWriteFilePermission();
+            if (!hasRecordAudioPermission || !hasWriteFilePermission) {
+                if (!hasRecordAudioPermission) {
+                    log("Missing Permission: RECORD_AUDIO");
+                }
+
+                if (!hasWriteFilePermission) {
+                    log("Missing Permission: WRITE_EXTERNAL_STORAGE");
+                }
+
+                return;
+            }
+
+            if (b.containsKey(INTENT_BUFFER_TEST_DURATION)) {
+                getApp().setBufferTestDuration(b.getInt(INTENT_BUFFER_TEST_DURATION));
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_SAMPLING_FREQUENCY)) {
+                getApp().setSamplingRate(b.getInt(INTENT_SAMPLING_FREQUENCY));
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_CHANNEL_INDEX)) {
+                getApp().setChannelIndex(b.getInt(INTENT_CHANNEL_INDEX));
+                mChannelIndex = b.getInt(INTENT_CHANNEL_INDEX);
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_FILENAME)) {
+                mIntentFileName = b.getString(INTENT_FILENAME);
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_RECORDER_BUFFER)) {
+                getApp().setRecorderBufferSizeInBytes(
+                        b.getInt(INTENT_RECORDER_BUFFER) * Constant.BYTES_PER_FRAME);
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_PLAYER_BUFFER)) {
+                getApp().setPlayerBufferSizeInBytes(
+                        b.getInt(INTENT_PLAYER_BUFFER) * Constant.BYTES_PER_FRAME);
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_AUDIO_THREAD)) {
+                getApp().setAudioThreadType(b.getInt(INTENT_AUDIO_THREAD));
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_MIC_SOURCE)) {
+                getApp().setMicSource(b.getInt(INTENT_MIC_SOURCE));
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_PERFORMANCE_MODE)) {
+                getApp().setPerformanceMode(b.getInt(INTENT_PERFORMANCE_MODE));
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_IGNORE_FIRST_FRAMES)) {
+                getApp().setIgnoreFirstFrames(b.getInt(INTENT_IGNORE_FIRST_FRAMES));
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_AUDIO_LEVEL)) {
+                int audioLevel = b.getInt(INTENT_AUDIO_LEVEL);
+                if (audioLevel >= 0) {
+                    AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+                    am.setStreamVolume(AudioManager.STREAM_MUSIC, audioLevel, 0);
+                    getApp().setSoundLevelCalibrationEnabled(false);
+                } else { // AudioLevel of -1 means automatically calibrate
+                    getApp().setSoundLevelCalibrationEnabled(true);
+                }
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_NUMBER_LOAD_THREADS)) {
+                getApp().setNumberOfLoadThreads(b.getInt(INTENT_NUMBER_LOAD_THREADS));
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_ENABLE_SYSTRACE)) {
+                getApp().setCaptureSysTraceEnabled(b.getBoolean(INTENT_ENABLE_SYSTRACE));
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_ENABLE_WAVCAPTURE)) {
+                getApp().setCaptureWavsEnabled(b.getBoolean(INTENT_ENABLE_WAVCAPTURE));
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_NUM_CAPTURES)) {
+                getApp().setNumberOfCaptures(b.getInt(INTENT_NUM_CAPTURES));
+                mIntentRunning = true;
+            }
+
+            if (b.containsKey(INTENT_WAV_DURATION)) {
+                getApp().setBufferTestWavePlotDuration(b.getInt(INTENT_WAV_DURATION));
+                mIntentRunning = true;
+            }
+
+            if (mIntentRunning || b.containsKey(INTENT_TEST_TYPE)) {
+                // run tests with provided or default parameters
+                refreshState();
+
+                // if no test is specified then Latency Test will be run
+                int testType = b.getInt(INTENT_TEST_TYPE,
+                        Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY);
+                switch (testType) {
+                    case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                        startBufferTest();
+                        break;
+                    case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_CALIBRATION:
+                        doCalibration();
+                        break;
+                    case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                    default:
+                        startLatencyTest();
+                        break;
+                }
+            }
+
+        } else {
+            if (mIntentRunning && b != null) {
+                log("Test already in progress");
+                showToast("Test already in progress");
+            }
+        }
+    }
+
+
+    /** Stop all currently running threads that are related to audio test. */
+    private void stopAudioTestThreads() {
+        log("stopping audio threads");
+        if (mAudioThread != null) {
+            try {
+                mAudioThread.finish();
+                mAudioThread.join(Constant.JOIN_WAIT_TIME_MS);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+            mAudioThread = null;
+        }
+
+        if (mNativeAudioThread != null) {
+            try {
+                mNativeAudioThread.finish();
+                mNativeAudioThread.join(Constant.JOIN_WAIT_TIME_MS);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+            mNativeAudioThread = null;
+        }
+
+        stopLoadThreads();
+        System.gc();
+    }
+
+
+    public void onDestroy() {
+        stopAudioTestThreads();
+        super.onDestroy();
+        stopService(new Intent(this, AudioTestService.class));
+    }
+
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+        log("on resume called");
+    }
+
+
+    @Override
+    protected void onPause() {
+        super.onPause();
+    }
+
+    @Override
+    public boolean onCreateOptionsMenu(Menu menu){
+        MenuInflater inflater = getMenuInflater();
+        inflater.inflate(R.menu.tool_bar_menu, menu);
+        return true;
+    }
+
+    @Override
+    public boolean onOptionsItemSelected(MenuItem item) {
+        // Respond to user selecting action bar buttons
+        switch (item.getItemId()) {
+            case R.id.action_help:
+                if (!isBusy()) {
+                    // Launch about Activity
+                    Intent aboutIntent = new Intent(this, AboutActivity.class);
+                    startActivity(aboutIntent);
+                } else {
+                    showToast("Test in progress... please wait");
+                }
+
+                return true;
+
+            case R.id.action_settings:
+                if (!isBusy()) {
+                    // Launch settings activity
+                    Intent mySettingsIntent = new Intent(this, SettingsActivity.class);
+                    startActivityForResult(mySettingsIntent, SETTINGS_ACTIVITY_REQUEST);
+                } else {
+                    showToast("Test in progress... please wait");
+                }
+                return true;
+        }
+
+        return super.onOptionsItemSelected(item);
+    }
+
+
+    /** Check if the app is busy (running test). */
+    public boolean isBusy() {
+        boolean busy = false;
+
+        if (mAudioThread != null && mAudioThread.mIsRunning) {
+            busy = true;
+        }
+
+        if (mNativeAudioThread != null && mNativeAudioThread.mIsRunning) {
+            busy = true;
+        }
+
+        return busy;
+    }
+
+
+    /** Create a new audio thread according to the settings. */
+    private void restartAudioSystem() {
+        log("restart audio system...");
+
+        int sessionId = 0; /* FIXME runtime test for am.generateAudioSessionId() in API 21 */
+
+        mAudioThreadType = getApp().getAudioThreadType();
+        mSamplingRate = getApp().getSamplingRate();
+        mChannelIndex = getApp().getChannelIndex();
+        mPlayerBufferSizeInBytes = getApp().getPlayerBufferSizeInBytes();
+        mRecorderBufferSizeInBytes = getApp().getRecorderBufferSizeInBytes();
+        mTestStartTimeString = (String) DateFormat.format("MMddkkmmss",
+                System.currentTimeMillis());
+        mMicSource = getApp().getMicSource();
+        mPerformanceMode = getApp().getPerformanceMode();
+        mIgnoreFirstFrames = getApp().getIgnoreFirstFrames();
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        mSoundLevel = am.getStreamVolume(AudioManager.STREAM_MUSIC);
+        mBufferTestDurationInSeconds = getApp().getBufferTestDuration();
+        mBufferTestWavePlotDurationInSeconds = getApp().getBufferTestWavePlotDuration();
+
+        mCaptureHolder = new CaptureHolder(getApp().getNumStateCaptures(),
+                getFileNamePrefix(), getApp().isCaptureWavSnippetsEnabled(),
+                getApp().isCaptureSysTraceEnabled(), getApp().isCaptureBugreportEnabled(),
+                this, mSamplingRate);
+
+        log(" current sampling rate: " + mSamplingRate);
+        stopAudioTestThreads();
+
+        // select java or native audio thread
+        int micSourceMapped;
+        switch (mAudioThreadType) {
+        case Constant.AUDIO_THREAD_TYPE_JAVA:
+            micSourceMapped = getApp().mapMicSource(Constant.AUDIO_THREAD_TYPE_JAVA, mMicSource);
+
+            int expectedRecorderBufferPeriod = Math.round(
+                    (float) (mRecorderBufferSizeInBytes * Constant.MILLIS_PER_SECOND)
+                            / (Constant.BYTES_PER_FRAME * mSamplingRate));
+            mRecorderBufferPeriod.prepareMemberObjects(
+                    Constant.MAX_RECORDED_LATE_CALLBACKS_PER_SECOND * mBufferTestDurationInSeconds,
+                    expectedRecorderBufferPeriod, mCaptureHolder);
+
+            int expectedPlayerBufferPeriod = Math.round(
+                    (float) (mPlayerBufferSizeInBytes * Constant.MILLIS_PER_SECOND)
+                            / (Constant.BYTES_PER_FRAME * mSamplingRate));
+            mPlayerBufferPeriod.prepareMemberObjects(
+                    Constant.MAX_RECORDED_LATE_CALLBACKS_PER_SECOND * mBufferTestDurationInSeconds,
+                    expectedPlayerBufferPeriod, mCaptureHolder);
+
+            mAudioThread = new LoopbackAudioThread(mSamplingRate, mPlayerBufferSizeInBytes,
+                          mRecorderBufferSizeInBytes, micSourceMapped, /* no performance mode */ mRecorderBufferPeriod,
+                          mPlayerBufferPeriod, mTestType, mBufferTestDurationInSeconds,
+                          mBufferTestWavePlotDurationInSeconds, getApplicationContext(),
+                          mChannelIndex, mCaptureHolder);
+            mAudioThread.setMessageHandler(mMessageHandler);
+            mAudioThread.mSessionId = sessionId;
+            mAudioThread.start();
+            break;
+        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+            micSourceMapped = getApp().mapMicSource(Constant.AUDIO_THREAD_TYPE_NATIVE, mMicSource);
+            int performanceModeMapped = getApp().mapPerformanceMode(mPerformanceMode);
+            // Note: mRecorderBufferSizeInBytes will not actually be used, since recorder buffer
+            // size = player buffer size in native mode
+            mNativeAudioThread = new NativeAudioThread(mSamplingRate, mPlayerBufferSizeInBytes,
+                                mRecorderBufferSizeInBytes, micSourceMapped, performanceModeMapped, mTestType,
+                                mBufferTestDurationInSeconds, mBufferTestWavePlotDurationInSeconds,
+                                mIgnoreFirstFrames, mCaptureHolder);
+            mNativeAudioThread.setMessageHandler(mMessageHandler);
+            mNativeAudioThread.mSessionId = sessionId;
+            mNativeAudioThread.start();
+            break;
+        }
+
+        startLoadThreads();
+
+        mMessageHandler.post(new Runnable() {
+            @Override
+            public void run() {
+                refreshState();
+            }
+        });
+    }
+
+
+    /** Start all LoadThread. */
+    private void startLoadThreads() {
+
+        if (getApp().getNumberOfLoadThreads() > 0) {
+
+            mLoadThreads = new LoadThread[getApp().getNumberOfLoadThreads()];
+
+            for (int i = 0; i < mLoadThreads.length; i++) {
+                mLoadThreads[i] = new LoadThread("Loopback_LoadThread_" + i);
+                mLoadThreads[i].start();
+            }
+        }
+    }
+
+
+    /** Stop all LoadThread. */
+    private void stopLoadThreads() {
+        log("stopping load threads");
+        if (mLoadThreads != null) {
+            for (int i = 0; i < mLoadThreads.length; i++) {
+                if (mLoadThreads[i] != null) {
+                    try {
+                        mLoadThreads[i].requestStop();
+                        mLoadThreads[i].join(Constant.JOIN_WAIT_TIME_MS);
+                    } catch (InterruptedException e) {
+                        e.printStackTrace();
+                    }
+                    mLoadThreads[i] = null;
+                }
+            }
+        }
+    }
+
+
+    private void resetBufferPeriodRecord(BufferPeriod recorderBufferPeriod,
+                                         BufferPeriod playerBufferPeriod) {
+        recorderBufferPeriod.resetRecord();
+        playerBufferPeriod.resetRecord();
+    }
+
+
+    private void setTransportButtonsState(int state){
+        Button latencyStart = (Button) findViewById(R.id.buttonStartLatencyTest);
+        Button bufferStart = (Button) findViewById(R.id.buttonStartBufferTest);
+        Button calibrationStart = (Button) findViewById(R.id.buttonCalibrateSoundLevel);
+
+        switch (state) {
+            case LATENCY_TEST_STARTED:
+                findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.INVISIBLE);
+                mTextViewResultSummary.setText("");
+                findViewById(R.id.glitchReportPanel).setVisibility(View.INVISIBLE);
+                latencyStart.setCompoundDrawablesWithIntrinsicBounds(
+                        R.drawable.ic_stop, 0, 0, 0);
+                bufferStart.setEnabled(false);
+                calibrationStart.setEnabled(false);
+                break;
+
+            case LATENCY_TEST_ENDED:
+                findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.VISIBLE);
+                latencyStart.setCompoundDrawablesWithIntrinsicBounds(
+                        R.drawable.ic_play_arrow, 0, 0, 0);
+                bufferStart.setEnabled(true);
+                calibrationStart.setEnabled(true);
+                break;
+
+            case BUFFER_TEST_STARTED:
+                findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.INVISIBLE);
+                mTextViewResultSummary.setText("");
+                findViewById(R.id.glitchReportPanel).setVisibility(View.INVISIBLE);
+                bufferStart.setCompoundDrawablesWithIntrinsicBounds(
+                        R.drawable.ic_stop, 0, 0, 0);
+                latencyStart.setEnabled(false);
+                calibrationStart.setEnabled(false);
+                break;
+
+            case BUFFER_TEST_ENDED:
+                findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.VISIBLE);
+                findViewById(R.id.resultSummary).setVisibility(View.VISIBLE);
+                findViewById(R.id.glitchReportPanel).setVisibility(View.VISIBLE);
+                bufferStart.setCompoundDrawablesWithIntrinsicBounds(
+                        R.drawable.ic_play_arrow, 0, 0, 0);
+                latencyStart.setEnabled(true);
+                calibrationStart.setEnabled(true);
+                break;
+
+            case CALIBRATION_STARTED:
+                findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.INVISIBLE);
+                findViewById(R.id.resultSummary).setVisibility(View.INVISIBLE);
+                findViewById(R.id.glitchReportPanel).setVisibility(View.INVISIBLE);
+                bufferStart.setCompoundDrawablesWithIntrinsicBounds(
+                        R.drawable.ic_stop, 0, 0, 0);
+                latencyStart.setEnabled(false);
+                bufferStart.setEnabled(false);
+                calibrationStart.setEnabled(false);
+                break;
+
+            case CALIBRATION_ENDED:
+                findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.VISIBLE);
+                findViewById(R.id.resultSummary).setVisibility(View.VISIBLE);
+                findViewById(R.id.glitchReportPanel).setVisibility(View.VISIBLE);
+                bufferStart.setCompoundDrawablesWithIntrinsicBounds(
+                        R.drawable.ic_play_arrow, 0, 0, 0);
+                latencyStart.setEnabled(true);
+                bufferStart.setEnabled(true);
+                calibrationStart.setEnabled(true);
+                break;
+        }
+    }
+
+    private void doCalibrationIfEnabled(final Runnable onComplete) {
+        if (getApp().isSoundLevelCalibrationEnabled()) {
+            doCalibration(onComplete);
+        } else {
+            if (onComplete != null) {
+                onComplete.run();
+            }
+        }
+    }
+
+    private void doCalibration() {
+        doCalibration(null);
+    }
+
+    private void doCalibration(final Runnable onComplete) {
+        if (isBusy()) {
+            showToast("Test in progress... please wait");
+            return;
+        }
+
+        if (!hasRecordAudioPermission()) {
+            requestRecordAudioPermission(PERMISSIONS_REQUEST_RECORD_AUDIO_LATENCY);
+            // Returning, otherwise we don't know if user accepted or rejected.
+            return;
+        }
+
+        showToast("Calibrating sound level...");
+        final SoundLevelCalibration calibration =
+                new SoundLevelCalibration(getApp().getSamplingRate(),
+                        getApp().getPlayerBufferSizeInBytes(),
+                        getApp().getRecorderBufferSizeInBytes(),
+                        getApp().getMicSource(), getApp().getPerformanceMode(), this);
+
+        calibration.setChangeListener(new SoundLevelCalibration.SoundLevelChangeListener() {
+            @Override
+            void onChange(int newLevel) {
+                refreshSoundLevelBar();
+            }
+        });
+
+        mCalibrationThread = new Thread(new Runnable() {
+            @Override
+            public void run() {
+                calibration.calibrate();
+                showToast("Calibration complete");
+                if (onComplete != null) {
+                    onComplete.run();
+                }
+            }
+        });
+
+        mCalibrationThread.start();
+    }
+
+    /** Start the latency test. */
+    public void onButtonLatencyTest(View view) throws InterruptedException {
+        if (isBusy()) {
+            stopTests();
+            return;
+        }
+
+        // Ensure we have RECORD_AUDIO permissions
+        // On Android M (API 23) we must request dangerous permissions each time we use them
+        if (hasRecordAudioPermission()) {
+            startLatencyTest();
+        } else {
+            requestRecordAudioPermission(PERMISSIONS_REQUEST_RECORD_AUDIO_LATENCY);
+        }
+    }
+
+    private void startLatencyTest() {
+        if (isBusy()) {
+            showToast("Test in progress... please wait");
+            return;
+        }
+
+        doCalibrationIfEnabled(latencyTestRunnable);
+    }
+
+    private Runnable latencyTestRunnable = new Runnable() {
+        @Override
+        public void run() {
+            if (isBusy()) {
+                showToast("Test in progress... please wait");
+                return;
+            }
+
+            mBarMasterLevel.post(new Runnable() {
+                @Override
+                public void run() {
+                    mBarMasterLevel.setEnabled(false);
+                }
+            });
+            resetBufferPeriodRecord(mRecorderBufferPeriod, mPlayerBufferPeriod);
+
+            mTestType = Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY;
+            restartAudioSystem();
+            try {
+                Thread.sleep(THREAD_SLEEP_DURATION_MS);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+
+            switch (mAudioThreadType) {
+                case Constant.AUDIO_THREAD_TYPE_JAVA:
+                    if (mAudioThread != null) {
+                        mAudioThread.runTest();
+                    }
+                    break;
+                case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                    if (mNativeAudioThread != null) {
+                        mNativeAudioThread.runTest();
+                    }
+                    break;
+            }
+        }
+    };
+
+
+    /** Start the Buffer (Glitch Detection) Test. */
+    public void onButtonBufferTest(View view) throws InterruptedException {
+        if (isBusy()) {
+            stopTests();
+            return;
+        }
+
+        if (hasRecordAudioPermission()) {
+            startBufferTest();
+        } else {
+            requestRecordAudioPermission(PERMISSIONS_REQUEST_RECORD_AUDIO_BUFFER);
+        }
+    }
+
+
+    private void startBufferTest() {
+
+        if (!isBusy()) {
+            mBarMasterLevel.setEnabled(false);
+            resetBufferPeriodRecord(mRecorderBufferPeriod, mPlayerBufferPeriod);
+            mTestType = Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD;
+            restartAudioSystem();   // in this function a audio thread is created
+            try {
+                Thread.sleep(THREAD_SLEEP_DURATION_MS);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+
+            switch (mAudioThreadType) {
+            case Constant.AUDIO_THREAD_TYPE_JAVA:
+                if (mAudioThread != null) {
+                    mAudioThread.runBufferTest();
+                }
+                break;
+            case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                if (mNativeAudioThread != null) {
+                    mNativeAudioThread.runBufferTest();
+                }
+                break;
+            }
+        } else {
+            int duration = 0;
+            switch (mAudioThreadType) {
+            case Constant.AUDIO_THREAD_TYPE_JAVA:
+                duration = mAudioThread.getDurationInSeconds();
+                break;
+            case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                duration = mNativeAudioThread.getDurationInSeconds();
+                break;
+            }
+            showToast("Long-run Test in progress, in total should take " +
+                    Integer.toString(duration) + "s, please wait");
+        }
+    }
+
+
+    /** Stop the ongoing test. */
+    public void stopTests() throws InterruptedException {
+        if (mAudioThread != null) {
+            mAudioThread.requestStopTest();
+        }
+
+        if (mNativeAudioThread != null) {
+            mNativeAudioThread.requestStopTest();
+        }
+    }
+
+    public void onButtonCalibrateSoundLevel(final View view) {
+        Message m = Message.obtain();
+        m.what = CALIBRATION_STARTED;
+        mMessageHandler.sendMessage(m);
+        Runnable onComplete = new Runnable() {
+            @Override
+            public void run() {
+                Message m = Message.obtain();
+                m.what = CALIBRATION_ENDED;
+                mMessageHandler.sendMessage(m);
+            }
+        };
+        doCalibration(onComplete);
+    }
+
+    /***
+     * Show dialog to choose to save files with filename dialog or not
+     */
+    public void onButtonSave(View view) {
+        if (!isBusy()) {
+            DialogFragment newFragment = new SaveFilesDialogFragment();
+            newFragment.show(getFragmentManager(), "saveFiles");
+        } else {
+            showToast("Test in progress... please wait");
+        }
+    }
+
+    /**
+     * Save five files: one .png file for a screenshot on the main activity, one .wav file for
+     * the plot displayed on the main activity, one .txt file for storing various test results, one
+     * .txt file for storing recorder buffer period data, and one .txt file for storing player
+     * buffer period data.
+     */
+    private void SaveFilesWithDialog() {
+
+        String fileName = "loopback_" + mTestStartTimeString;
+
+        //Launch filename choosing activities if available, otherwise save without prompting
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
+            launchFileNameChoosingActivity("text/plain", fileName, ".txt", SAVE_TO_TXT_REQUEST);
+            launchFileNameChoosingActivity("image/png", fileName, ".png", SAVE_TO_PNG_REQUEST);
+            launchFileNameChoosingActivity("audio/wav", fileName, ".wav", SAVE_TO_WAVE_REQUEST);
+            launchFileNameChoosingActivity("text/plain", fileName, "_recorderBufferPeriod.txt",
+                    SAVE_RECORDER_BUFFER_PERIOD_TO_TXT_REQUEST);
+            launchFileNameChoosingActivity("text/plain", fileName, "_recorderBufferPeriodTimes.txt",
+                    SAVE_RECORDER_BUFFER_PERIOD_TIMES_TO_TXT_REQUEST);
+            launchFileNameChoosingActivity("image/png", fileName, "_recorderBufferPeriod.png",
+                    SAVE_RECORDER_BUFFER_PERIOD_TO_PNG_REQUEST);
+            launchFileNameChoosingActivity("text/plain", fileName, "_playerBufferPeriod.txt",
+                    SAVE_PLAYER_BUFFER_PERIOD_TO_TXT_REQUEST);
+            launchFileNameChoosingActivity("text/plain", fileName, "_playerBufferPeriodTimes.txt",
+                    SAVE_PLAYER_BUFFER_PERIOD_TIMES_TO_TXT_REQUEST);
+            launchFileNameChoosingActivity("image/png", fileName, "_playerBufferPeriod.png",
+                    SAVE_PLAYER_BUFFER_PERIOD_TO_PNG_REQUEST);
+
+            if (mGlitchesData != null) {
+                launchFileNameChoosingActivity("text/plain", fileName, "_glitchMillis.txt",
+                        SAVE_GLITCH_OCCURRENCES_TO_TEXT_REQUEST);
+                launchFileNameChoosingActivity("image/png", fileName, "_heatMap.png",
+                        SAVE_GLITCH_AND_CALLBACK_HEATMAP_REQUEST);
+            }
+        } else {
+            saveAllTo(fileName);
+        }
+    }
+
+    /**
+     * Launches an activity for choosing the filename of the file to be saved
+     */
+    public void launchFileNameChoosingActivity(String type, String fileName, String suffix,
+                                               int RequestCode) {
+        Intent FilenameIntent = new Intent(Intent.ACTION_CREATE_DOCUMENT);
+        FilenameIntent.addCategory(Intent.CATEGORY_OPENABLE);
+        FilenameIntent.setType(type);
+        FilenameIntent.putExtra(Intent.EXTRA_TITLE, fileName + suffix);
+        startActivityForResult(FilenameIntent, RequestCode);
+    }
+
+    private String getFileNamePrefix(){
+        if (mIntentFileName != null && !mIntentFileName.isEmpty()) {
+            return mIntentFileName;
+        } else {
+            return "loopback_" + mTestStartTimeString;
+        }
+    }
+
+    /** See the documentation on onButtonSave() */
+    public void saveAllTo(String fileName) {
+
+        if (!hasWriteFilePermission()) {
+            requestWriteFilePermission(PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE_RESULTS);
+            return;
+        }
+
+        showToast("Saving files to: " + fileName + ".(wav,png,txt)");
+
+        //save to a given uri... local file?
+        saveToWaveFile(Uri.parse(FILE_SAVE_PATH + fileName + ".wav"));
+
+        saveScreenShot(Uri.parse(FILE_SAVE_PATH + fileName + ".png"));
+
+        saveTextToFile(Uri.parse(FILE_SAVE_PATH + fileName + ".txt"), getReport().toString());
+
+        int[] bufferPeriodArray = null;
+        int maxBufferPeriod = Constant.UNKNOWN;
+        switch (mAudioThreadType) {
+        case Constant.AUDIO_THREAD_TYPE_JAVA:
+            bufferPeriodArray = mRecorderBufferPeriod.getBufferPeriodArray();
+            maxBufferPeriod = mRecorderBufferPeriod.getMaxBufferPeriod();
+            break;
+        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+            bufferPeriodArray = mNativeRecorderBufferPeriodArray;
+            maxBufferPeriod = mNativeRecorderMaxBufferPeriod;
+            break;
+        }
+        saveBufferPeriod(Uri.parse(FILE_SAVE_PATH + fileName + "_recorderBufferPeriod.txt"),
+                bufferPeriodArray, maxBufferPeriod);
+        saveHistogram(Uri.parse(FILE_SAVE_PATH + fileName + "_recorderBufferPeriod.png"),
+                bufferPeriodArray, maxBufferPeriod);
+        saveTextToFile(Uri.parse(FILE_SAVE_PATH + fileName + "_recorderBufferPeriodTimes.txt"),
+                mRecorderCallbackTimes.toString());
+
+        bufferPeriodArray = null;
+        maxBufferPeriod = Constant.UNKNOWN;
+        switch (mAudioThreadType) {
+        case Constant.AUDIO_THREAD_TYPE_JAVA:
+            bufferPeriodArray = mPlayerBufferPeriod.getBufferPeriodArray();
+            maxBufferPeriod = mPlayerBufferPeriod.getMaxBufferPeriod();
+            break;
+        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+            bufferPeriodArray = mNativePlayerBufferPeriodArray;
+            maxBufferPeriod = mNativePlayerMaxBufferPeriod;
+            break;
+        }
+        saveBufferPeriod(Uri.parse(FILE_SAVE_PATH + fileName + "_playerBufferPeriod.txt")
+                , bufferPeriodArray, maxBufferPeriod);
+        saveHistogram(Uri.parse(FILE_SAVE_PATH + fileName + "_playerBufferPeriod.png"),
+                bufferPeriodArray, maxBufferPeriod);
+        saveTextToFile(Uri.parse(FILE_SAVE_PATH + fileName + "_playerBufferPeriodTimes.txt"),
+                mPlayerCallbackTimes.toString());
+
+        if (mGlitchesData != null) {
+            saveGlitchOccurrences(Uri.parse(FILE_SAVE_PATH + fileName + "_glitchMillis.txt"),
+                    mGlitchesData);
+            saveHeatMap(Uri.parse(FILE_SAVE_PATH + fileName + "_heatMap.png"),
+                    mRecorderCallbackTimes, mPlayerCallbackTimes,
+                    GlitchesStringBuilder.getGlitchMilliseconds(mFFTSamplingSize,
+                            mFFTOverlapSamples, mGlitchesData, mSamplingRate),
+                    mGlitchingIntervalTooLong, mBufferTestElapsedSeconds, fileName);
+        }
+
+    }
+
+
+    @Override
+    public void onActivityResult(int requestCode, int resultCode, Intent resultData) {
+        log("ActivityResult request: " + requestCode + "  result:" + resultCode);
+
+        if (resultCode == Activity.RESULT_OK) {
+            switch (requestCode) {
+            case SAVE_TO_WAVE_REQUEST:
+                log("got SAVE TO WAV intent back!");
+                if (resultData != null) {
+                    saveToWaveFile(resultData.getData());
+                }
+                break;
+            case SAVE_TO_PNG_REQUEST:
+                log("got SAVE TO PNG intent back!");
+                if (resultData != null) {
+                    saveScreenShot(resultData.getData());
+                }
+                break;
+            case SAVE_TO_TXT_REQUEST:
+                if (resultData != null) {
+                    saveTextToFile(resultData.getData(), getReport().toString());
+                }
+                break;
+            case SAVE_RECORDER_BUFFER_PERIOD_TO_TXT_REQUEST:
+                if (resultData != null) {
+                    int[] bufferPeriodArray = null;
+                    int maxBufferPeriod = Constant.UNKNOWN;
+                    switch (mAudioThreadType) {
+                    case Constant.AUDIO_THREAD_TYPE_JAVA:
+                        bufferPeriodArray = mRecorderBufferPeriod.getBufferPeriodArray();
+                        maxBufferPeriod = mRecorderBufferPeriod.getMaxBufferPeriod();
+                        break;
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                        bufferPeriodArray = mNativeRecorderBufferPeriodArray;
+                        maxBufferPeriod = mNativeRecorderMaxBufferPeriod;
+                        break;
+                    }
+                    saveBufferPeriod(resultData.getData(), bufferPeriodArray, maxBufferPeriod);
+                }
+                break;
+            case SAVE_PLAYER_BUFFER_PERIOD_TO_TXT_REQUEST:
+                if (resultData != null) {
+                    int[] bufferPeriodArray = null;
+                    int maxBufferPeriod = Constant.UNKNOWN;
+                    switch (mAudioThreadType) {
+                    case Constant.AUDIO_THREAD_TYPE_JAVA:
+                        bufferPeriodArray = mPlayerBufferPeriod.getBufferPeriodArray();
+                        maxBufferPeriod = mPlayerBufferPeriod.getMaxBufferPeriod();
+                        break;
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                        bufferPeriodArray = mNativePlayerBufferPeriodArray;
+                        maxBufferPeriod = mNativePlayerMaxBufferPeriod;
+                        break;
+                    }
+                    saveBufferPeriod(resultData.getData(), bufferPeriodArray, maxBufferPeriod);
+                }
+                break;
+            case SAVE_RECORDER_BUFFER_PERIOD_TO_PNG_REQUEST:
+                if (resultData != null) {
+                    int[] bufferPeriodArray = null;
+                    int maxBufferPeriod = Constant.UNKNOWN;
+                    switch (mAudioThreadType) {
+                        case Constant.AUDIO_THREAD_TYPE_JAVA:
+                            bufferPeriodArray = mRecorderBufferPeriod.getBufferPeriodArray();
+                            maxBufferPeriod = mRecorderBufferPeriod.getMaxBufferPeriod();
+                            break;
+                        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                            bufferPeriodArray = mNativeRecorderBufferPeriodArray;
+                            maxBufferPeriod = mNativeRecorderMaxBufferPeriod;
+                            break;
+                    }
+                    saveHistogram(resultData.getData(), bufferPeriodArray, maxBufferPeriod);
+                }
+                break;
+            case SAVE_PLAYER_BUFFER_PERIOD_TO_PNG_REQUEST:
+                if (resultData != null) {
+                    int[] bufferPeriodArray = null;
+                    int maxBufferPeriod = Constant.UNKNOWN;
+                    switch (mAudioThreadType) {
+                        case Constant.AUDIO_THREAD_TYPE_JAVA:
+                            bufferPeriodArray = mPlayerBufferPeriod.getBufferPeriodArray();
+                            maxBufferPeriod = mPlayerBufferPeriod.getMaxBufferPeriod();
+                            break;
+                        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                            bufferPeriodArray = mNativePlayerBufferPeriodArray;
+                            maxBufferPeriod = mNativePlayerMaxBufferPeriod;
+                            break;
+                    }
+                    saveHistogram(resultData.getData(), bufferPeriodArray, maxBufferPeriod);
+                }
+                break;
+            case SAVE_PLAYER_BUFFER_PERIOD_TIMES_TO_TXT_REQUEST:
+                if (resultData != null) {
+                    saveTextToFile(resultData.getData(),
+                            mPlayerCallbackTimes.toString());
+                }
+                break;
+            case SAVE_RECORDER_BUFFER_PERIOD_TIMES_TO_TXT_REQUEST:
+                if (resultData != null) {
+                    saveTextToFile(resultData.getData(),
+                            mRecorderCallbackTimes.toString());
+                }
+                break;
+            case SAVE_GLITCH_OCCURRENCES_TO_TEXT_REQUEST:
+                if (resultData != null) {
+                    saveGlitchOccurrences(resultData.getData(), mGlitchesData);
+                }
+                break;
+            case SAVE_GLITCH_AND_CALLBACK_HEATMAP_REQUEST:
+                if (resultData != null && mGlitchesData != null && mRecorderCallbackTimes != null
+                        & mPlayerCallbackTimes != null){
+                    saveHeatMap(resultData.getData(), mRecorderCallbackTimes, mPlayerCallbackTimes,
+                            GlitchesStringBuilder.getGlitchMilliseconds(mFFTSamplingSize,
+                                    mFFTOverlapSamples, mGlitchesData, mSamplingRate),
+                            mGlitchingIntervalTooLong, mBufferTestElapsedSeconds,
+                            resultData.getData().toString());
+                }
+            case SETTINGS_ACTIVITY_REQUEST:
+                log("return from new settings!");
+
+                break;
+            }
+        }
+    }
+
+
+    /**
+     * Refresh the sound level bar on the main activity to reflect the current sound level
+     * of the system.
+     */
+    private void refreshSoundLevelBar() {
+        mBarMasterLevel.setEnabled(true);
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        int currentVolume = am.getStreamVolume(AudioManager.STREAM_MUSIC);
+        mBarMasterLevel.setProgress(currentVolume);
+
+        mTextViewCurrentLevel.setText(String.format("Current Sound Level: %d/%d", currentVolume,
+                mBarMasterLevel.getMax()));
+    }
+
+
+    /** Reset all results gathered from previous round of test (if any). */
+    private void resetResults() {
+        mCorrelation.invalidate();
+        mNativeRecorderBufferPeriodArray = null;
+        mNativePlayerBufferPeriodArray = null;
+        mPlayerCallbackTimes = null;
+        mRecorderCallbackTimes = null;
+        mGlitchesData = null;
+        mWaveData = null;
+    }
+
+
+    /** Get the file path from uri. Doesn't work for all devices. */
+    private String getPath(Uri uri) {
+        String[] projection = {MediaStore.Images.Media.DATA};
+        Cursor cursor1 = getContentResolver().query(uri, projection, null, null, null);
+        if (cursor1 == null) {
+            return uri.getPath();
+        }
+
+        int ColumnIndex = cursor1.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
+        cursor1.moveToFirst();
+        String path = cursor1.getString(ColumnIndex);
+        cursor1.close();
+        return path;
+    }
+
+
+    /** Zoom out the plot to its full size. */
+    public void onButtonZoomOutFull(View view) {
+        double fullZoomOut = mWavePlotView.getMaxZoomOut();
+        mWavePlotView.setZoom(fullZoomOut);
+        mWavePlotView.refreshGraph();
+    }
+
+
+    /** Zoom out the plot. */
+    public void onButtonZoomOut(View view) {
+        double zoom = mWavePlotView.getZoom();
+        zoom = 2.0 * zoom;
+        mWavePlotView.setZoom(zoom);
+        mWavePlotView.refreshGraph();
+    }
+
+
+    /** Zoom in the plot. */
+    public void onButtonZoomIn(View view) {
+        double zoom = mWavePlotView.getZoom();
+        zoom = zoom / 2.0;
+        mWavePlotView.setZoom(zoom);
+        mWavePlotView.refreshGraph();
+    }
+
+
+    /** Go to RecorderBufferPeriodActivity */
+    public void onButtonRecorderBufferPeriod(View view) {
+        if (!isBusy()) {
+            Intent RecorderBufferPeriodIntent = new Intent(this,
+                                                RecorderBufferPeriodActivity.class);
+            int recorderBufferSizeInFrames = mRecorderBufferSizeInBytes / Constant.BYTES_PER_FRAME;
+            log("recorderBufferSizeInFrames:" + recorderBufferSizeInFrames);
+
+            switch (mAudioThreadType) {
+            case Constant.AUDIO_THREAD_TYPE_JAVA:
+                RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodArray",
+                        mRecorderBufferPeriod.getBufferPeriodArray());
+                RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodMax",
+                        mRecorderBufferPeriod.getMaxBufferPeriod());
+                break;
+            case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodArray",
+                        mNativeRecorderBufferPeriodArray);
+                RecorderBufferPeriodIntent.putExtra("recorderBufferPeriodMax",
+                        mNativeRecorderMaxBufferPeriod);
+                break;
+            }
+
+            RecorderBufferPeriodIntent.putExtra("recorderBufferSize", recorderBufferSizeInFrames);
+            RecorderBufferPeriodIntent.putExtra("samplingRate", mSamplingRate);
+            startActivity(RecorderBufferPeriodIntent);
+        } else
+            showToast("Test in progress... please wait");
+    }
+
+
+    /** Go to PlayerBufferPeriodActivity */
+    public void onButtonPlayerBufferPeriod(View view) {
+        if (!isBusy()) {
+            Intent PlayerBufferPeriodIntent = new Intent(this, PlayerBufferPeriodActivity.class);
+            int playerBufferSizeInFrames = mPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME;
+
+            switch (mAudioThreadType) {
+            case Constant.AUDIO_THREAD_TYPE_JAVA:
+                PlayerBufferPeriodIntent.putExtra("playerBufferPeriodArray",
+                        mPlayerBufferPeriod.getBufferPeriodArray());
+                PlayerBufferPeriodIntent.putExtra("playerBufferPeriodMax",
+                        mPlayerBufferPeriod.getMaxBufferPeriod());
+                break;
+            case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                PlayerBufferPeriodIntent.putExtra("playerBufferPeriodArray",
+                        mNativePlayerBufferPeriodArray);
+                PlayerBufferPeriodIntent.putExtra("playerBufferPeriodMax",
+                        mNativePlayerMaxBufferPeriod);
+                break;
+            }
+
+            PlayerBufferPeriodIntent.putExtra("playerBufferSize", playerBufferSizeInFrames);
+            PlayerBufferPeriodIntent.putExtra("samplingRate", mSamplingRate);
+            startActivity(PlayerBufferPeriodIntent);
+        } else
+            showToast("Test in progress... please wait");
+    }
+
+
+    /** Display pop up window of recorded glitches */
+    public void onButtonGlitches(View view) {
+        if (!isBusy()) {
+            if (mGlitchesData != null) {
+                // Create a PopUpWindow with scrollable TextView
+                View puLayout = this.getLayoutInflater().inflate(R.layout.report_window, null);
+                PopupWindow popUp = new PopupWindow(puLayout, ViewGroup.LayoutParams.MATCH_PARENT,
+                        ViewGroup.LayoutParams.MATCH_PARENT, true);
+
+                // Generate report of glitch intervals and set pop up window text
+                TextView GlitchText =
+                        (TextView) popUp.getContentView().findViewById(R.id.ReportInfo);
+                GlitchText.setText(GlitchesStringBuilder.getGlitchString(mFFTSamplingSize,
+                        mFFTOverlapSamples, mGlitchesData, mSamplingRate,
+                        mGlitchingIntervalTooLong, estimateNumberOfGlitches(mGlitchesData)));
+
+                // display pop up window, dismissible with back button
+                popUp.showAtLocation(findViewById(R.id.linearLayoutMain), Gravity.TOP, 0, 0);
+            } else {
+                showToast("Please run the buffer test to get data");
+            }
+
+        } else {
+            showToast("Test in progress... please wait");
+        }
+    }
+
+    /** Display pop up window of recorded metrics and system information */
+    public void onButtonReport(View view) {
+        if (!isBusy()) {
+            if ((mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD
+                    && mGlitchesData != null)
+                    || (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY
+                    && mCorrelation.isValid())) {
+                // Create a PopUpWindow with scrollable TextView
+                View puLayout = this.getLayoutInflater().inflate(R.layout.report_window, null);
+                PopupWindow popUp = new PopupWindow(puLayout, ViewGroup.LayoutParams.MATCH_PARENT,
+                        ViewGroup.LayoutParams.MATCH_PARENT, true);
+
+                // Generate report of glitch intervals and set pop up window text
+                TextView reportText =
+                        (TextView) popUp.getContentView().findViewById(R.id.ReportInfo);
+                reportText.setText(getReport().toString());
+
+                // display pop up window, dismissible with back button
+                popUp.showAtLocation(findViewById(R.id.linearLayoutMain), Gravity.TOP, 0, 0);
+            } else {
+                showToast("Please run the tests to get data");
+            }
+
+        } else {
+            showToast("Test in progress... please wait");
+        }
+    }
+
+    /** Display pop up window of recorded metrics and system information */
+    public void onButtonHeatMap(View view) {
+        if (!isBusy()) {
+            if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD
+                    && mGlitchesData != null && mRecorderCallbackTimes != null
+                    && mRecorderCallbackTimes != null) {
+
+                // Create a PopUpWindow with heatMap custom view
+                View puLayout = this.getLayoutInflater().inflate(R.layout.heatmap_window, null);
+                PopupWindow popUp = new PopupWindow(puLayout, ViewGroup.LayoutParams.MATCH_PARENT,
+                        ViewGroup.LayoutParams.MATCH_PARENT, true);
+
+                ((LinearLayout) popUp.getContentView()).addView(
+                        new GlitchAndCallbackHeatMapView(this, mRecorderCallbackTimes,
+                                mPlayerCallbackTimes,
+                                GlitchesStringBuilder.getGlitchMilliseconds(mFFTSamplingSize,
+                                        mFFTOverlapSamples, mGlitchesData, mSamplingRate),
+                                mGlitchingIntervalTooLong, mBufferTestElapsedSeconds,
+                                getResources().getString(R.string.heatTitle)));
+
+                popUp.showAtLocation(findViewById(R.id.linearLayoutMain), Gravity.TOP, 0, 0);
+
+            } else {
+                showToast("Please run the tests to get data");
+            }
+
+        } else {
+            showToast("Test in progress... please wait");
+        }
+    }
+
+    /** Redraw the plot according to mWaveData */
+    void refreshPlots() {
+        mWavePlotView.setData(mWaveData, mSamplingRate);
+        mWavePlotView.redraw();
+    }
+
+    /** Refresh the text on the main activity that shows the app states and audio settings. */
+    void refreshState() {
+        log("refreshState!");
+        refreshSoundLevelBar();
+
+        // get info
+        int playerFrames = mPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME;
+        int recorderFrames = mRecorderBufferSizeInBytes / Constant.BYTES_PER_FRAME;
+        StringBuilder s = new StringBuilder(200);
+
+        s.append("Settings from most recent run (at ");
+        s.append(mTestStartTimeString);
+        s.append("):\n");
+
+        s.append("SR: ").append(mSamplingRate).append(" Hz");
+        s.append(" ChannelIndex: ").append(mChannelIndex < 0 ? "MONO" : mChannelIndex);
+        switch (mAudioThreadType) {
+        case Constant.AUDIO_THREAD_TYPE_JAVA:
+            s.append(" Play Frames: " ).append(playerFrames);
+            s.append(" Record Frames: ").append(recorderFrames);
+            s.append(" Audio: JAVA");
+            break;
+        case Constant.AUDIO_THREAD_TYPE_NATIVE:
+            s.append(" Frames: ").append(playerFrames);
+            s.append(" Audio: NATIVE");
+            break;
+        }
+
+        // mic source
+        String micSourceName = getApp().getMicSourceString(mMicSource);
+        if (micSourceName != null) {
+            s.append(" Mic: ").append(micSourceName);
+        }
+
+        // performance mode
+        String performanceModeName = getApp().getPerformanceModeString(mPerformanceMode);
+        if (performanceModeName != null) {
+            s.append(" Performance Mode: ").append(performanceModeName);
+        }
+
+        // sound level at start of test
+        s.append(" Sound Level: ").append(mSoundLevel).append("/").append(mBarMasterLevel.getMax());
+
+        // load threads
+        s.append(" Simulated Load Threads: ").append(getApp().getNumberOfLoadThreads());
+
+        // Show short summary of results, round trip latency or number of glitches
+        if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY) {
+            if (mIgnoreFirstFrames > 0) {
+                s.append(" First Frames Ignored: ").append(mIgnoreFirstFrames);
+            }
+            if (mCorrelation.isValid()) {
+                mTextViewResultSummary.setText(String.format("Latency: %.2f ms Confidence: %.2f" +
+                                " Average = %.4f RMS = %.4f",
+                        mCorrelation.mEstimatedLatencyMs, mCorrelation.mEstimatedLatencyConfidence,
+                        mCorrelation.mAverage, mCorrelation.mRms));
+            }
+        } else if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD &&
+                mGlitchesData != null) {
+            // show buffer test duration
+            s.append("\nBuffer Test Duration: ").append(mBufferTestDurationInSeconds).append(" s");
+
+            // show buffer test wave plot duration
+            s.append("   Buffer Test Wave Plot Duration: last ");
+            s.append(mBufferTestWavePlotDurationInSeconds);
+            s.append(" s");
+
+            mTextViewResultSummary.setText(getResources().getString(R.string.numGlitches) + " " +
+                    estimateNumberOfGlitches(mGlitchesData));
+        } else {
+            mTextViewResultSummary.setText("");
+        }
+
+        String info = getApp().getSystemInfo();
+        s.append(" ").append(info);
+
+        mTextInfo.setText(s.toString());
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+
+    public void showToast(final String msg) {
+        // Make sure UI manipulations are only done on the UI thread
+        LoopbackActivity.this.runOnUiThread(new Runnable() {
+            public void run() {
+                Toast toast = Toast.makeText(getApplicationContext(), msg, Toast.LENGTH_LONG);
+                toast.setGravity(Gravity.CENTER_VERTICAL | Gravity.CENTER_HORIZONTAL, 10, 10);
+                toast.show();
+            }
+        });
+    }
+
+
+    /** Get the application that runs this activity. Wrapper for getApplication(). */
+    private LoopbackApplication getApp() {
+        return (LoopbackApplication) this.getApplication();
+    }
+
+
+    /** Save a .wav file of the wave plot on the main activity. */
+    void saveToWaveFile(Uri uri) {
+        if (mWaveData != null && mWaveData.length > 0) {
+            AudioFileOutput audioFileOutput = new AudioFileOutput(getApplicationContext(), uri,
+                                                                  mSamplingRate);
+            boolean status = audioFileOutput.writeData(mWaveData);
+            if (status) {
+                String wavFileAbsolutePath = getPath(uri);
+                // for some devices getPath fails
+                if (wavFileAbsolutePath != null) {
+                    File file = new File(wavFileAbsolutePath);
+                    wavFileAbsolutePath = file.getAbsolutePath();
+                } else {
+                    wavFileAbsolutePath = "";
+                }
+                showToast("Finished exporting wave File " + wavFileAbsolutePath);
+            } else {
+                showToast("Something failed saving wave file");
+            }
+
+        }
+    }
+
+
+    /** Save a screenshot of the main activity. */
+    void saveScreenShot(Uri uri) {
+        ParcelFileDescriptor parcelFileDescriptor = null;
+        FileOutputStream outputStream;
+        try {
+            parcelFileDescriptor = getApplicationContext().getContentResolver().
+                                   openFileDescriptor(uri, "w");
+
+            FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
+            outputStream = new FileOutputStream(fileDescriptor);
+
+            log("Done creating output stream");
+
+            LinearLayout LL = (LinearLayout) findViewById(R.id.linearLayoutMain);
+
+            View v = LL.getRootView();
+            v.setDrawingCacheEnabled(true);
+            Bitmap b = v.getDrawingCache();
+
+            //save
+            b.compress(Bitmap.CompressFormat.PNG, 100, outputStream);
+            parcelFileDescriptor.close();
+            v.setDrawingCacheEnabled(false);
+        } catch (Exception e) {
+            log("Failed to open png file " + e);
+        } finally {
+            try {
+                if (parcelFileDescriptor != null) {
+                    parcelFileDescriptor.close();
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+                log("Error closing ParcelFile Descriptor");
+            }
+        }
+    }
+
+    private void saveHistogram(Uri uri, int[] bufferPeriodArray, int maxBufferPeriod) {
+        // Create and histogram view bitmap
+        HistogramView recordHisto = new HistogramView(this,null);
+        recordHisto.setBufferPeriodArray(bufferPeriodArray);
+        recordHisto.setMaxBufferPeriod(maxBufferPeriod);
+
+        // Draw histogram on bitmap canvas
+        Bitmap histoBmp = Bitmap.createBitmap(HISTOGRAM_EXPORT_WIDTH,
+                HISTOGRAM_EXPORT_HEIGHT, Bitmap.Config.ARGB_8888); // creates a MUTABLE bitmap
+        recordHisto.fillCanvas(new Canvas(histoBmp), histoBmp.getWidth(), histoBmp.getHeight());
+
+        saveImage(uri, histoBmp);
+    }
+
+    private void saveHeatMap(Uri uri, BufferCallbackTimes recorderCallbackTimes,
+                             BufferCallbackTimes playerCallbackTimes, int[] glitchMilliseconds,
+                             boolean glitchesExceeded, int duration, String title) {
+        Bitmap heatBmp = Bitmap.createBitmap(HEATMAP_DRAW_WIDTH, HEATMAP_DRAW_HEIGHT,
+                Bitmap.Config.ARGB_8888);
+        GlitchAndCallbackHeatMapView.fillCanvas(new Canvas(heatBmp), recorderCallbackTimes,
+                playerCallbackTimes, glitchMilliseconds, glitchesExceeded, duration,
+                title);
+        saveImage(uri, Bitmap.createScaledBitmap(heatBmp,
+                HEATMAP_DRAW_WIDTH / HEATMAP_EXPORT_DIVISOR,
+                HEATMAP_DRAW_HEIGHT / HEATMAP_EXPORT_DIVISOR, false));
+    }
+
+    /** Save an image to file. */
+    private void saveImage(Uri uri, Bitmap bmp) {
+        ParcelFileDescriptor parcelFileDescriptor = null;
+        FileOutputStream outputStream;
+        try {
+            parcelFileDescriptor = getApplicationContext().getContentResolver().
+                    openFileDescriptor(uri, "w");
+
+            FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
+            outputStream = new FileOutputStream(fileDescriptor);
+
+            log("Done creating output stream");
+
+            // Save compressed bitmap to file
+            bmp.compress(Bitmap.CompressFormat.PNG, EXPORTED_IMAGE_QUALITY, outputStream);
+            parcelFileDescriptor.close();
+        } catch (Exception e) {
+            log("Failed to open png file " + e);
+        } finally {
+            try {
+                if (parcelFileDescriptor != null) {
+                    parcelFileDescriptor.close();
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+                log("Error closing ParcelFile Descriptor");
+            }
+        }
+    }
+
+
+    /**
+     * Save a .txt file of the given buffer period's data.
+     * First column is time, second column is count.
+     */
+    void saveBufferPeriod(Uri uri, int[] bufferPeriodArray, int maxBufferPeriod) {
+        ParcelFileDescriptor parcelFileDescriptor = null;
+        FileOutputStream outputStream;
+        if (bufferPeriodArray != null) {
+            try {
+                parcelFileDescriptor = getApplicationContext().getContentResolver().
+                        openFileDescriptor(uri, "w");
+
+                FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
+                outputStream = new FileOutputStream(fileDescriptor);
+                log("Done creating output stream for saving buffer period");
+
+                int usefulDataRange = Math.min(maxBufferPeriod + 1, bufferPeriodArray.length);
+                int[] usefulBufferData = Arrays.copyOfRange(bufferPeriodArray, 0, usefulDataRange);
+
+                String endline = "\n";
+                String delimiter = ",";
+                StringBuilder sb = new StringBuilder();
+                for (int i = 0; i < usefulBufferData.length; i++) {
+                    sb.append(i + delimiter + usefulBufferData[i] + endline);
+                }
+
+                outputStream.write(sb.toString().getBytes());
+
+            } catch (Exception e) {
+                log("Failed to open text file " + e);
+            } finally {
+                try {
+                    if (parcelFileDescriptor != null) {
+                        parcelFileDescriptor.close();
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                    log("Error closing ParcelFile Descriptor");
+                }
+            }
+        }
+
+    }
+
+    /** Save a .txt file of various test results. */
+    void saveTextToFile(Uri uri, String outputText) {
+        ParcelFileDescriptor parcelFileDescriptor = null;
+        FileOutputStream outputStream;
+        try {
+            parcelFileDescriptor = getApplicationContext().getContentResolver().
+                                   openFileDescriptor(uri, "w");
+
+            FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
+            outputStream = new FileOutputStream(fileDescriptor);
+            log("Done creating output stream");
+
+            outputStream.write(outputText.getBytes());
+            parcelFileDescriptor.close();
+        } catch (Exception e) {
+            log("Failed to open text file " + e);
+        } finally {
+            try {
+                if (parcelFileDescriptor != null) {
+                    parcelFileDescriptor.close();
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+                log("Error closing ParcelFile Descriptor");
+            }
+        }
+    }
+
+    private StringBuilder getReport() {
+        String endline = "\n";
+        final int stringLength = 300;
+        StringBuilder sb = new StringBuilder(stringLength);
+        sb.append("DateTime = " + mTestStartTimeString + endline);
+        sb.append(INTENT_SAMPLING_FREQUENCY + " = " + mSamplingRate + endline);
+        sb.append(INTENT_CHANNEL_INDEX + " = " + mChannelIndex + endline);
+        sb.append(INTENT_RECORDER_BUFFER + " = " + mRecorderBufferSizeInBytes /
+                Constant.BYTES_PER_FRAME + endline);
+        sb.append(INTENT_PLAYER_BUFFER + " = " + mPlayerBufferSizeInBytes /
+                Constant.BYTES_PER_FRAME + endline);
+        sb.append(INTENT_AUDIO_THREAD + " = " + mAudioThreadType + endline);
+
+        String audioType = "unknown";
+        switch (mAudioThreadType) {
+            case Constant.AUDIO_THREAD_TYPE_JAVA:
+                audioType = "JAVA";
+                break;
+            case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                audioType = "NATIVE";
+                break;
+        }
+        sb.append(INTENT_AUDIO_THREAD + "_String = " + audioType + endline);
+
+        sb.append(INTENT_MIC_SOURCE + " = " + mMicSource + endline);
+        sb.append(INTENT_MIC_SOURCE + "_String = " + getApp().getMicSourceString(mMicSource)
+                + endline);
+        sb.append(INTENT_AUDIO_LEVEL + " = " + mSoundLevel + endline);
+
+        switch (mTestType) {
+            case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                sb.append(INTENT_IGNORE_FIRST_FRAMES + " = " + mIgnoreFirstFrames + endline);
+                if (mCorrelation.isValid()) {
+                    sb.append(String.format("LatencyMs = %.2f", mCorrelation.mEstimatedLatencyMs)
+                            + endline);
+                } else {
+                    sb.append(String.format("LatencyMs = unknown") + endline);
+                }
+
+                sb.append(String.format("LatencyConfidence = %.2f",
+                        mCorrelation.mEstimatedLatencyConfidence) + endline);
+
+                sb.append(String.format("Average = %.4f", mCorrelation.mAverage) + endline);
+                sb.append(String.format("RMS = %.4f", mCorrelation.mRms) + endline);
+                break;
+            case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                sb.append("Buffer Test Duration (s) = " + mBufferTestDurationInSeconds + endline);
+
+                // report recorder results
+                int[] recorderBufferData = null;
+                int recorderBufferDataMax = 0;
+                double recorderBufferDataStdDev = 0.0;
+                switch (mAudioThreadType) {
+                    case Constant.AUDIO_THREAD_TYPE_JAVA:
+                        recorderBufferData = mRecorderBufferPeriod.getBufferPeriodArray();
+                        recorderBufferDataMax = mRecorderBufferPeriod.getMaxBufferPeriod();
+                        recorderBufferDataStdDev = mRecorderBufferPeriod.getStdDevBufferPeriod();
+                        break;
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                        recorderBufferData = mNativeRecorderBufferPeriodArray;
+                        recorderBufferDataMax = mNativeRecorderMaxBufferPeriod;
+                        recorderBufferDataStdDev = mNativeRecorderStdDevBufferPeriod;
+                        break;
+                }
+                // report expected recorder buffer period
+                if (recorderBufferData != null) {
+                    // this is the range of data that actually has values
+                    int usefulDataRange = Math.min(recorderBufferDataMax + 1,
+                            recorderBufferData.length);
+                    int[] usefulBufferData = Arrays.copyOfRange(recorderBufferData, 0,
+                            usefulDataRange);
+                    PerformanceMeasurement measurement = new PerformanceMeasurement(
+                            mRecorderCallbackTimes.getExpectedBufferPeriod(), usefulBufferData);
+                    float recorderPercentAtExpected =
+                            measurement.percentBufferPeriodsAtExpected();
+                    double benchmark = measurement.computeWeightedBenchmark();
+                    int outliers = measurement.countOutliers();
+                    sb.append("Expected Recorder Buffer Period (ms) = " +
+                            mRecorderCallbackTimes.getExpectedBufferPeriod() + endline);
+                    sb.append("Recorder Buffer Periods At Expected = " +
+                            String.format("%.5f%%", recorderPercentAtExpected * 100) + endline);
+
+                    sb.append("Recorder Buffer Period Std Dev = "
+                            + String.format(Locale.US, "%.5f ms", recorderBufferDataStdDev)
+                            + endline);
+
+                    // output thousandths of a percent not at expected buffer period
+                    sb.append("kth% Late Recorder Buffer Callbacks = "
+                            + String.format("%.5f", (1 - recorderPercentAtExpected) * 100000)
+                            + endline);
+                    sb.append("Recorder Benchmark = " + benchmark + endline);
+                    sb.append("Recorder Number of Outliers = " + outliers + endline);
+                } else {
+                    sb.append("Cannot Find Recorder Buffer Period Data!" + endline);
+                }
+
+                // report player results
+                int[] playerBufferData = null;
+                int playerBufferDataMax = 0;
+                double playerBufferDataStdDev = 0.0;
+                switch (mAudioThreadType) {
+                    case Constant.AUDIO_THREAD_TYPE_JAVA:
+                        playerBufferData = mPlayerBufferPeriod.getBufferPeriodArray();
+                        playerBufferDataMax = mPlayerBufferPeriod.getMaxBufferPeriod();
+                        playerBufferDataStdDev = mPlayerBufferPeriod.getStdDevBufferPeriod();
+                        break;
+                    case Constant.AUDIO_THREAD_TYPE_NATIVE:
+                        playerBufferData = mNativePlayerBufferPeriodArray;
+                        playerBufferDataMax = mNativePlayerMaxBufferPeriod;
+                        playerBufferDataStdDev = mNativePlayerStdDevBufferPeriod;
+                        break;
+                }
+                // report expected player buffer period
+                sb.append("Expected Player Buffer Period (ms) = " +
+                        mPlayerCallbackTimes.getExpectedBufferPeriod() + endline);
+                if (playerBufferData != null) {
+                    // this is the range of data that actually has values
+                    int usefulDataRange = Math.min(playerBufferDataMax + 1,
+                            playerBufferData.length);
+                    int[] usefulBufferData = Arrays.copyOfRange(playerBufferData, 0,
+                            usefulDataRange);
+                    PerformanceMeasurement measurement = new PerformanceMeasurement(
+                            mPlayerCallbackTimes.getExpectedBufferPeriod(), usefulBufferData);
+                    float playerPercentAtExpected = measurement.percentBufferPeriodsAtExpected();
+                    double benchmark = measurement.computeWeightedBenchmark();
+                    int outliers = measurement.countOutliers();
+                    sb.append("Player Buffer Periods At Expected = "
+                            + String.format("%.5f%%", playerPercentAtExpected * 100) + endline);
+
+                    sb.append("Player Buffer Period Std Dev = "
+                            + String.format(Locale.US, "%.5f ms", playerBufferDataStdDev)
+                            + endline);
+
+                    // output thousandths of a percent not at expected buffer period
+                    sb.append("kth% Late Player Buffer Callbacks = "
+                            + String.format("%.5f", (1 - playerPercentAtExpected) * 100000)
+                            + endline);
+                    sb.append("Player Benchmark = " + benchmark + endline);
+                    sb.append("Player Number of Outliers = " + outliers + endline);
+
+                } else {
+                    sb.append("Cannot Find Player Buffer Period Data!" + endline);
+                }
+                // report glitches per hour
+                int numberOfGlitches = estimateNumberOfGlitches(mGlitchesData);
+                float testDurationInHours = mBufferTestElapsedSeconds
+                        / (float) Constant.SECONDS_PER_HOUR;
+
+                // Report Glitches Per Hour if sufficient data available, ie at least half an hour
+                if (testDurationInHours >= .5) {
+                    int glitchesPerHour = (int) Math.ceil(numberOfGlitches/testDurationInHours);
+                    sb.append("Glitches Per Hour = " + glitchesPerHour + endline);
+                }
+                sb.append("Total Number of Glitches = " + numberOfGlitches + endline);
+
+                // report if the total glitching interval is too long
+                sb.append("Total glitching interval too long =  " +
+                        mGlitchingIntervalTooLong);
+
+                sb.append("\nLate Player Callbacks = ");
+                sb.append(mPlayerCallbackTimes.getNumLateOrEarlyCallbacks());
+                sb.append("\nLate Player Callbacks Exceeded Capacity = ");
+                sb.append(mPlayerCallbackTimes.isCapacityExceeded());
+                sb.append("\nLate Recorder Callbacks = ");
+                sb.append(mRecorderCallbackTimes.getNumLateOrEarlyCallbacks());
+                sb.append("\nLate Recorder Callbacks Exceeded Capacity = ");
+                sb.append(mRecorderCallbackTimes.isCapacityExceeded());
+                sb.append("\n");
+        }
+
+
+        String info = getApp().getSystemInfo();
+        sb.append("SystemInfo = " + info + endline);
+
+        return sb;
+    }
+
+    /** Save a .txt file of of glitch occurrences in ms from beginning of test. */
+    private void saveGlitchOccurrences(Uri uri, int[] glitchesData) {
+        ParcelFileDescriptor parcelFileDescriptor = null;
+        FileOutputStream outputStream;
+        try {
+            parcelFileDescriptor = getApplicationContext().getContentResolver().
+                    openFileDescriptor(uri, "w");
+
+            FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
+            outputStream = new FileOutputStream(fileDescriptor);
+
+            log("Done creating output stream");
+
+            outputStream.write(GlitchesStringBuilder.getGlitchStringForFile(mFFTSamplingSize,
+                    mFFTOverlapSamples, glitchesData, mSamplingRate).getBytes());
+        } catch (Exception e) {
+            log("Failed to open text file " + e);
+        } finally {
+            try {
+                if (parcelFileDescriptor != null) {
+                    parcelFileDescriptor.close();
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+                log("Error closing ParcelFile Descriptor");
+            }
+        }
+    }
+
+    /**
+     * Estimate the number of glitches. This version of estimation will count two consecutive
+     * glitching intervals as one glitch. This is because two time intervals are partly overlapped.
+     * Note: If the total glitching intervals exceed the length of glitchesData, this estimation
+     * becomes incomplete. However, whether or not the total glitching interval is too long will
+     * also be indicated, and in the case it's true, we know something went wrong.
+     */
+    private static int estimateNumberOfGlitches(int[] glitchesData) {
+        final int discard = 10; // don't count glitches occurring at the first few FFT interval
+        boolean isPreviousGlitch = false; // is there a glitch in previous interval or not
+        int previousFFTInterval = -1;
+        int count = 0;
+        // if there are three consecutive glitches, the first two will be counted as one,
+        // the third will be counted as another one
+        for (int i = 0; i < glitchesData.length; i++) {
+            if (glitchesData[i] > discard) {
+                if (glitchesData[i] == previousFFTInterval + 1 && isPreviousGlitch) {
+                    isPreviousGlitch = false;
+                    previousFFTInterval = glitchesData[i];
+                } else {
+                    isPreviousGlitch = true;
+                    previousFFTInterval = glitchesData[i];
+                    count += 1;
+                }
+            }
+
+        }
+
+        return count;
+    }
+
+
+    /**
+     * Estimate the number of glitches. This version of estimation will count the whole consecutive
+     * intervals as one glitch. This version is not currently used.
+     * Note: If the total glitching intervals exceed the length of glitchesData, this estimation
+     * becomes incomplete. However, whether or not the total glitching interval is too long will
+     * also be indicated, and in the case it's true, we know something went wrong.
+     */
+    private static int estimateNumberOfGlitches2(int[] glitchesData) {
+        final int discard = 10; // don't count glitches occurring at the first few FFT interval
+        int previousFFTInterval = -1;
+        int count = 0;
+        for (int i = 0; i < glitchesData.length; i++) {
+            if (glitchesData[i] > discard) {
+                if (glitchesData[i] != previousFFTInterval + 1) {
+                    count += 1;
+                }
+                previousFFTInterval = glitchesData[i];
+            }
+        }
+        return count;
+    }
+
+    /**
+     * Check whether we have the RECORD_AUDIO permission
+     * @return true if we do
+     */
+    private boolean hasRecordAudioPermission(){
+        boolean hasPermission = (ContextCompat.checkSelfPermission(this,
+                Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED);
+
+        log("Has RECORD_AUDIO permission? " + hasPermission);
+        return hasPermission;
+    }
+
+    /**
+     * Requests the RECORD_AUDIO permission from the user
+     */
+    private void requestRecordAudioPermission(int requestCode){
+
+        String requiredPermission = Manifest.permission.RECORD_AUDIO;
+
+        // If the user previously denied this permission then show a message explaining why
+        // this permission is needed
+        if (ActivityCompat.shouldShowRequestPermissionRationale(this,
+                requiredPermission)) {
+
+            showToast("This app needs to record audio through the microphone to test the device's "+
+                    "performance");
+        }
+
+        // request the permission.
+        ActivityCompat.requestPermissions(this, new String[]{requiredPermission}, requestCode);
+    }
+
+    @Override
+    public void onRequestPermissionsResult(int requestCode,
+                                           String permissions[], int[] grantResults) {
+
+        // Save all files or run requested test after being granted permissions
+        if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
+            if (requestCode == PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE_RESULTS ) {
+                saveAllTo(getFileNamePrefix());
+            } else if (requestCode == PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE_SCRIPT ) {
+                AtraceScriptsWriter.writeScriptsToFile(this);
+            } else if (requestCode == PERMISSIONS_REQUEST_RECORD_AUDIO_BUFFER) {
+                startBufferTest();
+            } else if (requestCode == PERMISSIONS_REQUEST_RECORD_AUDIO_LATENCY) {
+                startLatencyTest();
+            }
+        }
+    }
+
+    /**
+     * Check whether we have the WRITE_EXTERNAL_STORAGE permission
+     *
+     * @return true if we do
+     */
+    private boolean hasWriteFilePermission() {
+        boolean hasPermission = (ContextCompat.checkSelfPermission(this,
+                Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED);
+
+        log("Has WRITE_EXTERNAL_STORAGE? " + hasPermission);
+        return hasPermission;
+    }
+
+    /**
+     * Requests the WRITE_EXTERNAL_STORAGE permission from the user
+     */
+    private void requestWriteFilePermission(int requestCode) {
+
+        String requiredPermission = Manifest.permission.WRITE_EXTERNAL_STORAGE;
+
+        // request the permission.
+        ActivityCompat.requestPermissions(this, new String[]{requiredPermission}, requestCode);
+    }
+
+    /**
+     * Receive results from save files DialogAlert and either save all files directly
+     * or use filename dialog
+     */
+    @Override
+    public void onSaveDialogSelect(DialogFragment dialog, boolean saveWithoutDialog) {
+        if (saveWithoutDialog) {
+            saveAllTo("loopback_" + mTestStartTimeString);
+        } else {
+            SaveFilesWithDialog();
+        }
+    }
+
+    private void restoreInstanceState(Bundle in) {
+        mWaveData = in.getDoubleArray("mWaveData");
+
+        mTestType = in.getInt("mTestType");
+        mMicSource = in.getInt("mMicSource");
+        mAudioThreadType = in.getInt("mAudioThreadType");
+        mSamplingRate = in.getInt("mSamplingRate");
+        mChannelIndex = in.getInt("mChannelIndex");
+        mSoundLevel = in.getInt("mSoundLevel");
+        mPlayerBufferSizeInBytes = in.getInt("mPlayerBufferSizeInBytes");
+        mRecorderBufferSizeInBytes = in.getInt("mRecorderBufferSizeInBytes");
+
+        mTestStartTimeString = in.getString("mTestStartTimeString");
+
+        mGlitchesData = in.getIntArray("mGlitchesData");
+        if(mGlitchesData != null) {
+            mGlitchingIntervalTooLong = in.getBoolean("mGlitchingIntervalTooLong");
+            mFFTSamplingSize = in.getInt("mFFTSamplingSize");
+            mFFTOverlapSamples = in.getInt("mFFTOverlapSamples");
+            mBufferTestStartTime = in.getLong("mBufferTestStartTime");
+            mBufferTestElapsedSeconds = in.getInt("mBufferTestElapsedSeconds");
+            mBufferTestDurationInSeconds = in.getInt("mBufferTestDurationInSeconds");
+            mBufferTestWavePlotDurationInSeconds =
+                    in.getInt("mBufferTestWavePlotDurationInSeconds");
+
+            findViewById(R.id.glitchReportPanel).setVisibility(View.VISIBLE);
+        }
+
+        if(mWaveData != null) {
+            mCorrelation = in.getParcelable("mCorrelation");
+            mPlayerBufferPeriod = in.getParcelable("mPlayerBufferPeriod");
+            mRecorderBufferPeriod = in.getParcelable("mRecorderBufferPeriod");
+            mPlayerCallbackTimes = in.getParcelable("mPlayerCallbackTimes");
+            mRecorderCallbackTimes = in.getParcelable("mRecorderCallbackTimes");
+
+            mNativePlayerBufferPeriodArray = in.getIntArray("mNativePlayerBufferPeriodArray");
+            mNativePlayerMaxBufferPeriod = in.getInt("mNativePlayerMaxBufferPeriod");
+            mNativeRecorderBufferPeriodArray = in.getIntArray("mNativeRecorderBufferPeriodArray");
+            mNativeRecorderMaxBufferPeriod = in.getInt("mNativeRecorderMaxBufferPeriod");
+
+            mWavePlotView.setData(mWaveData, mSamplingRate);
+            refreshState();
+            findViewById(R.id.zoomAndSaveControlPanel).setVisibility(View.VISIBLE);
+            findViewById(R.id.resultSummary).setVisibility(View.VISIBLE);
+        }
+    }
+
+    @Override
+    protected void onSaveInstanceState(Bundle out) {
+        super.onSaveInstanceState(out);
+        // TODO: keep larger pieces of data in a fragment to speed up response to rotation
+        out.putDoubleArray("mWaveData", mWaveData);
+
+        out.putInt("mTestType", mTestType);
+        out.putInt("mMicSource", mMicSource);
+        out.putInt("mAudioThreadType", mAudioThreadType);
+        out.putInt("mSamplingRate", mSamplingRate);
+        out.putInt("mChannelIndex", mChannelIndex);
+        out.putInt("mSoundLevel", mSoundLevel);
+        out.putInt("mPlayerBufferSizeInBytes", mPlayerBufferSizeInBytes);
+        out.putInt("mRecorderBufferSizeInBytes", mRecorderBufferSizeInBytes);
+        out.putString("mTestStartTimeString", mTestStartTimeString);
+
+        out.putParcelable("mCorrelation", mCorrelation);
+        out.putParcelable("mPlayerBufferPeriod", mPlayerBufferPeriod);
+        out.putParcelable("mRecorderBufferPeriod", mRecorderBufferPeriod);
+        out.putParcelable("mPlayerCallbackTimes", mPlayerCallbackTimes);
+        out.putParcelable("mRecorderCallbackTimes", mRecorderCallbackTimes);
+
+        out.putIntArray("mNativePlayerBufferPeriodArray", mNativePlayerBufferPeriodArray);
+        out.putInt("mNativePlayerMaxBufferPeriod", mNativePlayerMaxBufferPeriod);
+        out.putIntArray("mNativeRecorderBufferPeriodArray", mNativeRecorderBufferPeriodArray);
+        out.putInt("mNativeRecorderMaxBufferPeriod", mNativeRecorderMaxBufferPeriod);
+
+        // buffer test values
+        out.putIntArray("mGlitchesData", mGlitchesData);
+        out.putBoolean("mGlitchingIntervalTooLong", mGlitchingIntervalTooLong);
+        out.putInt("mFFTSamplingSize", mFFTSamplingSize);
+        out.putInt("mFFTOverlapSamples", mFFTOverlapSamples);
+        out.putLong("mBufferTestStartTime", mBufferTestStartTime);
+        out.putInt("mBufferTestElapsedSeconds", mBufferTestElapsedSeconds);
+        out.putInt("mBufferTestDurationInSeconds", mBufferTestDurationInSeconds);
+        out.putInt("mBufferTestWavePlotDurationInSeconds", mBufferTestWavePlotDurationInSeconds);
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java
new file mode 100644
index 0000000..f38ef5f
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackApplication.java
@@ -0,0 +1,435 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.app.Application;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.content.res.Configuration;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.media.MediaRecorder;
+import android.os.Build;
+import android.util.Log;
+
+
+/**
+ * This class maintain global application states, so it also keeps and computes the default
+ * values of all the audio settings.
+ */
+
+public class LoopbackApplication extends Application {
+    private static final String TAG = "LoopbackApplication";
+
+    // here defines all the initial setting values, some get modified in ComputeDefaults()
+    private int mSamplingRate = 48000;
+    private int mChannelIndex = -1;
+    private int mPlayerBufferSizeInBytes = 0; // for both native and java
+    private int mRecorderBuffSizeInBytes = 0; // for both native and java
+    private int mAudioThreadType = Constant.AUDIO_THREAD_TYPE_JAVA; //0:Java, 1:Native (JNI)
+    private int mMicSource = 3; //maps to MediaRecorder.AudioSource.VOICE_RECOGNITION;
+    private int mPerformanceMode = -1; // DEFAULT
+    private int mIgnoreFirstFrames = 0;
+    private int mBufferTestDurationInSeconds = 5;
+    private int mBufferTestWavePlotDurationInSeconds = 7;
+    private int mNumberOfLoadThreads = 4;
+    private boolean mCaptureSysTraceEnabled = false;
+    private boolean mCaptureBugreportEnabled = false;
+    private boolean mCaptureWavSnippetsEnabled = false;
+    private boolean mSoundLevelCalibrationEnabled = false;
+    private int mNumStateCaptures = Constant.DEFAULT_NUM_CAPTURES;
+
+    public void setDefaults() {
+        if (isSafeToUseSles()) {
+            mAudioThreadType = Constant.AUDIO_THREAD_TYPE_NATIVE;
+        } else {
+            mAudioThreadType = Constant.AUDIO_THREAD_TYPE_JAVA;
+        }
+
+        computeDefaults();
+    }
+
+    int getSamplingRate() {
+        return mSamplingRate;
+    }
+
+    void setSamplingRate(int samplingRate) {
+        mSamplingRate = clamp(samplingRate, Constant.SAMPLING_RATE_MIN, Constant.SAMPLING_RATE_MAX);
+    }
+
+    int getChannelIndex() { return mChannelIndex; }
+
+    void setChannelIndex(int channelIndex) { mChannelIndex = channelIndex; }
+
+    int getAudioThreadType() {
+        return mAudioThreadType;
+    }
+
+
+    void setAudioThreadType(int audioThreadType) {
+        if (isSafeToUseSles() && audioThreadType != Constant.AUDIO_THREAD_TYPE_JAVA) {
+            //safe to use native and Java thread not selected
+            mAudioThreadType = Constant.AUDIO_THREAD_TYPE_NATIVE;
+        } else {
+            mAudioThreadType = Constant.AUDIO_THREAD_TYPE_JAVA;
+        }
+    }
+
+
+    int getMicSource() {
+        return mMicSource;
+    }
+
+
+    int mapMicSource(int threadType, int source) {
+        int mappedSource = 0;
+
+        //experiment with remote submix
+        if (threadType == Constant.AUDIO_THREAD_TYPE_JAVA) {
+            switch (source) {
+            default:
+            case 0: //DEFAULT
+                mappedSource = MediaRecorder.AudioSource.DEFAULT;
+                break;
+            case 1: //MIC
+                mappedSource = MediaRecorder.AudioSource.MIC;
+                break;
+            case 2: //CAMCORDER
+                mappedSource = MediaRecorder.AudioSource.CAMCORDER;
+                break;
+            case 3: //VOICE_RECOGNITION
+                mappedSource = MediaRecorder.AudioSource.VOICE_RECOGNITION;
+                break;
+            case 4: //VOICE_COMMUNICATION
+                mappedSource = MediaRecorder.AudioSource.VOICE_COMMUNICATION;
+                break;
+            case 5: //REMOTE_SUBMIX (JAVA ONLY)
+                mappedSource = MediaRecorder.AudioSource.REMOTE_SUBMIX;
+                break;
+            case 6: //UNPROCESSED
+                if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
+                    mappedSource = 9 /*MediaRecorder.AudioSource.UNPROCESSED*/;
+                } else {
+                    mappedSource = MediaRecorder.AudioSource.DEFAULT;
+                }
+                break;
+            }
+        } else if (threadType == Constant.AUDIO_THREAD_TYPE_NATIVE) {
+            // FIXME taken from OpenSLES_AndroidConfiguration.h
+            switch (source) {
+            default:
+            case 0: //DEFAULT
+                mappedSource = 0x00; //SL_ANDROID_RECORDING_PRESET_NONE
+                break;
+            case 1: //MIC
+                mappedSource = 0x01; //SL_ANDROID_RECORDING_PRESET_GENERIC
+                break;
+            case 2: //CAMCORDER
+                mappedSource = 0x02; //SL_ANDROID_RECORDING_PRESET_CAMCORDER
+                break;
+            case 3: //VOICE_RECOGNITION
+                mappedSource = 0x03; //SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION
+                break;
+            case 4: //VOICE_COMMUNICATION
+                mappedSource = 0x04; //SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION
+                break;
+            case 5: //REMOTE_SUBMIX (JAVA ONLY)
+                mappedSource = 0x00; //SL_ANDROID_RECORDING_PRESET_NONE;
+                break;
+            case 6: //UNPROCESSED
+                // FIXME should use >=
+                if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
+                    mappedSource = 0x05; //SL_ANDROID_RECORDING_PRESET_UNPROCESSED;
+                } else {
+                    mappedSource = 0x00; //SL_ANDROID_RECORDING_PRESET_NONE
+                }
+                break;
+            }
+        }
+
+        return mappedSource;
+    }
+
+
+    String getMicSourceString(int source) {
+        String name = null;
+        String[] myArray = getResources().getStringArray(R.array.mic_source_array);
+
+        if (myArray != null && source >= 0 && source < myArray.length) {
+            name = myArray[source];
+        }
+        return name;
+    }
+
+    void setMicSource(int micSource) { mMicSource = micSource; }
+
+    int mapPerformanceMode(int performanceMode) {
+        int mappedPerformanceMode = -1;
+
+        // FIXME taken from OpenSLES_AndroidConfiguration.h
+        switch (performanceMode) {
+        case 0: // NONE
+        case 1: // LATENCY
+        case 2: // LATENCY_EFFECTS
+        case 3: // POWER_SAVING
+            // FIXME should use >=
+            if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
+                mappedPerformanceMode = performanceMode;
+                break;
+            }
+            // fall through
+        case -1:
+        default:
+            mappedPerformanceMode = -1;
+            break;
+            }
+
+        return mappedPerformanceMode;
+    }
+
+
+    int getPerformanceMode() {
+        return mPerformanceMode;
+    }
+
+    String getPerformanceModeString(int performanceMode) {
+        String name = null;
+        String[] myArray = getResources().getStringArray(R.array.performance_mode_array);
+
+        if (myArray != null && performanceMode >= -1 && performanceMode < myArray.length - 1) {
+            name = myArray[performanceMode + 1];
+        }
+        return name;
+    }
+
+
+    void setPerformanceMode(int performanceMode) { mPerformanceMode = performanceMode; }
+
+    int getIgnoreFirstFrames() {
+        return mIgnoreFirstFrames;
+    }
+
+    void setIgnoreFirstFrames(int ignoreFirstFrames) {
+        mIgnoreFirstFrames = ignoreFirstFrames;
+    }
+
+    int getPlayerBufferSizeInBytes() {
+        return mPlayerBufferSizeInBytes;
+    }
+
+
+    void setPlayerBufferSizeInBytes(int playerBufferSizeInBytes) {
+        mPlayerBufferSizeInBytes = clamp(playerBufferSizeInBytes, Constant.PLAYER_BUFFER_FRAMES_MIN,
+                Constant.PLAYER_BUFFER_FRAMES_MAX);
+    }
+
+
+    int getRecorderBufferSizeInBytes() {
+        return mRecorderBuffSizeInBytes;
+    }
+
+
+    void setRecorderBufferSizeInBytes(int recorderBufferSizeInBytes) {
+        mRecorderBuffSizeInBytes = clamp(recorderBufferSizeInBytes,
+                Constant.RECORDER_BUFFER_FRAMES_MIN, Constant.RECORDER_BUFFER_FRAMES_MAX);
+    }
+
+
+    int getBufferTestDuration() {
+        return mBufferTestDurationInSeconds;
+    }
+
+
+    void setBufferTestDuration(int bufferTestDurationInSeconds) {
+        mBufferTestDurationInSeconds = clamp(bufferTestDurationInSeconds,
+                Constant.BUFFER_TEST_DURATION_SECONDS_MIN,
+                Constant.BUFFER_TEST_DURATION_SECONDS_MAX);
+    }
+
+
+    int getBufferTestWavePlotDuration() {
+        return mBufferTestWavePlotDurationInSeconds;
+    }
+
+
+    void setBufferTestWavePlotDuration(int bufferTestWavePlotDurationInSeconds) {
+        mBufferTestWavePlotDurationInSeconds = clamp(bufferTestWavePlotDurationInSeconds,
+                Constant.BUFFER_TEST_WAVE_PLOT_DURATION_SECONDS_MIN,
+                Constant.BUFFER_TEST_WAVE_PLOT_DURATION_SECONDS_MAX);
+    }
+
+    int getNumberOfLoadThreads() {
+        return mNumberOfLoadThreads;
+    }
+
+    void setNumberOfLoadThreads(int numberOfLoadThreads) {
+        mNumberOfLoadThreads = clamp(numberOfLoadThreads, Constant.MIN_NUM_LOAD_THREADS,
+                Constant.MAX_NUM_LOAD_THREADS);
+    }
+
+    public void setNumberOfCaptures (int num){
+        mNumStateCaptures = clamp(num, Constant.MIN_NUM_CAPTURES, Constant.MAX_NUM_CAPTURES);
+    }
+
+    public void setCaptureSysTraceEnabled (boolean enabled){
+        mCaptureSysTraceEnabled = enabled;
+    }
+
+    public void setCaptureBugreportEnabled(boolean enabled) {
+        mCaptureBugreportEnabled = enabled;
+    }
+
+    public void setCaptureWavsEnabled (boolean enabled){
+        mCaptureWavSnippetsEnabled = enabled;
+    }
+
+    public void setSoundLevelCalibrationEnabled(boolean enabled) {
+        mSoundLevelCalibrationEnabled = enabled;
+    }
+
+    public boolean isCaptureEnabled() {
+        return isCaptureSysTraceEnabled() || isCaptureBugreportEnabled();
+    }
+
+    public boolean isCaptureSysTraceEnabled() {
+        return mCaptureSysTraceEnabled;
+    }
+
+    public boolean isCaptureBugreportEnabled() {
+        return mCaptureBugreportEnabled;
+    }
+
+    public boolean isSoundLevelCalibrationEnabled() {
+        return mSoundLevelCalibrationEnabled;
+    }
+
+    public int getNumStateCaptures() {
+        return mNumStateCaptures;
+    }
+
+    public boolean isCaptureWavSnippetsEnabled() {
+        return mCaptureWavSnippetsEnabled;
+    }
+
+    /**
+     * Returns value if value is within inclusive bounds min through max
+     * otherwise returns min or max according to if value is less than or greater than the range
+     */
+    private int clamp(int value, int min, int max) {
+
+        if (max < min) throw new UnsupportedOperationException("min must be <= max");
+
+        if (value < min) return min;
+        else if (value > max) return max;
+        else return value;
+    }
+
+
+    /** Compute Default audio settings. */
+    public void computeDefaults() {
+        int samplingRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+        setSamplingRate(samplingRate);
+
+        if (mAudioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE) {
+
+            int minBufferSizeInFrames;
+            if (isSafeToUseGetProperty()) {
+                AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+                String value = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
+                minBufferSizeInFrames = Integer.parseInt(value);
+            } else {
+                minBufferSizeInFrames = 1024;
+                log("On button test micSource Name: ");
+            }
+            int minBufferSizeInBytes = Constant.BYTES_PER_FRAME * minBufferSizeInFrames;
+
+            setPlayerBufferSizeInBytes(minBufferSizeInBytes);
+            setRecorderBufferSizeInBytes(minBufferSizeInBytes);
+        } else {
+            int minPlayerBufferSizeInBytes = AudioTrack.getMinBufferSize(samplingRate,
+                    AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
+            setPlayerBufferSizeInBytes(minPlayerBufferSizeInBytes);
+
+            int minRecorderBufferSizeInBytes =  AudioRecord.getMinBufferSize(samplingRate,
+                    AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
+            setRecorderBufferSizeInBytes(minRecorderBufferSizeInBytes);
+        }
+
+    }
+
+
+    String getSystemInfo() {
+        String info = null;
+        try {
+            int versionCode = getApplicationContext().getPackageManager().getPackageInfo(
+                              getApplicationContext().getPackageName(), 0).versionCode;
+            String versionName = getApplicationContext().getPackageManager().getPackageInfo(
+                                 getApplicationContext().getPackageName(), 0).versionName;
+            info = "App ver. " + versionCode + "." + versionName + " | " + Build.MODEL + " | " +
+                    Build.FINGERPRINT;
+        } catch (PackageManager.NameNotFoundException e) {
+            e.printStackTrace();
+        }
+
+        return info;
+    }
+
+
+    /** Check if it's safe to use Open SLES. */
+    boolean isSafeToUseSles() {
+        return  Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD;
+    }
+
+
+    /** Check if it's safe to use getProperty(). */
+    boolean isSafeToUseGetProperty() {
+        return  Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
+    }
+
+
+    @Override
+    public void onConfigurationChanged(Configuration newConfig) {
+        super.onConfigurationChanged(newConfig);
+    }
+
+
+    @Override
+    public void onCreate() {
+        super.onCreate();
+
+        setDefaults();
+    }
+
+
+    @Override
+    public void onLowMemory() {
+        super.onLowMemory();
+    }
+
+
+    @Override
+    public void onTerminate() {
+        super.onTerminate();
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java
new file mode 100644
index 0000000..b4c3b3a
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/LoopbackAudioThread.java
@@ -0,0 +1,400 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.content.Context;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.media.MediaRecorder;
+import android.os.Build;
+import android.util.Log;
+import android.os.Handler;
+import android.os.Message;
+
+/**
+ * A thread/audio track based audio synth.
+ */
+
+public class LoopbackAudioThread extends Thread {
+    private static final String TAG = "LoopbackAudioThread";
+
+    private static final int THREAD_SLEEP_DURATION_MS = 1;
+
+    // for latency test
+    static final int LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED = 991;
+    static final int LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR = 992;
+    static final int LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE = 993;
+    static final int LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP = 994;
+
+    // for buffer test
+    static final int LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED = 996;
+    static final int LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR = 997;
+    static final int LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE = 998;
+    static final int LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP = 999;
+
+    public boolean           mIsRunning = false;
+    public AudioTrack        mAudioTrack;
+    public int               mSessionId;
+    private Thread           mRecorderThread;
+    private RecorderRunnable mRecorderRunnable;
+
+    private final int mSamplingRate;
+    private final int mChannelIndex;
+    private final int mChannelConfigIn = AudioFormat.CHANNEL_IN_MONO;
+    private final int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
+    private int       mMinPlayerBufferSizeInBytes = 0;
+    private int       mMinRecorderBuffSizeInBytes = 0;
+    private int       mMinPlayerBufferSizeSamples = 0;
+    private final int mMicSource;
+    private final int mChannelConfigOut = AudioFormat.CHANNEL_OUT_MONO;
+    private boolean   mIsPlaying = false;
+    private boolean   mIsRequestStop = false;
+    private Handler   mMessageHandler;
+    // This is the pipe that connects the player and the recorder in latency test.
+    private PipeShort mLatencyTestPipe = new PipeShort(Constant.MAX_SHORTS);
+
+    // for buffer test
+    private BufferPeriod   mRecorderBufferPeriod; // used to collect recorder's buffer period
+    private BufferPeriod   mPlayerBufferPeriod; // used to collect player's buffer period
+    private int            mTestType; // latency test or buffer test
+    private int            mBufferTestDurationInSeconds; // Duration of actual buffer test
+    private Context        mContext;
+    private int            mBufferTestWavePlotDurationInSeconds;
+    private final CaptureHolder mCaptureHolder;
+    private boolean        mIsAdjustingSoundLevel = true; // only used in buffer test
+
+
+    public LoopbackAudioThread(int samplingRate, int playerBufferInBytes, int recorderBufferInBytes,
+                               int micSource, BufferPeriod recorderBufferPeriod,
+                               BufferPeriod playerBufferPeriod, int testType,
+                               int bufferTestDurationInSeconds,
+                               int bufferTestWavePlotDurationInSeconds, Context context,
+                               int channelIndex, CaptureHolder captureHolder) {
+        mSamplingRate = samplingRate;
+        mMinPlayerBufferSizeInBytes = playerBufferInBytes;
+        mMinRecorderBuffSizeInBytes = recorderBufferInBytes;
+        mMicSource = micSource;
+        mRecorderBufferPeriod = recorderBufferPeriod;
+        mPlayerBufferPeriod = playerBufferPeriod;
+        mTestType = testType;
+        mBufferTestDurationInSeconds = bufferTestDurationInSeconds;
+        mBufferTestWavePlotDurationInSeconds = bufferTestWavePlotDurationInSeconds;
+        mContext = context;
+        mChannelIndex = channelIndex;
+        mCaptureHolder = captureHolder;
+
+        setName("Loopback_LoopbackAudio");
+    }
+
+
+    public void run() {
+        setPriority(Thread.MAX_PRIORITY);
+
+        if (mMinPlayerBufferSizeInBytes <= 0) {
+            mMinPlayerBufferSizeInBytes = AudioTrack.getMinBufferSize(mSamplingRate,
+                                        mChannelConfigOut, mAudioFormat);
+
+            log("Player: computed min buff size = " + mMinPlayerBufferSizeInBytes + " bytes");
+        } else {
+            log("Player: using min buff size = " + mMinPlayerBufferSizeInBytes + " bytes");
+        }
+
+        mMinPlayerBufferSizeSamples = mMinPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME;
+        short[] audioShortArrayOut = new short[mMinPlayerBufferSizeSamples];
+
+        // we may want to adjust this to different multiplication of mMinPlayerBufferSizeSamples
+        int audioTrackWriteDataSize = mMinPlayerBufferSizeSamples;
+
+        // used for buffer test only
+        final double frequency1 = Constant.PRIME_FREQUENCY_1;
+        final double frequency2 = Constant.PRIME_FREQUENCY_2; // not actually used
+        short[] bufferTestTone = new short[audioTrackWriteDataSize]; // used by AudioTrack.write()
+        ToneGeneration toneGeneration = new SineWaveTone(mSamplingRate, frequency1);
+
+        mRecorderRunnable = new RecorderRunnable(mLatencyTestPipe, mSamplingRate, mChannelConfigIn,
+                mAudioFormat, mMinRecorderBuffSizeInBytes, MediaRecorder.AudioSource.MIC, this,
+                mRecorderBufferPeriod, mTestType, frequency1, frequency2,
+                mBufferTestWavePlotDurationInSeconds, mContext, mChannelIndex, mCaptureHolder);
+        mRecorderRunnable.setBufferTestDurationInSeconds(mBufferTestDurationInSeconds);
+        mRecorderThread = new Thread(mRecorderRunnable);
+        mRecorderThread.setName("Loopback_RecorderRunnable");
+
+        // both player and recorder run at max priority
+        mRecorderThread.setPriority(Thread.MAX_PRIORITY);
+        mRecorderThread.start();
+
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+            mAudioTrack = new AudioTrack.Builder()
+                    .setAudioFormat((mChannelIndex < 0 ?
+                            new AudioFormat.Builder().setChannelMask(AudioFormat.CHANNEL_OUT_MONO) :
+                            new AudioFormat.Builder().setChannelIndexMask(1 << mChannelIndex))
+                            .setSampleRate(mSamplingRate)
+                            .setEncoding(mAudioFormat)
+                            .build())
+                    .setBufferSizeInBytes(mMinPlayerBufferSizeInBytes)
+                    .setTransferMode(AudioTrack.MODE_STREAM)
+                    .build();
+        } else {
+            mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
+                    mSamplingRate,
+                    mChannelConfigOut,
+                    mAudioFormat,
+                    mMinPlayerBufferSizeInBytes,
+                    AudioTrack.MODE_STREAM /* FIXME runtime test for API level 9,
+                    mSessionId */);
+        }
+
+        if (mRecorderRunnable != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED) {
+            mIsPlaying = false;
+            mIsRunning = true;
+
+            while (mIsRunning && mRecorderThread.isAlive()) {
+                if (mIsPlaying) {
+                    switch (mTestType) {
+                    case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                        // read from the pipe and plays it out
+                        int samplesAvailable = mLatencyTestPipe.availableToRead();
+                        if (samplesAvailable > 0) {
+                            int samplesOfInterest = Math.min(samplesAvailable,
+                                    mMinPlayerBufferSizeSamples);
+
+                            int samplesRead = mLatencyTestPipe.read(audioShortArrayOut, 0,
+                                                                    samplesOfInterest);
+                            mAudioTrack.write(audioShortArrayOut, 0, samplesRead);
+                            mPlayerBufferPeriod.collectBufferPeriod();
+                        }
+                        break;
+                    case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                        // don't collect buffer period when we are still adjusting the sound level
+                        if (mIsAdjustingSoundLevel) {
+                            toneGeneration.generateTone(bufferTestTone, bufferTestTone.length);
+                            mAudioTrack.write(bufferTestTone, 0, audioTrackWriteDataSize);
+                        } else {
+                            mPlayerBufferPeriod.collectBufferPeriod();
+                            toneGeneration.generateTone(bufferTestTone, bufferTestTone.length);
+                            mAudioTrack.write(bufferTestTone, 0, audioTrackWriteDataSize);
+                        }
+                        break;
+                    }
+                } else {
+                    // wait for a bit to allow AudioTrack to start playing
+                    if (mIsRunning) {
+                        try {
+                            sleep(THREAD_SLEEP_DURATION_MS);
+                        } catch (InterruptedException e) {
+                            e.printStackTrace();
+                        }
+                    }
+                }
+            }
+            endTest();
+
+        } else {
+            log("Loopback Audio Thread couldn't run!");
+            mAudioTrack.release();
+            mAudioTrack = null;
+            if (mMessageHandler != null) {
+                Message msg = Message.obtain();
+                switch (mTestType) {
+                case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                    msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR;
+                    break;
+                case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                    msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR;
+                    break;
+                }
+
+                mMessageHandler.sendMessage(msg);
+            }
+
+        }
+    }
+
+
+    public void setMessageHandler(Handler messageHandler) {
+        mMessageHandler = messageHandler;
+    }
+
+
+    public void setIsAdjustingSoundLevel(boolean isAdjustingSoundLevel) {
+        mIsAdjustingSoundLevel = isAdjustingSoundLevel;
+    }
+
+
+    public void runTest() {
+        if (mIsRunning) {
+            // start test
+            if (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
+                log("...run test, but still playing...");
+                endTest();
+            } else {
+                // start playing
+                mIsPlaying = true;
+                mAudioTrack.play();
+                boolean status = mRecorderRunnable.startRecording();
+
+                log("Started capture test");
+                if (mMessageHandler != null) {
+                    Message msg = Message.obtain();
+                    msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED;
+                    if (!status) {
+                        msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR;
+                    }
+
+                    mMessageHandler.sendMessage(msg);
+                }
+            }
+        }
+    }
+
+
+    public void runBufferTest() {
+        if (mIsRunning) {
+            // start test
+            if (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
+                log("...run test, but still playing...");
+                endTest();
+            } else {
+                // start playing
+                mIsPlaying = true;
+                mAudioTrack.play();
+                boolean status = mRecorderRunnable.startBufferRecording();
+                log(" Started capture test");
+                if (mMessageHandler != null) {
+                    Message msg = Message.obtain();
+                    msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED;
+
+                    if (!status) {
+                        msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR;
+                    }
+
+                    mMessageHandler.sendMessage(msg);
+                }
+            }
+        }
+    }
+
+
+    /** Clean some things up before sending out a message to LoopbackActivity. */
+    public void endTest() {
+        switch (mTestType) {
+        case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+            log("--Ending latency test--");
+            break;
+        case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+            log("--Ending buffer test--");
+            break;
+        }
+
+        mIsPlaying = false;
+        mAudioTrack.pause();
+        mLatencyTestPipe.flush();
+        mAudioTrack.flush();
+
+        if (mMessageHandler != null) {
+            Message msg = Message.obtain();
+            if (mIsRequestStop) {
+                switch (mTestType) {
+                case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                    msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP;
+                    break;
+                case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                    msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP;
+                    break;
+                }
+            } else {
+                switch (mTestType) {
+                case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                    msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE;
+                    break;
+                case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                    msg.what = LOOPBACK_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE;
+                    break;
+                }
+            }
+
+            mMessageHandler.sendMessage(msg);
+        }
+    }
+
+
+    /**
+     * This is called only when the user requests to stop the test through
+     * pressing a button in the LoopbackActivity.
+     */
+    public void requestStopTest() throws InterruptedException {
+        mIsRequestStop = true;
+        mRecorderRunnable.requestStop();
+    }
+
+
+    /** Release mAudioTrack and mRecorderThread. */
+    public void finish() throws InterruptedException {
+        mIsRunning = false;
+
+        final AudioTrack at = mAudioTrack;
+        if (at != null) {
+            at.release();
+            mAudioTrack = null;
+        }
+
+        Thread zeThread = mRecorderThread;
+        mRecorderThread = null;
+        if (zeThread != null) {
+            zeThread.interrupt();
+            zeThread.join(Constant.JOIN_WAIT_TIME_MS);
+        }
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+
+    public double[] getWaveData() {
+        return mRecorderRunnable.getWaveData();
+    }
+
+
+    public int[] getAllGlitches() {
+        return mRecorderRunnable.getAllGlitches();
+    }
+
+
+    public boolean getGlitchingIntervalTooLong() {
+        return mRecorderRunnable.getGlitchingIntervalTooLong();
+    }
+
+
+    public int getFFTSamplingSize() {
+        return mRecorderRunnable.getFFTSamplingSize();
+    }
+
+
+    public int getFFTOverlapSamples() {
+        return mRecorderRunnable.getFFTOverlapSamples();
+    }
+
+
+    int getDurationInSeconds() {
+        return mBufferTestDurationInSeconds;
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java
new file mode 100644
index 0000000..95d5899
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/NativeAudioThread.java
@@ -0,0 +1,517 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+
+import android.util.Log;
+import android.os.Handler;
+import android.os.Message;
+
+
+/**
+ * A thread/audio track based audio synth.
+ */
+
+public class NativeAudioThread extends Thread {
+    private static final String TAG = "NativeAudioThread";
+
+    // for latency test
+    static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED = 891;
+    static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR = 892;
+    static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE = 893;
+    static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE_ERRORS = 894;
+    static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP = 895;
+
+    // for buffer test
+    static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED = 896;
+    static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR = 897;
+    static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE = 898;
+    static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE_ERRORS = 899;
+    static final int LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP = 900;
+
+    public boolean  mIsRunning = false;
+    public int      mSessionId;
+    public double[] mSamples; // store samples that will be shown on WavePlotView
+    int             mSamplesIndex;
+
+    private int mTestType;
+    private int mSamplingRate;
+    private int mMinPlayerBufferSizeInBytes = 0;
+    private int mMinRecorderBuffSizeInBytes = 0; // currently not used
+    private int mMicSource;
+    private int mPerformanceMode = -1;
+    private int mIgnoreFirstFrames;
+
+    private boolean mIsRequestStop = false;
+    private Handler mMessageHandler;
+    private boolean isDestroying = false;
+    private boolean hasDestroyingErrors = false;
+
+    // for buffer test
+    private int[]   mRecorderBufferPeriod;
+    private int     mRecorderMaxBufferPeriod;
+    private double  mRecorderStdDevBufferPeriod;
+    private int[]   mPlayerBufferPeriod;
+    private int     mPlayerMaxBufferPeriod;
+    private double  mPlayerStdDevBufferPeriod;
+    private BufferCallbackTimes mPlayerCallbackTimes;
+    private BufferCallbackTimes mRecorderCallbackTimes;
+    private int     mBufferTestWavePlotDurationInSeconds;
+    private double  mFrequency1 = Constant.PRIME_FREQUENCY_1;
+    private double  mFrequency2 = Constant.PRIME_FREQUENCY_2; // not actually used
+    private int     mBufferTestDurationInSeconds;
+    private int     mFFTSamplingSize;
+    private int     mFFTOverlapSamples;
+    private int[]   mAllGlitches;
+    private boolean mGlitchingIntervalTooLong;
+    private final CaptureHolder mCaptureHolder;
+
+    private PipeByteBuffer        mPipeByteBuffer;
+    private GlitchDetectionThread mGlitchDetectionThread;
+
+
+    public NativeAudioThread(int samplingRate, int playerBufferInBytes, int recorderBufferInBytes,
+                             int micSource, int performanceMode, int testType, int bufferTestDurationInSeconds,
+                             int bufferTestWavePlotDurationInSeconds, int ignoreFirstFrames,
+                             CaptureHolder captureHolder) {
+        mSamplingRate = samplingRate;
+        mMinPlayerBufferSizeInBytes = playerBufferInBytes;
+        mMinRecorderBuffSizeInBytes = recorderBufferInBytes;
+        mMicSource = micSource;
+        mPerformanceMode = performanceMode;
+        mTestType = testType;
+        mBufferTestDurationInSeconds = bufferTestDurationInSeconds;
+        mBufferTestWavePlotDurationInSeconds = bufferTestWavePlotDurationInSeconds;
+        mIgnoreFirstFrames = ignoreFirstFrames;
+        mCaptureHolder = captureHolder;
+        setName("Loopback_NativeAudio");
+    }
+
+    public NativeAudioThread(NativeAudioThread old) {
+        mSamplingRate = old.mSamplingRate;
+        mMinPlayerBufferSizeInBytes = old.mMinPlayerBufferSizeInBytes;
+        mMinRecorderBuffSizeInBytes = old.mMinRecorderBuffSizeInBytes;
+        mMicSource = old.mMicSource;
+        mPerformanceMode = old.mPerformanceMode;
+        mTestType = old.mTestType;
+        mBufferTestDurationInSeconds = old.mBufferTestDurationInSeconds;
+        mBufferTestWavePlotDurationInSeconds = old.mBufferTestWavePlotDurationInSeconds;
+        mIgnoreFirstFrames = old.mIgnoreFirstFrames;
+        mCaptureHolder = old.mCaptureHolder;
+        setName("Loopback_NativeAudio");
+    }
+
+    //JNI load
+    static {
+        try {
+            System.loadLibrary("loopback");
+        } catch (UnsatisfiedLinkError e) {
+            log("Error loading loopback JNI library");
+            e.printStackTrace();
+        }
+        /* TODO: gracefully fail/notify if the library can't be loaded */
+    }
+
+
+    //jni calls
+    public native long  slesInit(int samplingRate, int frameCount, int micSource,
+                                 int performanceMode,
+                                 int testType, double frequency1, ByteBuffer byteBuffer,
+                                 short[] sincTone, int maxRecordedLateCallbacks,
+                                 int ignoreFirstFrames);
+    public native int   slesProcessNext(long sles_data, double[] samples, long offset);
+    public native int   slesDestroy(long sles_data);
+
+    // to get buffer period data
+    public native int[]  slesGetRecorderBufferPeriod(long sles_data);
+    public native int    slesGetRecorderMaxBufferPeriod(long sles_data);
+    public native double slesGetRecorderVarianceBufferPeriod(long sles_data);
+    public native int[]  slesGetPlayerBufferPeriod(long sles_data);
+    public native int    slesGetPlayerMaxBufferPeriod(long sles_data);
+    public native double slesGetPlayerVarianceBufferPeriod(long sles_data);
+    public native BufferCallbackTimes slesGetPlayerCallbackTimeStamps(long sles_data);
+    public native BufferCallbackTimes slesGetRecorderCallbackTimeStamps(long sles_data);
+
+    public native int slesGetCaptureRank(long sles_data);
+
+
+    public void run() {
+        setPriority(Thread.MAX_PRIORITY);
+        mIsRunning = true;
+
+        //erase output buffer
+        if (mSamples != null)
+            mSamples = null;
+
+        //start playing
+        log(" Started capture test");
+        if (mMessageHandler != null) {
+            Message msg = Message.obtain();
+            switch (mTestType) {
+            case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STARTED;
+                break;
+            case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STARTED;
+                break;
+            }
+            mMessageHandler.sendMessage(msg);
+        }
+
+        //generate sinc tone use for loopback test
+        short loopbackTone[] = new short[mMinPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME];
+        if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY) {
+            ToneGeneration sincToneGen = new RampedSineTone(mSamplingRate,
+                    Constant.LOOPBACK_FREQUENCY);
+            sincToneGen.generateTone(loopbackTone, loopbackTone.length);
+        }
+
+        log(String.format("about to init, sampling rate: %d, buffer:%d", mSamplingRate,
+                mMinPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME));
+
+        // mPipeByteBuffer is only used in buffer test
+        mPipeByteBuffer = new PipeByteBuffer(Constant.MAX_SHORTS);
+        long startTimeMs = System.currentTimeMillis();
+        long sles_data = slesInit(mSamplingRate,
+                mMinPlayerBufferSizeInBytes / Constant.BYTES_PER_FRAME, mMicSource,
+                mPerformanceMode, mTestType,
+                mFrequency1, mPipeByteBuffer.getByteBuffer(), loopbackTone,
+                mBufferTestDurationInSeconds * Constant.MAX_RECORDED_LATE_CALLBACKS_PER_SECOND,
+                mIgnoreFirstFrames);
+        log(String.format("sles_data = 0x%X", sles_data));
+
+        if (sles_data == 0) {
+            //notify error!!
+            log(" ERROR at JNI initialization");
+            if (mMessageHandler != null) {
+                Message msg = Message.obtain();
+                switch (mTestType) {
+                case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                    msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_ERROR;
+                    break;
+                case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                    msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_ERROR;
+                    break;
+                }
+                mMessageHandler.sendMessage(msg);
+            }
+        } else {
+            // wait a little bit
+            try {
+                final int setUpTime = 10;
+                sleep(setUpTime); //just to let it start properly
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+
+
+            int totalSamplesRead = 0;
+            switch (mTestType) {
+            case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                final int latencyTestDurationInSeconds = 2;
+                int nNewSize = (int) (1.1 * mSamplingRate * latencyTestDurationInSeconds);
+                mSamples = new double[nNewSize];
+                mSamplesIndex = 0; //reset index
+                Arrays.fill(mSamples, 0);
+
+                //TODO use a ByteBuffer to retrieve recorded data instead
+                long offset = 0;
+                // retrieve native recorder's recorded data
+                for (int ii = 0; ii < latencyTestDurationInSeconds; ii++) {
+                    log(String.format("block %d...", ii));
+                    int samplesRead = slesProcessNext(sles_data, mSamples, offset);
+                    totalSamplesRead += samplesRead;
+                    offset += samplesRead;
+                    log(" [" + ii + "] jni samples read:" + samplesRead +
+                        "  currentOffset:" + offset);
+                }
+
+                log(String.format(" samplesRead: %d, sampleOffset:%d", totalSamplesRead, offset));
+                log("about to destroy...");
+                break;
+            case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                setUpGlitchDetectionThread();
+                long testDurationMs = mBufferTestDurationInSeconds * Constant.MILLIS_PER_SECOND;
+                long elapsedTimeMs = System.currentTimeMillis() - startTimeMs;
+                while (elapsedTimeMs < testDurationMs) {
+                    if (mIsRequestStop) {
+                        break;
+                    } else {
+                        int rank = slesGetCaptureRank(sles_data);
+                        if (rank > 0) {
+                            //log("Late callback detected");
+                            mCaptureHolder.captureState(rank);
+                        }
+                        try {
+                            final int setUpTime = 100;
+                            sleep(setUpTime); //just to let it start properly
+                        } catch (InterruptedException e) {
+                            e.printStackTrace();
+                        }
+                        elapsedTimeMs = System.currentTimeMillis() - startTimeMs;
+                    }
+
+                }
+                break;
+
+
+            }
+
+            // collect buffer period data
+            mRecorderBufferPeriod = slesGetRecorderBufferPeriod(sles_data);
+            mRecorderMaxBufferPeriod = slesGetRecorderMaxBufferPeriod(sles_data);
+            mRecorderStdDevBufferPeriod = Math.sqrt(slesGetRecorderVarianceBufferPeriod(sles_data));
+            mPlayerBufferPeriod = slesGetPlayerBufferPeriod(sles_data);
+            mPlayerMaxBufferPeriod = slesGetPlayerMaxBufferPeriod(sles_data);
+            mPlayerStdDevBufferPeriod = Math.sqrt(slesGetPlayerVarianceBufferPeriod(sles_data));
+
+            mPlayerCallbackTimes = slesGetPlayerCallbackTimeStamps(sles_data);
+            mRecorderCallbackTimes = slesGetRecorderCallbackTimeStamps(sles_data);
+
+            // get glitches data only for buffer test
+            if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD) {
+                mAllGlitches = mGlitchDetectionThread.getGlitches();
+                mSamples = mGlitchDetectionThread.getWaveData();
+                mGlitchingIntervalTooLong = mGlitchDetectionThread.getGlitchingIntervalTooLong();
+                endDetecting();
+            }
+
+            if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY) {
+                mCaptureHolder.captureState(0);
+            }
+
+            runDestroy(sles_data);
+
+            final int maxTry = 20;
+            int tryCount = 0;
+            while (isDestroying) {
+                try {
+                    sleep(40);
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+
+                tryCount++;
+                log("destroy try: " + tryCount);
+
+                if (tryCount >= maxTry) {
+                    hasDestroyingErrors = true;
+                    log("WARNING: waited for max time to properly destroy JNI.");
+                    break;
+                }
+            }
+            log(String.format("after destroying. TotalSamplesRead = %d", totalSamplesRead));
+
+            // for buffer test samples won't be read into here
+            if (mTestType == Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY
+                && totalSamplesRead == 0) {
+                //hasDestroyingErrors = true;
+                log("Warning: Latency test reads no sample from native recorder!");
+            }
+
+            endTest();
+        }
+    }
+
+
+    public void requestStopTest() {
+        mIsRequestStop = true;
+    }
+
+
+    /** Set up parameters needed for GlitchDetectionThread, then create and run this thread. */
+    private void setUpGlitchDetectionThread() {
+        final int targetFFTMs = 20; // we want each FFT to cover 20ms of samples
+        mFFTSamplingSize = targetFFTMs * mSamplingRate / Constant.MILLIS_PER_SECOND;
+        // round to the nearest power of 2
+        mFFTSamplingSize = (int) Math.pow(2, Math.round(Math.log(mFFTSamplingSize) / Math.log(2)));
+
+        if (mFFTSamplingSize < 2) {
+            mFFTSamplingSize = 2; // mFFTSamplingSize should be at least 2
+        }
+        mFFTOverlapSamples = mFFTSamplingSize / 2; // mFFTOverlapSamples is half of mFFTSamplingSize
+
+        mGlitchDetectionThread = new GlitchDetectionThread(mFrequency1, mFrequency2, mSamplingRate,
+            mFFTSamplingSize, mFFTOverlapSamples, mBufferTestDurationInSeconds,
+            mBufferTestWavePlotDurationInSeconds, mPipeByteBuffer, mCaptureHolder);
+        mGlitchDetectionThread.start();
+    }
+
+
+    public void endDetecting() {
+        mPipeByteBuffer.flush();
+        mPipeByteBuffer = null;
+        mGlitchDetectionThread.requestStop();
+        GlitchDetectionThread tempThread = mGlitchDetectionThread;
+        mGlitchDetectionThread = null;
+        try {
+            tempThread.join(Constant.JOIN_WAIT_TIME_MS);
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+    }
+
+
+    public void setMessageHandler(Handler messageHandler) {
+        mMessageHandler = messageHandler;
+    }
+
+
+    private void runDestroy(final long sles_data) {
+        isDestroying = true;
+
+        //start thread
+        final long local_sles_data = sles_data;
+        Thread thread = new Thread(new Runnable() {
+            public void run() {
+                isDestroying = true;
+                log("**Start runnable destroy");
+
+                int status = slesDestroy(local_sles_data);
+                log(String.format("**End runnable destroy sles delete status: %d", status));
+                isDestroying = false;
+            }
+        });
+
+        thread.start();
+        log("end of runDestroy()");
+    }
+
+
+    /** not doing real work, just to keep consistency with LoopbackAudioThread. */
+    public void runTest() {
+
+    }
+
+
+    /** not doing real work, just to keep consistency with LoopbackAudioThread. */
+    public void runBufferTest() {
+
+    }
+
+
+    public void endTest() {
+       log("--Ending capture test--");
+       if (mMessageHandler != null) {
+           Message msg = Message.obtain();
+           if (hasDestroyingErrors) {
+               switch (mTestType) {
+                   case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                       msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE_ERRORS;
+                       break;
+                   case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                       msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE_ERRORS;
+                       break;
+               }
+           } else if (mIsRequestStop) {
+               switch (mTestType) {
+                   case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                       msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_STOP;
+                       break;
+                   case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                       msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_STOP;
+                       break;
+               }
+           } else {
+               switch (mTestType) {
+               case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                   msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_LATENCY_REC_COMPLETE;
+                   break;
+               case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                   msg.what = LOOPBACK_NATIVE_AUDIO_THREAD_MESSAGE_BUFFER_REC_COMPLETE;
+                   break;
+               }
+           }
+
+           mMessageHandler.sendMessage(msg);
+       }
+    }
+
+
+    public void finish() {
+        mIsRunning = false;
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+
+    double[] getWaveData() {
+        return mSamples;
+    }
+
+
+    public int[] getRecorderBufferPeriod() {
+        return mRecorderBufferPeriod;
+    }
+
+    public int getRecorderMaxBufferPeriod() {
+        return mRecorderMaxBufferPeriod;
+    }
+
+    public double getRecorderStdDevBufferPeriod() {
+        return mRecorderStdDevBufferPeriod;
+    }
+
+    public int[] getPlayerBufferPeriod() {
+        return mPlayerBufferPeriod;
+    }
+
+    public int getPlayerMaxBufferPeriod() {
+        return mPlayerMaxBufferPeriod;
+    }
+
+    public double getPlayerStdDevBufferPeriod() {
+        return mPlayerStdDevBufferPeriod;
+    }
+
+    public int[] getNativeAllGlitches() {
+        return mAllGlitches;
+    }
+
+
+    public boolean getGlitchingIntervalTooLong() {
+        return mGlitchingIntervalTooLong;
+    }
+
+
+    public int getNativeFFTSamplingSize() {
+        return mFFTSamplingSize;
+    }
+
+
+    public int getNativeFFTOverlapSamples() {
+        return mFFTOverlapSamples;
+    }
+
+
+    public int getDurationInSeconds() {
+        return mBufferTestDurationInSeconds;
+    }
+
+    public BufferCallbackTimes getPlayerCallbackTimes() {
+        return mPlayerCallbackTimes;
+    }
+
+    public BufferCallbackTimes getRecorderCallbackTimes() {
+        return mRecorderCallbackTimes;
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java
new file mode 100644
index 0000000..35c5e18
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PerformanceMeasurement.java
@@ -0,0 +1,271 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.util.Log;
+
+
+/**
+ * This class is used to automatically the audio performance according to recorder/player buffer
+ * period.
+ */
+
+public class PerformanceMeasurement {
+    public static final String TAG = "PerformanceMeasurement";
+
+    // this is used to enlarge the benchmark, so that it can be displayed with better accuracy on
+    // the dashboard
+    private static final int mMultiplicationFactor = 10000;
+
+    private int   mExpectedBufferPeriodMs;
+    private int[] mBufferData;
+    private int   mTotalOccurrence;
+
+    // used to determine buffer sizes mismatch
+    private static final double mPercentOccurrenceThreshold = 0.95;
+    // used to count the number of outliers
+    private static final int    mOutliersThreshold = 3;
+
+
+    /**
+     * Note: if mBufferSize * Constant.MILLIS_PER_SECOND / mSamplingRate == Integer is satisfied,
+     * the measurement will be more accurate, but this is not necessary.
+     */
+    public PerformanceMeasurement(int expectedBufferPeriod, int[] bufferData) {
+        mBufferData = bufferData;
+
+        mTotalOccurrence = 0;
+        for (int i = 0; i < mBufferData.length; i++) {
+            mTotalOccurrence += mBufferData[i];
+        }
+
+        mExpectedBufferPeriodMs = expectedBufferPeriod;
+    }
+
+
+    /**
+     * Measure the performance according to the collected buffer period.
+     * First, determine if there is a buffer sizes mismatch. If there is, then the performance
+     * measurement should be disregarded since it won't be accurate. If there isn't a mismatch,
+     * then a benchmark and a count on outliers can be produced as a measurement of performance.
+     * The benchmark should be as small as possible, so is the number of outliers.
+     * Note: This is a wrapper method that calls different methods and prints their results. It is
+     * also possible to call individual method to obtain specific result.
+     * Note: Should try to compare the number of outliers with the number of glitches and see if
+     * they match.
+     */
+    public void measurePerformance() {
+        // calculate standard deviation and mean of mBufferData
+        double mean = computeMean(mBufferData);
+        double standardDeviation = computeStandardDeviation(mBufferData, mean);
+        log("mean before discarding 99% data: " + mean);
+        log("standard deviation before discarding 99% data: " + standardDeviation);
+        log("stdev/mean before discarding 99% data: " + (standardDeviation / mean));
+
+        // calculate standard deviation and mean of dataAfterDiscard
+        int[] dataAfterDiscard = computeDataAfterDiscard(mBufferData);
+        double meanAfterDiscard = computeMean(dataAfterDiscard);
+        double standardDeviationAfterDiscard = computeStandardDeviation(dataAfterDiscard,
+                                                                        meanAfterDiscard);
+        log("mean after discarding 99% data: " + meanAfterDiscard);
+        log("standard deviation after discarding 99% data: " + standardDeviationAfterDiscard);
+        log("stdev/mean after discarding 99% data: " + (standardDeviationAfterDiscard /
+                                                        meanAfterDiscard));
+        log("percent difference between two means: " + (Math.abs(meanAfterDiscard - mean) / mean));
+
+        // determine if there's a buffer sizes mismatch
+        boolean isBufferSizesMismatch =
+                percentBufferPeriodsAtExpected() > mPercentOccurrenceThreshold;
+
+        // compute benchmark and count the number of outliers
+        double benchmark = computeWeightedBenchmark();
+        int outliers = countOutliers();
+
+        log("total occurrence: " + mTotalOccurrence);
+        log("buffer size mismatch: " + isBufferSizesMismatch);
+        log("benchmark: " + benchmark);
+        log("number of outliers: " + outliers);
+        log("expected buffer period: " + mExpectedBufferPeriodMs + " ms");
+        int maxPeriod = (mBufferData.length - 1);
+        log("max buffer period: " + maxPeriod + " ms");
+    }
+
+
+    /**
+     * Determine percent of Buffer Period Callbacks that occurred at the expected time
+     * Returns a value between 0 and 1
+     */
+    public float percentBufferPeriodsAtExpected() {
+        int occurrenceNearExpectedBufferPeriod = 0;
+        // indicate how many buckets around mExpectedBufferPeriod do we want to add to the count
+        int acceptableOffset = 2;
+        int start = Math.max(0, mExpectedBufferPeriodMs - acceptableOffset);
+        int end = Math.min(mBufferData.length - 1, mExpectedBufferPeriodMs + acceptableOffset);
+        // include the next bucket too because the period is rounded up
+        for (int i = start; i <= end; i++) {
+            occurrenceNearExpectedBufferPeriod += mBufferData[i];
+        }
+        return ((float) occurrenceNearExpectedBufferPeriod) / mTotalOccurrence;
+    }
+
+
+    /**
+     * Compute a benchmark using the following formula:
+     * (1/totalOccurrence) sum_i(|i - expectedBufferPeriod|^2 * occurrence_i / expectedBufferPeriod)
+     * , for i < expectedBufferPeriod * mOutliersThreshold
+     * Also, the benchmark is additionally multiplied by mMultiplicationFactor. This is not in the
+     * original formula, and it is used only because the original benchmark will be too small to
+     * be displayed accurately on the dashboard.
+     */
+    public double computeWeightedBenchmark() {
+        double weightedCount = 0;
+        double weight;
+        double benchmark;
+
+        // don't count mExpectedBufferPeriodMs + 1 towards benchmark, cause this beam may be large
+        // due to rounding issue (all results are rounded up when collecting buffer period.)
+        int threshold = Math.min(mBufferData.length, mExpectedBufferPeriodMs * mOutliersThreshold);
+        for (int i = 0; i < threshold; i++) {
+            if (mBufferData[i] != 0 && (i != mExpectedBufferPeriodMs + 1)) {
+                weight = Math.abs(i - mExpectedBufferPeriodMs);
+                weight *= weight;   // squared
+                weightedCount += weight * mBufferData[i];
+            }
+        }
+        weightedCount /= mExpectedBufferPeriodMs;
+
+        benchmark = (weightedCount / mTotalOccurrence) * mMultiplicationFactor;
+        return benchmark;
+    }
+
+
+    /**
+     * All occurrence that happens after (mExpectedBufferPeriodMs * mOutliersThreshold) ms, will
+     * be considered as outliers.
+     */
+    public int countOutliers() {
+        int outliersThresholdInMs = mExpectedBufferPeriodMs * mOutliersThreshold;
+        int outliersCount = 0;
+        for (int i = outliersThresholdInMs; i < mBufferData.length; i++) {
+            outliersCount += mBufferData[i];
+        }
+        return outliersCount;
+    }
+
+
+    /**
+     * Output an array that has discarded 99 % of the data in the middle. In this array,
+     * data[i] = x means there are x occurrences of value i.
+     */
+    private int[] computeDataAfterDiscard(int[] data) {
+        // calculate the total amount of data
+        int totalCount = 0;
+        int length = data.length;
+        for (int i = 0; i < length; i++) {
+            totalCount += data[i];
+        }
+
+        // we only want to keep a certain percent of data at the bottom and top
+        final double percent = 0.005;
+        int bar = (int) (totalCount * percent);
+        if (bar == 0) { // at least keep the lowest and highest data
+            bar = 1;
+        }
+        int count = 0;
+        int[] dataAfterDiscard = new int[length];
+
+        // for bottom data
+        for (int i = 0; i < length; i++) {
+            if (count > bar) {
+                break;
+            } else if (count + data[i] > bar) {
+                dataAfterDiscard[i] += bar - count;
+                break;
+            } else {
+                dataAfterDiscard[i] += data[i];
+                count += data[i];
+            }
+        }
+
+        // for top data
+        count = 0;
+        for (int i = length - 1; i >= 0; i--) {
+            if (count > bar) {
+                break;
+            } else if (count + data[i] > bar) {
+                dataAfterDiscard[i] += bar - count;
+                break;
+            } else {
+                dataAfterDiscard[i] += data[i];
+                count += data[i];
+            }
+        }
+
+        return dataAfterDiscard;
+    }
+
+
+    /**
+     * Calculate the mean of int array "data". In this array, data[i] = x means there are
+     * x occurrences of value i.
+     */
+    private double computeMean(int[] data) {
+        int count = 0;
+        int sum = 0;
+        for (int i = 0; i < data.length; i++) {
+            count += data[i];
+            sum += data[i] * i;
+        }
+
+        double mean;
+        if (count != 0) {
+            mean = (double) sum / count;
+        } else {
+            mean = 0;
+            log("zero count!");
+        }
+
+        return mean;
+    }
+
+
+    /**
+     * Calculate the standard deviation of int array "data". In this array, data[i] = x means
+     * there are x occurrences of value i.
+     */
+    private double computeStandardDeviation(int[] data, double mean) {
+        double sumDeviation = 0;
+        int count = 0;
+        double standardDeviation;
+
+        for (int i = 0; i < data.length; i++) {
+            if (data[i] != 0) {
+                count += data[i];
+                sumDeviation += (i - mean) * (i - mean) * data[i];
+            }
+        }
+
+        standardDeviation = Math.sqrt(sumDeviation / (count - 1));
+        return standardDeviation;
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java
new file mode 100644
index 0000000..8eb1214
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Pipe.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class is a pipe that allows one writer and one reader.
+ */
+
+public abstract class Pipe {
+    public static final int OVERRUN = -2;   // when there's an overrun, return this value
+
+    protected int       mSamplesOverrun;
+    protected int       mOverruns;
+    protected final int mMaxValues;   // always in power of two
+
+
+    /** maxSamples must be >= 2. */
+    public Pipe(int maxSamples) {
+        mMaxValues = Utilities.roundup(maxSamples); // round up to the nearest power of 2
+    }
+
+
+    /**
+     * Read at most "count" number of samples into array "buffer", starting from index "offset".
+     * If the available samples to read is smaller than count, just read as much as it can and
+     * return the amount of samples read (non-blocking). offset + count must be <= buffer.length.
+     */
+    public abstract int read(short[] buffer, int offset, int count);
+
+
+    /** Return the amount of samples available to read. */
+    public abstract int availableToRead();
+
+
+    /** Clear the pipe. */
+    public abstract void flush();
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java
new file mode 100644
index 0000000..7c95aaf
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeByteBuffer.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+import android.util.Log;
+
+
+/**
+ * Non-blocking pipe where writer writes to the pipe using by knowing the address of "mByteBuffer",
+ * and write to this ByteBuffer directly. On the other hand, reader reads from the pipe using
+ * read(), which converts data in ByteBuffer into shorts.
+ * Data in the pipe are stored in the ByteBuffer array "mByteBuffer".
+ * The write side of a pipe permits overruns; flow control is the caller's responsibility.
+ */
+
+public class PipeByteBuffer extends Pipe {
+    private static final String TAG = "PipeByteBuffer";
+
+    private final ByteBuffer mByteBuffer;
+    private int              mFront = 0; // reader's current position
+
+
+    /**
+     * The ByteBuffer in this class consists of two sections. The first section is the actual pipe
+     * to store data. This section must have a size in power of 2, and this is enforced by the
+     * constructor through rounding maxSamples up to the nearest power of 2. This second section
+     * is used to store metadata. Currently the only metadata is an integer that stores the rear,
+     * where rear is the writer's current position. The metadata is at the end of ByteBuffer, and is
+     * outside of the actual pipe.
+     * IMPORTANT: The code is designed (in native code) such that metadata won't be overwritten when
+     * the writer writes to the pipe. If changes to the code are required, please make sure the
+     * metadata won't be overwritten.
+     * IMPORTANT: Since a signed integer is used to store rear and mFront, their values should not
+     * exceed 2^31 - 1, or else overflows happens and the positions of read and mFront becomes
+     * incorrect.
+     */
+    public PipeByteBuffer(int maxSamples) {
+        super(maxSamples);
+        int extraInt = 1; // used to store rear
+        int extraShort = extraInt * Constant.SHORTS_PER_INT;
+        int numberOfShorts = mMaxValues + extraShort;
+        mByteBuffer = ByteBuffer.allocateDirect(numberOfShorts * Constant.BYTES_PER_SHORT);
+        mByteBuffer.order(ByteOrder.LITTLE_ENDIAN);
+    }
+
+
+    /**
+     * Convert data in mByteBuffer into short, and put them into "buffer".
+     * Note: rear and mFront are keep in terms of number of short instead of number of byte.
+     */
+    @Override
+    public int read(short[] buffer, int offset, int requiredSamples) {
+        // first, update the current rear
+        int rear;
+        synchronized (mByteBuffer) {
+            rear = mByteBuffer.getInt(mMaxValues * Constant.BYTES_PER_SHORT);
+        }
+        //log("initial offset: " + offset + "\n initial requiredSamples: " + requiredSamples);
+
+        // after here, rear may actually be updated further. However, we don't care. If at the point
+        // of checking there's enough data then we will read it. If not just wait until next call
+        // of read.
+        int avail = availableToRead(rear, mFront);
+        if (avail <= 0) {   //return -2 for overrun
+            return avail;
+        }
+
+        // if not enough samples, just read partial samples
+        if (requiredSamples > avail) {
+            requiredSamples = avail;
+        }
+
+        // mask the upper bits to get the correct position in the pipe
+        int front = mFront & (mMaxValues - 1);
+        int read = mMaxValues - front;   // total samples from currentIndex until the end of array
+        if (read > requiredSamples) {
+            read = requiredSamples;
+        }
+
+        int byteBufferFront = front * Constant.BYTES_PER_SHORT; // start reading from here
+        byteBufferToArray(buffer, offset, read, byteBufferFront);
+
+        if (front + read == mMaxValues) {
+            int samplesLeft = requiredSamples - read;
+            if (samplesLeft > 0) {
+                byteBufferFront = 0;
+                byteBufferToArray(buffer, offset + read, read + samplesLeft, byteBufferFront);
+                read += samplesLeft;
+            }
+        }
+
+        mFront += read;
+        return read;
+    }
+
+
+    /**
+     * Copy mByteBuffer's data (starting from "byteBufferFront") into double array "buffer".
+     * "start" is the starting index of "buffer" and "length" is the amount of samples copying.
+     */
+    private void byteBufferToArray(short[] buffer, int start, int length, int byteBufferFront) {
+        for (int i = start; i < (start + length); i++) {
+            buffer[i] = mByteBuffer.getShort(byteBufferFront);
+            byteBufferFront += Constant.BYTES_PER_SHORT;
+        }
+    }
+
+
+    /** Private function that actually calculate the number of samples available to read. */
+    private int availableToRead(int rear, int front) {
+        int avail = rear - front;
+        if (avail > mMaxValues) {
+            // Discard 1/16 of the most recent data in pipe to avoid another overrun immediately
+            int oldFront = mFront;
+            mFront = rear - mMaxValues + (mMaxValues >> 5);
+            mSamplesOverrun += mFront - oldFront;
+            ++mOverruns;
+            return OVERRUN;
+        }
+
+        return avail;
+    }
+
+
+    @Override
+    public int availableToRead() {
+        int rear;
+        int avail;
+        synchronized (mByteBuffer) {
+            rear = mByteBuffer.getInt(mMaxValues * Constant.BYTES_PER_SHORT);
+        }
+
+        avail = availableToRead(rear, mFront);
+        return avail;
+    }
+
+
+    public ByteBuffer getByteBuffer() {
+        return mByteBuffer;
+    }
+
+
+    @Override
+    public void flush() {
+        //set rear and front to zero
+        mFront = 0;
+        synchronized (mByteBuffer) {
+            mByteBuffer.putInt(mMaxValues * Constant.BYTES_PER_SHORT, 0);
+        }
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeShort.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeShort.java
new file mode 100644
index 0000000..829ef49
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PipeShort.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * Non-blocking pipe where writer writes to the pipe using write() and read reads from the pipe
+ * using read(). Data in the pipe are stored in the short array "mBuffer".
+ * The write side of a pipe permits overruns; flow control is the caller's responsibility.
+ */
+
+public class PipeShort extends Pipe {
+    private int          mFront; // writer's current position
+    private int          mRear; // reader's current position
+    private final short  mBuffer[]; // store that data in the pipe
+    private volatile int mVolatileRear; // used to keep rear synchronized
+
+
+    /**
+     * IMPORTANT: Since a signed integer is used to store mRear and mFront, their values should not
+     * exceed 2^31 - 1, or else overflows happens and the positions of read and mFront becomes
+     * incorrect.
+     */
+    public PipeShort(int maxSamples) {
+        super(maxSamples);
+        mBuffer = new short[mMaxValues];
+    }
+
+
+    /**
+     * offset must be >= 0.
+     * count is maximum number of bytes to copy, and must be >= 0.
+     * offset + count must be <= buffer.length.
+     * Return actual number of shorts copied, which will be >= 0.
+     */
+    public int write(short[] buffer, int offset, int count) {
+        // mask the upper bits to get the correct position in the pipe
+        int rear = mRear & (mMaxValues - 1);
+        int written = mMaxValues - rear;
+        if (written > count) {
+            written = count;
+        }
+
+        System.arraycopy(buffer, offset, mBuffer, rear, written);
+        if (rear + written == mMaxValues) {
+            if ((count -= written) > rear) {
+                count = rear;
+            }
+            if (count > 0) {
+                System.arraycopy(buffer, offset + written, mBuffer, 0, count);
+                written += count;
+            }
+        }
+
+        mRear += written;
+        mVolatileRear = mRear;
+        return written;
+    }
+
+
+    @Override
+    public int read(short[] buffer, int offset, int count) {
+        int avail = availableToRead();
+        if (avail <= 0) {
+            return avail;
+        }
+
+        // An overrun can occur from here on and be silently ignored,
+        // but it will be caught at next read()
+        if (count > avail) {
+            count = avail;
+        }
+
+        // mask the upper bits to get the correct position in the pipe
+        int front = mFront & (mMaxValues - 1);
+        int read = mMaxValues - front;
+
+        if (read > count) {
+            read = count;
+        }
+
+        // In particular, an overrun during the System.arraycopy will result in reading corrupt data
+        System.arraycopy(mBuffer, front, buffer, offset, read);
+        // We could re-read the rear pointer here to detect the corruption, but why bother?
+        if (front + read == mMaxValues) {
+            if ((count -= read) > front) {
+                count = front;
+            }
+
+            if (count > 0) {
+                System.arraycopy(mBuffer, 0, buffer, offset + read, count);
+                read += count;
+            }
+        }
+
+        mFront += read;
+        return read;
+    }
+
+
+
+    @Override
+    public int availableToRead() {
+        int rear = mVolatileRear;
+        int avail = rear - mFront;
+        if (avail > mMaxValues) {
+            // Discard 1/16 of the most recent data in pipe to avoid another overrun immediately
+            int oldFront = mFront;
+            mFront = rear - mMaxValues + (mMaxValues >> 4);
+            mSamplesOverrun += mFront - oldFront;
+            ++mOverruns;
+            return OVERRUN;
+        }
+
+        return avail;
+    }
+
+
+    @Override
+    public void flush() {
+        mRear = mFront;
+        mVolatileRear = mFront;
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PlayerBufferPeriodActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PlayerBufferPeriodActivity.java
new file mode 100644
index 0000000..6132a14
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/PlayerBufferPeriodActivity.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import java.util.Arrays;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.view.View;
+
+
+/**
+ * This activity will display a histogram that shows the player's buffer period.
+ */
+
+public class PlayerBufferPeriodActivity extends Activity {
+
+
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+
+        View view = getLayoutInflater().inflate(R.layout.player_buffer_period_activity, null);
+        setContentView(view);
+        HistogramView histogramView = (HistogramView) findViewById(R.id.viewWriteHistogram);
+        Bundle bundle = getIntent().getExtras();
+
+        // setup the histogram
+        int[] bufferData = bundle.getIntArray("playerBufferPeriodArray");
+        int bufferDataMax = bundle.getInt("playerBufferPeriodMax");
+        histogramView.setBufferPeriodArray(bufferData);
+        histogramView.setMaxBufferPeriod(bufferDataMax);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RampedSineTone.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RampedSineTone.java
new file mode 100644
index 0000000..dc0227f
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RampedSineTone.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+/**
+ * Creates a tone that can be injected (and then looped back) in the Latency test.
+ * The generated tone is a sine wave whose amplitude linearly increases than decreases
+ */
+public class RampedSineTone extends SineWaveTone {
+
+    public RampedSineTone(int samplingRate, double frequency) {
+        super(samplingRate, frequency);
+        mAmplitude = Constant.LOOPBACK_AMPLITUDE;
+    }
+
+    /**
+     * Modifies SineWaveTone by creating an ramp up in amplitude followed by an immediate ramp down
+     */
+    @Override
+    public void generateTone(short[] tone, int size) {
+        super.generateTone(tone, size);
+
+        for (int i = 0; i < size; i++) {
+            double factor;    // applied to the amplitude of the sine wave
+
+            //for first half of sample amplitude is increasing hence i < size / 2
+            if (i < size / 2) {
+                factor = (i / (float) size) * 2;
+            } else {
+                factor = ((size - i) / (float) size) * 2;
+            }
+            tone[i] *= factor;
+        }
+    }
+
+    /**
+     * Modifies SineWaveTone by creating an ramp up in amplitude followed by an immediate ramp down
+     */
+    @Override
+    public void generateTone(double[] tone, int size) {
+        super.generateTone(tone, size);
+
+        for (int i = 0; i < size; i++) {
+            double factor;    // applied to the amplitude of the sine wave
+
+            //for first half of sample amplitude is increasing hence i < size / 2
+            if (i < size / 2) {
+                factor = Constant.LOOPBACK_AMPLITUDE * i / size;
+            } else {
+                factor = Constant.LOOPBACK_AMPLITUDE * (size - i) / size;
+            }
+            tone[i] *= factor;
+        }
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderBufferPeriodActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderBufferPeriodActivity.java
new file mode 100644
index 0000000..f34dd4f
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderBufferPeriodActivity.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.View;
+
+import java.util.Arrays;
+
+
+/**
+ * This activity will display a histogram that shows the recorder's buffer period.
+ */
+
+public class RecorderBufferPeriodActivity extends Activity {
+    private static final String TAG = "RecorderBufferPeriodActivity";
+
+
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+
+        View view = getLayoutInflater().inflate(R.layout.recorder_buffer_period_activity, null);
+        setContentView(view);
+        HistogramView histogramView = (HistogramView) findViewById(R.id.viewReadHistogram);
+        Bundle bundle = getIntent().getExtras();
+
+        // setup the histogram
+        int[] bufferData = bundle.getIntArray("recorderBufferPeriodArray");
+        int bufferDataMax = bundle.getInt("recorderBufferPeriodMax");
+        histogramView.setBufferPeriodArray(bufferData);
+        histogramView.setMaxBufferPeriod(bufferDataMax);
+
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java
new file mode 100644
index 0000000..8c3c7a1
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/RecorderRunnable.java
@@ -0,0 +1,557 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.content.Context;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.os.Build;
+import android.util.Log;
+
+/**
+ * This thread records incoming sound samples (uses AudioRecord).
+ */
+
+public class RecorderRunnable implements Runnable {
+    private static final String TAG = "RecorderRunnable";
+
+    private AudioRecord         mRecorder;
+    private boolean             mIsRunning;
+    private boolean             mIsRecording = false;
+    private static final Object sRecordingLock = new Object();
+
+    private final LoopbackAudioThread mAudioThread;
+    // This is the pipe that connects the player and the recorder in latency test.
+    private final PipeShort           mLatencyTestPipeShort;
+    // This is the pipe that is used in buffer test to send data to GlitchDetectionThread
+    private PipeShort                 mBufferTestPipeShort;
+
+    private boolean   mIsRequestStop = false;
+    private final int mTestType;    // latency test or buffer test
+    private final int mSelectedRecordSource;
+    private final int mSamplingRate;
+
+    private int       mChannelConfig = AudioFormat.CHANNEL_IN_MONO;
+    private int       mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
+    private int       mMinRecorderBuffSizeInBytes = 0;
+    private int       mMinRecorderBuffSizeInSamples = 0;
+
+    private short[] mAudioShortArray;   // this array stores values from mAudioTone in read()
+    private short[] mBufferTestShortArray;
+    private short[] mAudioTone;
+
+    // for glitch detection (buffer test)
+    private BufferPeriod          mRecorderBufferPeriodInRecorder;
+    private final int             mBufferTestWavePlotDurationInSeconds;
+    private final int             mChannelIndex;
+    private final double          mFrequency1;
+    private final double          mFrequency2; // not actually used
+    private int[]                 mAllGlitches; // value = 1 means there's a glitch in that interval
+    private boolean               mGlitchingIntervalTooLong;
+    private int                   mFFTSamplingSize; // the amount of samples used per FFT.
+    private int                   mFFTOverlapSamples; // overlap half the samples
+    private long                  mStartTimeMs;
+    private int                   mBufferTestDurationInSeconds;
+    private long                  mBufferTestDurationMs;
+    private final CaptureHolder   mCaptureHolder;
+    private final Context         mContext;
+    private AudioManager          mAudioManager;
+    private GlitchDetectionThread mGlitchDetectionThread;
+
+    // for adjusting sound level in buffer test
+    private double[] mSoundLevelSamples;
+    private int      mSoundLevelSamplesIndex = 0;
+    private boolean  mIsAdjustingSoundLevel = true; // is true if still adjusting sound level
+    private double   mSoundBotLimit = 0.6;    // we want to keep the sound level high
+    private double   mSoundTopLimit = 0.8;    // but we also don't want to be close to saturation
+    private int      mAdjustSoundLevelCount = 0;
+    private int      mMaxVolume;   // max possible volume of the device
+
+    private double[]  mSamples; // samples shown on WavePlotView
+    private int       mSamplesIndex;
+
+    RecorderRunnable(PipeShort latencyPipe, int samplingRate, int channelConfig, int audioFormat,
+                     int recorderBufferInBytes, int micSource, LoopbackAudioThread audioThread,
+                     BufferPeriod recorderBufferPeriod, int testType, double frequency1,
+                     double frequency2, int bufferTestWavePlotDurationInSeconds,
+                     Context context, int channelIndex, CaptureHolder captureHolder) {
+        mLatencyTestPipeShort = latencyPipe;
+        mSamplingRate = samplingRate;
+        mChannelConfig = channelConfig;
+        mAudioFormat = audioFormat;
+        mMinRecorderBuffSizeInBytes = recorderBufferInBytes;
+        mSelectedRecordSource = micSource;
+        mAudioThread = audioThread;
+        mRecorderBufferPeriodInRecorder = recorderBufferPeriod;
+        mTestType = testType;
+        mFrequency1 = frequency1;
+        mFrequency2 = frequency2;
+        mBufferTestWavePlotDurationInSeconds = bufferTestWavePlotDurationInSeconds;
+        mContext = context;
+        mChannelIndex = channelIndex;
+        mCaptureHolder = captureHolder;
+    }
+
+
+    /** Initialize the recording device for latency test. */
+    public boolean initRecord() {
+        log("Init Record");
+        if (mMinRecorderBuffSizeInBytes <= 0) {
+            mMinRecorderBuffSizeInBytes = AudioRecord.getMinBufferSize(mSamplingRate,
+                                          mChannelConfig, mAudioFormat);
+            log("RecorderRunnable: computing min buff size = " + mMinRecorderBuffSizeInBytes
+                + " bytes");
+        } else {
+            log("RecorderRunnable: using min buff size = " + mMinRecorderBuffSizeInBytes +
+                " bytes");
+        }
+
+        if (mMinRecorderBuffSizeInBytes <= 0) {
+            return false;
+        }
+
+        mMinRecorderBuffSizeInSamples = mMinRecorderBuffSizeInBytes / Constant.BYTES_PER_FRAME;
+        mAudioShortArray = new short[mMinRecorderBuffSizeInSamples];
+
+        try {
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+                mRecorder = new AudioRecord.Builder()
+                        .setAudioFormat((mChannelIndex < 0 ?
+                                new AudioFormat.Builder()
+                                        .setChannelMask(AudioFormat.CHANNEL_IN_MONO) :
+                                new AudioFormat
+                                        .Builder().setChannelIndexMask(1 << mChannelIndex))
+                                .setSampleRate(mSamplingRate)
+                                .setEncoding(mAudioFormat)
+                                .build())
+                        .setAudioSource(mSelectedRecordSource)
+                        .setBufferSizeInBytes(2 * mMinRecorderBuffSizeInBytes)
+                        .build();
+            } else {
+                mRecorder = new AudioRecord(mSelectedRecordSource, mSamplingRate,
+                        mChannelConfig, mAudioFormat, 2 * mMinRecorderBuffSizeInBytes);
+            }
+        } catch (IllegalArgumentException | UnsupportedOperationException e) {
+            e.printStackTrace();
+            return false;
+        } finally {
+            if (mRecorder == null){
+                return false;
+            } else if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
+                mRecorder.release();
+                mRecorder = null;
+                return false;
+            }
+        }
+
+        //generate sinc wave for use in loopback test
+        ToneGeneration sincTone = new RampedSineTone(mSamplingRate, Constant.LOOPBACK_FREQUENCY);
+        mAudioTone = new short[Constant.LOOPBACK_SAMPLE_FRAMES];
+        sincTone.generateTone(mAudioTone, Constant.LOOPBACK_SAMPLE_FRAMES);
+
+        return true;
+    }
+
+
+    /** Initialize the recording device for buffer test. */
+    boolean initBufferRecord() {
+        log("Init Record");
+        if (mMinRecorderBuffSizeInBytes <= 0) {
+
+            mMinRecorderBuffSizeInBytes = AudioRecord.getMinBufferSize(mSamplingRate,
+                                          mChannelConfig, mAudioFormat);
+            log("RecorderRunnable: computing min buff size = " + mMinRecorderBuffSizeInBytes
+                + " bytes");
+        } else {
+            log("RecorderRunnable: using min buff size = " + mMinRecorderBuffSizeInBytes +
+                " bytes");
+        }
+
+        if (mMinRecorderBuffSizeInBytes <= 0) {
+            return false;
+        }
+
+        mMinRecorderBuffSizeInSamples = mMinRecorderBuffSizeInBytes / Constant.BYTES_PER_FRAME;
+        mBufferTestShortArray = new short[mMinRecorderBuffSizeInSamples];
+
+        final int cycles = 100;
+        int soundLevelSamples =  (mSamplingRate / (int) mFrequency1) * cycles;
+        mSoundLevelSamples = new double[soundLevelSamples];
+        mAudioManager = (AudioManager) mContext.getSystemService(mContext.AUDIO_SERVICE);
+        mMaxVolume = mAudioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
+
+        try {
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+                mRecorder = new AudioRecord.Builder()
+                        .setAudioFormat((mChannelIndex < 0 ?
+                                new AudioFormat.Builder()
+                                        .setChannelMask(AudioFormat.CHANNEL_IN_MONO) :
+                                new AudioFormat
+                                        .Builder().setChannelIndexMask(1 << mChannelIndex))
+                                .setSampleRate(mSamplingRate)
+                                .setEncoding(mAudioFormat)
+                                .build())
+                        .setAudioSource(mSelectedRecordSource)
+                        .setBufferSizeInBytes(2 * mMinRecorderBuffSizeInBytes)
+                        .build();
+            } else {
+                mRecorder = new AudioRecord(mSelectedRecordSource, mSamplingRate,
+                        mChannelConfig, mAudioFormat, 2 * mMinRecorderBuffSizeInBytes);
+            }
+        } catch (IllegalArgumentException | UnsupportedOperationException e) {
+            e.printStackTrace();
+            return false;
+        } finally {
+            if (mRecorder == null){
+                return false;
+            } else if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
+                mRecorder.release();
+                mRecorder = null;
+                return false;
+            }
+        }
+
+        final int targetFFTMs = 20; // we want each FFT to cover 20ms of samples
+        mFFTSamplingSize = targetFFTMs * mSamplingRate / Constant.MILLIS_PER_SECOND;
+        // round to the nearest power of 2
+        mFFTSamplingSize = (int) Math.pow(2, Math.round(Math.log(mFFTSamplingSize) / Math.log(2)));
+
+        if (mFFTSamplingSize < 2) {
+            mFFTSamplingSize = 2; // mFFTSamplingSize should be at least 2
+        }
+        mFFTOverlapSamples = mFFTSamplingSize / 2; // mFFTOverlapSamples is half of mFFTSamplingSize
+
+        return true;
+    }
+
+
+    boolean startRecording() {
+        synchronized (sRecordingLock) {
+            mIsRecording = true;
+        }
+
+        final int samplesDurationInSecond = 2;
+        int nNewSize = mSamplingRate * samplesDurationInSecond; // 2 seconds!
+        mSamples = new double[nNewSize];
+
+        boolean status = initRecord();
+        if (status) {
+            log("Ready to go.");
+            startRecordingForReal();
+        } else {
+            log("Recorder initialization error.");
+            synchronized (sRecordingLock) {
+                mIsRecording = false;
+            }
+        }
+
+        return status;
+    }
+
+
+    boolean startBufferRecording() {
+        synchronized (sRecordingLock) {
+            mIsRecording = true;
+        }
+
+        boolean status = initBufferRecord();
+        if (status) {
+            log("Ready to go.");
+            startBufferRecordingForReal();
+        } else {
+            log("Recorder initialization error.");
+            synchronized (sRecordingLock) {
+                mIsRecording = false;
+            }
+        }
+
+        return status;
+    }
+
+
+    void startRecordingForReal() {
+        mLatencyTestPipeShort.flush();
+        mRecorder.startRecording();
+    }
+
+
+    void startBufferRecordingForReal() {
+        mBufferTestPipeShort = new PipeShort(Constant.MAX_SHORTS);
+        mGlitchDetectionThread = new GlitchDetectionThread(mFrequency1, mFrequency2, mSamplingRate,
+                mFFTSamplingSize, mFFTOverlapSamples, mBufferTestDurationInSeconds,
+                mBufferTestWavePlotDurationInSeconds, mBufferTestPipeShort, mCaptureHolder);
+        mGlitchDetectionThread.start();
+        mRecorder.startRecording();
+    }
+
+
+    void stopRecording() {
+        log("stop recording A");
+        synchronized (sRecordingLock) {
+            log("stop recording B");
+            mIsRecording = false;
+        }
+        stopRecordingForReal();
+    }
+
+
+    void stopRecordingForReal() {
+        log("stop recording for real");
+        if (mRecorder != null) {
+            mRecorder.stop();
+        }
+
+        if (mRecorder != null) {
+            mRecorder.release();
+            mRecorder = null;
+        }
+    }
+
+
+    public void run() {
+        // keeps the total time elapsed since the start of the test. Only used in buffer test.
+        long elapsedTimeMs;
+        mIsRunning = true;
+        while (mIsRunning) {
+            boolean isRecording;
+
+            synchronized (sRecordingLock) {
+                isRecording = mIsRecording;
+            }
+
+            if (isRecording && mRecorder != null) {
+                int nSamplesRead;
+                switch (mTestType) {
+                case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+                    nSamplesRead = mRecorder.read(mAudioShortArray, 0,
+                                   mMinRecorderBuffSizeInSamples);
+
+                    if (nSamplesRead > 0) {
+                        mRecorderBufferPeriodInRecorder.collectBufferPeriod();
+                        { // inject the tone that will be looped-back
+                            int currentIndex = mSamplesIndex - 100; //offset
+                            for (int i = 0; i < nSamplesRead; i++) {
+                                if (currentIndex >= 0 && currentIndex < mAudioTone.length) {
+                                    mAudioShortArray[i] = mAudioTone[currentIndex];
+                                }
+                                currentIndex++;
+                            }
+                        }
+
+                        mLatencyTestPipeShort.write(mAudioShortArray, 0, nSamplesRead);
+                        if (isStillRoomToRecord()) { //record to vector
+                            for (int i = 0; i < nSamplesRead; i++) {
+                                double value = mAudioShortArray[i];
+                                value = value / Short.MAX_VALUE;
+                                if (mSamplesIndex < mSamples.length) {
+                                    mSamples[mSamplesIndex++] = value;
+                                }
+
+                            }
+                        } else {
+                            mIsRunning = false;
+                        }
+                    }
+                    break;
+                case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+                    if (mIsRequestStop) {
+                        endBufferTest();
+                    } else {
+                        // before we start the test, first adjust sound level
+                        if (mIsAdjustingSoundLevel) {
+                            nSamplesRead = mRecorder.read(mBufferTestShortArray, 0,
+                                    mMinRecorderBuffSizeInSamples);
+                            if (nSamplesRead > 0) {
+                                for (int i = 0; i < nSamplesRead; i++) {
+                                    double value = mBufferTestShortArray[i];
+                                    if (mSoundLevelSamplesIndex < mSoundLevelSamples.length) {
+                                        mSoundLevelSamples[mSoundLevelSamplesIndex++] = value;
+                                    } else {
+                                        // adjust the sound level to appropriate level
+                                        mIsAdjustingSoundLevel = AdjustSoundLevel();
+                                        mAdjustSoundLevelCount++;
+                                        mSoundLevelSamplesIndex = 0;
+                                        if (!mIsAdjustingSoundLevel) {
+                                            // end of sound level adjustment, notify AudioTrack
+                                            mAudioThread.setIsAdjustingSoundLevel(false);
+                                            mStartTimeMs = System.currentTimeMillis();
+                                            break;
+                                        }
+                                    }
+                                }
+                            }
+                        } else {
+                            // the end of test is controlled here. Once we've run for the specified
+                            // test duration, end the test
+                            elapsedTimeMs = System.currentTimeMillis() - mStartTimeMs;
+                            if (elapsedTimeMs >= mBufferTestDurationMs) {
+                                endBufferTest();
+                            } else {
+                                nSamplesRead = mRecorder.read(mBufferTestShortArray, 0,
+                                        mMinRecorderBuffSizeInSamples);
+                                if (nSamplesRead > 0) {
+                                    mRecorderBufferPeriodInRecorder.collectBufferPeriod();
+                                    mBufferTestPipeShort.write(mBufferTestShortArray, 0,
+                                            nSamplesRead);
+                                }
+                            }
+                        }
+                    }
+                    break;
+                }
+            }
+        } //synchronized
+        stopRecording(); //close this
+    }
+
+
+    /** Someone is requesting to stop the test, will stop the test even if the test is not done. */
+    public void requestStop() {
+        switch (mTestType) {
+        case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD:
+            mIsRequestStop = true;
+            break;
+        case Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_LATENCY:
+            mIsRunning = false;
+            break;
+        }
+    }
+
+
+    /** Collect data then clean things up.*/
+    private void endBufferTest() {
+        mIsRunning = false;
+        mAllGlitches = mGlitchDetectionThread.getGlitches();
+        mGlitchingIntervalTooLong = mGlitchDetectionThread.getGlitchingIntervalTooLong();
+        mSamples = mGlitchDetectionThread.getWaveData();
+        endDetecting();
+    }
+
+
+    /** Clean everything up. */
+    public void endDetecting() {
+        mBufferTestPipeShort.flush();
+        mBufferTestPipeShort = null;
+        mGlitchDetectionThread.requestStop();
+        GlitchDetectionThread tempThread = mGlitchDetectionThread;
+        mGlitchDetectionThread = null;
+        try {
+            tempThread.join(Constant.JOIN_WAIT_TIME_MS);
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+    }
+
+
+    /**
+     * Adjust the sound level such that the buffer test can run with small noise disturbance.
+     * Return a boolean value to indicate whether or not the sound level has adjusted to an
+     * appropriate level.
+     */
+    private boolean AdjustSoundLevel() {
+        // if after adjusting 20 times, we still cannot get into the volume we want, increase the
+        // limit range, so it's easier to get into the volume we want.
+        if (mAdjustSoundLevelCount != 0 && mAdjustSoundLevelCount % 20 == 0) {
+            mSoundTopLimit += 0.1;
+            mSoundBotLimit -= 0.1;
+        }
+
+        double topThreshold = Short.MAX_VALUE * mSoundTopLimit;
+        double botThreshold = Short.MAX_VALUE * mSoundBotLimit;
+        double currentMax = mSoundLevelSamples[0];
+        int currentVolume = mAudioManager.getStreamVolume(AudioManager.STREAM_MUSIC);
+
+        // since it's a sine wave, we are only checking max positive value
+        for (int i = 1; i < mSoundLevelSamples.length; i++) {
+            if (mSoundLevelSamples[i] > topThreshold) { // once a sample exceed, return
+                // adjust sound level down
+                currentVolume--;
+                mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, currentVolume, 0);
+                return true;
+            }
+
+            if (mSoundLevelSamples[i] > currentMax) {
+                currentMax = mSoundLevelSamples[i];
+            }
+        }
+
+        if (currentMax < botThreshold) {
+            // adjust sound level up
+            if (currentVolume < mMaxVolume) {
+                currentVolume++;
+                mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC,
+                        currentVolume, 0);
+                return true;
+            } else {
+                return false;
+            }
+        }
+
+        return false;
+    }
+
+
+    /** Check if there's any room left in mSamples. */
+    public boolean isStillRoomToRecord() {
+        boolean result = false;
+        if (mSamples != null) {
+            if (mSamplesIndex < mSamples.length) {
+                result = true;
+            }
+        }
+
+        return result;
+    }
+
+
+    public void setBufferTestDurationInSeconds(int bufferTestDurationInSeconds) {
+        mBufferTestDurationInSeconds = bufferTestDurationInSeconds;
+        mBufferTestDurationMs = Constant.MILLIS_PER_SECOND * mBufferTestDurationInSeconds;
+    }
+
+
+    public int[] getAllGlitches() {
+        return mAllGlitches;
+    }
+
+
+    public boolean getGlitchingIntervalTooLong() {
+        return mGlitchingIntervalTooLong;
+    }
+
+
+    public double[] getWaveData() {
+        return mSamples;
+    }
+
+
+    public int getFFTSamplingSize() {
+        return mFFTSamplingSize;
+    }
+
+
+    public int getFFTOverlapSamples() {
+        return mFFTOverlapSamples;
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SaveFilesDialogFragment.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SaveFilesDialogFragment.java
new file mode 100644
index 0000000..4707b81
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SaveFilesDialogFragment.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.app.Dialog;
+import android.app.DialogFragment;
+import android.content.DialogInterface;
+import android.os.Bundle;
+
+/**
+ * Displays an option for saving all files to file://mnt/sdcard/ or choosing filenames
+ */
+public class SaveFilesDialogFragment extends DialogFragment {
+
+    /* The activity that creates an instance of this dialog fragment must
+     * implement this interface in order to receive event callbacks. */
+    public interface NoticeDialogListener {
+        public void onSaveDialogSelect(DialogFragment dialog, boolean saveWithoutDialog);
+    }
+
+    // Use this instance of the interface to deliver action events
+    NoticeDialogListener mListener;
+
+    // Override the Fragment.onAttach() method to instantiate the NoticeDialogListener
+    @Override
+    public void onAttach(Activity activity) {
+        super.onAttach(activity);
+        // Verify that the host activity implements the callback interface
+        try {
+            // Instantiate the NoticeDialogListener so we can send events to the host
+            mListener = (NoticeDialogListener) activity;
+        } catch (ClassCastException e) {
+            // The activity doesn't implement the interface, throw exception
+            throw new ClassCastException(activity.toString()
+                    + " must implement NoticeDialogListener");
+        }
+    }
+
+    @Override
+    public Dialog onCreateDialog(Bundle savedInstanceState) {
+        AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
+
+        builder.setMessage(R.string.SaveFileDialogLabel)
+                .setPositiveButton(R.string.SaveFileDialogOK,
+                        new DialogInterface.OnClickListener() {
+                    public void onClick(DialogInterface dialog, int id) {
+                        dialog.dismiss();
+                        mListener.onSaveDialogSelect(SaveFilesDialogFragment.this, true);
+                    }
+                })
+                .setNegativeButton(R.string.SaveFileDialogChooseFilenames,
+                        new DialogInterface.OnClickListener() {
+                    public void onClick(DialogInterface dialog, int id) {
+                        dialog.dismiss();
+                        mListener.onSaveDialogSelect(SaveFilesDialogFragment.this, false);
+                    }
+                });
+        // Create the AlertDialog object and return it
+        return builder.create();
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java
new file mode 100644
index 0000000..1167a25
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsActivity.java
@@ -0,0 +1,458 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.app.Activity;
+import android.content.Intent;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.ArrayAdapter;
+import android.widget.CompoundButton;
+import android.widget.PopupWindow;
+import android.widget.Spinner;
+import android.widget.TextView;
+import android.widget.ToggleButton;
+
+
+/**
+ * This activity displays all settings that can be adjusted by the user.
+ */
+
+public class SettingsActivity extends Activity implements OnItemSelectedListener,
+        ToggleButton.OnCheckedChangeListener {
+
+    private static final String TAG = "SettingsActivity";
+
+    private Spinner      mSpinnerMicSource;
+    private Spinner      mSpinnerPerformanceMode;
+    private Spinner      mSpinnerSamplingRate;
+    private Spinner      mSpinnerAudioThreadType;
+    private TextView     mTextSettingsInfo;
+    private Spinner      mSpinnerChannelIndex;
+    private SettingsPicker mPlayerBufferUI;
+    private SettingsPicker mRecorderBufferUI;
+    private SettingsPicker mBufferTestDurationUI;
+    private SettingsPicker mWavePlotDurationUI;
+    private SettingsPicker mLoadThreadUI;
+    private SettingsPicker mNumCapturesUI;
+    private SettingsPicker mIgnoreFirstFramesUI;
+    private ToggleButton   mSystraceToggleButton;
+    private ToggleButton   mBugreportToggleButton;
+    private ToggleButton   mWavCaptureToggleButton;
+    private ToggleButton   mSoundLevelCalibrationToggleButton;
+
+    ArrayAdapter<CharSequence> mAdapterSamplingRate;
+
+
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        // Set the layout for this activity. You can find it
+        View view = getLayoutInflater().inflate(R.layout.settings_activity, null);
+        setContentView(view);
+        mTextSettingsInfo = (TextView) findViewById(R.id.textSettingsInfo);
+
+        int micSource = getApp().getMicSource();
+        mSpinnerMicSource = (Spinner) findViewById(R.id.spinnerMicSource);
+        ArrayAdapter<CharSequence> adapterMicSource = ArrayAdapter.createFromResource(this,
+                R.array.mic_source_array, android.R.layout.simple_spinner_item);
+        // Specify the layout to use when the list of choices appears
+        adapterMicSource.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+        // Apply the adapter to the spinner
+        mSpinnerMicSource.setAdapter(adapterMicSource);
+        //set current value
+        mSpinnerMicSource.setSelection(micSource, false);
+        mSpinnerMicSource.setOnItemSelectedListener(this);
+
+        int performanceMode = getApp().getPerformanceMode();
+        mSpinnerPerformanceMode = (Spinner) findViewById(R.id.spinnerPerformanceMode);
+        ArrayAdapter<CharSequence> adapterPerformanceMode = ArrayAdapter.createFromResource(this,
+                R.array.performance_mode_array, android.R.layout.simple_spinner_item);
+        // Specify the layout to use when the list of choices appears
+        adapterPerformanceMode.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+        // Apply the adapter to the spinner
+        mSpinnerPerformanceMode.setAdapter(adapterPerformanceMode);
+        //set current value
+        mSpinnerPerformanceMode.setSelection(performanceMode + 1, false);
+        mSpinnerPerformanceMode.setOnItemSelectedListener(this);
+
+        int samplingRate = getApp().getSamplingRate();
+        //init spinner, etc
+        mSpinnerSamplingRate = (Spinner) findViewById(R.id.spinnerSamplingRate);
+        mAdapterSamplingRate = ArrayAdapter.createFromResource(this,
+                R.array.samplingRate_array, android.R.layout.simple_spinner_item);
+        // Specify the layout to use when the list of choices appears
+        mAdapterSamplingRate.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+        // Apply the adapter to the spinner
+        mSpinnerSamplingRate.setAdapter(mAdapterSamplingRate);
+        //set current value
+        String currentValue = String.valueOf(samplingRate);
+        int nPosition = mAdapterSamplingRate.getPosition(currentValue);
+        mSpinnerSamplingRate.setSelection(nPosition, false);
+        mSpinnerSamplingRate.setOnItemSelectedListener(this);
+
+        //spinner native
+        int audioThreadType = getApp().getAudioThreadType();
+        mSpinnerAudioThreadType = (Spinner) findViewById(R.id.spinnerAudioThreadType);
+        ArrayAdapter<CharSequence> adapter2 = ArrayAdapter.createFromResource(this,
+                R.array.audioThreadType_array, android.R.layout.simple_spinner_item);
+        // Specify the layout to use when the list of choices appears
+        adapter2.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+        // Apply the adapter to the spinner
+        mSpinnerAudioThreadType.setAdapter(adapter2);
+        //set current value
+        mSpinnerAudioThreadType.setSelection(audioThreadType, false);
+        if (!getApp().isSafeToUseSles())
+            mSpinnerAudioThreadType.setEnabled(false);
+        mSpinnerAudioThreadType.setOnItemSelectedListener(this);
+
+        mSpinnerChannelIndex = (Spinner) findViewById(R.id.spinnerChannelIndex);
+        ArrayAdapter<CharSequence> adapter3 = ArrayAdapter.createFromResource(this,
+                R.array.channelIndex_array, android.R.layout.simple_spinner_item);
+        // Specify the layout to use when the list of choices appears
+        adapter3.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+        // Apply the adapter to the spinner
+        mSpinnerChannelIndex.setAdapter(adapter3);
+        mSpinnerChannelIndex.setOnItemSelectedListener(this);
+
+        // Settings Picker for Buffer Test Duration
+        mBufferTestDurationUI = (SettingsPicker) findViewById(R.id.bufferTestDurationSetting);
+        mBufferTestDurationUI.setMinMaxDefault(Constant.BUFFER_TEST_DURATION_SECONDS_MIN,
+                Constant.BUFFER_TEST_DURATION_SECONDS_MAX, getApp().getBufferTestDuration());
+        mBufferTestDurationUI.setTitle(getResources().getString(R.string.labelBufferTestDuration,
+                Constant.BUFFER_TEST_DURATION_SECONDS_MAX));
+        mBufferTestDurationUI.setSettingsChangeListener(new SettingsPicker.SettingChangeListener() {
+            @Override
+            public void settingChanged(int seconds) {
+                log("buffer test new duration: " + seconds);
+                getApp().setBufferTestDuration(seconds);
+                setSettingsHaveChanged();
+            }
+        });
+
+        // Settings Picker for Wave Plot Duration
+        mWavePlotDurationUI = (SettingsPicker) findViewById(R.id.wavePlotDurationSetting);
+        mWavePlotDurationUI.setMinMaxDefault(Constant.BUFFER_TEST_WAVE_PLOT_DURATION_SECONDS_MIN,
+                Constant.BUFFER_TEST_WAVE_PLOT_DURATION_SECONDS_MAX,
+                getApp().getBufferTestWavePlotDuration());
+        mWavePlotDurationUI.setTitle(getResources().getString(
+                R.string.labelBufferTestWavePlotDuration,
+                Constant.BUFFER_TEST_WAVE_PLOT_DURATION_SECONDS_MAX));
+        mWavePlotDurationUI.setSettingsChangeListener(new SettingsPicker.SettingChangeListener() {
+            @Override
+            public void settingChanged(int value) {
+                log("buffer test's wave plot new duration:" + value);
+                getApp().setBufferTestWavePlotDuration(value);
+                setSettingsHaveChanged();
+            }
+        });
+
+        // Settings Picker for Player Buffer Period
+        mPlayerBufferUI = (SettingsPicker) findViewById(R.id.playerBufferSetting);
+        mPlayerBufferUI.setMinMaxDefault(Constant.PLAYER_BUFFER_FRAMES_MIN,
+                Constant.PLAYER_BUFFER_FRAMES_MAX,
+                getApp().getPlayerBufferSizeInBytes() / Constant.BYTES_PER_FRAME);
+        mPlayerBufferUI.setTitle(getResources().getString(
+                R.string.labelPlayerBuffer, Constant.PLAYER_BUFFER_FRAMES_MAX));
+        mPlayerBufferUI.setSettingsChangeListener(new SettingsPicker.SettingChangeListener() {
+            @Override
+            public void settingChanged(int value) {
+                log("player buffer new size " + value);
+                getApp().setPlayerBufferSizeInBytes(value * Constant.BYTES_PER_FRAME);
+                int audioThreadType = mSpinnerAudioThreadType.getSelectedItemPosition();
+                // in native mode, recorder buffer size = player buffer size
+                if (audioThreadType == Constant.AUDIO_THREAD_TYPE_NATIVE) {
+                    getApp().setRecorderBufferSizeInBytes(value * Constant.BYTES_PER_FRAME);
+                    mRecorderBufferUI.setValue(value);
+                }
+                setSettingsHaveChanged();
+            }
+        });
+
+        // Settings Picker for Recorder Buffer Period
+        mRecorderBufferUI = (SettingsPicker) findViewById(R.id.recorderBufferSetting);
+        mRecorderBufferUI.setMinMaxDefault(Constant.RECORDER_BUFFER_FRAMES_MIN,
+                Constant.RECORDER_BUFFER_FRAMES_MAX,
+                getApp().getRecorderBufferSizeInBytes() / Constant.BYTES_PER_FRAME);
+        mRecorderBufferUI.setTitle(getResources().getString(R.string.labelRecorderBuffer,
+                Constant.RECORDER_BUFFER_FRAMES_MAX));
+        mRecorderBufferUI.setSettingsChangeListener(new SettingsPicker.SettingChangeListener() {
+            @Override
+            public void settingChanged(int value) {
+                log("recorder buffer new size:" + value);
+                getApp().setRecorderBufferSizeInBytes(value * Constant.BYTES_PER_FRAME);
+                setSettingsHaveChanged();
+            }
+        });
+
+        // Settings Picker for Number of Load Threads
+        mLoadThreadUI = (SettingsPicker) findViewById(R.id.numLoadThreadsSetting);
+        mLoadThreadUI.setMinMaxDefault(Constant.MIN_NUM_LOAD_THREADS, Constant.MAX_NUM_LOAD_THREADS,
+                getApp().getNumberOfLoadThreads());
+        mLoadThreadUI.setTitle(getResources().getString(R.string.loadThreadsLabel));
+        mLoadThreadUI.setSettingsChangeListener(new SettingsPicker.SettingChangeListener() {
+            @Override
+            public void settingChanged(int value) {
+                log("new num load threads:" + value);
+                getApp().setNumberOfLoadThreads(value);
+                setSettingsHaveChanged();
+            }
+        });
+
+        // Settings Picker for Number of Captures
+        mNumCapturesUI = (SettingsPicker) findViewById(R.id.numCapturesSettingPicker);
+        mNumCapturesUI.setMinMaxDefault(Constant.MIN_NUM_CAPTURES, Constant.MAX_NUM_CAPTURES,
+                getApp().getNumStateCaptures());
+        mNumCapturesUI.setTitle(getResources().getString(R.string.numCapturesSetting));
+        mNumCapturesUI.setSettingsChangeListener(new SettingsPicker.SettingChangeListener() {
+            @Override
+            public void settingChanged(int value) {
+                log("new num captures:" + value);
+                getApp().setNumberOfCaptures(value);
+                setSettingsHaveChanged();
+            }
+        });
+
+        mWavCaptureToggleButton = (ToggleButton) findViewById(R.id.wavSnippetsEnabledToggle);
+        mWavCaptureToggleButton.setChecked(getApp().isCaptureWavSnippetsEnabled());
+        mWavCaptureToggleButton.setOnCheckedChangeListener(this);
+
+        mBugreportToggleButton = (ToggleButton) findViewById(R.id.BugreportEnabledToggle);
+        mBugreportToggleButton.setChecked(getApp().isCaptureBugreportEnabled());
+        mBugreportToggleButton.setOnCheckedChangeListener(this);
+
+        mSystraceToggleButton = (ToggleButton) findViewById(R.id.SystraceEnabledToggle);
+        mSystraceToggleButton.setChecked(getApp().isCaptureSysTraceEnabled());
+        mSystraceToggleButton.setOnCheckedChangeListener(this);
+
+        mSoundLevelCalibrationToggleButton = (ToggleButton)
+                findViewById(R.id.soundLevelCalibrationEnabledToggle);
+        mSoundLevelCalibrationToggleButton.setChecked(getApp().isSoundLevelCalibrationEnabled());
+        mSoundLevelCalibrationToggleButton.setOnCheckedChangeListener(this);
+
+        // Settings Picker for number of frames to ignore at the beginning
+        mIgnoreFirstFramesUI = (SettingsPicker) findViewById(R.id.ignoreFirstFramesSettingPicker);
+        mIgnoreFirstFramesUI.setMinMaxDefault(Constant.MIN_IGNORE_FIRST_FRAMES,
+                Constant.MAX_IGNORE_FIRST_FRAMES, getApp().getIgnoreFirstFrames());
+        mIgnoreFirstFramesUI.setTitle(getResources().getString(R.string.labelIgnoreFirstFrames,
+                Constant.MAX_IGNORE_FIRST_FRAMES));
+        mIgnoreFirstFramesUI.setSettingsChangeListener(new SettingsPicker.SettingChangeListener() {
+            @Override
+            public void settingChanged(int frames) {
+                log("new number of first frames to ignore: " + frames);
+                getApp().setIgnoreFirstFrames(frames);
+                setSettingsHaveChanged();
+            }
+        });
+
+        refresh();
+    }
+
+
+    public void onDestroy() {
+        super.onDestroy();
+    }
+
+
+    @Override
+    public void onBackPressed() {
+        log("on back pressed");
+        setSettingsHaveChanged();
+        finish();
+    }
+
+
+    private void refresh() {
+        mBufferTestDurationUI.setValue(getApp().getBufferTestDuration());
+        mWavePlotDurationUI.setValue(getApp().getBufferTestWavePlotDuration());
+
+        mPlayerBufferUI.setValue(getApp().getPlayerBufferSizeInBytes() / Constant.BYTES_PER_FRAME);
+        mRecorderBufferUI.setValue(
+                getApp().getRecorderBufferSizeInBytes() / Constant.BYTES_PER_FRAME);
+
+        mRecorderBufferUI.setEnabled(
+                getApp().getAudioThreadType() == Constant.AUDIO_THREAD_TYPE_JAVA);
+
+        int samplingRate = getApp().getSamplingRate();
+        String currentValue = String.valueOf(samplingRate);
+        int nPosition = mAdapterSamplingRate.getPosition(currentValue);
+        mSpinnerSamplingRate.setSelection(nPosition);
+
+
+        if (getApp().getAudioThreadType() == Constant.AUDIO_THREAD_TYPE_JAVA) {
+            mSpinnerChannelIndex.setSelection(getApp().getChannelIndex() + 1, false);
+            mSpinnerChannelIndex.setEnabled(true);
+        } else {
+            mSpinnerChannelIndex.setSelection(0, false);
+            mSpinnerChannelIndex.setEnabled(false);
+        }
+
+        mNumCapturesUI.setEnabled(getApp().isCaptureEnabled() ||
+                getApp().isCaptureWavSnippetsEnabled());
+
+        String info = getApp().getSystemInfo();
+        mTextSettingsInfo.setText("SETTINGS - " + info);
+    }
+
+
+    public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
+        // An item was selected. You can retrieve the selected item using
+        // parent.getItemAtPosition(pos)
+        log("item selected!");
+
+        switch (parent.getId()) {
+        case R.id.spinnerSamplingRate:
+            String stringValue = mSpinnerSamplingRate.getSelectedItem().toString();
+            int samplingRate = Integer.parseInt(stringValue);
+            getApp().setSamplingRate(samplingRate);
+            setSettingsHaveChanged();
+            log("Sampling Rate: " + stringValue);
+            refresh();
+            break;
+        case R.id.spinnerAudioThreadType:
+            int audioThreadType = mSpinnerAudioThreadType.getSelectedItemPosition();
+            getApp().setAudioThreadType(audioThreadType);
+            getApp().computeDefaults();
+            setSettingsHaveChanged();
+            log("AudioThreadType:" + audioThreadType);
+            refresh();
+            break;
+        case R.id.spinnerChannelIndex:
+            int channelIndex = mSpinnerChannelIndex.getSelectedItemPosition() - 1;
+            getApp().setChannelIndex(channelIndex);
+            setSettingsHaveChanged();
+            log("channelIndex:" + channelIndex);
+            refresh();
+            break;
+        case R.id.spinnerMicSource:
+            int micSource = mSpinnerMicSource.getSelectedItemPosition();
+            getApp().setMicSource(micSource);
+            setSettingsHaveChanged();
+            log("mic Source:" + micSource);
+            refresh();
+            break;
+        case R.id.spinnerPerformanceMode:
+            int performanceMode = mSpinnerPerformanceMode.getSelectedItemPosition() - 1;
+            getApp().setPerformanceMode(performanceMode);
+            setSettingsHaveChanged();
+            log("performanceMode:" + performanceMode);
+            refresh();
+            break;
+        }
+    }
+
+    @Override
+    public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
+        if (buttonView.getId() == mWavCaptureToggleButton.getId()){
+            getApp().setCaptureWavsEnabled(isChecked);
+        } else if (buttonView.getId() == mSystraceToggleButton.getId()) {
+            getApp().setCaptureSysTraceEnabled(isChecked);
+        } else if (buttonView.getId() == mBugreportToggleButton.getId()) {
+            getApp().setCaptureBugreportEnabled(isChecked);
+        } else if (buttonView.getId() == mSoundLevelCalibrationToggleButton.getId()) {
+            getApp().setSoundLevelCalibrationEnabled(isChecked);
+        }
+        mNumCapturesUI.setEnabled(getApp().isCaptureEnabled() ||
+                getApp().isCaptureWavSnippetsEnabled());
+    }
+
+    private void setSettingsHaveChanged() {
+        Intent intent = new Intent();
+        setResult(RESULT_OK, intent);
+    }
+
+
+    public void onNothingSelected(AdapterView<?> parent) {
+        // Another interface callback
+    }
+
+    public void onButtonHelp(View view) {
+        // Create a PopUpWindow with scrollable TextView
+        View puLayout = this.getLayoutInflater().inflate(R.layout.report_window, null);
+        PopupWindow popUp = new PopupWindow(puLayout, ViewGroup.LayoutParams.MATCH_PARENT,
+                ViewGroup.LayoutParams.MATCH_PARENT, true);
+
+        TextView helpText =
+                (TextView) popUp.getContentView().findViewById(R.id.ReportInfo);
+        if (view.getId() == R.id.buttonSystraceHelp || view.getId() == R.id.buttonBugreportHelp) {
+            helpText.setText(getResources().getString(R.string.systraceHelp));
+        } else if (view.getId() == R.id.buttonCalibrateSoundLevelHelp) {
+            helpText.setText(getResources().getString(R.string.calibrateSoundLevelHelp));
+        }
+
+        // display pop up window, dismissible with back button
+        popUp.showAtLocation(findViewById(R.id.settingsMainLayout), Gravity.TOP, 0, 0);
+    }
+
+        /** Called when the user clicks the button */
+    public void onButtonClick(View view) {
+        getApp().computeDefaults();
+        refresh();
+    }
+
+// Below is work in progress by Ricardo
+//    public void onButtonRecordDefault(View view) {
+//        int samplingRate = getApp().getSamplingRate();
+//
+//        int minRecorderBufferSizeInBytes =  AudioRecord.getMinBufferSize(samplingRate,
+//                AudioFormat.CHANNEL_IN_MONO,
+//                AudioFormat.ENCODING_PCM_16BIT);
+//        getApp().setRecorderBufferSizeInBytes(minRecorderBufferSizeInBytes);
+//
+//        refresh();
+//    }
+
+//    private void computeDefaults() {
+//
+////        if (getApp().getAudioThreadType() == LoopbackApplication.AUDIO_THREAD_TYPE_JAVA) {
+////            mNumberPickerRecorderBuffer.setEnabled(true);
+////        else
+////            mNumberPickerRecorderBuffer.setEnabled(false);
+//
+//        int samplingRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+//        getApp().setSamplingRate(samplingRate);
+//        int minPlayerBufferSizeInBytes = AudioTrack.getMinBufferSize(samplingRate,
+//                AudioFormat.CHANNEL_OUT_MONO,
+//                AudioFormat.ENCODING_PCM_16BIT);
+//        getApp().setPlayerBufferSizeInBytes(minPlayerBufferSizeInBytes);
+//
+//        int minRecorderBufferSizeInBytes =  AudioRecord.getMinBufferSize(samplingRate,
+//                AudioFormat.CHANNEL_IN_MONO,
+//                AudioFormat.ENCODING_PCM_16BIT);
+//        getApp().setRecorderBufferSizeInBytes(minRecorderBufferSizeInBytes);
+//        getApp().setRecorderBufferSizeInBytes(minRecorderBufferSizeInBytes);
+//
+//        log("computed defaults");
+//
+//    }
+
+    private LoopbackApplication getApp() {
+        return (LoopbackApplication) this.getApplication();
+    }
+
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsPicker.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsPicker.java
new file mode 100644
index 0000000..f8a9e4a
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SettingsPicker.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.widget.EditText;
+import android.widget.LinearLayout;
+import android.widget.SeekBar;
+import android.widget.TextView;
+
+public class SettingsPicker extends LinearLayout implements SeekBar.OnSeekBarChangeListener,
+        CatchEventsEditText.EditTextEventListener {
+
+    protected TextView mTitleTextView;
+    protected CatchEventsEditText mValueEditText;
+    protected SeekBar mValueSeekBar;
+    protected SettingChangeListener mSettingsChangeListener;
+
+    protected int mMinimumValue;
+    protected int mMaximumValue;
+
+    public interface SettingChangeListener {
+        public void settingChanged(int value);
+    }
+
+    public SettingsPicker(Context context, AttributeSet attrs) {
+        super(context, attrs);
+
+        inflate(context, R.layout.settings_picker, this);
+
+        mTitleTextView = (TextView) findViewById(R.id.settings_title);
+        mValueEditText = (CatchEventsEditText) findViewById(R.id.settings_valueText);
+        mValueSeekBar = (SeekBar) findViewById(R.id.settings_seekbar);
+
+        mValueEditText.setEditTextEvenListener(this);
+        mValueSeekBar.setOnSeekBarChangeListener(this);
+    }
+
+    public void setMinMaxDefault(int min, int max, int def) {
+        mMinimumValue = min;
+        mMaximumValue = max;
+        mValueSeekBar.setMax(max - min);
+        setValue(def);
+    }
+
+    public void setTitle(String title) {
+        mTitleTextView.setText(title);
+    }
+
+    public void setValue(int value) {
+        mValueSeekBar.setProgress(value - mMinimumValue);
+        mValueEditText.setText(Integer.toString(value));
+    }
+
+    public void setSettingsChangeListener(SettingChangeListener settingsChangeListener) {
+        mSettingsChangeListener = settingsChangeListener;
+    }
+
+    protected void textChanged(int value) {
+        mValueSeekBar.setProgress(value - mMinimumValue);
+        if (mSettingsChangeListener != null) {
+            mSettingsChangeListener.settingChanged(value);
+        }
+    }
+
+    protected void sliderChanged(int value, boolean userInteractionFinished) {
+        mValueEditText.setText(Integer.toString(value));
+        if (userInteractionFinished && mSettingsChangeListener != null) {
+            mSettingsChangeListener.settingChanged(value);
+        }
+    }
+
+    @Override
+    public void textEdited(EditText v) {
+        if (!v.getText().toString().isEmpty()) {
+            int value;
+            try {
+                value = Integer.parseInt(v.getText().toString());
+            } catch (NumberFormatException e) {
+                value = mMinimumValue;
+                v.setText(Integer.toString(value));
+            }
+            if (value < mMinimumValue) {
+                value = mMinimumValue;
+                v.setText(Integer.toString(value));
+            } else if (value > mMaximumValue) {
+                value = mMaximumValue;
+                v.setText(Integer.toString(value));
+            }
+            textChanged(value);
+        } else {
+            sliderChanged(mMinimumValue + mValueSeekBar.getProgress(), false);
+        }
+    }
+
+    @Override
+    public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+        if (fromUser) {
+            sliderChanged(mMinimumValue + progress, false);
+        }
+    }
+
+    @Override
+    public void onStartTrackingTouch(SeekBar seekBar) {
+
+    }
+
+    @Override
+    public void onStopTrackingTouch(SeekBar seekBar) {
+        sliderChanged(mMinimumValue + seekBar.getProgress(), true);
+    }
+
+    @Override
+    public void setEnabled(boolean enabled) {
+        mValueEditText.setEnabled(enabled);
+        mValueSeekBar.setEnabled(enabled);
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java
new file mode 100644
index 0000000..186d847
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SineWaveTone.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class generates a sine wave with given frequency and samplingRate.
+ * It keeps a member variable "mPhase", so as it continually be called, it will continue to generate
+ * the next section of the sine wave.
+ */
+
+public class SineWaveTone extends ToneGeneration {
+    private int          mCount; // counts the total samples produced.
+    private double       mPhase; // current phase
+    private final double mPhaseIncrement; // phase incrementation associated with mFrequency
+
+
+    public SineWaveTone(int samplingRate, double frequency) {
+        super(samplingRate);
+        mCount = 0;
+        mPhaseIncrement = Constant.TWO_PI * (frequency / mSamplingRate); // should < 2pi
+        mAmplitude = Constant.SINE_WAVE_AMPLITUDE;
+    }
+
+
+    @Override
+    public void generateTone(short[] tone, int size) {
+        for (int i = 0; i < size; i++) {
+            short value1 = (short) (mAmplitude * Math.sin(mPhase) * Short.MAX_VALUE);
+            tone[i] = value1;
+
+            mPhase += mPhaseIncrement;
+            // insert glitches if mIsGlitchEnabled == true, and insert it for every second
+            if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+                mPhase += mPhaseIncrement;
+            }
+
+            mCount++;
+
+            if (mPhase >= Constant.TWO_PI) {
+                mPhase -= Constant.TWO_PI;
+            }
+        }
+    }
+
+
+    @Override
+    public void generateTone(double[] tone, int size) {
+        for (int i = 0; i < size; i++) {
+            double value1 = mAmplitude * Math.sin(mPhase);
+            tone[i] = value1;
+
+            mPhase += mPhaseIncrement;
+            // insert glitches if mIsGlitchEnabled == true, and insert it for every second
+            if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+                mPhase += mPhaseIncrement;
+            }
+
+            mCount++;
+
+            if (mPhase >= Constant.TWO_PI) {
+                mPhase -= Constant.TWO_PI;
+            }
+        }
+    }
+
+
+    @Override
+    public void resetPhases() {
+        mPhase = 0;
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SoundLevelCalibration.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SoundLevelCalibration.java
new file mode 100644
index 0000000..ed70a09
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/SoundLevelCalibration.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import android.content.Context;
+import android.media.AudioManager;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.Log;
+
+class SoundLevelCalibration {
+    private static final int SECONDS_PER_LEVEL = 1;
+    private static final int MAX_STEPS = 15; // The maximum number of levels that should be tried
+    private static final double CRITICAL_RATIO = 0.41; // Ratio of input over output amplitude at
+                                                      // which the feedback loop neither decays nor
+                                                      // grows (determined experimentally)
+    private static final String TAG = "SoundLevelCalibration";
+
+    private NativeAudioThread mNativeAudioThread = null;
+    private AudioManager mAudioManager;
+
+    private SoundLevelChangeListener mChangeListener;
+
+    abstract static class SoundLevelChangeListener {
+        // used to run the callback on the UI thread
+        private Handler handler = new Handler(Looper.getMainLooper());
+
+        abstract void onChange(int newLevel);
+
+        private void go(final int newLevel) {
+            handler.post(new Runnable() {
+                @Override
+                public void run() {
+                    onChange(newLevel);
+                }
+            });
+        }
+    }
+
+    SoundLevelCalibration(int samplingRate, int playerBufferSizeInBytes,
+                                 int recorderBufferSizeInBytes, int micSource, int performanceMode, Context context) {
+
+        // TODO: Allow capturing wave data without doing glitch detection.
+        CaptureHolder captureHolder = new CaptureHolder(0, "", false, false, false, context,
+                samplingRate);
+        // TODO: Run for less than 1 second.
+        mNativeAudioThread = new NativeAudioThread(samplingRate, playerBufferSizeInBytes,
+                recorderBufferSizeInBytes, micSource, performanceMode,
+                Constant.LOOPBACK_PLUG_AUDIO_THREAD_TEST_TYPE_BUFFER_PERIOD, SECONDS_PER_LEVEL,
+                SECONDS_PER_LEVEL, 0, captureHolder);
+        mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+    }
+
+    // TODO: Allow stopping in the middle of calibration
+    int calibrate() {
+        final int maxLevel = mAudioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
+        int levelBottom = 0;
+        int levelTop = maxLevel + 1;
+        while(levelTop - levelBottom > 1) {
+            int level = (levelBottom + levelTop) / 2;
+            Log.d(TAG, "setting level to " + level);
+            setVolume(level);
+
+            double amplitude = runAudioThread(mNativeAudioThread);
+            mNativeAudioThread = new NativeAudioThread(mNativeAudioThread); // generate fresh thread
+            Log.d(TAG, "calibrate: at sound level " + level + " volume was " + amplitude);
+
+            if (amplitude < Constant.SINE_WAVE_AMPLITUDE * CRITICAL_RATIO) {
+                levelBottom = level;
+            } else {
+                levelTop = level;
+            }
+        }
+        // At this point, levelBottom has the highest proper value, if there is one (0 otherwise)
+        Log.d(TAG, "Final level: " + levelBottom);
+        setVolume(levelBottom);
+        return levelBottom;
+    }
+
+    private double runAudioThread(NativeAudioThread thread) {
+        // runs the native audio thread and returns the average amplitude
+        thread.start();
+        try {
+            thread.join();
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+        double[] data = thread.getWaveData();
+        return averageAmplitude(data);
+    }
+
+    // TODO: Only gives accurate results for an undistorted sine wave. Check for distortion.
+    private static double averageAmplitude(double[] data) {
+        if (data == null || data.length == 0) {
+            return 0; // no data is present
+        }
+        double sumSquare = 0;
+        for (double x : data) {
+            sumSquare += x * x;
+        }
+        return Math.sqrt(2.0 * sumSquare / data.length); // amplitude of the sine wave
+    }
+
+    private void setVolume(int level) {
+        mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, level, 0);
+        if (mChangeListener != null) {
+            mChangeListener.go(level);
+        }
+    }
+
+    void setChangeListener(SoundLevelChangeListener changeListener) {
+        mChangeListener = changeListener;
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/ToneGeneration.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/ToneGeneration.java
new file mode 100644
index 0000000..176c7c1
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/ToneGeneration.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class is used to generates different kinds of tones.
+ */
+
+public abstract class ToneGeneration {
+    protected int     mSamplingRate;
+    protected double  mAmplitude;  // this value should be from 0 to 1.0
+    protected boolean mIsGlitchEnabled = false; // indicates we are inserting glitches or not
+
+
+    public ToneGeneration(int samplingRate) {
+        mSamplingRate = samplingRate;
+    }
+
+
+    /** Store samples into "tone". Value of samples are from -32768 to 32767. */
+    public abstract void generateTone(short[] tone, int size);
+
+
+    /**
+     * Store samples into "tone". Value of samples are from -1.0 to 1.0.
+     * This function is not supposed to be used to create tone that is going to pass
+     * into AudioTrack.write() as it only takes in float.
+     */
+    public abstract void generateTone(double[] tone, int size);
+
+
+    /** Reset all the phases to zero. */
+    public abstract void resetPhases();
+
+
+    /**
+     * Set the value of mIsGlitchEnabled. If mIsGlitchEnabled == true, will insert glitches to
+     * the generated tone.
+     */
+    public void setGlitchEnabled(boolean isGlitchEnabled) {
+        mIsGlitchEnabled = isGlitchEnabled;
+    }
+
+    public void setAmplitude(double amplitude) {
+        mAmplitude = amplitude;
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java
new file mode 100644
index 0000000..27083cf
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/TwoSineWavesTone.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class generates a mix of two sine waves with frequency1, frequency2, and samplingRate.
+ * It keeps two member variable "mPhase1" and "mPhase2", so as it continually be called,
+ * it will continue to generate the next section of the sine wave.
+ */
+
+public class TwoSineWavesTone extends ToneGeneration {
+    private int          mCount; // counts the total samples produced.
+    private double       mPhase1; // current phase associated with mFrequency1
+    private double       mPhase2; // current phase associated with mFrequency2
+    private final double mPhaseIncrement1; // phase incrementation associated with mFrequency1
+    private final double mPhaseIncrement2; // phase incrementation associated with mFrequency2
+
+
+    /**
+     * Currently, this class is never used, but it can be used in the future to create a different
+     * kind of wave when running the test.
+     */
+    public TwoSineWavesTone(int samplingRate, double frequency1, double frequency2) {
+        super(samplingRate);
+        mCount = 0;
+        mPhaseIncrement1 = Constant.TWO_PI * (frequency1 / mSamplingRate); // should < 2pi
+        mPhaseIncrement2 = Constant.TWO_PI * (frequency2 / mSamplingRate); // should < 2pi
+        mAmplitude = Constant.TWO_SINE_WAVES_AMPLITUDE;
+    }
+
+
+    @Override
+    public void generateTone(short[] tone, int size) {
+        for (int i = 0; i < size; i++) {
+            short value1 = (short) (mAmplitude * Math.sin(mPhase1) * Short.MAX_VALUE);
+            short value2 = (short) (mAmplitude * Math.sin(mPhase2) * Short.MAX_VALUE);
+            tone[i] = (short) (value1 + value2);
+
+            mPhase1 += mPhaseIncrement1;
+            mPhase2 += mPhaseIncrement2;
+
+            // insert glitches for every second if mIsGlitchEnabled == true.
+            if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+                mPhase1 += mPhaseIncrement1;
+                mPhase2 += mPhaseIncrement2;
+            }
+
+            mCount++;
+
+            if (mPhase1 > Constant.TWO_PI) {
+                mPhase1 -= Constant.TWO_PI;
+            }
+            if (mPhase2 > Constant.TWO_PI) {
+                mPhase2 -= Constant.TWO_PI;
+            }
+
+        }
+    }
+
+
+    @Override
+    public void generateTone(double[] tone, int size) {
+        for (int i = 0; i < size; i++) {
+            double value1 = mAmplitude * Math.sin(mPhase1);
+            double value2 = mAmplitude * Math.sin(mPhase2);
+            tone[i] = value1 + value2;
+
+            mPhase1 += mPhaseIncrement1;
+            mPhase2 += mPhaseIncrement2;
+            // insert glitches if mIsGlitchEnabled == true, and insert it for every second
+            if (mIsGlitchEnabled & (mCount % mSamplingRate == 0)) {
+                mPhase1 += mPhaseIncrement1;
+                mPhase2 += mPhaseIncrement2;
+            }
+
+            mCount++;
+
+            if (mPhase1 > Constant.TWO_PI) {
+                mPhase1 -= Constant.TWO_PI;
+            }
+            if (mPhase2 > Constant.TWO_PI) {
+                mPhase2 -= Constant.TWO_PI;
+            }
+
+        }
+    }
+
+
+    @Override
+    public void resetPhases() {
+        mPhase1 = 0;
+        mPhase2 = 0;
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java
new file mode 100644
index 0000000..15928bf
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/Utilities.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+
+/**
+ * This class contains functions that can be reused in different classes.
+ */
+
+public class Utilities {
+
+
+    /** Multiply the input array with a hanning window. */
+    public static void hanningWindow(double[] samples) {
+        int length = samples.length;
+        final double alpha = 0.5;
+        final double beta = 0.5;
+        double coefficient;
+        for (int i = 0; i < length; i++) {
+            coefficient = (Constant.TWO_PI * i) / (length - 1);
+            samples[i] *= alpha - beta * Math.cos(coefficient);
+        }
+
+    }
+
+
+    /** Round up to the nearest power of 2. */
+    public static int roundup(int size)
+    {
+        // Integer.numberOfLeadingZeros() returns 32 for zero input
+        if (size == 0) {
+            size = 1;
+        }
+
+        int lz = Integer.numberOfLeadingZeros(size);
+        int rounded = 0x80000000 >>> lz;
+        // 0x800000001 and higher are actually rounded _down_ to prevent overflow
+        if (size > rounded && lz > 0) {
+            rounded <<= 1;
+        }
+        return rounded;
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WaveDataRingBuffer.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WaveDataRingBuffer.java
new file mode 100644
index 0000000..ee47238
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WaveDataRingBuffer.java
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import java.util.Arrays;
+
+/**
+ * Maintains two ring buffers for recording wav data
+ * At any one time one buffer is available for writing to file while one is recording incoming data
+ */
+public class WaveDataRingBuffer {
+
+    public interface ReadableWaveDeck {
+        boolean writeToFile(AudioFileOutput audioFile);
+    }
+
+    private WaveDeck mLoadedDeck;
+    private WaveDeck mShelvedDeck;
+
+    public WaveDataRingBuffer(int size) {
+        if (size < Constant.SAMPLING_RATE_MIN * Constant.BUFFER_TEST_DURATION_SECONDS_MIN) {
+            size = Constant.SAMPLING_RATE_MIN * Constant.BUFFER_TEST_DURATION_SECONDS_MIN;
+        } else if (size > Constant.SAMPLING_RATE_MAX * Constant.BUFFER_TEST_DURATION_SECONDS_MAX) {
+            size = Constant.SAMPLING_RATE_MAX * Constant.BUFFER_TEST_DURATION_SECONDS_MAX;
+        }
+        mLoadedDeck = new WaveDeck(size);
+        mShelvedDeck = new WaveDeck(size);
+    }
+
+    public synchronized void writeWaveData(double[] data, int srcPos, int length) {
+        mLoadedDeck.writeWaveData(data, srcPos, length);
+    }
+
+    public synchronized double[] getWaveRecord() {
+        return mLoadedDeck.getWaveRecord();
+    }
+
+    private void SwapDecks() {
+        WaveDeck temp = mShelvedDeck;
+        mShelvedDeck = mLoadedDeck;
+        mLoadedDeck = temp;
+    }
+
+    /**
+     * Returns currently writing buffer as writeToFile interface, load erased shelved deck for write
+     * If shelved deck is still being read returns null
+     **/
+    public synchronized ReadableWaveDeck getWaveDeck() {
+        if (!mShelvedDeck.isBeingRead()) {
+            SwapDecks();
+            mShelvedDeck.readyForRead();
+            mLoadedDeck.reset();
+            return mShelvedDeck;
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * Maintains a recording of wave data of last n seconds
+     */
+    public class WaveDeck implements ReadableWaveDeck {
+
+        private double[] mWaveRecord;
+        private volatile int mIndex = 0; // between 0 and mWaveRecord.length - 1
+        private boolean mArrayFull = false; // true after mIndex has wrapped
+        private boolean mIsBeingRead = false;
+
+        public WaveDeck(int size) {
+            mWaveRecord = new double[size];
+        }
+
+        /**
+         * Write length number of doubles from data into ring buffer from starting srcPos
+         */
+        public void writeWaveData(double[] data, int srcPos, int length) {
+            if (length > data.length - srcPos) {
+                // requested to write more data than available
+                // bad request leave data un-affected
+                return;
+            }
+
+            if (length >= mWaveRecord.length) {
+                // requested write would fill or exceed ring buffer capacity
+                // fill ring buffer with last segment of requested write
+                System.arraycopy(data, srcPos + (length - mWaveRecord.length), mWaveRecord, 0,
+                        mWaveRecord.length);
+                mIndex = 0;
+            } else if (mWaveRecord.length - mIndex > length) {
+                // write requested data from current offset
+                System.arraycopy(data, srcPos, mWaveRecord, mIndex, length);
+                mIndex += length;
+            } else {
+                // write to available buffer then wrap and overwrite previous records
+                if (!mArrayFull) {
+                    mArrayFull = true;
+                }
+
+                int availBuff = mWaveRecord.length - mIndex;
+
+                System.arraycopy(data, srcPos, mWaveRecord, mIndex, availBuff);
+                System.arraycopy(data, srcPos + availBuff, mWaveRecord, 0, length - availBuff);
+
+                mIndex = length - availBuff;
+
+            }
+
+        }
+
+        /**
+         * Returns a private copy of recorded wave data
+         *
+         * @return double array of wave recording, rearranged with oldest sample at first index
+         */
+        public double[] getWaveRecord() {
+            double outputBuffer[] = new double[mWaveRecord.length];
+
+            if (!mArrayFull) {
+                //return partially filled sample with trailing zeroes
+                System.arraycopy(mWaveRecord, 0, outputBuffer, 0, mIndex);
+                Arrays.fill(outputBuffer, mIndex+1, outputBuffer.length-1, 0);
+            } else {
+                //copy buffer to contiguous sample and return unwrapped array
+                System.arraycopy(mWaveRecord, mIndex, outputBuffer, 0, mWaveRecord.length - mIndex);
+                System.arraycopy(mWaveRecord, 0, outputBuffer, mWaveRecord.length - mIndex, mIndex);
+            }
+
+            return outputBuffer;
+        }
+
+        /** Make buffer available for new recording **/
+        public void reset() {
+            mIndex = 0;
+            mArrayFull = false;
+        }
+
+        public boolean isBeingRead() {
+            return mIsBeingRead;
+        }
+
+        private void readyForRead() {
+            mIsBeingRead = true;
+        }
+
+        @Override
+        public boolean writeToFile(AudioFileOutput audioFile) {
+            boolean successfulWrite;
+            if (mArrayFull) {
+                successfulWrite = audioFile.writeRingBufferData(mWaveRecord, mIndex, mIndex);
+            } else {
+                // Write only filled part of array to file
+                successfulWrite = audioFile.writeRingBufferData(mWaveRecord, 0, mIndex);
+            }
+
+            mIsBeingRead = false;
+            return successfulWrite;
+        }
+    }
+}
diff --git a/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java
new file mode 100644
index 0000000..71b31c5
--- /dev/null
+++ b/LoopbackApp/app/src/main/java/org/drrickorang/loopback/WavePlotView.java
@@ -0,0 +1,632 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drrickorang.loopback;
+
+import java.util.Arrays;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Paint;
+import android.graphics.Path;
+import android.graphics.Paint.Style;
+import android.os.Vibrator;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.GestureDetector;
+import android.view.MotionEvent;
+import android.view.ScaleGestureDetector;
+import android.view.View;
+import android.view.animation.LinearInterpolator;
+import android.widget.Scroller;
+
+
+/**
+ * This view is the wave plot shows on the main activity.
+ */
+
+public class WavePlotView extends View  {
+    private static final String TAG = "WavePlotView";
+
+    private double [] mBigDataArray;
+    private double [] mValuesArray;  //top points to plot
+    private double [] mValuesArray2; //bottom
+
+    private double [] mInsetArray;
+    private double [] mInsetArray2;
+    private int       mInsetSize = 20;
+
+    private double mZoomFactorX = 1.0; //1:1  1 sample / point .  Note: Point != pixel.
+    private int    mCurrentOffset = 0;
+    private int    mArraySize = 100; //default size
+    private int    mSamplingRate;
+
+    private GestureDetector        mDetector;
+    private ScaleGestureDetector   mSGDetector;
+    private MyScaleGestureListener mSGDListener;
+    private Scroller mScroller;
+
+    private int mWidth;
+    private int mHeight;
+    private boolean mHasDimensions;
+
+    private Paint mMyPaint;
+    private Paint mPaintZoomBox;
+    private Paint mPaintInsetBackground;
+    private Paint mPaintInsetBorder;
+    private Paint mPaintInset;
+    private Paint mPaintGrid;
+    private Paint mPaintGridText;
+
+    // Default values used when we don't have a valid waveform to display.
+    // This saves us having to add multiple special cases to handle null waveforms.
+    private int mDefaultSampleRate = 48000; // chosen because it is common in real world devices
+    private double[] mDefaultDataVector = new double[mDefaultSampleRate]; // 1 second of fake audio
+
+    public WavePlotView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+        mSGDListener = new MyScaleGestureListener();
+        mDetector = new GestureDetector(context, new MyGestureListener());
+        mSGDetector = new ScaleGestureDetector(context, mSGDListener);
+        mScroller = new Scroller(context, new LinearInterpolator(), true);
+        initPaints();
+
+        // Initialize the value array to 1s silence
+        mSamplingRate = mDefaultSampleRate;
+        mBigDataArray = new double[mSamplingRate];
+        Arrays.fill(mDefaultDataVector, 0);
+    }
+
+
+    /** Initiate all the Paint objects. */
+    private void initPaints() {
+        final int COLOR_WAVE = 0xFF1E4A99;
+        final int COLOR_ZOOM_BOX = 0X50E0E619;
+        final int COLOR_INSET_BACKGROUND = 0xFFFFFFFF;
+        final int COLOR_INSET_BORDER = 0xFF002260;
+        final int COLOR_INSET_WAVE = 0xFF910000;
+        final int COLOR_GRID = 0x7F002260;
+        final int COLOR_GRID_TEXT = 0xFF002260;
+
+        mMyPaint = new Paint();
+        mMyPaint.setColor(COLOR_WAVE);
+        mMyPaint.setAntiAlias(true);
+        mMyPaint.setStyle(Style.FILL_AND_STROKE);
+        mMyPaint.setStrokeWidth(1);
+
+        mPaintZoomBox = new Paint();
+        mPaintZoomBox.setColor(COLOR_ZOOM_BOX);
+        mPaintZoomBox.setAntiAlias(true);
+        mPaintZoomBox.setStyle(Style.FILL);
+
+        mPaintInsetBackground = new Paint();
+        mPaintInsetBackground.setColor(COLOR_INSET_BACKGROUND);
+        mPaintInsetBackground.setAntiAlias(true);
+        mPaintInsetBackground.setStyle(Style.FILL);
+
+        mPaintInsetBorder = new Paint();
+        mPaintInsetBorder.setColor(COLOR_INSET_BORDER);
+        mPaintInsetBorder.setAntiAlias(true);
+        mPaintInsetBorder.setStyle(Style.STROKE);
+        mPaintInsetBorder.setStrokeWidth(1);
+
+        mPaintInset = new Paint();
+        mPaintInset.setColor(COLOR_INSET_WAVE);
+        mPaintInset.setAntiAlias(true);
+        mPaintInset.setStyle(Style.FILL_AND_STROKE);
+        mPaintInset.setStrokeWidth(1);
+
+        final int textSize = 25;
+        mPaintGrid = new Paint(Paint.ANTI_ALIAS_FLAG);
+        mPaintGrid.setColor(COLOR_GRID); //gray
+        mPaintGrid.setTextSize(textSize);
+
+        mPaintGridText = new Paint(Paint.ANTI_ALIAS_FLAG);
+        mPaintGridText.setColor(COLOR_GRID_TEXT); //BLACKgray
+        mPaintGridText.setTextSize(textSize);
+    }
+
+    public double getZoom() {
+        return mZoomFactorX;
+    }
+
+
+    /** Return max zoom out value (> 1.0)/ */
+    public double getMaxZoomOut() {
+        double maxZoom = 1.0;
+
+        if (mBigDataArray != null) {
+            int n = mBigDataArray.length;
+            maxZoom = ((double) n) / mArraySize;
+        }
+
+        return maxZoom;
+    }
+
+
+    public double getMinZoomOut() {
+        double minZoom = 1.0;
+        return minZoom;
+    }
+
+
+    public int getOffset() {
+        return mCurrentOffset;
+    }
+
+
+    public void setZoom(double zoom) {
+        double newZoom = zoom;
+        double maxZoom = getMaxZoomOut();
+        double minZoom = getMinZoomOut();
+
+        //foolproof:
+        if (newZoom < minZoom)
+            newZoom = minZoom;
+
+        if (newZoom > maxZoom)
+            newZoom = maxZoom;
+
+        mZoomFactorX = newZoom;
+        //fix offset if this is the case
+        setOffset(0, true); //just touch offset in case it needs to be fixed.
+    }
+
+
+    public void setOffset(int sampleOffset, boolean relative) {
+        int newOffset = sampleOffset;
+
+        if (relative) {
+            newOffset = mCurrentOffset + sampleOffset;
+        }
+
+        if (mBigDataArray != null) {
+            int n = mBigDataArray.length;
+            //update offset if last sample is more than expected
+            int lastSample = newOffset + (int)getWindowSamples();
+            if (lastSample >= n) {
+                int delta = lastSample - n;
+                newOffset -= delta;
+            }
+
+            if (newOffset < 0)
+                newOffset = 0;
+
+            if (newOffset >= n)
+                newOffset = n - 1;
+
+            mCurrentOffset = newOffset;
+        }
+    }
+
+
+    public double getWindowSamples() {
+        //samples in current window
+        double samples = 0;
+        if (mBigDataArray != null) {
+            double zoomFactor = getZoom();
+            samples = mArraySize * zoomFactor;
+        }
+
+        return samples;
+    }
+
+
+    public void refreshGraph() {
+        computeViewArray(mZoomFactorX, mCurrentOffset);
+    }
+
+
+    @Override
+    protected void onSizeChanged(int w, int h, int oldw, int oldh) {
+        mWidth = w;
+        mHeight = h;
+        log("New w: " + mWidth + " h: " + mHeight);
+        mHasDimensions = true;
+        initView();
+        refreshView();
+    }
+
+
+    private void initView() {
+        //re init graphical elements
+        mArraySize = mWidth;
+        mInsetSize = mWidth / 5;
+        mValuesArray = new double[mArraySize];
+        mValuesArray2 = new double[mArraySize];
+        Arrays.fill(mValuesArray, 0);
+        Arrays.fill(mValuesArray2, 0);
+
+        //inset
+        mInsetArray = new double[mInsetSize];
+        mInsetArray2 = new double[mInsetSize];
+        Arrays.fill(mInsetArray, (double) 0);
+        Arrays.fill(mInsetArray2, (double) 0);
+    }
+
+
+    @Override
+    protected void onDraw(Canvas canvas) {
+        super.onDraw(canvas);
+        boolean showGrid = true;
+        boolean showInset = true;
+
+        int i;
+        int w = getWidth();
+        int h = getHeight();
+
+        double valueMax = 1.0;
+        double valueMin = -1.0;
+        double valueRange = valueMax - valueMin;
+
+        //print gridline time in ms/seconds, etc.
+        if (showGrid) {
+            //current number of samples in display
+            double samples = getWindowSamples();
+            if (samples > 0.0 && mSamplingRate > 0) {
+                double windowMs = (1000.0 * samples) / mSamplingRate;
+
+                //decide the best units: ms, 10ms, 100ms, 1 sec, 2 sec
+                double msPerDivision = windowMs / 10;
+                log(" windowMS: " + windowMs + " msPerdivision: " + msPerDivision);
+
+                int divisionInMS = 1;
+                //find the best level for markings:
+                if (msPerDivision <= 5) {
+                    divisionInMS = 1;
+                } else if (msPerDivision < 15) {
+                    divisionInMS = 10;
+                } else if (msPerDivision < 30) {
+                    divisionInMS = 20;
+                } else if (msPerDivision < 60) {
+                    divisionInMS = 40;
+                } else if (msPerDivision < 150) {
+                    divisionInMS = 100;
+                } else if (msPerDivision < 400) {
+                    divisionInMS = 200;
+                } else if (msPerDivision < 750) {
+                    divisionInMS = 500;
+                } else {
+                    divisionInMS = 1000;
+                }
+                log(" chosen Division in MS: " + divisionInMS);
+
+                //current offset in samples
+                int currentOffsetSamples = getOffset();
+                double currentOffsetMs = (1000.0 * currentOffsetSamples) / mSamplingRate;
+                int gridCount = (int) ((currentOffsetMs + divisionInMS) / divisionInMS);
+                double startGridCountFrac = ((currentOffsetMs) % divisionInMS);
+                log(" gridCount:" + gridCount + " fraction: " + startGridCountFrac +
+                    "  firstDivision: " + gridCount * divisionInMS);
+
+                double currentGridMs = divisionInMS - startGridCountFrac; //in mS
+                while (currentGridMs <= windowMs) {
+                    float newX = (float) (w * currentGridMs / windowMs);
+                    canvas.drawLine(newX, 0, newX, h, mPaintGrid);
+
+                    double currentGridValueMS = gridCount * divisionInMS;
+                    String label = String.format("%.0f ms", (float) currentGridValueMS);
+
+                    //path
+                    Path myPath = new Path();
+                    myPath.moveTo(newX, h);
+                    myPath.lineTo(newX, h / 2);
+
+                    canvas.drawTextOnPath(label, myPath, 10, -3, mPaintGridText);
+
+                    //advance
+                    currentGridMs += divisionInMS;
+                    gridCount++;
+                }
+
+                //horizontal line
+                canvas.drawLine(0, h / 2, w, h / 2, mPaintGrid);
+            }
+        }
+
+        float deltaX = (float) w / mArraySize;
+
+        //top
+        Path myPath = new Path();
+        myPath.moveTo(0, h / 2); //start
+
+        if (mBigDataArray != null) {
+            if (getZoom() >= 2) {
+                for (i = 0; i < mArraySize; ++i) {
+                    float top = (float) ((valueMax - mValuesArray[i]) / valueRange) * h;
+                    float bottom = (float) ((valueMax - mValuesArray2[i]) / valueRange) * h + 1;
+                    float left = i * deltaX;
+                    canvas.drawRect(left, top, left + deltaX, bottom, mMyPaint);
+                }
+            } else {
+                for (i = 0; i < (mArraySize - 1); ++i) {
+                    float first = (float) ((valueMax - mValuesArray[i]) / valueRange) * h;
+                    float second = (float) ((valueMax - mValuesArray[i + 1]) / valueRange) * h;
+                    float left = i * deltaX;
+                    canvas.drawLine(left, first, left + deltaX, second, mMyPaint);
+                }
+            }
+
+
+            if (showInset) {
+                float iW = (float) (w * 0.2);
+                float iH = (float) (h * 0.2);
+                float iX = (float) (w * 0.7);
+                float iY = (float) (h * 0.1);
+                //x, y of inset
+                canvas.drawRect(iX, iY, iX + iW, iY + iH, mPaintInsetBackground);
+                canvas.drawRect(iX - 1, iY - 1, iX + iW + 2, iY + iH + 2, mPaintInsetBorder);
+                //paintInset
+                float iDeltaX = (float) iW / mInsetSize;
+
+                for (i = 0; i < mInsetSize; ++i) {
+                    float top = iY + (float) ((valueMax - mInsetArray[i]) / valueRange) * iH;
+                    float bottom = iY +
+                            (float) ((valueMax - mInsetArray2[i]) / valueRange) * iH + 1;
+                    float left = iX + i * iDeltaX;
+                    canvas.drawRect(left, top, left + deltaX, bottom, mPaintInset);
+                }
+
+                if (mBigDataArray != null) {
+                    //paint current region of zoom
+                    int offsetSamples = getOffset();
+                    double windowSamples = getWindowSamples();
+                    int samples = mBigDataArray.length;
+
+                    if (samples > 0) {
+                        float x1 = (float) (iW * offsetSamples / samples);
+                        float x2 = (float) (iW * (offsetSamples + windowSamples) / samples);
+
+                        canvas.drawRect(iX + x1, iY, iX + x2, iY + iH, mPaintZoomBox);
+                    }
+                }
+            }
+        }
+        if (mScroller.computeScrollOffset()) {
+            setOffset(mScroller.getCurrX(), false);
+            refreshGraph();
+        }
+    }
+
+
+    void resetArray() {
+        Arrays.fill(mValuesArray, 0);
+        Arrays.fill(mValuesArray2, 0);
+    }
+
+    void refreshView() {
+        double maxZoom = getMaxZoomOut();
+        setZoom(maxZoom);
+        setOffset(0, false);
+        computeInset();
+        refreshGraph();
+    }
+
+    void computeInset() {
+        if (mBigDataArray != null) {
+            int sampleCount = mBigDataArray.length;
+            double pointsPerSample = (double) mInsetSize / sampleCount;
+
+            Arrays.fill(mInsetArray, 0);
+            Arrays.fill(mInsetArray2, 0);
+
+            double currentIndex = 0; //points.
+            double max = -1.0;
+            double min = 1.0;
+            double maxAbs = 0.0;
+            int index = 0;
+
+            for (int i = 0; i < sampleCount; i++) {
+                double value = mBigDataArray[i];
+                if (value > max) {
+                    max = value;
+                }
+
+                if (value < min) {
+                    min = value;
+                }
+
+                int prevIndexInt = (int) currentIndex;
+                currentIndex += pointsPerSample;
+                if ((int) currentIndex > prevIndexInt) { //it switched, time to decide
+                    mInsetArray[index] = max;
+                    mInsetArray2[index] = min;
+
+                    if (Math.abs(max) > maxAbs) maxAbs = Math.abs(max);
+                    if (Math.abs(min) > maxAbs) maxAbs = Math.abs(min);
+
+                    max = -1.0;
+                    min = 1.0;
+                    index++;
+                }
+
+                if (index >= mInsetSize)
+                    break;
+            }
+
+            //now, normalize
+            if (maxAbs > 0) {
+                for (int i = 0; i < mInsetSize; i++) {
+                    mInsetArray[i] /= maxAbs;
+                    mInsetArray2[i] /= maxAbs;
+
+                }
+            }
+
+        }
+    }
+
+
+    void computeViewArray(double zoomFactorX, int sampleOffset) {
+        //zoom factor: how many samples per point. 1.0 = 1.0 samples per point
+        // sample offset in samples.
+        if (zoomFactorX < 1.0)
+            zoomFactorX = 1.0;
+
+        if (mBigDataArray != null) {
+            int sampleCount = mBigDataArray.length;
+            double samplesPerPoint = zoomFactorX;
+            double pointsPerSample = 1.0 / samplesPerPoint;
+
+            resetArray();
+
+            double currentIndex = 0; //points.
+            double max = -1.0;
+            double min = 1.0;
+            int index = 0;
+
+            for (int i = sampleOffset; i < sampleCount; i++) {
+
+                double value = mBigDataArray[i];
+                if (value > max) {
+                    max = value;
+                }
+
+                if (value < min) {
+                    min = value;
+                }
+
+                int prevIndexInt = (int) currentIndex;
+                currentIndex += pointsPerSample;
+                if ((int) currentIndex > prevIndexInt) { //it switched, time to decide
+                    mValuesArray[index] = max;
+                    mValuesArray2[index] = min;
+
+                    max = -1.0;
+                    min = 1.0;
+                    index++;
+                }
+
+                if (index >= mArraySize)
+                    break;
+            }
+        } //big data array not null
+
+        redraw();
+    }
+
+
+    void setData(double[] dataVector, int sampleRate) {
+        if (sampleRate < 1)
+            throw new IllegalArgumentException("sampleRate must be a positive integer");
+
+        mSamplingRate = sampleRate;
+        mBigDataArray = (dataVector != null ? dataVector : mDefaultDataVector);
+
+        if (mHasDimensions) { // only refresh the view if it has been initialized already
+            refreshView();
+        }
+    }
+
+    void redraw() {
+        invalidate();
+    }
+
+    @Override
+    public boolean onTouchEvent(MotionEvent event) {
+        mDetector.onTouchEvent(event);
+        mSGDetector.onTouchEvent(event);
+        //return super.onTouchEvent(event);
+        return true;
+    }
+
+    class MyGestureListener extends GestureDetector.SimpleOnGestureListener {
+        private static final String DEBUG_TAG = "MyGestureListener";
+        private boolean mInDrag = false;
+
+        @Override
+        public boolean onDown(MotionEvent event) {
+            Log.d(DEBUG_TAG, "onDown: " + event.toString() + " " + TAG);
+            if(!mScroller.isFinished()) {
+                mScroller.forceFinished(true);
+                refreshGraph();
+            }
+            return true;
+        }
+
+
+        @Override
+        public boolean onFling(MotionEvent event1, MotionEvent event2,
+                               float velocityX, float velocityY) {
+            Log.d(DEBUG_TAG, "onFling: VelocityX: " + velocityX + "  velocityY:  " + velocityY);
+
+            mScroller.fling(mCurrentOffset, 0,
+                    (int) (-velocityX * getZoom()),
+                    0, 0, mBigDataArray.length, 0, 0);
+            refreshGraph();
+            return true;
+        }
+
+
+        @Override
+        public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
+            setOffset((int) (distanceX * getZoom()), true);
+            refreshGraph();
+            return super.onScroll(e1, e2, distanceX, distanceY);
+        }
+
+        @Override
+        public boolean onDoubleTap(MotionEvent event) {
+            Log.d(DEBUG_TAG, "onDoubleTap: " + event.toString());
+
+            int tappedSample = (int) (event.getX() * getZoom());
+            setZoom(getZoom() / 2);
+            setOffset(tappedSample / 2, true);
+
+            refreshGraph();
+            return true;
+        }
+
+        @Override
+        public void onLongPress(MotionEvent e) {
+            Vibrator vibe = (Vibrator) getContext().getSystemService(Context.VIBRATOR_SERVICE);
+            if (vibe.hasVibrator()) {
+                vibe.vibrate(20);
+            }
+            setZoom(getMaxZoomOut());
+            setOffset(0, false);
+            refreshGraph();
+        }
+    }
+
+    private class MyScaleGestureListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
+        private static final String DEBUG_TAG = "MyScaleGestureListener";
+        int focusSample = 0;
+
+
+        @Override
+        public boolean onScaleBegin(ScaleGestureDetector detector) {
+            focusSample = (int) (detector.getFocusX() * getZoom()) + mCurrentOffset;
+            return super.onScaleBegin(detector);
+        }
+
+        @Override
+        public boolean onScale(ScaleGestureDetector detector) {
+            setZoom(getZoom() / detector.getScaleFactor());
+
+            int newFocusSample = (int) (detector.getFocusX() * getZoom()) + mCurrentOffset;
+            int sampleDelta = (int) (focusSample - newFocusSample);
+            setOffset(sampleDelta, true);
+            refreshGraph();
+            return true;
+        }
+    }
+
+    private static void log(String msg) {
+        Log.v(TAG, msg);
+    }
+
+}
diff --git a/LoopbackApp/app/src/main/jni/Android.mk b/LoopbackApp/app/src/main/jni/Android.mk
new file mode 100644
index 0000000..ef0c829
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/Android.mk
@@ -0,0 +1,27 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE      := libloopback
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES   := \
+	sles.cpp \
+	jni_sles.c \
+	audio_utils/atomic.c \
+	audio_utils/fifo.c \
+	audio_utils/roundup.c
+LOCAL_C_INCLUDES := \
+        frameworks/wilhelm/include
+
+LOCAL_SHARED_LIBRARIES := \
+	libOpenSLES \
+	liblog \
+    libandroid
+
+LOCAL_LDLIBS += -lOpenSLES -llog -landroid
+#LOCAL_PRELINK_MODULE := false
+
+#LOCAL_LDFLAGS += -Wl,--hash-style=sysv
+#LOCAL_CFLAGS := -DSTDC_HEADERS
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/atomic.c b/LoopbackApp/app/src/main/jni/audio_utils/atomic.c
new file mode 100644
index 0000000..b76b1f4
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/audio_utils/atomic.c
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "atomic.h"
+
+#include <stdatomic.h>
+#include <stdbool.h>
+
+int32_t android_atomic_acquire_load(volatile const int32_t* addr) {
+    volatile atomic_int_least32_t* a = (volatile atomic_int_least32_t*) addr;
+    return atomic_load_explicit(a, memory_order_acquire);
+}
+
+void android_atomic_release_store(int32_t value, volatile int32_t* addr) {
+    volatile atomic_int_least32_t* a = (volatile atomic_int_least32_t*) addr;
+    atomic_store_explicit(a, value, memory_order_release);
+}
+
+int32_t android_atomic_exchange(int32_t value, volatile const int32_t* addr) {
+    volatile atomic_int_least32_t* a = (volatile atomic_int_least32_t*) addr;
+    return atomic_exchange(a, value);
+}
+
+bool android_atomic_compare_exchange(int32_t* expect, int32_t desire, volatile const int32_t* addr) {
+    volatile atomic_int_least32_t* a = (volatile atomic_int_least32_t*) addr;
+    return atomic_compare_exchange_weak(a, expect, desire);
+}
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/atomic.h b/LoopbackApp/app/src/main/jni/audio_utils/atomic.h
new file mode 100644
index 0000000..164ad17
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/audio_utils/atomic.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_ATOMIC_H
+#define ANDROID_AUDIO_ATOMIC_H
+
+#include <stdlib.h>
+#include <stdbool.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int32_t android_atomic_acquire_load(volatile const int32_t* addr);
+void android_atomic_release_store(int32_t value, volatile int32_t* addr);
+
+// FIXME: use standard atomic library instead of these functions
+int32_t android_atomic_exchange(int32_t value, volatile const int32_t* addr);
+bool android_atomic_compare_exchange(int32_t* expect, int32_t desire, volatile const int32_t* addr);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // ANDROID_AUDIO_ATOMIC_H
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/fifo.c b/LoopbackApp/app/src/main/jni/audio_utils/fifo.c
new file mode 100644
index 0000000..e00fc28
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/audio_utils/fifo.c
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "audio_utils_fifo"
+
+#include <stdlib.h>
+#include <string.h>
+#include "fifo.h"
+#include "roundup.h"
+#include "atomic.h"
+//#include <cutils/log.h>
+#define ALOG_ASSERT(exp)
+
+
+void audio_utils_fifo_init(struct audio_utils_fifo *fifo, size_t frameCount, size_t frameSize,
+        void *buffer) {
+    // We would need a 64-bit roundup to support larger frameCount.
+    ALOG_ASSERT(fifo != NULL && frameCount > 0 && frameSize > 0 && buffer != NULL);
+    fifo->mFrameCount = frameCount;
+    fifo->mFrameCountP2 = roundup(frameCount);
+    fifo->mFudgeFactor = fifo->mFrameCountP2 - fifo->mFrameCount;
+    fifo->mFrameSize = frameSize;
+    fifo->mBuffer = buffer;
+    fifo->mFront = 0;
+    fifo->mRear = 0;
+}
+
+
+void audio_utils_fifo_deinit(struct audio_utils_fifo *fifo __unused)
+{
+}
+
+
+// Return a new index as the sum of an old index (either mFront or mRear) and a specified increment.
+static inline int32_t audio_utils_fifo_sum(struct audio_utils_fifo *fifo, int32_t index,
+        uint32_t increment) {
+    if (fifo->mFudgeFactor) {
+        uint32_t mask = fifo->mFrameCountP2 - 1;
+        ALOG_ASSERT((index & mask) < fifo->mFrameCount);
+        ALOG_ASSERT(/*0 <= increment &&*/ increment <= fifo->mFrameCountP2);
+        if ((index & mask) + increment >= fifo->mFrameCount) {
+            increment += fifo->mFudgeFactor;
+        }
+
+        index += increment;
+        ALOG_ASSERT((index & mask) < fifo->mFrameCount);
+        return index;
+    } else {
+        return index + increment;
+    }
+}
+
+
+// Return the difference between two indices: rear - front, where 0 <= difference <= mFrameCount.
+static inline size_t audio_utils_fifo_diff(struct audio_utils_fifo *fifo, int32_t rear,
+        int32_t front) {
+    int32_t diff = rear - front;
+
+    if (fifo->mFudgeFactor) {
+        uint32_t mask = ~(fifo->mFrameCountP2 - 1);
+        int32_t genDiff = (rear & mask) - (front & mask);
+
+        if (genDiff != 0) {
+            ALOG_ASSERT(genDiff == (int32_t) fifo->mFrameCountP2);
+            diff -= fifo->mFudgeFactor;
+        }
+    }
+
+    // FIFO should not be overfull
+    ALOG_ASSERT(0 <= diff && diff <= (int32_t) fifo->mFrameCount);
+    return (size_t) diff;
+}
+
+
+ssize_t audio_utils_fifo_write(struct audio_utils_fifo *fifo, const void *buffer, size_t count) {
+    int32_t front = android_atomic_acquire_load(&fifo->mFront);
+    int32_t rear = fifo->mRear;
+    size_t availToWrite = fifo->mFrameCount - audio_utils_fifo_diff(fifo, rear, front);
+
+    if (availToWrite > count) {
+        availToWrite = count;
+    }
+
+    rear &= fifo->mFrameCountP2 - 1;
+    size_t part1 = fifo->mFrameCount - rear;
+    if (part1 > availToWrite) {
+        part1 = availToWrite;
+    }
+
+    if (part1 > 0) {
+        memcpy((char *) fifo->mBuffer + (rear * fifo->mFrameSize), buffer,
+                part1 * fifo->mFrameSize);
+        size_t part2 = availToWrite - part1;
+
+        if (part2 > 0) {
+            memcpy(fifo->mBuffer, (char *) buffer + (part1 * fifo->mFrameSize),
+                    part2 * fifo->mFrameSize);
+        }
+
+        android_atomic_release_store(audio_utils_fifo_sum(fifo, fifo->mRear, availToWrite),
+                &fifo->mRear);
+    }
+    return availToWrite;
+}
+
+
+ssize_t audio_utils_fifo_read(struct audio_utils_fifo *fifo, void *buffer, size_t count) {
+    int32_t rear = android_atomic_acquire_load(&fifo->mRear);
+    int32_t front = fifo->mFront;
+    size_t availToRead = audio_utils_fifo_diff(fifo, rear, front);
+    if (availToRead > count) {
+        availToRead = count;
+    }
+
+    front &= fifo->mFrameCountP2 - 1;
+    size_t part1 = fifo->mFrameCount - front;
+    if (part1 > availToRead) {
+        part1 = availToRead;
+    }
+
+    if (part1 > 0) {
+        memcpy(buffer, (char *) fifo->mBuffer + (front * fifo->mFrameSize),
+               part1 * fifo->mFrameSize);
+        size_t part2 = availToRead - part1;
+        if (part2 > 0) {
+            memcpy((char *) buffer + (part1 * fifo->mFrameSize), fifo->mBuffer,
+                   part2 * fifo->mFrameSize);
+        }
+        android_atomic_release_store(audio_utils_fifo_sum(fifo, fifo->mFront, availToRead),
+                                     &fifo->mFront);
+    }
+    return availToRead;
+}
+
+size_t audio_utils_fifo_availToRead(struct audio_utils_fifo *fifo) {
+    int32_t rear = android_atomic_acquire_load(&fifo->mRear);
+    int32_t front = fifo->mFront;
+    size_t availToRead = audio_utils_fifo_diff(fifo, rear, front);
+    return availToRead;
+}
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/fifo.h b/LoopbackApp/app/src/main/jni/audio_utils/fifo.h
new file mode 100644
index 0000000..37a9df8
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/audio_utils/fifo.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_FIFO_H
+#define ANDROID_AUDIO_FIFO_H
+
+#include <stdlib.h>
+
+// FIXME use atomic_int_least32_t and new atomic operations instead of legacy Android ones
+// #include <stdatomic.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Single writer, single reader non-blocking FIFO.
+// Writer and reader must be in same process.
+
+// No user-serviceable parts within.
+struct audio_utils_fifo {
+    // These fields are const after initialization
+    size_t     mFrameCount;   // max number of significant frames to be stored in the FIFO > 0
+    size_t     mFrameCountP2; // roundup(mFrameCount)
+    size_t     mFudgeFactor;  // mFrameCountP2 - mFrameCount, the number of "wasted" frames after
+                              // the end of mBuffer.  Only the indices are wasted, not any memory.
+    size_t     mFrameSize;    // size of each frame in bytes
+    void      *mBuffer;       // pointer to caller-allocated buffer of size mFrameCount frames
+
+    volatile int32_t mFront; // frame index of first frame slot available to read, or read index
+    volatile int32_t mRear;  // frame index of next frame slot available to write, or write index
+};
+
+// Initialize a FIFO object.
+// Input parameters:
+//  fifo        Pointer to the FIFO object.
+//  frameCount  Max number of significant frames to be stored in the FIFO > 0.
+//              If writes and reads always use the same count, and that count is a divisor of
+//              frameCount, then the writes and reads will never do a partial transfer.
+//  frameSize   Size of each frame in bytes.
+//  buffer      Pointer to a caller-allocated buffer of frameCount frames.
+void audio_utils_fifo_init(struct audio_utils_fifo *fifo, size_t frameCount, size_t frameSize,
+        void *buffer);
+
+// De-initialize a FIFO object.
+// Input parameters:
+//  fifo        Pointer to the FIFO object.
+void audio_utils_fifo_deinit(struct audio_utils_fifo *fifo);
+
+// Write to FIFO.
+// Input parameters:
+//  fifo        Pointer to the FIFO object.
+//  buffer      Pointer to source buffer containing 'count' frames of data.
+// Returns actual number of frames written <= count.
+// The actual transfer count may be zero if the FIFO is full,
+// or partial if the FIFO was almost full.
+// A negative return value indicates an error.  Currently there are no errors defined.
+ssize_t audio_utils_fifo_write(struct audio_utils_fifo *fifo, const void *buffer, size_t count);
+
+// Read from FIFO.
+// Input parameters:
+//  fifo        Pointer to the FIFO object.
+//  buffer      Pointer to destination buffer to be filled with up to 'count' frames of data.
+// Returns actual number of frames read <= count.
+// The actual transfer count may be zero if the FIFO is empty,
+// or partial if the FIFO was almost empty.
+// A negative return value indicates an error.  Currently there are no errors defined.
+ssize_t audio_utils_fifo_read(struct audio_utils_fifo *fifo, void *buffer, size_t count);
+
+size_t audio_utils_fifo_availToRead(struct audio_utils_fifo *fifo);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // !ANDROID_AUDIO_FIFO_H
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/roundup.c b/LoopbackApp/app/src/main/jni/audio_utils/roundup.c
new file mode 100644
index 0000000..6c8e504
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/audio_utils/roundup.c
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "roundup.h"
+
+unsigned roundup(unsigned v) {
+    // __builtin_clz is undefined for zero input
+    if (v == 0) {
+        v = 1;
+    }
+
+    int lz = __builtin_clz((int) v);
+    unsigned rounded = ((unsigned) 0x80000000) >> lz;
+    // 0x800000001 and higher are actually rounded _down_ to prevent overflow
+    if (v > rounded && lz > 0) {
+        rounded <<= 1;
+    }
+    return rounded;
+}
diff --git a/LoopbackApp/app/src/main/jni/audio_utils/roundup.h b/LoopbackApp/app/src/main/jni/audio_utils/roundup.h
new file mode 100644
index 0000000..ad34289
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/audio_utils/roundup.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_ROUNDUP_H
+#define ANDROID_AUDIO_ROUNDUP_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Round up to the next highest power of 2
+unsigned roundup(unsigned v);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // ANDROID_AUDIO_ROUNDUP_H
diff --git a/LoopbackApp/app/src/main/jni/jni_sles.c b/LoopbackApp/app/src/main/jni/jni_sles.c
new file mode 100644
index 0000000..0417252
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/jni_sles.c
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/log.h>
+#include "sles.h"
+#include "jni_sles.h"
+#include <stdio.h>
+
+
+JNIEXPORT jlong JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesInit
+  (JNIEnv *env, jobject obj __unused, jint samplingRate, jint frameCount, jint micSource,
+   jint performanceMode,
+   jint testType, jdouble frequency1, jobject byteBuffer, jshortArray loopbackTone,
+   jint maxRecordedLateCallbacks, jint ignoreFirstFrames) {
+
+    sles_data * pSles = NULL;
+
+    char* byteBufferPtr = (*env)->GetDirectBufferAddress(env, byteBuffer);
+    int byteBufferLength = (*env)->GetDirectBufferCapacity(env, byteBuffer);
+
+    short* loopbackToneArray = (*env)->GetShortArrayElements(env, loopbackTone, 0);
+
+    if (slesInit(&pSles, samplingRate, frameCount, micSource,
+                 performanceMode,
+                 testType, frequency1, byteBufferPtr, byteBufferLength,
+                 loopbackToneArray, maxRecordedLateCallbacks, ignoreFirstFrames) != SLES_FAIL) {
+        return (long) pSles;
+    }
+
+    // FIXME This should be stored as a (long) field in the object,
+    // so that incorrect Java code could not synthesize a bad sles pointer.
+    return 0;
+}
+
+
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesProcessNext
+(JNIEnv *env __unused, jobject obj __unused, jlong sles, jdoubleArray samplesArray, jlong offset) {
+    sles_data * pSles = (sles_data*) (size_t) sles;
+
+    long maxSamples = (*env)->GetArrayLength(env, samplesArray);
+    double *pSamples = (*env)->GetDoubleArrayElements(env, samplesArray, 0);
+
+    long availableSamples = maxSamples-offset;
+    double *pCurrentSample = pSamples+offset;
+
+    SLES_PRINTF("jni slesProcessNext pSles:%p, currentSample %p, availableSamples %ld ",
+                pSles, pCurrentSample, availableSamples);
+
+    int samplesRead = slesProcessNext(pSles, pCurrentSample, availableSamples);
+    return samplesRead;
+}
+
+
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesDestroy
+  (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+    sles_data * pSles = (sles_data*) (size_t) sles;
+    int status = slesDestroy(&pSles);
+    return status;
+}
+
+
+JNIEXPORT jintArray JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderBufferPeriod
+  (JNIEnv *env, jobject obj __unused, jlong sles) {
+    sles_data * pSles = (sles_data*) (size_t) sles;
+    int* recorderBufferPeriod = slesGetRecorderBufferPeriod(pSles);
+
+    // get the length = RANGE
+    jintArray result = (*env)->NewIntArray(env, RANGE);
+    (*env)->SetIntArrayRegion(env, result, 0, RANGE, recorderBufferPeriod);
+
+    return result;
+}
+
+
+JNIEXPORT jint JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderMaxBufferPeriod
+  (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+    sles_data * pSles = (sles_data*) (size_t) sles;
+    int recorderMaxBufferPeriod = slesGetRecorderMaxBufferPeriod(pSles);
+
+    return recorderMaxBufferPeriod;
+}
+
+
+JNIEXPORT jdouble JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderVarianceBufferPeriod
+        (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+    sles_data *pSles = (sles_data *) (size_t) sles;
+    int64_t result = slesGetRecorderVarianceBufferPeriod(pSles);
+    // variance has units ns^2 so we have to square the conversion factor
+    double scaled = (double) result / ((double) NANOS_PER_MILLI * (double) NANOS_PER_MILLI);
+    return scaled;
+}
+
+
+JNIEXPORT jintArray
+JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerBufferPeriod
+  (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+    sles_data * pSles = (sles_data*) (size_t) sles;
+    int* playerBufferPeriod = slesGetPlayerBufferPeriod(pSles);
+
+    jintArray result = (*env)->NewIntArray(env, RANGE);
+    (*env)->SetIntArrayRegion(env, result, 0, RANGE, playerBufferPeriod);
+
+    return result;
+}
+
+
+JNIEXPORT jint JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerMaxBufferPeriod
+  (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+    sles_data * pSles = (sles_data*) (size_t) sles;
+    int playerMaxBufferPeriod = slesGetPlayerMaxBufferPeriod(pSles);
+
+    return playerMaxBufferPeriod;
+}
+
+
+JNIEXPORT jdouble JNICALL
+Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerVarianceBufferPeriod
+        (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+    sles_data *pSles = (sles_data *) (size_t) sles;
+    int64_t result = slesGetPlayerVarianceBufferPeriod(pSles);
+    // variance has units ns^2 so we have to square the conversion factor
+    double scaled = (double) result / ((double) NANOS_PER_MILLI * (double) NANOS_PER_MILLI);
+    return scaled;
+}
+
+
+jobject getCallbackTimes(JNIEnv *env, callbackTimeStamps *callbacks, short expectedBufferPeriod){
+    jintArray timeStamps = (*env)->NewIntArray(env, callbacks->index);
+    (*env)->SetIntArrayRegion(env, timeStamps, 0, callbacks->index, callbacks->timeStampsMs);
+
+    jshortArray callbackLengths = (*env)->NewShortArray(env, callbacks->index);
+    (*env)->SetShortArrayRegion(env, callbackLengths, 0, callbacks->index,
+                                callbacks->callbackDurations);
+
+    jclass cls = (*env)->FindClass(env, "org/drrickorang/loopback/BufferCallbackTimes");
+    jmethodID methodID = (*env)->GetMethodID(env, cls, "<init>", "([I[SZS)V");
+    jobject callbackTimes=(*env)->NewObject(env,cls, methodID, timeStamps, callbackLengths,
+                                            callbacks->exceededCapacity, expectedBufferPeriod);
+    return callbackTimes;
+}
+
+JNIEXPORT jobject
+JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerCallbackTimeStamps
+        (JNIEnv *env, jobject obj __unused, jlong sles) {
+    sles_data * pSles = (sles_data*) (size_t) sles;
+    return getCallbackTimes(env, &(pSles->playerTimeStamps), pSles->expectedBufferPeriod);
+}
+
+JNIEXPORT jobject
+JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderCallbackTimeStamps
+        (JNIEnv *env, jobject obj __unused, jlong sles) {
+    sles_data * pSles = (sles_data*) (size_t) sles;
+    return getCallbackTimes(env, &(pSles->recorderTimeStamps), pSles->expectedBufferPeriod);
+}
+
+JNIEXPORT jint
+JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesGetCaptureRank
+        (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+    sles_data * pSles = (sles_data*) (size_t) sles;
+    return slesGetCaptureRank(pSles);
+}
diff --git a/LoopbackApp/app/src/main/jni/jni_sles.h b/LoopbackApp/app/src/main/jni/jni_sles.h
new file mode 100644
index 0000000..f25bd52
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/jni_sles.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <jni.h>
+
+#ifndef _Included_org_drrickorang_loopback_jni
+#define _Included_org_drrickorang_loopback_jni
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+////////////////////////
+////SLE
+JNIEXPORT jlong JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesInit
+  (JNIEnv *, jobject, jint, jint, jint, jint, jint, jdouble, jobject byteBuffer,
+   jshortArray loopbackTone, jint maxRecordedLateCallbacks, jint ignoreFirstFrames);
+
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesProcessNext
+  (JNIEnv *, jobject, jlong, jdoubleArray, jlong);
+
+JNIEXPORT jint JNICALL Java_org_drrickorang_loopback_NativeAudioThread_slesDestroy
+  (JNIEnv *, jobject, jlong);
+
+JNIEXPORT jintArray JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderBufferPeriod
+  (JNIEnv *, jobject, jlong);
+
+JNIEXPORT jint JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderMaxBufferPeriod
+  (JNIEnv *, jobject, jlong);
+
+JNIEXPORT jdouble JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetRecorderVarianceBufferPeriod
+  (JNIEnv *, jobject, jlong);
+
+JNIEXPORT jintArray JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerBufferPeriod
+  (JNIEnv *, jobject, jlong);
+
+JNIEXPORT jint JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerMaxBufferPeriod
+  (JNIEnv *, jobject, jlong);
+
+JNIEXPORT jdouble JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetPlayerVarianceBufferPeriod
+  (JNIEnv *, jobject, jlong);
+
+JNIEXPORT jint JNICALL
+        Java_org_drrickorang_loopback_NativeAudioThread_slesGetCaptureRank
+  (JNIEnv *, jobject, jlong);
+
+#ifdef __cplusplus
+}
+#endif
+#endif //_Included_org_drrickorang_loopback_jni
diff --git a/LoopbackApp/app/src/main/jni/sles.cpp b/LoopbackApp/app/src/main/jni/sles.cpp
new file mode 100644
index 0000000..159269b
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/sles.cpp
@@ -0,0 +1,1051 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+// FIXME taken from OpenSLES_AndroidConfiguration.h
+#define SL_ANDROID_KEY_PERFORMANCE_MODE  ((const SLchar*) "androidPerformanceMode")
+
+////////////////////////////////////////////
+/// Actual sles functions.
+
+
+// Test program to record from default audio input and playback to default audio output.
+// It will generate feedback (Larsen effect) if played through on-device speakers,
+// or acts as a delay if played through headset.
+
+#define _USE_MATH_DEFINES
+#include <cmath>
+#include "sles.h"
+#include "audio_utils/atomic.h"
+#include <stdio.h>
+#include <assert.h>
+#include <unistd.h>
+#include <string.h>
+
+int slesInit(sles_data ** ppSles, int samplingRate, int frameCount, int micSource,
+             int performanceMode,
+             int testType, double frequency1, char* byteBufferPtr, int byteBufferLength,
+             short* loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames) {
+    int status = SLES_FAIL;
+    if (ppSles != NULL) {
+        sles_data * pSles = (sles_data*) malloc(sizeof(sles_data));
+
+        memset(pSles, 0, sizeof(sles_data));
+
+        SLES_PRINTF("pSles malloc %zu bytes at %p", sizeof(sles_data), pSles);
+        //__android_log_print(ANDROID_LOG_INFO, "sles_jni",
+        //"malloc %d bytes at %p", sizeof(sles_data), pSles);//Or ANDROID_LOG_INFO, ...
+        *ppSles = pSles;
+        if (pSles != NULL)
+        {
+            SLES_PRINTF("creating server. Sampling rate =%d, frame count = %d",
+                        samplingRate, frameCount);
+            status = slesCreateServer(pSles, samplingRate, frameCount, micSource,
+                                      performanceMode, testType,
+                                      frequency1, byteBufferPtr, byteBufferLength, loopbackTone,
+                                      maxRecordedLateCallbacks, ignoreFirstFrames);
+            SLES_PRINTF("slesCreateServer =%d", status);
+        }
+    }
+
+    return status;
+}
+int slesDestroy(sles_data ** ppSles) {
+    int status = SLES_FAIL;
+    if (ppSles != NULL) {
+        slesDestroyServer(*ppSles);
+
+        if (*ppSles != NULL)
+        {
+            SLES_PRINTF("free memory at %p",*ppSles);
+            free(*ppSles);
+            *ppSles = 0;
+        }
+        status = SLES_SUCCESS;
+    }
+    return status;
+}
+
+#define ASSERT_EQ(x, y) do { if ((x) == (y)) ; else { fprintf(stderr, "0x%x != 0x%x\n", \
+    (unsigned) (x), (unsigned) (y)); assert((x) == (y)); } } while (0)
+
+// Called after audio recorder fills a buffer with data, then we can read from this filled buffer
+static void recorderCallback(SLAndroidSimpleBufferQueueItf caller __unused, void *context) {
+    sles_data *pSles = (sles_data*) context;
+    if (pSles != NULL) {
+        collectBufferPeriod(&pSles->recorderBufferStats, NULL /*fdpStats*/, &pSles->recorderTimeStamps,
+                            pSles->expectedBufferPeriod);
+
+        //__android_log_print(ANDROID_LOG_INFO, "sles_jni", "in recorderCallback");
+        SLresult result;
+
+        //ee  SLES_PRINTF("<R");
+
+        // We should only be called when a recording buffer is done
+        assert(pSles->rxFront <= pSles->rxBufCount);
+        assert(pSles->rxRear <= pSles->rxBufCount);
+        assert(pSles->rxFront != pSles->rxRear);
+        char *buffer = pSles->rxBuffers[pSles->rxFront]; //pSles->rxBuffers stores the data recorded
+
+
+        // Remove buffer from record queue
+        if (++pSles->rxFront > pSles->rxBufCount) {
+            pSles->rxFront = 0;
+        }
+
+        if (pSles->testType == TEST_TYPE_LATENCY) {
+            // Throw out first frames
+            if (pSles->ignoreFirstFrames) {
+                int framesToErase = pSles->ignoreFirstFrames;
+                if (framesToErase > (int) pSles->bufSizeInFrames) {
+                    framesToErase = pSles->bufSizeInFrames;
+                }
+                pSles->ignoreFirstFrames -= framesToErase;
+                memset(buffer, 0, framesToErase * pSles->channels * sizeof(short));
+            }
+
+            ssize_t actual = audio_utils_fifo_write(&(pSles->fifo), buffer,
+                    (size_t) pSles->bufSizeInFrames);
+
+            if (actual != (ssize_t) pSles->bufSizeInFrames) {
+                write(1, "?", 1);
+            }
+
+            // This is called by a realtime (SCHED_FIFO) thread,
+            // and it is unsafe to do I/O as it could block for unbounded time.
+            // Flash filesystem is especially notorious for blocking.
+            if (pSles->fifo2Buffer != NULL) {
+                actual = audio_utils_fifo_write(&(pSles->fifo2), buffer,
+                        (size_t) pSles->bufSizeInFrames);
+                if (actual != (ssize_t) pSles->bufSizeInFrames) {
+                    write(1, "?", 1);
+                }
+            }
+        } else if (pSles->testType == TEST_TYPE_BUFFER_PERIOD) {
+            if (pSles->fifo2Buffer != NULL) {
+                ssize_t actual = byteBuffer_write(pSles, buffer, (size_t) pSles->bufSizeInFrames);
+
+                //FIXME should log errors using other methods instead of printing to terminal
+                if (actual != (ssize_t) pSles->bufSizeInFrames) {
+                    write(1, "?", 1);
+                }
+            }
+        }
+
+
+        // Enqueue this same buffer for the recorder to fill again.
+        result = (*(pSles->recorderBufferQueue))->Enqueue(pSles->recorderBufferQueue, buffer,
+                                                          pSles->bufSizeInBytes);
+        //__android_log_print(ANDROID_LOG_INFO, "recorderCallback", "recorder buffer size: %i",
+        //                    pSles->bufSizeInBytes);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+
+        // Update our model of the record queue
+        SLuint32 rxRearNext = pSles->rxRear + 1;
+        if (rxRearNext > pSles->rxBufCount) {
+            rxRearNext = 0;
+        }
+        assert(rxRearNext != pSles->rxFront);
+        pSles->rxBuffers[pSles->rxRear] = buffer;
+        pSles->rxRear = rxRearNext;
+
+
+
+      //ee  SLES_PRINTF("r>");
+
+    } //pSles not null
+}
+
+
+// Write "count" amount of short from buffer to pSles->byteBufferPtr. This byteBuffer will read by
+// java code.
+ssize_t byteBuffer_write(sles_data *pSles, char *buffer, size_t count) {
+    // bytebufferSize is in byte
+    int32_t rear; // rear should not exceed 2^31 - 1, or else overflow will happen
+    memcpy(&rear, (char *) (pSles->byteBufferPtr + pSles->byteBufferLength - 4), sizeof(rear));
+
+    size_t frameSize = pSles->channels * sizeof(short); // only one channel
+    int32_t maxLengthInShort = (pSles->byteBufferLength - 4) / frameSize;
+    // mask the upper bits to get the correct position in the pipe
+    int32_t tempRear = rear & (maxLengthInShort - 1);
+    size_t part1 = maxLengthInShort - tempRear;
+
+    if (part1 > count) {
+        part1 = count;
+    }
+
+    if (part1 > 0) {
+        memcpy(pSles->byteBufferPtr + (tempRear * frameSize), buffer,
+               part1 * frameSize);
+
+        size_t part2 = count - part1;
+        if (part2 > 0) {
+            memcpy(pSles->byteBufferPtr, (buffer + (part1 * frameSize)),
+                   part2 * frameSize);
+        }
+
+        //TODO do we need something similar to the below function call?
+        //android_atomic_release_store(audio_utils_fifo_sum(fifo, fifo->mRear, availToWrite),
+        //        &fifo->mRear);
+    }
+
+    // increase value of rear
+    int32_t* rear2 = (int32_t *) (pSles->byteBufferPtr + pSles->byteBufferLength - 4);
+    *rear2 += count;
+    return count;
+}
+
+// Calculate nanosecond difference between two timespec structs from clock_gettime(CLOCK_MONOTONIC)
+// tv_sec [0, max time_t] , tv_nsec [0, 999999999]
+int64_t diffInNano(struct timespec previousTime, struct timespec currentTime) {
+    return (int64_t) (currentTime.tv_sec - previousTime.tv_sec) * (int64_t) NANOS_PER_SECOND +
+            currentTime.tv_nsec - previousTime.tv_nsec;
+}
+
+// Called after audio player empties a buffer of data
+static void playerCallback(SLBufferQueueItf caller __unused, void *context) {
+    sles_data *pSles = (sles_data*) context;
+    if (pSles != NULL) {
+        collectBufferPeriod(&pSles->playerBufferStats, &pSles->recorderBufferStats /*fdpStats*/,
+                            &pSles->playerTimeStamps, pSles->expectedBufferPeriod);
+        SLresult result;
+
+        //ee  SLES_PRINTF("<P");
+
+        // Get the buffer that just finished playing
+        assert(pSles->txFront <= pSles->txBufCount);
+        assert(pSles->txRear <= pSles->txBufCount);
+        assert(pSles->txFront != pSles->txRear);
+        char *buffer = pSles->txBuffers[pSles->txFront];
+        if (++pSles->txFront > pSles->txBufCount) {
+            pSles->txFront = 0;
+        }
+
+        if (pSles->testType == TEST_TYPE_LATENCY) {
+            // Jitter buffer should have strictly less than 2 buffers worth of data in it.
+            // This is to prevent the test itself from adding too much latency.
+            size_t discardedInputFrames = 0;
+            for (;;) {
+                size_t availToRead = audio_utils_fifo_availToRead(&pSles->fifo);
+                if (availToRead < pSles->bufSizeInFrames * 2) {
+                    break;
+                }
+                ssize_t actual = audio_utils_fifo_read(&pSles->fifo, buffer, pSles->bufSizeInFrames);
+                if (actual > 0) {
+                    discardedInputFrames += actual;
+                }
+                if (actual != (ssize_t) pSles->bufSizeInFrames) {
+                    break;
+                }
+            }
+            if (discardedInputFrames > 0) {
+                if (pSles->totalDiscardedInputFrames > 0) {
+                    __android_log_print(ANDROID_LOG_WARN, "sles_jni",
+                        "Discarded an additional %zu input frames after a total of %zu input frames"
+                        " had previously been discarded",
+                        discardedInputFrames, pSles->totalDiscardedInputFrames);
+                }
+                pSles->totalDiscardedInputFrames += discardedInputFrames;
+            }
+
+            ssize_t actual = audio_utils_fifo_read(&(pSles->fifo), buffer, pSles->bufSizeInFrames);
+            if (actual != (ssize_t) pSles->bufSizeInFrames) {
+                write(1, "/", 1);
+                // on underrun from pipe, substitute silence
+                memset(buffer, 0, pSles->bufSizeInFrames * pSles->channels * sizeof(short));
+            }
+
+            if (pSles->injectImpulse == -1) {   // here we inject pulse
+
+                /*// Experimentally, a single frame impulse was insufficient to trigger feedback.
+                // Also a Nyquist frequency signal was also insufficient, probably because
+                // the response of output and/or input path was not adequate at high frequencies.
+                // This short burst of a few cycles of square wave at Nyquist/4 found to work well.
+                for (unsigned i = 0; i < pSles->bufSizeInFrames / 8; i += 8) {
+                    for (int j = 0; j < 8; j++) {
+                        for (unsigned k = 0; k < pSles->channels; k++) {
+                            ((short *) buffer)[(i + j) * pSles->channels + k] =
+                                                                            j < 4 ? 0x7FFF : 0x8000;
+                        }
+                    }
+                }*/
+
+                //inject java generated tone
+                for (unsigned i = 0; i < pSles->bufSizeInFrames; ++i) {
+                    for (unsigned k = 0; k < pSles->channels; ++k) {
+                        ((short *) buffer)[i * pSles->channels + k] = pSles->loopbackTone[i];
+                    }
+                }
+
+                pSles->injectImpulse = 0;
+                pSles->totalDiscardedInputFrames = 0;
+            }
+        } else if (pSles->testType == TEST_TYPE_BUFFER_PERIOD) {
+            double twoPi = M_PI * 2;
+            int maxShort = 32767;
+            float amplitude = 0.8;
+            short value;
+            double phaseIncrement = pSles->frequency1 / pSles->sampleRate;
+            bool isGlitchEnabled = false;
+            for (unsigned i = 0; i < pSles->bufSizeInFrames; i++) {
+                value = (short) (sin(pSles->bufferTestPhase1) * maxShort * amplitude);
+                for (unsigned k = 0; k < pSles->channels; ++k) {
+                    ((short *) buffer)[i* pSles->channels + k] = value;
+                }
+
+                pSles->bufferTestPhase1 += twoPi * phaseIncrement;
+                // insert glitches if isGlitchEnabled == true, and insert it for every second
+                if (isGlitchEnabled && (pSles->count % pSles->sampleRate == 0)) {
+                    pSles->bufferTestPhase1 += twoPi * phaseIncrement;
+                }
+
+                pSles->count++;
+
+                while (pSles->bufferTestPhase1 > twoPi) {
+                    pSles->bufferTestPhase1 -= twoPi;
+                }
+            }
+        }
+
+        // Enqueue the filled buffer for playing
+        result = (*(pSles->playerBufferQueue))->Enqueue(pSles->playerBufferQueue, buffer,
+                                                        pSles->bufSizeInBytes);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        // Update our model of the player queue
+        assert(pSles->txFront <= pSles->txBufCount);
+        assert(pSles->txRear <= pSles->txBufCount);
+        SLuint32 txRearNext = pSles->txRear + 1;
+        if (txRearNext > pSles->txBufCount) {
+            txRearNext = 0;
+        }
+        assert(txRearNext != pSles->txFront);
+        pSles->txBuffers[pSles->txRear] = buffer;
+        pSles->txRear = txRearNext;
+
+    } //pSles not null
+}
+
+// Used to set initial values for the bufferStats struct before values can be recorded.
+void initBufferStats(bufferStats *stats) {
+    stats->buffer_period = new int[RANGE](); // initialized to zeros
+    stats->previous_time = {0,0};
+    stats->current_time = {0,0};
+
+    stats->buffer_count = 0;
+    stats->max_buffer_period = 0;
+
+    stats->measurement_count = 0;
+    stats->SDM = 0;
+    stats->var = 0;
+}
+
+// Called in the beginning of playerCallback() to collect the interval between each callback.
+// fdpStats is either NULL or a pointer to the buffer statistics for the full-duplex partner.
+void collectBufferPeriod(bufferStats *stats, bufferStats *fdpStats, callbackTimeStamps *timeStamps,
+                         short expectedBufferPeriod) {
+    clock_gettime(CLOCK_MONOTONIC, &(stats->current_time));
+
+    if (timeStamps->startTime.tv_sec == 0 && timeStamps->startTime.tv_nsec == 0) {
+        timeStamps->startTime = stats->current_time;
+    }
+
+    (stats->buffer_count)++;
+
+    if (stats->previous_time.tv_sec != 0 && stats->buffer_count > BUFFER_PERIOD_DISCARD &&
+         (fdpStats == NULL || fdpStats->buffer_count > BUFFER_PERIOD_DISCARD_FULL_DUPLEX_PARTNER)) {
+
+        int64_t callbackDuration = diffInNano(stats->previous_time, stats->current_time);
+
+        bool outlier = updateBufferStats(stats, callbackDuration, expectedBufferPeriod);
+
+        //recording timestamps of buffer periods not at expected buffer period
+        if (outlier) {
+            int64_t timeStamp = diffInNano(timeStamps->startTime, stats->current_time);
+            recordTimeStamp(timeStamps, callbackDuration, timeStamp);
+        }
+    }
+
+    stats->previous_time = stats->current_time;
+}
+
+// Records an outlier given the duration in nanoseconds and the number of nanoseconds
+// between it and the start of the test.
+void recordTimeStamp(callbackTimeStamps *timeStamps,
+                     int64_t callbackDuration, int64_t timeStamp) {
+    if (timeStamps->exceededCapacity) {
+        return;
+    }
+
+    //only marked as exceeded if attempting to record a late callback after arrays full
+    if (timeStamps->index == timeStamps->capacity){
+        timeStamps->exceededCapacity = true;
+    } else {
+        timeStamps->callbackDurations[timeStamps->index] =
+                (short) ((callbackDuration + NANOS_PER_MILLI - 1) / NANOS_PER_MILLI);
+        timeStamps->timeStampsMs[timeStamps->index] =
+                (int) ((timeStamp + NANOS_PER_MILLI - 1) / NANOS_PER_MILLI);
+        timeStamps->index++;
+    }
+}
+
+void atomicSetIfGreater(volatile int32_t *addr, int32_t val) {
+    // TODO: rewrite this to avoid the need for unbounded spinning
+    int32_t old;
+    do {
+        old = *addr;
+        if (val < old) return;
+    } while(!android_atomic_compare_exchange(&old, val, addr));
+}
+
+// Updates the stats being collected about buffer periods. Returns true if this is an outlier.
+bool updateBufferStats(bufferStats *stats, int64_t diff_in_nano, int expectedBufferPeriod) {
+    stats->measurement_count++;
+
+    // round up to nearest millisecond
+    int diff_in_milli = (int) ((diff_in_nano + NANOS_PER_MILLI - 1) / NANOS_PER_MILLI);
+
+    if (diff_in_milli > stats->max_buffer_period) {
+        stats->max_buffer_period = diff_in_milli;
+    }
+
+    // from 0 ms to 1000 ms, plus a sum of all instances > 1000ms
+    if (diff_in_milli >= (RANGE - 1)) {
+        (stats->buffer_period)[RANGE-1]++;
+    } else if (diff_in_milli >= 0) {
+        (stats->buffer_period)[diff_in_milli]++;
+    } else { // for diff_in_milli < 0
+        __android_log_print(ANDROID_LOG_INFO, "sles_player", "Having negative BufferPeriod.");
+    }
+
+    int64_t delta = diff_in_nano - (int64_t) expectedBufferPeriod * NANOS_PER_MILLI;
+    stats->SDM += delta * delta;
+    if (stats->measurement_count > 1) {
+        stats->var = stats->SDM / stats->measurement_count;
+    }
+
+    // check if the lateness is so bad that a systrace should be captured
+    // TODO: replace static threshold of lateness with a dynamic determination
+    if (diff_in_milli > expectedBufferPeriod + LATE_CALLBACK_CAPTURE_THRESHOLD) {
+        // TODO: log in a non-blocking way
+        //__android_log_print(ANDROID_LOG_INFO, "buffer_stats", "Callback late by %d ms",
+        //                    diff_in_milli - expectedBufferPeriod);
+        atomicSetIfGreater(&(stats->captureRank), diff_in_milli - expectedBufferPeriod);
+    }
+    return diff_in_milli > expectedBufferPeriod + LATE_CALLBACK_OUTLIER_THRESHOLD;
+}
+
+int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource,
+                     int performanceMode,
+                     int testType, double frequency1, char *byteBufferPtr, int byteBufferLength,
+                     short *loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames) {
+    int status = SLES_FAIL;
+
+    if (pSles != NULL) {
+
+        //        adb shell slesTest_feedback -r1 -t1 -s48000 -f240 -i300 -e3 -o/sdcard/log.wav
+        //            r1 and t1 are the receive and transmit buffer counts, typically 1
+        //            s is the sample rate, typically 48000 or 44100
+        //            f is the frame count per buffer, typically 240 or 256
+        //            i is the number of milliseconds before impulse.  You may need to adjust this.
+        //            e is number of seconds to record
+        //            o is output .wav file name
+
+
+        //        // default values
+        //        SLuint32 rxBufCount = 1;     // -r#
+        //        SLuint32 txBufCount = 1;     // -t#
+        //        SLuint32 bufSizeInFrames = 240;  // -f#
+        //        SLuint32 channels = 1;       // -c#
+        //        SLuint32 sampleRate = 48000; // -s#
+        //        SLuint32 exitAfterSeconds = 3; // -e#
+        //        SLuint32 freeBufCount = 0;   // calculated
+        //        SLuint32 bufSizeInBytes = 0; // calculated
+        //        int injectImpulse = 300; // -i#i
+        //
+        //        // Storage area for the buffer queues
+        //        char **rxBuffers;
+        //        char **txBuffers;
+        //        char **freeBuffers;
+        //
+        //        // Buffer indices
+        //        SLuint32 rxFront;    // oldest recording
+        //        SLuint32 rxRear;     // next to be recorded
+        //        SLuint32 txFront;    // oldest playing
+        //        SLuint32 txRear;     // next to be played
+        //        SLuint32 freeFront;  // oldest free
+        //        SLuint32 freeRear;   // next to be freed
+        //
+        //        audio_utils_fifo fifo; //(*)
+        //        SLAndroidSimpleBufferQueueItf recorderBufferQueue;
+        //        SLBufferQueueItf playerBufferQueue;
+
+        // default values
+        pSles->rxBufCount = 1;     // -r#
+        pSles->txBufCount = 1;     // -t#
+        pSles->bufSizeInFrames = frameCount;//240;  // -f#
+        pSles->channels = 1;       // -c#
+        pSles->sampleRate = samplingRate;//48000; // -s#
+        pSles->exitAfterSeconds = 3; // -e#
+        pSles->freeBufCount = 0;   // calculated
+        pSles->bufSizeInBytes = 0; // calculated
+        pSles->injectImpulse = 300; // -i#i
+        pSles->totalDiscardedInputFrames = 0;
+        pSles->ignoreFirstFrames = ignoreFirstFrames;
+
+        // Storage area for the buffer queues
+        //        char **rxBuffers;
+        //        char **txBuffers;
+        //        char **freeBuffers;
+
+        // Buffer indices
+        pSles->rxFront;    // oldest recording
+        pSles->rxRear;     // next to be recorded
+        pSles->txFront;    // oldest playing
+        pSles->txRear;     // next to be played
+        pSles->freeFront;  // oldest free
+        pSles->freeRear;   // next to be freed
+
+        pSles->fifo; //(*)
+        pSles->fifo2Buffer = NULL;  //this fifo is for sending data to java code (to plot it)
+        pSles->recorderBufferQueue;
+        pSles->playerBufferQueue;
+
+
+
+        // compute total free buffers as -r plus -t
+        pSles->freeBufCount = pSles->rxBufCount + pSles->txBufCount;
+        // compute buffer size
+        pSles->bufSizeInBytes = pSles->channels * pSles->bufSizeInFrames * sizeof(short);
+
+        // Initialize free buffers
+        pSles->freeBuffers = (char **) calloc(pSles->freeBufCount + 1, sizeof(char *));
+        SLES_PRINTF("  calloc freeBuffers %zu bytes at %p",pSles->freeBufCount + 1,
+                    pSles->freeBuffers);
+        unsigned j;
+        for (j = 0; j < pSles->freeBufCount; ++j) {
+            pSles->freeBuffers[j] = (char *) malloc(pSles->bufSizeInBytes);
+            SLES_PRINTF(" buff%d malloc %zu bytes at %p",j, pSles->bufSizeInBytes,
+                        pSles->freeBuffers[j]);
+        }
+        pSles->freeFront = 0;
+        pSles->freeRear = pSles->freeBufCount;
+        pSles->freeBuffers[j] = NULL;
+
+        // Initialize record queue
+        pSles->rxBuffers = (char **) calloc(pSles->rxBufCount + 1, sizeof(char *));
+        SLES_PRINTF("  calloc rxBuffers %zu bytes at %p",pSles->rxBufCount + 1, pSles->rxBuffers);
+        pSles->rxFront = 0;
+        pSles->rxRear = 0;
+
+        // Initialize play queue
+        pSles->txBuffers = (char **) calloc(pSles->txBufCount + 1, sizeof(char *));
+        SLES_PRINTF("  calloc txBuffers %zu bytes at %p",pSles->txBufCount + 1, pSles->txBuffers);
+        pSles->txFront = 0;
+        pSles->txRear = 0;
+
+        size_t frameSize = pSles->channels * sizeof(short);
+#define FIFO_FRAMES 1024
+        pSles->fifoBuffer = new short[FIFO_FRAMES * pSles->channels];
+        audio_utils_fifo_init(&(pSles->fifo), FIFO_FRAMES, frameSize, pSles->fifoBuffer);
+
+        //        SNDFILE *sndfile;
+        //        if (outFileName != NULL) {
+        // create .wav writer
+        //            SF_INFO info;
+        //            info.frames = 0;
+        //            info.samplerate = sampleRate;
+        //            info.channels = channels;
+        //            info.format = SF_FORMAT_WAV | SF_FORMAT_PCM_16;
+        //            sndfile = sf_open(outFileName, SFM_WRITE, &info);
+        //            if (sndfile != NULL) {
+#define FIFO2_FRAMES 65536
+        pSles->fifo2Buffer = new short[FIFO2_FRAMES * pSles->channels];
+        audio_utils_fifo_init(&(pSles->fifo2), FIFO2_FRAMES, frameSize, pSles->fifo2Buffer);
+        //            } else {
+        //                fprintf(stderr, "sf_open failed\n");
+        //            }
+        //        } else {
+        //            sndfile = NULL;
+        //        }
+
+        initBufferStats(&pSles->recorderBufferStats);
+        initBufferStats(&pSles->playerBufferStats);
+
+        // init other variables needed for buffer test
+        pSles->testType = testType;
+        pSles->frequency1 = frequency1;
+        pSles->bufferTestPhase1 = 0;
+        pSles->count = 0;
+        pSles->byteBufferPtr = byteBufferPtr;
+        pSles->byteBufferLength = byteBufferLength;
+
+        //init loopback tone
+        pSles->loopbackTone = loopbackTone;
+
+        pSles->recorderTimeStamps = {
+            new int[maxRecordedLateCallbacks],      //int* timeStampsMs
+            new short[maxRecordedLateCallbacks],    //short* callbackDurations
+            0,                                      //short index
+            {0,0},                                  //struct timespec startTime;
+            maxRecordedLateCallbacks,               //int capacity
+            false                                   //bool exceededCapacity
+        };
+
+        pSles->playerTimeStamps = {
+            new int[maxRecordedLateCallbacks],      //int* timeStampsMs
+            new short[maxRecordedLateCallbacks],    //short* callbackDurations;
+            0,                                      //short index
+            {0,0},                                  //struct timespec startTime;
+            maxRecordedLateCallbacks,               //int capacity
+            false                                   //bool exceededCapacity
+        };
+
+        pSles->expectedBufferPeriod = (short) (
+                round(pSles->bufSizeInFrames * MILLIS_PER_SECOND / (float) pSles->sampleRate));
+
+        SLresult result;
+
+        // create engine
+        pSles->engineObject;
+        result = slCreateEngine(&(pSles->engineObject), 0, NULL, 0, NULL, NULL);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        result = (*(pSles->engineObject))->Realize(pSles->engineObject, SL_BOOLEAN_FALSE);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        SLEngineItf engineEngine;
+        result = (*(pSles->engineObject))->GetInterface(pSles->engineObject, SL_IID_ENGINE,
+                &engineEngine);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        // create output mix
+        pSles->outputmixObject;
+        result = (*engineEngine)->CreateOutputMix(engineEngine, &(pSles->outputmixObject), 0, NULL,
+                NULL);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        result = (*(pSles->outputmixObject))->Realize(pSles->outputmixObject, SL_BOOLEAN_FALSE);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        // create an audio player with buffer queue source and output mix sink
+        SLDataSource audiosrc;
+        SLDataSink audiosnk;
+        SLDataFormat_PCM pcm;
+        SLDataLocator_OutputMix locator_outputmix;
+        SLDataLocator_BufferQueue locator_bufferqueue_tx;
+        locator_bufferqueue_tx.locatorType = SL_DATALOCATOR_BUFFERQUEUE;
+        locator_bufferqueue_tx.numBuffers = pSles->txBufCount;
+        locator_outputmix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
+        locator_outputmix.outputMix = pSles->outputmixObject;
+        pcm.formatType = SL_DATAFORMAT_PCM;
+        pcm.numChannels = pSles->channels;
+        pcm.samplesPerSec = pSles->sampleRate * 1000;
+        pcm.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+        pcm.containerSize = 16;
+        pcm.channelMask = pSles->channels == 1 ? SL_SPEAKER_FRONT_CENTER :
+                (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT);
+        pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+        audiosrc.pLocator = &locator_bufferqueue_tx;
+        audiosrc.pFormat = &pcm;
+        audiosnk.pLocator = &locator_outputmix;
+        audiosnk.pFormat = NULL;
+        pSles->playerObject = NULL;
+        pSles->recorderObject = NULL;
+        SLInterfaceID ids_tx[2] = {SL_IID_BUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION};
+        SLboolean flags_tx[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
+        result = (*engineEngine)->CreateAudioPlayer(engineEngine, &(pSles->playerObject),
+                &audiosrc, &audiosnk, 2, ids_tx, flags_tx);
+        if (SL_RESULT_CONTENT_UNSUPPORTED == result) {
+            fprintf(stderr, "Could not create audio player (result %x), check sample rate\n",
+                    result);
+            SLES_PRINTF("ERROR: Could not create audio player (result %x), check sample rate\n",
+                                                     result);
+            goto cleanup;
+        }
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        {
+           /* Get the Android configuration interface which is explicit */
+            SLAndroidConfigurationItf configItf;
+            result = (*(pSles->playerObject))->GetInterface(pSles->playerObject,
+                                                 SL_IID_ANDROIDCONFIGURATION, (void*)&configItf);
+            ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+            /* Use the configuration interface to configure the player before it's realized */
+            if (performanceMode != -1) {
+                SLuint32 performanceMode32 = performanceMode;
+                result = (*configItf)->SetConfiguration(configItf, SL_ANDROID_KEY_PERFORMANCE_MODE,
+                        &performanceMode32, sizeof(SLuint32));
+                ASSERT_EQ(SL_RESULT_SUCCESS, result);
+            }
+
+        }
+
+        result = (*(pSles->playerObject))->Realize(pSles->playerObject, SL_BOOLEAN_FALSE);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        SLPlayItf playerPlay;
+        result = (*(pSles->playerObject))->GetInterface(pSles->playerObject, SL_IID_PLAY,
+                &playerPlay);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        result = (*(pSles->playerObject))->GetInterface(pSles->playerObject, SL_IID_BUFFERQUEUE,
+                &(pSles->playerBufferQueue));
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        result = (*(pSles->playerBufferQueue))->RegisterCallback(pSles->playerBufferQueue,
+                playerCallback, pSles); //playerCallback is the name of callback function
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        // Enqueue some zero buffers for the player
+        for (j = 0; j < pSles->txBufCount; ++j) {
+
+            // allocate a free buffer
+            assert(pSles->freeFront != pSles->freeRear);
+            char *buffer = pSles->freeBuffers[pSles->freeFront];
+            if (++pSles->freeFront > pSles->freeBufCount) {
+                pSles->freeFront = 0;
+            }
+
+            // put on play queue
+            SLuint32 txRearNext = pSles->txRear + 1;
+            if (txRearNext > pSles->txBufCount) {
+                txRearNext = 0;
+            }
+            assert(txRearNext != pSles->txFront);
+            pSles->txBuffers[pSles->txRear] = buffer;
+            pSles->txRear = txRearNext;
+            result = (*(pSles->playerBufferQueue))->Enqueue(pSles->playerBufferQueue,
+                    buffer, pSles->bufSizeInBytes);
+            ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        }
+
+        result = (*playerPlay)->SetPlayState(playerPlay, SL_PLAYSTATE_PLAYING);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        // Create an audio recorder with microphone device source and buffer queue sink.
+        // The buffer queue as sink is an Android-specific extension.
+        SLDataLocator_IODevice locator_iodevice;
+        SLDataLocator_AndroidSimpleBufferQueue locator_bufferqueue_rx;
+
+        locator_iodevice.locatorType = SL_DATALOCATOR_IODEVICE;
+        locator_iodevice.deviceType = SL_IODEVICE_AUDIOINPUT;
+        locator_iodevice.deviceID = SL_DEFAULTDEVICEID_AUDIOINPUT;
+        locator_iodevice.device = NULL;
+
+        audiosrc.pLocator = &locator_iodevice;
+        audiosrc.pFormat = NULL;
+
+        locator_bufferqueue_rx.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
+        locator_bufferqueue_rx.numBuffers = pSles->rxBufCount;
+
+        audiosnk.pLocator = &locator_bufferqueue_rx;
+        audiosnk.pFormat = &pcm;
+
+        {   //why brackets here?
+            SLInterfaceID ids_rx[2] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+                                       SL_IID_ANDROIDCONFIGURATION};
+            SLboolean flags_rx[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
+            result = (*engineEngine)->CreateAudioRecorder(engineEngine, &(pSles->recorderObject),
+                    &audiosrc, &audiosnk, 2, ids_rx, flags_rx);
+            if (SL_RESULT_SUCCESS != result) {
+                fprintf(stderr, "Could not create audio recorder (result %x), "
+                        "check sample rate and channel count\n", result);
+                status = SLES_FAIL;
+
+                SLES_PRINTF("ERROR: Could not create audio recorder (result %x), "
+                             "check sample rate and channel count\n", result);
+                goto cleanup;
+            }
+        }
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        {
+           /* Get the Android configuration interface which is explicit */
+            SLAndroidConfigurationItf configItf;
+            result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject,
+                                                 SL_IID_ANDROIDCONFIGURATION, (void*)&configItf);
+            ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+            SLuint32 presetValue = micSource;
+            //SL_ANDROID_RECORDING_PRESET_CAMCORDER;//SL_ANDROID_RECORDING_PRESET_NONE;
+
+            /* Use the configuration interface to configure the recorder before it's realized */
+            if (presetValue != SL_ANDROID_RECORDING_PRESET_NONE) {
+                result = (*configItf)->SetConfiguration(configItf, SL_ANDROID_KEY_RECORDING_PRESET,
+                        &presetValue, sizeof(SLuint32));
+                ASSERT_EQ(SL_RESULT_SUCCESS, result);
+            }
+            if (performanceMode != -1) {
+                SLuint32 performanceMode32 = performanceMode;
+                result = (*configItf)->SetConfiguration(configItf, SL_ANDROID_KEY_PERFORMANCE_MODE,
+                        &performanceMode32, sizeof(SLuint32));
+                ASSERT_EQ(SL_RESULT_SUCCESS, result);
+            }
+
+        }
+
+        result = (*(pSles->recorderObject))->Realize(pSles->recorderObject, SL_BOOLEAN_FALSE);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        SLRecordItf recorderRecord;
+        result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject, SL_IID_RECORD,
+                &recorderRecord);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject,
+                SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &(pSles->recorderBufferQueue));
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        result = (*(pSles->recorderBufferQueue))->RegisterCallback(pSles->recorderBufferQueue,
+                recorderCallback, pSles);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        // Enqueue some empty buffers for the recorder
+        for (j = 0; j < pSles->rxBufCount; ++j) {
+
+            // allocate a free buffer
+            assert(pSles->freeFront != pSles->freeRear);
+            char *buffer = pSles->freeBuffers[pSles->freeFront];
+            if (++pSles->freeFront > pSles->freeBufCount) {
+                pSles->freeFront = 0;
+            }
+
+            // put on record queue
+            SLuint32 rxRearNext = pSles->rxRear + 1;
+            if (rxRearNext > pSles->rxBufCount) {
+                rxRearNext = 0;
+            }
+            assert(rxRearNext != pSles->rxFront);
+            pSles->rxBuffers[pSles->rxRear] = buffer;
+            pSles->rxRear = rxRearNext;
+            result = (*(pSles->recorderBufferQueue))->Enqueue(pSles->recorderBufferQueue,
+                    buffer, pSles->bufSizeInBytes);
+            ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        }
+
+        // Kick off the recorder
+        result = (*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_RECORDING);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+
+
+        // Tear down the objects and exit
+        status = SLES_SUCCESS;
+        cleanup:
+
+        SLES_PRINTF("Finished initialization with status: %d", status);
+
+        int xx = 1;
+
+    }
+    return status;
+}
+
+// Read data from fifo2Buffer and store into pSamples.
+int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples) {
+    //int status = SLES_FAIL;
+
+    SLES_PRINTF("slesProcessNext: pSles = %p, currentSample: %p,  maxSamples = %ld",
+                pSles, pSamples, maxSamples);
+
+    int samplesRead = 0;
+
+    int currentSample = 0;
+    double *pCurrentSample = pSamples;
+    int maxValue = 32768;
+
+    if (pSles != NULL) {
+
+        SLresult result;
+        for (int i = 0; i < 10; i++) {
+            usleep(100000);         // sleep for 0.1s
+            if (pSles->fifo2Buffer != NULL) {
+                for (;;) {
+                    short buffer[pSles->bufSizeInFrames * pSles->channels];
+                    ssize_t actual = audio_utils_fifo_read(&(pSles->fifo2), buffer,
+                            pSles->bufSizeInFrames);
+                    if (actual <= 0)
+                        break;
+                    {
+                        for (int jj = 0; jj < actual && currentSample < maxSamples; jj++) {
+                            *(pCurrentSample++) = ((double) buffer[jj]) / maxValue;
+                            currentSample++;
+                        }
+                    }
+                    samplesRead += actual;
+                }
+            }
+            if (pSles->injectImpulse > 0) {
+                if (pSles->injectImpulse <= 100) {
+                    pSles->injectImpulse = -1;
+                    write(1, "I", 1);
+                } else {
+                    if ((pSles->injectImpulse % 1000) < 100) {
+                        write(1, "i", 1);
+                    }
+                    pSles->injectImpulse -= 100;
+                }
+            } else if (i == 9) {
+                write(1, ".", 1);
+            }
+        }
+        SLBufferQueueState playerBQState;
+        result = (*(pSles->playerBufferQueue))->GetState(pSles->playerBufferQueue,
+                  &playerBQState);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        SLAndroidSimpleBufferQueueState recorderBQState;
+        result = (*(pSles->recorderBufferQueue))->GetState(pSles->recorderBufferQueue,
+                  &recorderBQState);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        SLES_PRINTF("End of slesProcessNext: pSles = %p, samplesRead = %d, maxSamples = %ld",
+                    pSles, samplesRead, maxSamples);
+    }
+    return samplesRead;
+}
+
+
+int slesDestroyServer(sles_data *pSles) {
+    int status = SLES_FAIL;
+
+     SLES_PRINTF("Start slesDestroyServer: pSles = %p", pSles);
+
+    if (pSles != NULL) {
+        if (NULL != pSles->playerObject) {
+            SLES_PRINTF("stopping player...");
+            SLPlayItf playerPlay;
+            SLresult result = (*(pSles->playerObject))->GetInterface(pSles->playerObject,
+                                                        SL_IID_PLAY, &playerPlay);
+
+            ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+            //stop player and recorder if they exist
+             result = (*playerPlay)->SetPlayState(playerPlay, SL_PLAYSTATE_STOPPED);
+            ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        }
+
+        if (NULL != pSles->recorderObject) {
+            SLES_PRINTF("stopping recorder...");
+            SLRecordItf recorderRecord;
+            SLresult result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject,
+                                                          SL_IID_RECORD, &recorderRecord);
+            ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+            result = (*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_STOPPED);
+            ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        }
+
+        usleep(1000);
+
+        audio_utils_fifo_deinit(&(pSles->fifo));
+        delete[] pSles->fifoBuffer;
+
+        SLES_PRINTF("slesDestroyServer 2");
+
+        //        if (sndfile != NULL) {
+        audio_utils_fifo_deinit(&(pSles->fifo2));
+        delete[] pSles->fifo2Buffer;
+
+        SLES_PRINTF("slesDestroyServer 3");
+
+        //            sf_close(sndfile);
+        //        }
+        if (NULL != pSles->playerObject) {
+            (*(pSles->playerObject))->Destroy(pSles->playerObject);
+        }
+
+        SLES_PRINTF("slesDestroyServer 4");
+
+        if (NULL != pSles->recorderObject) {
+            (*(pSles->recorderObject))->Destroy(pSles->recorderObject);
+        }
+
+        SLES_PRINTF("slesDestroyServer 5");
+
+        (*(pSles->outputmixObject))->Destroy(pSles->outputmixObject);
+        SLES_PRINTF("slesDestroyServer 6");
+        (*(pSles->engineObject))->Destroy(pSles->engineObject);
+        SLES_PRINTF("slesDestroyServer 7");
+
+        //free buffers
+        if (NULL != pSles->freeBuffers) {
+            for (unsigned j = 0; j < pSles->freeBufCount; ++j) {
+                if (NULL != pSles->freeBuffers[j]) {
+                    SLES_PRINTF(" free buff%d at %p",j, pSles->freeBuffers[j]);
+                    free (pSles->freeBuffers[j]);
+                }
+            }
+            SLES_PRINTF("  free freeBuffers at %p", pSles->freeBuffers);
+            free(pSles->freeBuffers);
+        } else {
+            SLES_PRINTF("  freeBuffers NULL, no need to free");
+        }
+
+
+        if (NULL != pSles->rxBuffers) {
+            SLES_PRINTF("  free rxBuffers at %p", pSles->rxBuffers);
+            free(pSles->rxBuffers);
+        } else {
+            SLES_PRINTF("  rxBuffers NULL, no need to free");
+        }
+
+        if (NULL != pSles->txBuffers) {
+            SLES_PRINTF("  free txBuffers at %p", pSles->txBuffers);
+            free(pSles->txBuffers);
+        } else {
+            SLES_PRINTF("  txBuffers NULL, no need to free");
+        }
+
+
+        status = SLES_SUCCESS;
+    }
+    SLES_PRINTF("End slesDestroyServer: status = %d", status);
+    return status;
+}
+
+
+int* slesGetRecorderBufferPeriod(sles_data *pSles) {
+    return pSles->recorderBufferStats.buffer_period;
+}
+
+int slesGetRecorderMaxBufferPeriod(sles_data *pSles) {
+    return pSles->recorderBufferStats.max_buffer_period;
+}
+
+int64_t slesGetRecorderVarianceBufferPeriod(sles_data *pSles) {
+    return pSles->recorderBufferStats.var;
+}
+
+int* slesGetPlayerBufferPeriod(sles_data *pSles) {
+    return pSles->playerBufferStats.buffer_period;
+}
+
+int slesGetPlayerMaxBufferPeriod(sles_data *pSles) {
+    return pSles->playerBufferStats.max_buffer_period;
+}
+
+int64_t slesGetPlayerVarianceBufferPeriod(sles_data *pSles) {
+    return pSles->playerBufferStats.var;
+}
+
+int slesGetCaptureRank(sles_data *pSles) {
+    // clear the capture flags since they're being handled now
+    int recorderRank = android_atomic_exchange(0, &pSles->recorderBufferStats.captureRank);
+    int playerRank = android_atomic_exchange(0, &pSles->playerBufferStats.captureRank);
+
+    if (recorderRank > playerRank) {
+        return recorderRank;
+    } else {
+        return playerRank;
+    }
+}
diff --git a/LoopbackApp/app/src/main/jni/sles.h b/LoopbackApp/app/src/main/jni/sles.h
new file mode 100644
index 0000000..c176656
--- /dev/null
+++ b/LoopbackApp/app/src/main/jni/sles.h
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+#include <pthread.h>
+#include <android/log.h>
+#include <jni.h>
+#include <stdbool.h>
+
+#ifndef _Included_org_drrickorang_loopback_sles
+#define _Included_org_drrickorang_loopback_sles
+
+//struct audio_utils_fifo;
+#define SLES_PRINTF(...)  __android_log_print(ANDROID_LOG_INFO, "sles_jni", __VA_ARGS__);
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include <audio_utils/fifo.h>
+
+typedef struct {
+    int* timeStampsMs;          // Array of milliseconds since first callback
+    short* callbackDurations;   // Array of milliseconds between callback and previous callback
+    short index;                // Current write position
+    struct timespec startTime;  // Time of first callback {seconds,nanoseconds}
+    int capacity;               // Total number of callback times/lengths that can be recorded
+    bool exceededCapacity;      // Set only if late callbacks come after array is full
+} callbackTimeStamps;
+
+typedef struct {
+    int* buffer_period;
+    struct timespec previous_time;
+    struct timespec current_time;
+    int buffer_count;
+    int max_buffer_period;
+
+    volatile int32_t captureRank;   // Set > 0 when the callback requests a systrace/bug report
+
+    int measurement_count; // number of measurements which were actually recorded
+    int64_t SDM; // sum of squares of deviations from the expected mean
+    int64_t var; // variance in nanoseconds^2
+} bufferStats;
+
+//TODO fix this
+typedef struct {
+    SLuint32 rxBufCount;     // -r#
+    SLuint32 txBufCount;     // -t#
+    SLuint32 bufSizeInFrames;  // -f#
+    SLuint32 channels;       // -c#
+    SLuint32 sampleRate; // -s#
+    SLuint32 exitAfterSeconds; // -e#
+    SLuint32 freeBufCount;   // calculated
+    SLuint32 bufSizeInBytes; // calculated
+    int injectImpulse; // -i#i
+    size_t totalDiscardedInputFrames;   // total number of input frames discarded
+    int ignoreFirstFrames;
+
+    // Storage area for the buffer queues
+    char **rxBuffers;
+    char **txBuffers;
+    char **freeBuffers;
+
+    // Buffer indices
+    SLuint32 rxFront;    // oldest recording
+    SLuint32 rxRear;     // next to be recorded
+    SLuint32 txFront;    // oldest playing
+    SLuint32 txRear;     // next to be played
+    SLuint32 freeFront;  // oldest free
+    SLuint32 freeRear;   // next to be freed
+
+    struct audio_utils_fifo fifo;   // jitter buffer between recorder and player callbacks,
+                                    // to mitigate unpredictable phase difference between these,
+                                    // or even concurrent callbacks on two CPU cores
+    struct audio_utils_fifo fifo2;  // For sending data to java code (to plot it)
+    short *fifo2Buffer;
+    short *fifoBuffer;
+    SLAndroidSimpleBufferQueueItf recorderBufferQueue;
+    SLBufferQueueItf playerBufferQueue;
+
+    //other things that belong here
+    SLObjectItf playerObject;
+    SLObjectItf recorderObject;
+    SLObjectItf outputmixObject;
+    SLObjectItf engineObject;
+
+    bufferStats recorderBufferStats;
+    bufferStats playerBufferStats;
+
+    int testType;
+    double frequency1;
+    double bufferTestPhase1;
+    int count;
+    char* byteBufferPtr;
+    int byteBufferLength;
+
+    short* loopbackTone;
+
+    callbackTimeStamps recorderTimeStamps;
+    callbackTimeStamps playerTimeStamps;
+    short expectedBufferPeriod;
+} sles_data;
+
+#define NANOS_PER_SECOND 1000000000
+#define NANOS_PER_MILLI 1000000
+#define MILLIS_PER_SECOND 1000
+
+// how late in ms a callback must be to trigger a systrace/bugreport
+#define LATE_CALLBACK_CAPTURE_THRESHOLD 4
+#define LATE_CALLBACK_OUTLIER_THRESHOLD 1
+#define BUFFER_PERIOD_DISCARD 10
+#define BUFFER_PERIOD_DISCARD_FULL_DUPLEX_PARTNER 2
+
+enum {
+    SLES_SUCCESS = 0,
+    SLES_FAIL = 1,
+    RANGE = 1002,
+    TEST_TYPE_LATENCY = 222,
+    TEST_TYPE_BUFFER_PERIOD = 223
+} SLES_STATUS_ENUM;
+
+int slesInit(sles_data ** ppSles, int samplingRate, int frameCount, int micSource,
+             int performanceMode,
+             int testType, double frequency1, char* byteBufferPtr, int byteBufferLength,
+             short* loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames);
+
+//note the double pointer to properly free the memory of the structure
+int slesDestroy(sles_data ** ppSles);
+
+
+///full
+int slesFull(sles_data *pSles);
+
+int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource,
+                     int performanceMode,
+                     int testType, double frequency1, char* byteBufferPtr, int byteBufferLength,
+                     short* loopbackTone, int maxRecordedLateCallbacks, int ignoreFirstFrames);
+int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples);
+int slesDestroyServer(sles_data *pSles);
+int* slesGetRecorderBufferPeriod(sles_data *pSles);
+int slesGetRecorderMaxBufferPeriod(sles_data *pSles);
+int64_t slesGetRecorderVarianceBufferPeriod(sles_data *pSles);
+int* slesGetPlayerBufferPeriod(sles_data *pSles);
+int slesGetPlayerMaxBufferPeriod(sles_data *pSles);
+int64_t slesGetPlayerVarianceBufferPeriod(sles_data *pSles);
+int slesGetCaptureRank(sles_data *pSles);
+
+void initBufferStats(bufferStats *stats);
+void collectBufferPeriod(bufferStats *stats, bufferStats *fdpStats, callbackTimeStamps *timeStamps,
+                         short expectedBufferPeriod);
+bool updateBufferStats(bufferStats *stats, int64_t diff_in_nano, int expectedBufferPeriod);
+void recordTimeStamp(callbackTimeStamps *timeStamps,
+                     int64_t callbackDuration, int64_t timeStamp);
+
+ssize_t byteBuffer_write(sles_data *pSles, char *buffer, size_t count);
+
+#ifdef __cplusplus
+}
+#endif
+#endif //_Included_org_drrickorang_loopback_sles
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_assessment.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_assessment.png
new file mode 100644
index 0000000..47e6b52
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_assessment.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_description.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_description.png
new file mode 100644
index 0000000..687d5f8
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_description.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_help.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_help.png
new file mode 100644
index 0000000..c5b4f68
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_help.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_help_outline.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_help_outline.png
new file mode 100644
index 0000000..dc5cdee
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_help_outline.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_launcher.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_launcher.png
new file mode 100644
index 0000000..df5851e
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_launcher.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_play_arrow.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_play_arrow.png
new file mode 100644
index 0000000..6f7a047
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_play_arrow.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_report.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_report.png
new file mode 100644
index 0000000..4c44889
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_report.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_save.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_save.png
new file mode 100644
index 0000000..fbc4acd
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_save.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_settings.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_settings.png
new file mode 100644
index 0000000..8276847
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_settings.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_stop.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_stop.png
new file mode 100644
index 0000000..0255c0e
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_stop.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_zoom_in.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_zoom_in.png
new file mode 100644
index 0000000..cc66362
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_zoom_in.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_zoom_out.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_zoom_out.png
new file mode 100644
index 0000000..6b72870
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_zoom_out.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_zoom_out_full.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_zoom_out_full.png
new file mode 100644
index 0000000..79152d2
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ic_zoom_out_full.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/offtogglebutton.9.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/offtogglebutton.9.png
new file mode 100644
index 0000000..205e009
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/offtogglebutton.9.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ontogglebutton.9.png b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ontogglebutton.9.png
new file mode 100644
index 0000000..3f7f8df
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/ontogglebutton.9.png
Binary files differ
diff --git a/LoopbackApp/app/src/main/res/drawable-xxxhdpi/togglebutton_state_drawable.xml b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/togglebutton_state_drawable.xml
new file mode 100644
index 0000000..37b7374
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/drawable-xxxhdpi/togglebutton_state_drawable.xml
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="utf-8"?>
+<selector xmlns:android="http://schemas.android.com/apk/res/android">
+    <item
+        android:drawable="@drawable/ontogglebutton"
+        android:state_checked="true" />
+    <item
+        android:drawable="@drawable/offtogglebutton"
+        android:state_checked="false" />
+</selector>
\ No newline at end of file
diff --git a/LoopbackApp/app/src/main/res/layout/about_activity.xml b/LoopbackApp/app/src/main/res/layout/about_activity.xml
new file mode 100644
index 0000000..1c17cf7
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/about_activity.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="vertical"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:background="#FFFFFF">
+
+    <TextView
+        android:id="@+id/AboutInfo"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:text="@string/AboutInfo"
+        android:textSize="15sp"
+        android:autoLink="all" />
+
+</LinearLayout>
\ No newline at end of file
diff --git a/LoopbackApp/app/src/main/res/layout/heatmap_window.xml b/LoopbackApp/app/src/main/res/layout/heatmap_window.xml
new file mode 100644
index 0000000..d1190a6
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/heatmap_window.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2016 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="vertical"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:background="#FFFFFF">
+
+<!--    <ScrollView
+        android:id="@+id/heatMapScroll"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:scrollbars="vertical"
+        android:fadeScrollbars="false"
+        android:fillViewport="true">
+    </ScrollView>-->
+
+</LinearLayout>
\ No newline at end of file
diff --git a/LoopbackApp/app/src/main/res/layout/main_activity.xml b/LoopbackApp/app/src/main/res/layout/main_activity.xml
new file mode 100644
index 0000000..072afde
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/main_activity.xml
@@ -0,0 +1,283 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:tools="http://schemas.android.com/tools"
+    android:id="@+id/linearLayoutMain"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:orientation="vertical"
+    android:background="#FFFFFF">
+    <HorizontalScrollView
+        android:id="@+id/ScrollView1"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content">
+        <LinearLayout
+            xmlns:android="http://schemas.android.com/apk/res/android"
+            xmlns:tools="http://schemas.android.com/tools"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:orientation="horizontal">
+
+            <Button
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                android:id="@+id/buttonStartLatencyTest"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/buttonTest_enabled"
+                android:drawableLeft="@drawable/ic_play_arrow"
+                android:drawableStart="@drawable/ic_play_arrow"
+                style="@style/TextAppearance.AppCompat.Button"
+                android:onClick="onButtonLatencyTest"/>
+
+            <Button
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                android:id="@+id/buttonStartBufferTest"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/buttonBufferTest"
+                android:drawableLeft="@drawable/ic_play_arrow"
+                android:drawableStart="@drawable/ic_play_arrow"
+                style="@style/TextAppearance.AppCompat.Button"
+                android:onClick="onButtonBufferTest"/>
+
+        </LinearLayout>
+    </HorizontalScrollView>
+
+    <Button
+        xmlns:android="http://schemas.android.com/apk/res/android"
+        android:id="@+id/buttonCalibrateSoundLevel"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:text="@string/buttonCalibrateSoundLevel"
+        android:drawableLeft="@drawable/ic_play_arrow"
+        android:drawableStart="@drawable/ic_play_arrow"
+        style="@style/TextAppearance.AppCompat.Button"
+        android:onClick="onButtonCalibrateSoundLevel" />
+
+    <TextView
+        android:id="@+id/textInfo"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:textColor="#000000"
+        android:text="@string/labelInfo"/>
+
+    <LinearLayout
+        android:orientation="horizontal"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content">
+
+        <TextView
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="Current Level"
+            android:id="@+id/textViewCurrentLevel"/>
+
+        <SeekBar
+            android:id="@+id/BarMasterLevel"
+            android:indeterminate="false"
+            android:max="100"
+            android:progress="0"
+            android:layout_width="fill_parent"
+            android:layout_height="wrap_content"
+            style="?android:attr/progressBarStyleHorizontal" />
+    </LinearLayout>
+
+    <HorizontalScrollView
+        android:id="@+id/glitchReportPanel"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:visibility="invisible">
+        <LinearLayout
+            xmlns:android="http://schemas.android.com/apk/res/android"
+            xmlns:tools="http://schemas.android.com/tools"
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:orientation="vertical">
+
+            <LinearLayout
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                xmlns:tools="http://schemas.android.com/tools"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:orientation="horizontal">
+                <Button
+                    xmlns:android="http://schemas.android.com/apk/res/android"
+                    android:id="@+id/buttonRecorderBufferPeriod"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:drawableLeft="@drawable/ic_assessment"
+                    style="@style/TextAppearance.AppCompat.Button"
+                    android:text="@string/buttonRecorderBufferPeriod"
+                    android:onClick="onButtonRecorderBufferPeriod"/>
+
+                <Button
+                    xmlns:android="http://schemas.android.com/apk/res/android"
+                    android:id="@+id/buttonPlayerBufferPeriod"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:drawableLeft="@drawable/ic_assessment"
+                    style="@style/TextAppearance.AppCompat.Button"
+                    android:text="@string/buttonPlayerBufferPeriod"
+                    android:onClick="onButtonPlayerBufferPeriod"/>
+            </LinearLayout>
+            <LinearLayout
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                xmlns:tools="http://schemas.android.com/tools"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:orientation="horizontal">
+                <Button
+                    xmlns:android="http://schemas.android.com/apk/res/android"
+                    android:id="@+id/buttonGlitches"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:drawableLeft="@drawable/ic_description"
+                    style="@style/TextAppearance.AppCompat.Button"
+                    android:text="@string/buttonGlitches"
+                    android:onClick="onButtonGlitches"/>
+                <Button
+                    android:id="@+id/buttonHeatMap"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:drawableLeft="@drawable/ic_assessment"
+                    style="@style/TextAppearance.AppCompat.Button"
+                    android:text="@string/compareAll"
+                    android:onClick="onButtonHeatMap"/>
+            </LinearLayout>
+        </LinearLayout>
+    </HorizontalScrollView>
+
+    <LinearLayout
+        android:orientation="horizontal"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content">
+
+        <TextView
+            android:layout_width="250dp"
+            android:layout_height="wrap_content"
+            android:text=""
+            android:id="@+id/resultSummary"
+            android:textStyle="bold"/>
+    </LinearLayout>
+
+    <LinearLayout
+        android:layout_marginTop="0mm"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        android:layout_gravity="center_vertical"
+        android:gravity="center_horizontal"
+        android:orientation="horizontal"
+        android:layout_weight="1">
+
+        <org.drrickorang.loopback.WavePlotView
+            android:id="@+id/viewWavePlot"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"/>
+    </LinearLayout>
+
+    <RelativeLayout
+        android:id="@+id/zoomAndSaveControlPanel"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:layout_gravity="left"
+        android:padding="10dp"
+        android:visibility="invisible"
+        android:orientation="horizontal">
+
+        <LinearLayout
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_alignParentLeft="true">
+
+            <ImageButton
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                android:id="@+id/buttonZoomOutFull"
+                android:layout_width="40dp"
+                android:layout_height="40dp"
+                android:paddingEnd="5dp"
+                android:paddingRight="5dp"
+                android:paddingLeft="5dp"
+                android:text="@string/buttonZoomOutFull"
+                android:src="@drawable/ic_zoom_out_full"
+                android:background="@null"
+                android:onClick="onButtonZoomOutFull"/>
+
+            <ImageButton
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                android:id="@+id/buttonZoomOut"
+                android:layout_width="40dp"
+                android:layout_height="40dp"
+                android:paddingEnd="5dp"
+                android:paddingRight="5dp"
+                android:paddingLeft="5dp"
+                android:text="@string/buttonZoomOut"
+                android:src="@drawable/ic_zoom_out"
+                android:background="@null"
+                android:onClick="onButtonZoomOut"/>
+
+            <ImageButton
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                android:id="@+id/buttonZoomIn"
+                android:layout_width="40dp"
+                android:layout_height="40dp"
+                android:paddingEnd="5dp"
+                android:paddingRight="5dp"
+                android:paddingLeft="5dp"
+                android:text="@string/buttonZoomIn"
+                android:src="@drawable/ic_zoom_in"
+                android:background="@null"
+                android:onClick="onButtonZoomIn"/>
+
+        </LinearLayout>
+        <LinearLayout
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_alignParentRight="true">
+
+            <ImageButton
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                android:id="@+id/buttonReport"
+                android:layout_width="40dp"
+                android:layout_height="40dp"
+                android:paddingEnd="5dp"
+                android:paddingRight="5dp"
+                android:paddingLeft="5dp"
+                android:src="@drawable/ic_report"
+                android:text="@string/buttonSave"
+                android:background="@null"
+                android:onClick="onButtonReport"/>
+
+            <ImageButton
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                android:id="@+id/buttonSave"
+                android:layout_width="40dp"
+                android:layout_height="40dp"
+                android:paddingEnd="5dp"
+                android:paddingRight="5dp"
+                android:paddingLeft="5dp"
+                android:src="@drawable/ic_save"
+                android:text="@string/buttonSave"
+                android:background="@null"
+                android:onClick="onButtonSave"/>
+
+        </LinearLayout>
+
+    </RelativeLayout>
+
+</LinearLayout>
diff --git a/LoopbackApp/app/src/main/res/layout/player_buffer_period_activity.xml b/LoopbackApp/app/src/main/res/layout/player_buffer_period_activity.xml
new file mode 100644
index 0000000..285c48e
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/player_buffer_period_activity.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:custom="http://schemas.android.com/apk/res-auto"
+    android:orientation="vertical"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+
+    <TextView
+        android:id="@+id/writeHistogramInfo"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_gravity="center"
+        android:text="@string/WriteHistTitle"/>
+
+    <LinearLayout
+        android:layout_marginTop="0mm"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        android:layout_gravity="center_vertical"
+        android:gravity="center_horizontal"
+        android:orientation="horizontal"
+        android:layout_weight="1">
+        <org.drrickorang.loopback.HistogramView
+            android:id="@+id/viewWriteHistogram"
+            android:layout_width="fill_parent"
+            android:layout_height="fill_parent"
+            android:layout_weight="1" />
+    </LinearLayout>
+
+</LinearLayout>
\ No newline at end of file
diff --git a/LoopbackApp/app/src/main/res/layout/recorder_buffer_period_activity.xml b/LoopbackApp/app/src/main/res/layout/recorder_buffer_period_activity.xml
new file mode 100644
index 0000000..49b8113
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/recorder_buffer_period_activity.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:custom="http://schemas.android.com/apk/res-auto"
+    android:orientation="vertical" android:layout_width="match_parent"
+    android:layout_height="match_parent">
+
+    <TextView
+        android:id="@+id/readHistogramInfo"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_gravity="center"
+        android:text="@string/ReadHistTitle"/>
+
+    <LinearLayout
+        android:layout_marginTop="0mm"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        android:layout_gravity="center_vertical"
+        android:gravity="center_horizontal"
+        android:orientation="horizontal"
+        android:layout_weight="1">
+        <org.drrickorang.loopback.HistogramView
+            android:id="@+id/viewReadHistogram"
+            android:layout_width="fill_parent"
+            android:layout_height="fill_parent"
+            android:layout_weight="1" />
+    </LinearLayout>
+
+</LinearLayout>
+
+
diff --git a/LoopbackApp/app/src/main/res/layout/report_window.xml b/LoopbackApp/app/src/main/res/layout/report_window.xml
new file mode 100644
index 0000000..6039374
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/report_window.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="vertical"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:background="#FFFFFF">
+
+    <ScrollView
+        android:id="@+id/ReportScroll"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:scrollbars="vertical"
+        android:fadeScrollbars="false"
+        android:fillViewport="true">
+        <TextView
+            android:id="@+id/ReportInfo"
+            android:padding="10dp"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:textSize="15sp" />
+    </ScrollView>
+
+</LinearLayout>
\ No newline at end of file
diff --git a/LoopbackApp/app/src/main/res/layout/settings_activity.xml b/LoopbackApp/app/src/main/res/layout/settings_activity.xml
new file mode 100644
index 0000000..8e9986a
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/settings_activity.xml
@@ -0,0 +1,381 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:tools="http://schemas.android.com/tools"
+    android:id="@+id/settingsMainLayout"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:orientation="vertical"
+    android:background="#FFFFFF">
+
+    <ScrollView
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content">
+        <LinearLayout
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:orientation="vertical"
+            android:paddingBottom="150dp">
+
+            <TextView
+                android:id="@+id/textSettingsInfo"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/labelSettings"
+                android:elegantTextHeight="false" />
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <TextView
+                android:id="@+id/textMicSource"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/labelMicSource"/>
+            <Spinner
+                android:id="@+id/spinnerMicSource"
+                android:layout_width="fill_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <TextView
+                android:id="@+id/textPerformanceMode"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/labelPerformanceMode"/>
+            <Spinner
+                android:id="@+id/spinnerPerformanceMode"
+                android:layout_width="fill_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <TextView
+                android:id="@+id/textAudioThreadType"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/labelAudioThreadType"/>
+            <Spinner
+                android:id="@+id/spinnerAudioThreadType"
+                android:layout_width="fill_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <TextView
+                android:id="@+id/textChannelIndex"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/labelChannelIndex"/>
+            <Spinner
+                android:id="@+id/spinnerChannelIndex"
+                android:layout_width="fill_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <Button
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                android:id="@+id/buttonDefaultSettings"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/buttonDefaultSettings"
+                android:onClick="onButtonClick"/>
+
+            <TextView
+                android:id="@+id/textSamplingRate"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/labelSamplingRate"/>
+
+            <Spinner
+                android:id="@+id/spinnerSamplingRate"
+                android:layout_width="fill_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <org.drrickorang.loopback.SettingsPicker
+                android:id="@+id/playerBufferSetting"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+
+            <org.drrickorang.loopback.SettingsPicker
+                android:id="@+id/recorderBufferSetting"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+
+            <org.drrickorang.loopback.SettingsPicker
+                android:id="@+id/bufferTestDurationSetting"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+
+            <org.drrickorang.loopback.SettingsPicker
+                android:id="@+id/wavePlotDurationSetting"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray" />
+
+
+            <org.drrickorang.loopback.SettingsPicker
+                android:id="@+id/numLoadThreadsSetting"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content" />
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <LinearLayout
+                android:orientation="horizontal"
+                android:layout_width="match_parent"
+                android:layout_height="80dp"
+                android:padding="15dp">
+
+                <RelativeLayout
+                    android:layout_width="0dip"
+                    android:layout_height="match_parent"
+                    android:layout_weight="3">
+                    <ToggleButton
+                        android:id="@+id/SystraceEnabledToggle"
+                        android:layout_width="match_parent"
+                        android:layout_height="match_parent"
+                        android:layout_marginRight="15dp"
+                        android:background="@drawable/togglebutton_state_drawable"
+                        android:textOn="Enabled"
+                        android:textOff="Disabled"/>
+                </RelativeLayout>
+                <RelativeLayout
+                    android:layout_width="0dip"
+                    android:layout_height="match_parent"
+                    android:layout_weight="6">
+                    <TextView
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/enableSystrace"/>
+                </RelativeLayout>
+                <RelativeLayout
+                    android:layout_width="0dip"
+                    android:layout_height="match_parent"
+                    android:layout_weight="1">
+                    <ImageView
+                        android:layout_width="match_parent"
+                        android:layout_height="match_parent"
+                        android:onClick="onButtonHelp"
+                        android:id="@+id/buttonSystraceHelp"
+                        android:src="@drawable/ic_help_outline"/>
+                </RelativeLayout>
+            </LinearLayout>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <LinearLayout
+                android:orientation="horizontal"
+                android:layout_width="match_parent"
+                android:layout_height="80dp"
+                android:padding="15dp">
+
+                <RelativeLayout
+                    android:layout_width="0dp"
+                    android:layout_height="match_parent"
+                    android:layout_weight="3">
+                    <ToggleButton
+                        android:id="@+id/BugreportEnabledToggle"
+                        android:layout_width="match_parent"
+                        android:layout_height="match_parent"
+                        android:layout_marginRight="15dp"
+                        android:background="@drawable/togglebutton_state_drawable"
+                        android:textOn="Enabled"
+                        android:textOff="Disabled"/>
+                </RelativeLayout>
+                <RelativeLayout
+                    android:layout_width="0dp"
+                    android:layout_height="match_parent"
+                    android:layout_weight="6">
+                    <TextView
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/enableBugreport"/>
+                </RelativeLayout>
+                <RelativeLayout
+                    android:layout_width="0dip"
+                    android:layout_height="match_parent"
+                    android:layout_weight="1">
+                    <ImageView
+                        android:layout_width="match_parent"
+                        android:layout_height="match_parent"
+                        android:onClick="onButtonHelp"
+                        android:id="@+id/buttonBugreportHelp"
+                        android:src="@drawable/ic_help_outline"/>
+                </RelativeLayout>
+            </LinearLayout>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <LinearLayout
+                android:orientation="horizontal"
+                android:layout_width="match_parent"
+                android:layout_height="80dp"
+                android:padding="15dp">
+
+                <RelativeLayout
+                    android:layout_width="0dp"
+                    android:layout_height="match_parent"
+                    android:layout_weight="3">
+                    <ToggleButton
+                        android:id="@+id/wavSnippetsEnabledToggle"
+                        android:layout_width="match_parent"
+                        android:layout_height="match_parent"
+                        android:layout_marginRight="15dp"
+                        android:background="@drawable/togglebutton_state_drawable"
+                        android:textOn="Enabled"
+                        android:textOff="Disabled"/>
+                </RelativeLayout>
+                <RelativeLayout
+                    android:layout_width="0dp"
+                    android:layout_height="match_parent"
+                    android:layout_weight="6">
+                    <TextView
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/enableWavSnippets"/>
+                </RelativeLayout>
+                <RelativeLayout
+                    android:layout_width="0dip"
+                    android:layout_height="match_parent"
+                    android:layout_weight="1">
+                <!-- This empty layout is to match spacing of SystraceEnabled layout -->
+                </RelativeLayout>
+            </LinearLayout>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <org.drrickorang.loopback.SettingsPicker
+                android:id="@+id/numCapturesSettingPicker"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <org.drrickorang.loopback.SettingsPicker
+                android:id="@+id/ignoreFirstFramesSettingPicker"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"/>
+
+            <View
+                android:layout_width="fill_parent"
+                android:layout_height="1dp"
+                android:background="@android:color/darker_gray"/>
+
+            <LinearLayout
+                android:orientation="horizontal"
+                android:layout_width="match_parent"
+                android:layout_height="80dp"
+                android:padding="15dp">
+
+                <RelativeLayout
+                    android:layout_width="0dp"
+                    android:layout_height="match_parent"
+                    android:layout_weight="3">
+                    <ToggleButton
+                        android:id="@+id/soundLevelCalibrationEnabledToggle"
+                        android:layout_width="match_parent"
+                        android:layout_height="match_parent"
+                        android:layout_marginRight="15dp"
+                        android:background="@drawable/togglebutton_state_drawable"
+                        android:textOn="Enabled"
+                        android:textOff="Disabled"/>
+                </RelativeLayout>
+                <RelativeLayout
+                    android:layout_width="0dp"
+                    android:layout_height="match_parent"
+                    android:layout_weight="6">
+                    <TextView
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/enableSoundLevelCalibration"/>
+                </RelativeLayout>
+                <RelativeLayout
+                    android:layout_width="0dip"
+                    android:layout_height="match_parent"
+                    android:layout_weight="1">
+                    <ImageView
+                        android:layout_width="match_parent"
+                        android:layout_height="match_parent"
+                        android:onClick="onButtonHelp"
+                        android:id="@+id/buttonCalibrateSoundLevelHelp"
+                        android:src="@drawable/ic_help_outline"/>
+                </RelativeLayout>
+            </LinearLayout>
+
+        </LinearLayout>
+    </ScrollView>
+</LinearLayout>
diff --git a/LoopbackApp/app/src/main/res/layout/settings_picker.xml b/LoopbackApp/app/src/main/res/layout/settings_picker.xml
new file mode 100644
index 0000000..e533228
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/layout/settings_picker.xml
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2016 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="vertical"
+    android:layout_width="match_parent"
+    android:layout_height="wrap_content">
+
+    <TextView
+        android:id="@+id/settings_title"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:paddingBottom="20dp"/>
+
+    <LinearLayout
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:paddingBottom="10dp"
+        android:orientation="horizontal">
+
+        <RelativeLayout
+            android:layout_width="0dp"
+            android:layout_height="match_parent"
+            android:layout_weight="3">
+
+            <org.drrickorang.loopback.CatchEventsEditText
+                android:id="@+id/settings_valueText"
+                android:inputType="number"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:textSize="20sp"
+                android:imeOptions="actionDone"
+                android:selectAllOnFocus="true"
+                android:gravity="center" />
+        </RelativeLayout>
+
+        <RelativeLayout
+            android:layout_width="0dp"
+            android:layout_height="match_parent"
+            android:layout_weight="7">
+
+            <SeekBar
+                android:id="@+id/settings_seekbar"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content" />
+        </RelativeLayout>
+
+    </LinearLayout>
+
+</LinearLayout>
\ No newline at end of file
diff --git a/LoopbackApp/app/src/main/res/menu/tool_bar_menu.xml b/LoopbackApp/app/src/main/res/menu/tool_bar_menu.xml
new file mode 100644
index 0000000..0d98cd9
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/menu/tool_bar_menu.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="utf-8"?>
+<menu xmlns:android="http://schemas.android.com/apk/res/android">
+    <item
+        android:id="@+id/action_help"
+        android:icon="@drawable/ic_help"
+        android:title="@string/buttonAbout"
+        android:showAsAction="always"/>
+
+    <item android:id="@+id/action_settings"
+        android:icon="@drawable/ic_settings"
+        android:title="@string/buttonSettings"
+        android:showAsAction="always"/>
+</menu>
diff --git a/LoopbackApp/app/src/main/res/raw/loopback_listener b/LoopbackApp/app/src/main/res/raw/loopback_listener
new file mode 100644
index 0000000..a29b0c9
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/raw/loopback_listener
@@ -0,0 +1,70 @@
+#!/system/bin/sh
+
+####
+# Continuously checks for presence of signal file in 1 second intervals
+# Reads either a filename prefix or termination signal from file when it exists
+# Writes output of atrace and bugreport to files with supplied prefix
+####
+
+SYSTRACE_SUFFIX=".trace";
+BUGREPORT_SUFFIX="_bugreport.txt.gz";
+TERMINATE_SIGNAL="QUIT";
+SIGNAL_FILE="/sdcard/Loopback/loopback_signal"
+TRACE_CATEGORIES="sched audio $@"
+BUFFER_KB="8000"
+
+function exitListener {
+    # Exit atrace, remove signal file, and exit
+
+    echo "LOOPBACK LISTENER: stopping trace before exiting"
+    rm $SIGNAL_FILE
+    atrace --async_stop -z > /dev/null
+    echo "LOOPBACK LISTENER: exiting"
+    exit 1
+}
+
+# Begin an asynchronous systrace writing into a circular buffer of size BUFFER_KB
+echo "LOOPBACK LISTENER: starting trace"
+atrace --async_start -z -c -b $BUFFER_KB $TRACE_CATEGORIES
+echo " "
+
+# Remove signal file erroneously left behind from previous tests
+if [ -e "$SIGNAL_FILE" ]; then rm $SIGNAL_FILE; fi
+
+while true
+do
+    #echo "LOOPBACK LISTENER: checking for file $SIGNAL_FILE"
+    if [ -e "$SIGNAL_FILE" ] && [ -s "$SIGNAL_FILE" ]
+    then
+        contents=$(cat $SIGNAL_FILE)
+
+        # Ensure that if more than one listener is running only one will consume signal
+        > $SIGNAL_FILE
+
+        if [ "$contents" == $TERMINATE_SIGNAL ]
+        then
+            exitListener
+        else
+            for filename in $contents
+            do
+                case $filename in
+                *$SYSTRACE_SUFFIX)
+                    echo "LOOPBACK LISTENER: dumping systrace to file $filename"
+                    atrace --async_dump -z -c -b $BUFFER_KB $TRACE_CATEGORIES > $filename
+                    ;;
+
+                *$BUGREPORT_SUFFIX)
+                    echo "LOOPBACK LISTENER: dumping bugreport to file $filename"
+                    bugreport | gzip > $filename
+                    ;;
+
+                esac
+            done
+
+            echo "LOOPBACK LISTENER: Finished capture"
+
+            rm $SIGNAL_FILE
+        fi
+    fi
+    sleep 1
+done
diff --git a/LoopbackApp/app/src/main/res/values/strings.xml b/LoopbackApp/app/src/main/res/values/strings.xml
new file mode 100644
index 0000000..bb9d69f
--- /dev/null
+++ b/LoopbackApp/app/src/main/res/values/strings.xml
@@ -0,0 +1,193 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<resources>
+
+    <string name="app_name">Loopback App</string>
+
+    <string name="buttonPlay_play">Refresh Screen</string>
+    <string name="buttonPlay_pause">Pause</string>
+    <string name="buttonTest_enabled">Round-Trip\nLatency Test</string>
+    <string name="buttonTest_disabled">FX Disabled Loopback 2</string>
+    <string name="buttonSave">Save Results</string>
+    <string name="buttonZoomOutFull">Unzoom</string>
+    <string name="buttonZoomOut">Zoom Out</string>
+    <string name="buttonZoomIn"> Zoom In</string>
+    <string name="buttonAbout">About</string>
+    <string name="buttonRecorderBufferPeriod">Recorder</string>
+    <string name="buttonPlayerBufferPeriod">Player</string>
+    <string name="ReadHistTitle">Frequency vs. Recorder Buffer Period (ms) Plot</string>
+    <string name="WriteHistTitle">Frequency vs. Player Buffer Period (ms) Plot</string>
+    <string name="buttonBufferTest">Buffer Period\n&amp; Glitch Test</string>
+    <string name="buttonCalibrateSoundLevel">Calibrate Sound Level Now</string>
+    <string name="buttonGlitches">Glitches</string>
+    <string name="numGlitches">Total Number of Glitches:</string>
+
+    <!-- disabled -->
+    <string name="buttonZoomInFull">In Full</string>
+
+    <string name="buttonSettings">Settings</string>
+
+
+    <string name="labelMicSource">Microphone Source</string>
+    <string-array name="mic_source_array">
+        <item>DEFAULT</item>
+        <item>MIC</item>
+        <item>CAMCORDER</item>
+        <item>VOICE_RECOGNITION</item>
+        <item>VOICE_COMMUNICATION</item>
+        <item>REMOTE_SUBMIX (Java only)</item>
+        <item>UNPROCESSED (N or later)</item>
+    </string-array>
+
+    <string name="labelPerformanceMode">Performance Mode</string>
+    <string-array name="performance_mode_array">
+        <item>DEFAULT</item>
+        <item>NONE</item>
+        <item>LATENCY</item>
+        <item>LATENCY_EFFECTS</item>
+        <item>POWER_SAVING</item>
+    </string-array>
+
+    <string name="labelInfo">Test settings will appear here after the first test is run</string>
+    <string name="labelSettings">SETTINGS</string>
+    <string name="labelAbout">About</string>
+    <string name="labelSamplingRate">Sampling Rate</string>
+    <string name="AboutInfo">Round-trip audio latency testing app\n
+        using the Dr. Rick O\'Rang audio loopback dongle.\n
+        Authors: Ricardo Garcia (rago), Tzu-Yin Tai, and Brandon Swanson\n
+        Open source project on:\n
+        https://github.com/gkasten/drrickorang\n
+        References:\n
+        https://source.android.com/devices/audio/latency.html\n
+        https://goo.gl/dxcw0d\n\n\n
+        adb parameters:  all parameters are optional. If not specified, defaults will be used.\n
+        -ei SF \t\t\t\t\t\t\t\t\t\t\t\t ####\t\t sampling frequency \n
+        -es Filename \t\t\t\t\t\t\t ssss\t\t\t output filename \n
+        -ei MicSource \t\t\t\t\t\t\t ####\t\t microphone source\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 0: DEFAULT\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 1: MIC\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 2: CAMCORDER\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 3: VOICE_RECOGNITION\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 4: VOICE_COMMUNICATION\n
+        -ei AudioThread \t\t\t\t\t ####\t\t Audio Thread Type\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 0: Java\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 1: Native (JNI)\n
+        -ei AudioLevel \t\t\t\t\t\t ####\t\t Audio Level [0:15]\n
+        -ei RecorderBuffer \t\t\t\t ####\t\t Recorder Buffer Frames\n
+        -ei PlayerBuffer \t\t\t\t\t\t ####\t\t Player Buffer Frames\n
+        -ei TestType \t\t\t\t\t\t\t\t ####\t\t Audio Test Type\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 222: Latency Test\n
+        \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 223: Buffer Test\n
+        -ei BufferTestDuration \t ####\t\t Buffer Test Duration \n
+        -ei NumLoadThreads \t ####\t\t Number of Simulated Load Threads (0 - 20) \n
+        -ei CI \t ####\t\t Channel Index for USB Audio (0 - 8) \n
+        -ez CaptureSysTrace \t ####\t\t enable/disable systrace and bugreport capturing \n
+        -ez CaptureWavs \t ####\t\t enable/disable .wav file snippets capturing \n
+        -ei NumCaptures \t ####\t\t Number of systrace/bugreport and/or wav snippets to capture \n
+        -ei WaveDuration \t ####\t\t Duration in seconds of captured wave files
+        \n\n\n
+
+        Example: adb shell am start -n org.drrickorang.loopback/.LoopbackActivity \n
+        --ei SF 48000 --es FileName output --ei MicSource 3 --ei AudioThread 1 --ei AudioLevel 12
+        --ei TestType 223 --ei BufferTestDuration 5 --ez CaptureWavs true --ei NumCaptures 5
+    </string>
+
+    <!-- spinnerSamplingRate Options -->
+    <string-array name="samplingRate_array">
+        <item>8000</item>
+        <item>11025</item>
+        <item>22050</item>
+        <item>44100</item>
+        <item>48000</item>
+    </string-array>
+
+    <string name="labelAudioThreadType">Audio Thread Type</string>
+
+    <!-- spinnerAudioThreadType Options -->
+    <string-array name="audioThreadType_array">
+        <item>Java</item>
+        <item>native (JNI)</item>
+    </string-array>
+
+    <string name="labelChannelIndex">Channel Index</string>
+
+    <!-- spinnerChannelIndex Options -->
+    <string-array name="channelIndex_array">
+        <item>Mono</item>
+        <item>0</item>
+        <item>1</item>
+        <item>2</item>
+        <item>3</item>
+        <item>4</item>
+        <item>5</item>
+        <item>6</item>
+        <item>7</item>
+    </string-array>
+
+    <string name="labelPlayerBuffer">Player Buffer (Frames) (Max: %1$d)</string>
+    <string name="labelRecorderBuffer">Recorder Buffer (Frames) (Max: %1$d)</string>
+    <string name="buttonDefaultSettings">Compute Default Settings</string>
+    <string name="buttonRecordDefault">System Default Recorder Buffer</string>
+    <string name="labelBufferTestDuration">Buffer Test Duration (Seconds) (Max: %1$d)</string>
+    <string name="labelBufferTestWavePlotDuration">Buffer Test Wave Plot Duration (Seconds)
+                                                   (Max: %1$d)</string>
+    <string name="loadThreadsLabel">Number of Simulated Load Threads</string>
+    <string name="enableSystrace">Systrace Captures During Test</string>
+    <string name="enableBugreport">BugReport Captures During Test</string>
+    <string name="enableWavSnippets">Wav Snippet Captures During Test</string>
+    <string name="enableSoundLevelCalibration">
+        Calibrate sound level before latency test (experimental)</string>
+    <string name="numCapturesSetting">Number of Systrace/BugReport and or Wav Snippets to Capture
+    </string>
+    <string name="labelIgnoreFirstFrames">
+        Frames to ignore at the start of the latency test (Max: %1$d)</string>
+
+    <string name="SaveFileDialogLabel">Save Files To:</string>
+    <string name="SaveFileDialogOK">//mnt/sdcard/</string>
+    <string name="SaveFileDialogChooseFilenames">Choose Filenames \n and Location</string>
+    <string name="heatTitle">Glitches and Callbacks over Time</string>
+    <string name="compareAll">Compare All</string>
+
+    <string name="systraceHelp">
+        To use this feature it is necessary to launch a shell script on the Android device using ADB
+        shell.  This script is responsible for launching an asynchronous Systrace and writing its
+        buffer to file when signaled by the Loopback App.
+        \n\n
+        At the completion of the audio glitch/buffer test the Loopback App will signal the script to
+        exit so the script must be started before each test.
+        \n\n
+        The Loopback App places this script on the Android device when launched at
+        \n
+        /sdcard/Loopback/loopback_listener
+        \n\n
+
+        Trace Categories: The loopback_listener script takes as arguments the categories to be
+        tracked in the Systrace. By default the sched and audio categories are included.  For a list
+        of available categories see\n
+        developer.android.com/tools/help/systrace.html
+        \n\n
+
+        Example invocation:\n
+        adb shell \"sh /sdcard/Loopback/loopback_listener load am\"
+    </string>
+
+    <string name="calibrateSoundLevelHelp">
+        This feature is highly experimental. It will try various volume levels until it picks one
+        that it thinks is optimal. It has only been tested with the loopback plug and may fail
+        completely in open air.
+    </string>
+</resources>
diff --git a/LoopbackApp/build.gradle b/LoopbackApp/build.gradle
new file mode 100644
index 0000000..f1cc249
--- /dev/null
+++ b/LoopbackApp/build.gradle
@@ -0,0 +1,15 @@
+// Top-level build file where you can add configuration options common to all sub-projects/modules.
+buildscript {
+    repositories {
+        jcenter()
+    }
+    dependencies {
+        classpath 'com.android.tools.build:gradle-experimental:0.9.0'
+    }
+}
+
+allprojects {
+    repositories {
+        jcenter()
+    }
+}
diff --git a/LoopbackApp/gradle/wrapper/gradle-wrapper.jar b/LoopbackApp/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..8c0fb64
--- /dev/null
+++ b/LoopbackApp/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/LoopbackApp/gradle/wrapper/gradle-wrapper.properties b/LoopbackApp/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..dde259e
--- /dev/null
+++ b/LoopbackApp/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Tue Mar 21 12:29:44 PDT 2017
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
diff --git a/LoopbackApp/gradlew b/LoopbackApp/gradlew
new file mode 100755
index 0000000..91a7e26
--- /dev/null
+++ b/LoopbackApp/gradlew
@@ -0,0 +1,164 @@
+#!/usr/bin/env bash
+
+##############################################################################
+##
+##  Gradle start up script for UN*X
+##
+##############################################################################
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn ( ) {
+    echo "$*"
+}
+
+die ( ) {
+    echo
+    echo "$*"
+    echo
+    exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+case "`uname`" in
+  CYGWIN* )
+    cygwin=true
+    ;;
+  Darwin* )
+    darwin=true
+    ;;
+  MINGW* )
+    msys=true
+    ;;
+esac
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched.
+if $cygwin ; then
+    [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+fi
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+        PRG="$link"
+    else
+        PRG=`dirname "$PRG"`"/$link"
+    fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >&-
+APP_HOME="`pwd -P`"
+cd "$SAVED" >&-
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+        # IBM's JDK on AIX uses strange locations for the executables
+        JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+        JAVACMD="$JAVA_HOME/bin/java"
+    fi
+    if [ ! -x "$JAVACMD" ] ; then
+        die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+    fi
+else
+    JAVACMD="java"
+    which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
+    MAX_FD_LIMIT=`ulimit -H -n`
+    if [ $? -eq 0 ] ; then
+        if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+            MAX_FD="$MAX_FD_LIMIT"
+        fi
+        ulimit -n $MAX_FD
+        if [ $? -ne 0 ] ; then
+            warn "Could not set maximum file descriptor limit: $MAX_FD"
+        fi
+    else
+        warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+    fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+    GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+    APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+    CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+
+    # We build the pattern for arguments to be converted via cygpath
+    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+    SEP=""
+    for dir in $ROOTDIRSRAW ; do
+        ROOTDIRS="$ROOTDIRS$SEP$dir"
+        SEP="|"
+    done
+    OURCYGPATTERN="(^($ROOTDIRS))"
+    # Add a user-defined pattern to the cygpath arguments
+    if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+        OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+    fi
+    # Now convert the arguments - kludge to limit ourselves to /bin/sh
+    i=0
+    for arg in "$@" ; do
+        CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+        CHECK2=`echo "$arg"|egrep -c "^-"`                                 ### Determine if an option
+
+        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition
+            eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+        else
+            eval `echo args$i`="\"$arg\""
+        fi
+        i=$((i+1))
+    done
+    case $i in
+        (0) set -- ;;
+        (1) set -- "$args0" ;;
+        (2) set -- "$args0" "$args1" ;;
+        (3) set -- "$args0" "$args1" "$args2" ;;
+        (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+        (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+        (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+        (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+        (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+        (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+    esac
+fi
+
+# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
+function splitJvmOpts() {
+    JVM_OPTS=("$@")
+}
+eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
+JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
+
+exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
diff --git a/LoopbackApp/gradlew.bat b/LoopbackApp/gradlew.bat
new file mode 100644
index 0000000..aec9973
--- /dev/null
+++ b/LoopbackApp/gradlew.bat
@@ -0,0 +1,90 @@
+@if "%DEBUG%" == "" @echo off

+@rem ##########################################################################

+@rem

+@rem  Gradle startup script for Windows

+@rem

+@rem ##########################################################################

+

+@rem Set local scope for the variables with windows NT shell

+if "%OS%"=="Windows_NT" setlocal

+

+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.

+set DEFAULT_JVM_OPTS=

+

+set DIRNAME=%~dp0

+if "%DIRNAME%" == "" set DIRNAME=.

+set APP_BASE_NAME=%~n0

+set APP_HOME=%DIRNAME%

+

+@rem Find java.exe

+if defined JAVA_HOME goto findJavaFromJavaHome

+

+set JAVA_EXE=java.exe

+%JAVA_EXE% -version >NUL 2>&1

+if "%ERRORLEVEL%" == "0" goto init

+

+echo.

+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.

+echo.

+echo Please set the JAVA_HOME variable in your environment to match the

+echo location of your Java installation.

+

+goto fail

+

+:findJavaFromJavaHome

+set JAVA_HOME=%JAVA_HOME:"=%

+set JAVA_EXE=%JAVA_HOME%/bin/java.exe

+

+if exist "%JAVA_EXE%" goto init

+

+echo.

+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%

+echo.

+echo Please set the JAVA_HOME variable in your environment to match the

+echo location of your Java installation.

+

+goto fail

+

+:init

+@rem Get command-line arguments, handling Windowz variants

+

+if not "%OS%" == "Windows_NT" goto win9xME_args

+if "%@eval[2+2]" == "4" goto 4NT_args

+

+:win9xME_args

+@rem Slurp the command line arguments.

+set CMD_LINE_ARGS=

+set _SKIP=2

+

+:win9xME_args_slurp

+if "x%~1" == "x" goto execute

+

+set CMD_LINE_ARGS=%*

+goto execute

+

+:4NT_args

+@rem Get arguments from the 4NT Shell from JP Software

+set CMD_LINE_ARGS=%$

+

+:execute

+@rem Setup the command line

+

+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar

+

+@rem Execute Gradle

+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%

+

+:end

+@rem End local scope for the variables with windows NT shell

+if "%ERRORLEVEL%"=="0" goto mainEnd

+

+:fail

+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of

+rem the _cmd.exe /c_ return code!

+if  not "" == "%GRADLE_EXIT_CONSOLE%" exit 1

+exit /b 1

+

+:mainEnd

+if "%OS%"=="Windows_NT" endlocal

+

+:omega

diff --git a/LoopbackApp/proguard.cfg b/LoopbackApp/proguard.cfg
new file mode 100644
index 0000000..1c439e0
--- /dev/null
+++ b/LoopbackApp/proguard.cfg
@@ -0,0 +1,5 @@
+-keep class org.drrickorang.loopback.NativeAudioThread {
+}
+-keep class org.drrickorang.loopback.BufferCallbackTimes.java {
+    public <init> (int[], short[], boolean, short);
+}
\ No newline at end of file
diff --git a/LoopbackApp/settings.gradle b/LoopbackApp/settings.gradle
new file mode 100644
index 0000000..e7b4def
--- /dev/null
+++ b/LoopbackApp/settings.gradle
@@ -0,0 +1 @@
+include ':app'