Merge "Fix dodgy states of keyguard transport controls" into klp-dev
diff --git a/core/java/android/app/WallpaperManager.java b/core/java/android/app/WallpaperManager.java
index 16c9da6..ced72f8 100644
--- a/core/java/android/app/WallpaperManager.java
+++ b/core/java/android/app/WallpaperManager.java
@@ -280,6 +280,7 @@
             synchronized (this) {
                 mWallpaper = null;
                 mDefaultWallpaper = null;
+                mHandler.removeMessages(MSG_CLEAR_WALLPAPER);
             }
         }
 
diff --git a/core/java/android/content/ContentProvider.java b/core/java/android/content/ContentProvider.java
index 461dc1d..a9d0559 100644
--- a/core/java/android/content/ContentProvider.java
+++ b/core/java/android/content/ContentProvider.java
@@ -1078,6 +1078,7 @@
      * @see #openAssetFile(Uri, String)
      * @see #openFileHelper(Uri, String)
      * @see #getType(android.net.Uri)
+     * @see ParcelFileDescriptor#parseMode(String)
      */
     public ParcelFileDescriptor openFile(Uri uri, String mode)
             throws FileNotFoundException {
@@ -1147,6 +1148,7 @@
      * @see #openAssetFile(Uri, String)
      * @see #openFileHelper(Uri, String)
      * @see #getType(android.net.Uri)
+     * @see ParcelFileDescriptor#parseMode(String)
      */
     public ParcelFileDescriptor openFile(Uri uri, String mode, CancellationSignal signal)
             throws FileNotFoundException {
diff --git a/core/java/android/content/ContentResolver.java b/core/java/android/content/ContentResolver.java
index 04b4027..4e6cc92 100644
--- a/core/java/android/content/ContentResolver.java
+++ b/core/java/android/content/ContentResolver.java
@@ -1627,9 +1627,9 @@
     }
 
     /**
-     * Take a persistable Uri permission grant that has been offered. Once
+     * Take a persistable URI permission grant that has been offered. Once
      * taken, the permission grant will be remembered across device reboots.
-     * Only Uri permissions granted with
+     * Only URI permissions granted with
      * {@link Intent#FLAG_GRANT_PERSISTABLE_URI_PERMISSION} can be persisted. If
      * the grant has already been persisted, taking it again will touch
      * {@link UriPermission#getPersistedTime()}.
@@ -1644,7 +1644,7 @@
     }
 
     /**
-     * Relinquish a persisted Uri permission grant. The Uri must have been
+     * Relinquish a persisted URI permission grant. The URI must have been
      * previously made persistent with
      * {@link #takePersistableUriPermission(Uri, int)}. Any non-persistent
      * grants to the calling package will remain intact.
@@ -1659,7 +1659,7 @@
     }
 
     /**
-     * Return list of all Uri permission grants that have been persisted by the
+     * Return list of all URI permission grants that have been persisted by the
      * calling app. That is, the returned permissions have been granted
      * <em>to</em> the calling app. Only persistable grants taken with
      * {@link #takePersistableUriPermission(Uri, int)} are returned.
@@ -1677,7 +1677,7 @@
     }
 
     /**
-     * Return list of all persisted Uri permission grants that are hosted by the
+     * Return list of all persisted URI permission grants that are hosted by the
      * calling app. That is, the returned permissions have been granted
      * <em>from</em> the calling app. Only grants taken with
      * {@link #takePersistableUriPermission(Uri, int)} are returned.
diff --git a/core/java/android/content/Context.java b/core/java/android/content/Context.java
index fe079bc..2e4e209 100644
--- a/core/java/android/content/Context.java
+++ b/core/java/android/content/Context.java
@@ -30,8 +30,10 @@
 import android.media.MediaScannerConnection.OnScanCompletedListener;
 import android.net.Uri;
 import android.os.Bundle;
+import android.os.Environment;
 import android.os.Handler;
 import android.os.Looper;
+import android.os.StatFs;
 import android.os.UserHandle;
 import android.os.UserManager;
 import android.util.AttributeSet;
@@ -594,7 +596,7 @@
     public abstract File getFilesDir();
 
     /**
-     * Returns the absolute path to the directory on the external filesystem
+     * Returns the absolute path to the directory on the primary external filesystem
      * (that is somewhere on {@link android.os.Environment#getExternalStorageDirectory()
      * Environment.getExternalStorageDirectory()}) where the application can
      * place persistent files it owns.  These files are internal to the
@@ -608,10 +610,18 @@
      * <li>External files are not always available: they will disappear if the
      * user mounts the external storage on a computer or removes it.  See the
      * APIs on {@link android.os.Environment} for information in the storage state.
-     * <li>There is no security enforced with these files.  All applications
-     * can read and write files placed here.
+     * <li>There is no security enforced with these files.  For example, any application
+     * holding {@link android.Manifest.permission#WRITE_EXTERNAL_STORAGE} can write to
+     * these files.
      * </ul>
      *
+     * <p>Starting in {@link android.os.Build.VERSION_CODES#KITKAT}, no permissions
+     * are required to read or write to the returned path; it's always
+     * accessible to the calling app.  This only applies to paths generated for
+     * package name of the calling application.  To access paths belonging
+     * to other packages, {@link android.Manifest.permission#WRITE_EXTERNAL_STORAGE}
+     * and/or {@link android.Manifest.permission#READ_EXTERNAL_STORAGE} are required.
+     *
      * <p>On devices with multiple users (as described by {@link UserManager}),
      * each user has their own isolated external storage. Applications only
      * have access to the external storage for the user they're running as.</p>
@@ -644,11 +654,6 @@
      *
      * {@sample development/samples/ApiDemos/src/com/example/android/apis/content/ExternalStorage.java
      * private_picture}
-     * <p>
-     * Starting in {@link android.os.Build.VERSION_CODES#KITKAT}, no
-     * permissions are required for the owning application to read or write to
-     * this path. Otherwise, {@link android.Manifest.permission#WRITE_EXTERNAL_STORAGE}
-     * or {@link android.Manifest.permission#READ_EXTERNAL_STORAGE} are required.
      *
      * @param type The type of files directory to return.  May be null for
      * the root of the files directory or one of
@@ -677,36 +682,61 @@
      * it owns. These files are internal to the application, and not typically
      * visible to the user as media.
      * <p>
+     * This is like {@link #getFilesDir()} in that these files will be deleted when
+     * the application is uninstalled, however there are some important differences:
+     * <ul>
+     * <li>External files are not always available: they will disappear if the
+     * user mounts the external storage on a computer or removes it.
+     * <li>There is no security enforced with these files.
+     * </ul>
+     * <p>
      * External storage devices returned here are considered a permanent part of
      * the device, including both emulated external storage and physical media
-     * slots. This does not include transient devices, such as USB flash drives.
+     * slots, such as SD cards in a battery compartment. The returned paths do
+     * not include transient devices, such as USB flash drives.
      * <p>
-     * Starting in {@link android.os.Build.VERSION_CODES#KITKAT}, no
-     * permissions are required for the owning application to read or write to
-     * these paths.
+     * An application may store data on any or all of the returned devices.  For
+     * example, an app may choose to store large files on the device with the
+     * most available space, as measured by {@link StatFs}.
      * <p>
-     * The returned paths include any path that would be returned by
-     * {@link #getExternalFilesDir(String)}.
+     * No permissions are required to read or write to the returned paths; they
+     * are always accessible to the calling app.  Write access outside of these
+     * paths on secondary external storage devices is not available.
+     * <p>
+     * The first path returned is the same as {@link #getExternalFilesDir(String)}.
+     * Returned paths may be {@code null} if a storage device is unavailable.
      *
      * @see #getExternalFilesDir(String)
+     * @see Environment#getStorageState(File)
      */
     public abstract File[] getExternalFilesDirs(String type);
 
     /**
-     * Return the directory where this application's OBB files (if there are
-     * any) can be found. Note if the application does not have any OBB files,
-     * this directory may not exist.
+     * Return the primary external storage directory where this application's OBB
+     * files (if there are any) can be found. Note if the application does not have
+     * any OBB files, this directory may not exist.
+     * <p>
+     * This is like {@link #getFilesDir()} in that these files will be deleted when
+     * the application is uninstalled, however there are some important differences:
+     * <ul>
+     * <li>External files are not always available: they will disappear if the
+     * user mounts the external storage on a computer or removes it.
+     * <li>There is no security enforced with these files.  For example, any application
+     * holding {@link android.Manifest.permission#WRITE_EXTERNAL_STORAGE} can write to
+     * these files.
+     * </ul>
+     * <p>
+     * Starting in {@link android.os.Build.VERSION_CODES#KITKAT}, no permissions
+     * are required to read or write to the returned path; it's always
+     * accessible to the calling app.  This only applies to paths generated for
+     * package name of the calling application.  To access paths belonging
+     * to other packages, {@link android.Manifest.permission#WRITE_EXTERNAL_STORAGE}
+     * and/or {@link android.Manifest.permission#READ_EXTERNAL_STORAGE} are required.
      * <p>
      * On devices with multiple users (as described by {@link UserManager}),
      * multiple users may share the same OBB storage location. Applications
      * should ensure that multiple instances running under different users don't
      * interfere with each other.
-     * <p>
-     * Starting in {@link android.os.Build.VERSION_CODES#KITKAT}, no
-     * permissions are required for the owning application to read or write to
-     * this path. Otherwise,
-     * {@link android.Manifest.permission#WRITE_EXTERNAL_STORAGE} or
-     * {@link android.Manifest.permission#READ_EXTERNAL_STORAGE} are required.
      */
     public abstract File getObbDir();
 
@@ -716,18 +746,32 @@
      * any) can be found. Note if the application does not have any OBB files,
      * these directories may not exist.
      * <p>
+     * This is like {@link #getFilesDir()} in that these files will be deleted when
+     * the application is uninstalled, however there are some important differences:
+     * <ul>
+     * <li>External files are not always available: they will disappear if the
+     * user mounts the external storage on a computer or removes it.
+     * <li>There is no security enforced with these files.
+     * </ul>
+     * <p>
      * External storage devices returned here are considered a permanent part of
      * the device, including both emulated external storage and physical media
-     * slots. This does not include transient devices, such as USB flash drives.
+     * slots, such as SD cards in a battery compartment. The returned paths do
+     * not include transient devices, such as USB flash drives.
      * <p>
-     * Starting in {@link android.os.Build.VERSION_CODES#KITKAT}, no
-     * permissions are required for the owning application to read or write to
-     * this path.
+     * An application may store data on any or all of the returned devices.  For
+     * example, an app may choose to store large files on the device with the
+     * most available space, as measured by {@link StatFs}.
      * <p>
-     * The returned paths include any path that would be returned by
-     * {@link #getObbDir()}
+     * No permissions are required to read or write to the returned paths; they
+     * are always accessible to the calling app.  Write access outside of these
+     * paths on secondary external storage devices is not available.
+     * <p>
+     * The first path returned is the same as {@link #getObbDir()}.
+     * Returned paths may be {@code null} if a storage device is unavailable.
      *
      * @see #getObbDir()
+     * @see Environment#getStorageState(File)
      */
     public abstract File[] getObbDirs();
 
@@ -751,7 +795,7 @@
     public abstract File getCacheDir();
 
     /**
-     * Returns the absolute path to the directory on the external filesystem
+     * Returns the absolute path to the directory on the primary external filesystem
      * (that is somewhere on {@link android.os.Environment#getExternalStorageDirectory()
      * Environment.getExternalStorageDirectory()} where the application can
      * place cache files it owns. These files are internal to the application, and
@@ -773,19 +817,21 @@
      * <li>External files are not always available: they will disappear if the
      * user mounts the external storage on a computer or removes it.  See the
      * APIs on {@link android.os.Environment} for information in the storage state.
-     * <li>There is no security enforced with these files.  All applications
-     * can read and write files placed here.
+     * <li>There is no security enforced with these files.  For example, any application
+     * holding {@link android.Manifest.permission#WRITE_EXTERNAL_STORAGE} can write to
+     * these files.
      * </ul>
      *
+     * <p>Starting in {@link android.os.Build.VERSION_CODES#KITKAT}, no permissions
+     * are required to read or write to the returned path; it's always
+     * accessible to the calling app.  This only applies to paths generated for
+     * package name of the calling application.  To access paths belonging
+     * to other packages, {@link android.Manifest.permission#WRITE_EXTERNAL_STORAGE}
+     * and/or {@link android.Manifest.permission#READ_EXTERNAL_STORAGE} are required.
+     *
      * <p>On devices with multiple users (as described by {@link UserManager}),
      * each user has their own isolated external storage. Applications only
      * have access to the external storage for the user they're running as.</p>
-     * <p>
-     * Starting in {@link android.os.Build.VERSION_CODES#KITKAT}, no
-     * permissions are required for the owning application to read or write to
-     * this path. Otherwise,
-     * {@link android.Manifest.permission#WRITE_EXTERNAL_STORAGE} or
-     * {@link android.Manifest.permission#READ_EXTERNAL_STORAGE} are required.
      *
      * @return The path of the directory holding application cache files
      * on external storage.  Returns null if external storage is not currently
@@ -802,18 +848,32 @@
      * owns. These files are internal to the application, and not typically
      * visible to the user as media.
      * <p>
+     * This is like {@link #getCacheDir()} in that these files will be deleted when
+     * the application is uninstalled, however there are some important differences:
+     * <ul>
+     * <li>External files are not always available: they will disappear if the
+     * user mounts the external storage on a computer or removes it.
+     * <li>There is no security enforced with these files.
+     * </ul>
+     * <p>
      * External storage devices returned here are considered a permanent part of
      * the device, including both emulated external storage and physical media
-     * slots. This does not include transient devices, such as USB flash drives.
+     * slots, such as SD cards in a battery compartment. The returned paths do
+     * not include transient devices, such as USB flash drives.
      * <p>
-     * Starting in {@link android.os.Build.VERSION_CODES#KITKAT}, no
-     * permissions are required for the owning application to read or write to
-     * these paths.
+     * An application may store data on any or all of the returned devices.  For
+     * example, an app may choose to store large files on the device with the
+     * most available space, as measured by {@link StatFs}.
      * <p>
-     * The returned paths include any path that would be returned by
-     * {@link #getExternalCacheDir()}.
+     * No permissions are required to read or write to the returned paths; they
+     * are always accessible to the calling app.  Write access outside of these
+     * paths on secondary external storage devices is not available.
+     * <p>
+     * The first path returned is the same as {@link #getExternalCacheDir()}.
+     * Returned paths may be {@code null} if a storage device is unavailable.
      *
      * @see #getExternalCacheDir()
+     * @see Environment#getStorageState(File)
      */
     public abstract File[] getExternalCacheDirs();
 
diff --git a/core/java/android/content/Intent.java b/core/java/android/content/Intent.java
index 047f175..a289649b6 100644
--- a/core/java/android/content/Intent.java
+++ b/core/java/android/content/Intent.java
@@ -36,6 +36,8 @@
 import android.os.Parcelable;
 import android.os.StrictMode;
 import android.provider.DocumentsContract;
+import android.provider.DocumentsProvider;
+import android.provider.OpenableColumns;
 import android.util.AttributeSet;
 import android.util.Log;
 
@@ -45,7 +47,6 @@
 import java.io.Serializable;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Locale;
 import java.util.Set;
@@ -2621,49 +2622,76 @@
     public static final String ACTION_GLOBAL_BUTTON = "android.intent.action.GLOBAL_BUTTON";
 
     /**
-     * Activity Action: Allow the user to select and open one or more existing
-     * documents. Both read and write access to the documents will be granted
-     * until explicitly revoked by the user.
+     * Activity Action: Allow the user to select and return one or more existing
+     * documents. When invoked, the system will display the various
+     * {@link DocumentsProvider} instances installed on the device, letting the
+     * user interactively navigate through them. These documents include local
+     * media, such as photos and video, and documents provided by installed
+     * cloud storage providers.
      * <p>
-     * Callers can restrict selection to a specific kind of data, such as
-     * photos, by setting one or more MIME types in {@link #EXTRA_MIME_TYPES}.
+     * Each document is represented as a {@code content://} URI backed by a
+     * {@link DocumentsProvider}, which can be opened as a stream with
+     * {@link ContentResolver#openFileDescriptor(Uri, String)}, or queried for
+     * {@link android.provider.DocumentsContract.Document} metadata.
+     * <p>
+     * All selected documents are returned to the calling application with
+     * persistable read and write permission grants. If you want to maintain
+     * access to the documents across device reboots, you need to explicitly
+     * take the persistable permissions using
+     * {@link ContentResolver#takePersistableUriPermission(Uri, int)}.
+     * <p>
+     * Callers can restrict document selection to a specific kind of data, such
+     * as photos, by setting one or more MIME types in
+     * {@link #EXTRA_MIME_TYPES}.
      * <p>
      * If the caller can handle multiple returned items (the user performing
-     * multiple selection), then it can specify {@link #EXTRA_ALLOW_MULTIPLE} to
-     * indicate this.
+     * multiple selection), then you can specify {@link #EXTRA_ALLOW_MULTIPLE}
+     * to indicate this.
      * <p>
      * Callers must include {@link #CATEGORY_OPENABLE} in the Intent so that
      * returned URIs can be opened with
      * {@link ContentResolver#openFileDescriptor(Uri, String)}.
      * <p>
-     * Output: The URI of the item that was picked. This must be a content: URI
-     * so that any receiver can access it. If multiple documents were selected,
-     * they are returned in {@link #getClipData()}.
+     * Output: The URI of the item that was picked. This must be a
+     * {@code content://} URI so that any receiver can access it. If multiple
+     * documents were selected, they are returned in {@link #getClipData()}.
      *
      * @see DocumentsContract
-     * @see DocumentsContract#getOpenDocuments(Context)
+     * @see #ACTION_CREATE_DOCUMENT
+     * @see #FLAG_GRANT_PERSISTABLE_URI_PERMISSION
      */
     @SdkConstant(SdkConstantType.ACTIVITY_INTENT_ACTION)
     public static final String ACTION_OPEN_DOCUMENT = "android.intent.action.OPEN_DOCUMENT";
 
     /**
-     * Activity Action: Allow the user to create a new document. Both read and
-     * write access to the document will be granted until explicitly revoked by
-     * the user.
+     * Activity Action: Allow the user to create a new document. When invoked,
+     * the system will display the various {@link DocumentsProvider} instances
+     * installed on the device, letting the user navigate through them. The
+     * returned document may be a newly created document with no content, or it
+     * may be an existing document with the requested MIME type.
      * <p>
-     * Callers can provide a hint document name by setting {@link #EXTRA_TITLE},
-     * but the user may change this value before creating the file. Callers can
-     * optionally hint at the MIME type being created by setting
-     * {@link #setType(String)}.
+     * Each document is represented as a {@code content://} URI backed by a
+     * {@link DocumentsProvider}, which can be opened as a stream with
+     * {@link ContentResolver#openFileDescriptor(Uri, String)}, or queried for
+     * {@link android.provider.DocumentsContract.Document} metadata.
+     * <p>
+     * Callers must indicate the concrete MIME type of the document being
+     * created by setting {@link #setType(String)}. This MIME type cannot be
+     * changed after the document is created.
+     * <p>
+     * Callers can provide an initial display name through {@link #EXTRA_TITLE},
+     * but the user may change this value before creating the file.
      * <p>
      * Callers must include {@link #CATEGORY_OPENABLE} in the Intent so that
      * returned URIs can be opened with
      * {@link ContentResolver#openFileDescriptor(Uri, String)}.
      * <p>
-     * Output: The URI of the item that was created. This must be a content: URI
-     * so that any receiver can access it.
+     * Output: The URI of the item that was created. This must be a
+     * {@code content://} URI so that any receiver can access it.
      *
      * @see DocumentsContract
+     * @see #ACTION_OPEN_DOCUMENT
+     * @see #FLAG_GRANT_PERSISTABLE_URI_PERMISSION
      */
     @SdkConstant(SdkConstantType.ACTIVITY_INTENT_ACTION)
     public static final String ACTION_CREATE_DOCUMENT = "android.intent.action.CREATE_DOCUMENT";
@@ -2787,11 +2815,16 @@
      * experience).
      */
     public static final String CATEGORY_SAMPLE_CODE = "android.intent.category.SAMPLE_CODE";
+
     /**
-     * Used to indicate that a GET_CONTENT intent only wants URIs that can be opened with
-     * ContentResolver.openInputStream. Openable URIs must support the columns in
-     * {@link android.provider.OpenableColumns}
-     * when queried, though it is allowable for those columns to be blank.
+     * Used to indicate that an intent only wants URIs that can be opened with
+     * {@link ContentResolver#openFileDescriptor(Uri, String)}. Openable URIs
+     * must support at least the columns defined in {@link OpenableColumns} when
+     * queried.
+     *
+     * @see #ACTION_GET_CONTENT
+     * @see #ACTION_OPEN_DOCUMENT
+     * @see #ACTION_CREATE_DOCUMENT
      */
     @SdkConstant(SdkConstantType.INTENT_CATEGORY)
     public static final String CATEGORY_OPENABLE = "android.intent.category.OPENABLE";
@@ -3218,27 +3251,32 @@
             "android.intent.extra.client_intent";
 
     /**
-     * Used to indicate that a {@link #ACTION_GET_CONTENT} intent should only return
-     * data that is on the local device.  This is a boolean extra; the default
-     * is false.  If true, an implementation of ACTION_GET_CONTENT should only allow
-     * the user to select media that is already on the device, not requiring it
-     * be downloaded from a remote service when opened.  Another way to look
-     * at it is that such content should generally have a "_data" column to the
-     * path of the content on local external storage.
+     * Extra used to indicate that an intent should only return data that is on
+     * the local device. This is a boolean extra; the default is false. If true,
+     * an implementation should only allow the user to select data that is
+     * already on the device, not requiring it be downloaded from a remote
+     * service when opened.
+     *
+     * @see #ACTION_GET_CONTENT
+     * @see #ACTION_OPEN_DOCUMENT
+     * @see #ACTION_CREATE_DOCUMENT
      */
     public static final String EXTRA_LOCAL_ONLY =
-        "android.intent.extra.LOCAL_ONLY";
+            "android.intent.extra.LOCAL_ONLY";
 
     /**
-     * Used to indicate that a {@link #ACTION_GET_CONTENT} intent can allow the
-     * user to select and return multiple items.  This is a boolean extra; the default
-     * is false.  If true, an implementation of ACTION_GET_CONTENT is allowed to
-     * present the user with a UI where they can pick multiple items that are all
-     * returned to the caller.  When this happens, they should be returned as
-     * the {@link #getClipData()} part of the result Intent.
+     * Extra used to indicate that an intent can allow the user to select and
+     * return multiple items. This is a boolean extra; the default is false. If
+     * true, an implementation is allowed to present the user with a UI where
+     * they can pick multiple items that are all returned to the caller. When
+     * this happens, they should be returned as the {@link #getClipData()} part
+     * of the result Intent.
+     *
+     * @see #ACTION_GET_CONTENT
+     * @see #ACTION_OPEN_DOCUMENT
      */
     public static final String EXTRA_ALLOW_MULTIPLE =
-        "android.intent.extra.ALLOW_MULTIPLE";
+            "android.intent.extra.ALLOW_MULTIPLE";
 
     /**
      * The userHandle carried with broadcast intents related to addition, removal and switching of
@@ -3272,9 +3310,13 @@
             "android.intent.extra.restrictions_intent";
 
     /**
-     * Extra used to communicate set of acceptable MIME types for
-     * {@link #ACTION_GET_CONTENT} or {@link #ACTION_OPEN_DOCUMENT}. The type of the
-     * extra is <code>ArrayList&lt;String&gt;</code>.
+     * Extra used to communicate a set of acceptable MIME types. The type of the
+     * extra is {@code String[]}. Values may be a combination of concrete MIME
+     * types (such as "image/png") and/or partial MIME types (such as
+     * "audio/*").
+     *
+     * @see #ACTION_GET_CONTENT
+     * @see #ACTION_OPEN_DOCUMENT
      */
     public static final String EXTRA_MIME_TYPES = "android.intent.extra.MIME_TYPES";
 
@@ -3295,7 +3337,7 @@
 
     /**
      * If set, the recipient of this Intent will be granted permission to
-     * perform read operations on the Uri in the Intent's data and any URIs
+     * perform read operations on the URI in the Intent's data and any URIs
      * specified in its ClipData.  When applying to an Intent's ClipData,
      * all URIs as well as recursive traversals through data or other ClipData
      * in Intent items will be granted; only the grant flags of the top-level
@@ -3304,7 +3346,7 @@
     public static final int FLAG_GRANT_READ_URI_PERMISSION = 0x00000001;
     /**
      * If set, the recipient of this Intent will be granted permission to
-     * perform write operations on the Uri in the Intent's data and any URIs
+     * perform write operations on the URI in the Intent's data and any URIs
      * specified in its ClipData.  When applying to an Intent's ClipData,
      * all URIs as well as recursive traversals through data or other ClipData
      * in Intent items will be granted; only the grant flags of the top-level
@@ -3339,7 +3381,7 @@
 
     /**
      * When combined with {@link #FLAG_GRANT_READ_URI_PERMISSION} and/or
-     * {@link #FLAG_GRANT_WRITE_URI_PERMISSION}, the Uri permission grant can be
+     * {@link #FLAG_GRANT_WRITE_URI_PERMISSION}, the URI permission grant can be
      * persisted across device reboots until explicitly revoked with
      * {@link Context#revokeUriPermission(Uri, int)}. This flag only offers the
      * grant for possible persisting; the receiving application must call
@@ -3349,6 +3391,7 @@
      * @see ContentResolver#takePersistableUriPermission(Uri, int)
      * @see ContentResolver#releasePersistableUriPermission(Uri, int)
      * @see ContentResolver#getPersistedUriPermissions()
+     * @see ContentResolver#getOutgoingPersistedUriPermissions()
      */
     public static final int FLAG_GRANT_PERSISTABLE_URI_PERMISSION = 0x00000040;
 
diff --git a/core/java/android/os/ParcelFileDescriptor.java b/core/java/android/os/ParcelFileDescriptor.java
index 55c083b..1456387 100644
--- a/core/java/android/os/ParcelFileDescriptor.java
+++ b/core/java/android/os/ParcelFileDescriptor.java
@@ -192,6 +192,7 @@
      * @return a new ParcelFileDescriptor pointing to the given file.
      * @throws FileNotFoundException if the given file does not exist or can not
      *             be opened with the requested mode.
+     * @see #parseMode(String)
      */
     public static ParcelFileDescriptor open(File file, int mode) throws FileNotFoundException {
         final FileDescriptor fd = openInternal(file, mode);
@@ -216,6 +217,7 @@
      * @return a new ParcelFileDescriptor pointing to the given file.
      * @throws FileNotFoundException if the given file does not exist or can not
      *             be opened with the requested mode.
+     * @see #parseMode(String)
      */
     public static ParcelFileDescriptor open(
             File file, int mode, Handler handler, OnCloseListener listener) throws IOException {
diff --git a/core/java/android/provider/DocumentsContract.java b/core/java/android/provider/DocumentsContract.java
index 9d35847..7f8dca2 100644
--- a/core/java/android/provider/DocumentsContract.java
+++ b/core/java/android/provider/DocumentsContract.java
@@ -73,7 +73,8 @@
     }
 
     /**
-     * Intent action used to identify {@link DocumentsProvider} instances.
+     * Intent action used to identify {@link DocumentsProvider} instances. This
+     * is used in the {@code <intent-filter>} of a {@code <provider>}.
      */
     public static final String PROVIDER_INTERFACE = "android.content.action.DOCUMENTS_PROVIDER";
 
@@ -100,12 +101,14 @@
     private static final int THUMBNAIL_BUFFER_SIZE = (int) (128 * KB_IN_BYTES);
 
     /**
-     * Constants related to a document, including {@link Cursor} columns names
+     * Constants related to a document, including {@link Cursor} column names
      * and flags.
      * <p>
-     * A document can be either an openable file (with a specific MIME type), or
-     * a directory containing additional documents (with the
-     * {@link #MIME_TYPE_DIR} MIME type).
+     * A document can be either an openable stream (with a specific MIME type),
+     * or a directory containing additional documents (with the
+     * {@link #MIME_TYPE_DIR} MIME type). A directory represents the top of a
+     * subtree containing zero or more documents, which can recursively contain
+     * even more documents and directories.
      * <p>
      * All columns are <em>read-only</em> to client applications.
      */
@@ -122,7 +125,7 @@
          * single document may be included as a child of multiple directories.
          * <p>
          * A provider must always return durable IDs, since they will be used to
-         * issue long-term Uri permission grants when an application interacts
+         * issue long-term URI permission grants when an application interacts
          * with {@link Intent#ACTION_OPEN_DOCUMENT} and
          * {@link Intent#ACTION_CREATE_DOCUMENT}.
          * <p>
@@ -290,8 +293,11 @@
     }
 
     /**
-     * Constants related to a root of documents, including {@link Cursor}
-     * columns names and flags.
+     * Constants related to a root of documents, including {@link Cursor} column
+     * names and flags. A root is the start of a tree of documents, such as a
+     * physical storage device, or an account. Each root starts at the directory
+     * referenced by {@link Root#COLUMN_DOCUMENT_ID}, which can recursively
+     * contain both documents and directories.
      * <p>
      * All columns are <em>read-only</em> to client applications.
      */
@@ -329,7 +335,8 @@
 
         /**
          * Title for a root, which will be shown to a user. This column is
-         * required.
+         * required. For a single storage service surfacing multiple accounts as
+         * different roots, this title should be the name of the service.
          * <p>
          * Type: STRING
          */
@@ -337,7 +344,9 @@
 
         /**
          * Summary for this root, which may be shown to a user. This column is
-         * optional, and may be {@code null}.
+         * optional, and may be {@code null}. For a single storage service
+         * surfacing multiple accounts as different roots, this summary should
+         * be the name of the account.
          * <p>
          * Type: STRING
          */
@@ -393,11 +402,12 @@
         public static final int FLAG_LOCAL_ONLY = 1 << 1;
 
         /**
-         * Flag indicating that this root can report recently modified
-         * documents.
+         * Flag indicating that this root can be queried to provide recently
+         * modified documents.
          *
          * @see #COLUMN_FLAGS
          * @see DocumentsContract#buildRecentDocumentsUri(String, String)
+         * @see DocumentsProvider#queryRecentDocuments(String, String[])
          */
         public static final int FLAG_SUPPORTS_RECENTS = 1 << 2;
 
@@ -405,6 +415,8 @@
          * Flag indicating that this root supports search.
          *
          * @see #COLUMN_FLAGS
+         * @see DocumentsContract#buildSearchDocumentsUri(String, String,
+         *      String)
          * @see DocumentsProvider#querySearchDocuments(String, String,
          *      String[])
          */
@@ -481,7 +493,7 @@
     private static final String PARAM_MANAGE = "manage";
 
     /**
-     * Build Uri representing the roots of a document provider. When queried, a
+     * Build URI representing the roots of a document provider. When queried, a
      * provider will return one or more rows with columns defined by
      * {@link Root}.
      *
@@ -493,7 +505,7 @@
     }
 
     /**
-     * Build Uri representing the given {@link Root#COLUMN_ROOT_ID} in a
+     * Build URI representing the given {@link Root#COLUMN_ROOT_ID} in a
      * document provider.
      *
      * @see #getRootId(Uri)
@@ -504,7 +516,7 @@
     }
 
     /**
-     * Build Uri representing the recently modified documents of a specific root
+     * Build URI representing the recently modified documents of a specific root
      * in a document provider. When queried, a provider will return zero or more
      * rows with columns defined by {@link Document}.
      *
@@ -518,7 +530,7 @@
     }
 
     /**
-     * Build Uri representing the given {@link Document#COLUMN_DOCUMENT_ID} in a
+     * Build URI representing the given {@link Document#COLUMN_DOCUMENT_ID} in a
      * document provider. When queried, a provider will return a single row with
      * columns defined by {@link Document}.
      *
@@ -531,7 +543,7 @@
     }
 
     /**
-     * Build Uri representing the children of the given directory in a document
+     * Build URI representing the children of the given directory in a document
      * provider. When queried, a provider will return zero or more rows with
      * columns defined by {@link Document}.
      *
@@ -548,7 +560,7 @@
     }
 
     /**
-     * Build Uri representing a search for matching documents under a specific
+     * Build URI representing a search for matching documents under a specific
      * root in a document provider. When queried, a provider will return zero or
      * more rows with columns defined by {@link Document}.
      *
@@ -564,7 +576,7 @@
     }
 
     /**
-     * Test if the given Uri represents a {@link Document} backed by a
+     * Test if the given URI represents a {@link Document} backed by a
      * {@link DocumentsProvider}.
      */
     public static boolean isDocumentUri(Context context, Uri uri) {
@@ -588,7 +600,7 @@
     }
 
     /**
-     * Extract the {@link Root#COLUMN_ROOT_ID} from the given Uri.
+     * Extract the {@link Root#COLUMN_ROOT_ID} from the given URI.
      */
     public static String getRootId(Uri rootUri) {
         final List<String> paths = rootUri.getPathSegments();
@@ -602,7 +614,7 @@
     }
 
     /**
-     * Extract the {@link Document#COLUMN_DOCUMENT_ID} from the given Uri.
+     * Extract the {@link Document#COLUMN_DOCUMENT_ID} from the given URI.
      */
     public static String getDocumentId(Uri documentUri) {
         final List<String> paths = documentUri.getPathSegments();
@@ -616,7 +628,7 @@
     }
 
     /**
-     * Extract the search query from a Uri built by
+     * Extract the search query from a URI built by
      * {@link #buildSearchDocumentsUri(String, String, String)}.
      */
     public static String getSearchDocumentsQuery(Uri searchDocumentsUri) {
@@ -634,7 +646,7 @@
     }
 
     /**
-     * Return thumbnail representing the document at the given Uri. Callers are
+     * Return thumbnail representing the document at the given URI. Callers are
      * responsible for their own in-memory caching.
      *
      * @param documentUri document to return thumbnail for, which must have
@@ -642,7 +654,7 @@
      * @param size optimal thumbnail size desired. A provider may return a
      *            thumbnail of a different size, but never more than double the
      *            requested size.
-     * @param signal signal used to indicate that caller is no longer interested
+     * @param signal signal used to indicate if caller is no longer interested
      *            in the thumbnail.
      * @return decoded thumbnail, or {@code null} if problem was encountered.
      * @see DocumentsProvider#openDocumentThumbnail(String, Point,
diff --git a/core/java/android/provider/DocumentsProvider.java b/core/java/android/provider/DocumentsProvider.java
index 337b735..c9efb53 100644
--- a/core/java/android/provider/DocumentsProvider.java
+++ b/core/java/android/provider/DocumentsProvider.java
@@ -24,6 +24,7 @@
 import static android.provider.DocumentsContract.getSearchDocumentsQuery;
 
 import android.content.ContentProvider;
+import android.content.ContentResolver;
 import android.content.ContentValues;
 import android.content.Context;
 import android.content.Intent;
@@ -39,6 +40,7 @@
 import android.os.ParcelFileDescriptor;
 import android.os.ParcelFileDescriptor.OnCloseListener;
 import android.provider.DocumentsContract.Document;
+import android.provider.DocumentsContract.Root;
 import android.util.Log;
 
 import libcore.io.IoUtils;
@@ -46,25 +48,70 @@
 import java.io.FileNotFoundException;
 
 /**
- * Base class for a document provider. A document provider should extend this
- * class and implement the abstract methods.
+ * Base class for a document provider. A document provider offers read and write
+ * access to durable files, such as files stored on a local disk, or files in a
+ * cloud storage service. To create a document provider, extend this class,
+ * implement the abstract methods, and add it to your manifest like this:
+ *
+ * <pre class="prettyprint">&lt;manifest&gt;
+ *    ...
+ *    &lt;application&gt;
+ *        ...
+ *        &lt;provider
+ *            android:name="com.example.MyCloudProvider"
+ *            android:authorities="com.example.mycloudprovider"
+ *            android:exported="true"
+ *            android:grantUriPermissions="true"
+ *            android:permission="android.permission.MANAGE_DOCUMENTS"&gt;
+ *            &lt;intent-filter&gt;
+ *                &lt;action android:name="android.content.action.DOCUMENTS_PROVIDER" /&gt;
+ *            &lt;/intent-filter&gt;
+ *        &lt;/provider&gt;
+ *        ...
+ *    &lt;/application&gt;
+ *&lt;/manifest&gt;</pre>
  * <p>
- * Each document provider expresses one or more "roots" which each serve as the
- * top-level of a tree. For example, a root could represent an account, or a
- * physical storage device. Under each root, documents are referenced by
- * {@link Document#COLUMN_DOCUMENT_ID}, which must not change once returned.
+ * When defining your provider, you must protect it with
+ * {@link android.Manifest.permission#MANAGE_DOCUMENTS}, which is a permission
+ * only the system can obtain. Applications cannot use a documents provider
+ * directly; they must go through {@link Intent#ACTION_OPEN_DOCUMENT} or
+ * {@link Intent#ACTION_CREATE_DOCUMENT} which requires a user to actively
+ * navigate and select documents. When a user selects documents through that
+ * UI, the system issues narrow URI permission grants to the requesting
+ * application.
+ * </p>
+ * <h3>Documents</h3>
  * <p>
- * Documents can be either an openable file (with a specific MIME type), or a
+ * A document can be either an openable stream (with a specific MIME type), or a
  * directory containing additional documents (with the
- * {@link Document#MIME_TYPE_DIR} MIME type). Each document can have different
- * capabilities, as described by {@link Document#COLUMN_FLAGS}. The same
- * {@link Document#COLUMN_DOCUMENT_ID} can be included in multiple directories.
+ * {@link Document#MIME_TYPE_DIR} MIME type). Each directory represents the top
+ * of a subtree containing zero or more documents, which can recursively contain
+ * even more documents and directories.
+ * </p>
  * <p>
- * Document providers must be protected with the
- * {@link android.Manifest.permission#MANAGE_DOCUMENTS} permission, which can
- * only be requested by the system. The system-provided UI then issues narrow
- * Uri permission grants for individual documents when the user explicitly picks
- * documents.
+ * Each document can have different capabilities, as described by
+ * {@link Document#COLUMN_FLAGS}. For example, if a document can be represented
+ * as a thumbnail, a provider can set {@link Document#FLAG_SUPPORTS_THUMBNAIL}
+ * and implement
+ * {@link #openDocumentThumbnail(String, Point, CancellationSignal)} to return
+ * that thumbnail.
+ * </p>
+ * <p>
+ * Each document under a provider is uniquely referenced by its
+ * {@link Document#COLUMN_DOCUMENT_ID}, which must not change once returned. A
+ * single document can be included in multiple directories when responding to
+ * {@link #queryChildDocuments(String, String[], String)}. For example, a
+ * provider might surface a single photo in multiple locations: once in a
+ * directory of locations, and again in a directory of dates.
+ * </p>
+ * <h3>Roots</h3>
+ * <p>
+ * All documents are surfaced through one or more "roots." Each root represents
+ * the top of a document tree that a user can navigate. For example, a root
+ * could represent an account or a physical storage device. Similar to
+ * documents, each root can have capabilities expressed through
+ * {@link Root#COLUMN_FLAGS}.
+ * </p>
  *
  * @see Intent#ACTION_OPEN_DOCUMENT
  * @see Intent#ACTION_CREATE_DOCUMENT
@@ -114,25 +161,30 @@
     }
 
     /**
-     * Create a new document and return its {@link Document#COLUMN_DOCUMENT_ID}.
-     * A provider must allocate a new {@link Document#COLUMN_DOCUMENT_ID} to
-     * represent the document, which must not change once returned.
+     * Create a new document and return its newly generated
+     * {@link Document#COLUMN_DOCUMENT_ID}. A provider must allocate a new
+     * {@link Document#COLUMN_DOCUMENT_ID} to represent the document, which must
+     * not change once returned.
      *
-     * @param documentId the parent directory to create the new document under.
-     * @param mimeType the MIME type associated with the new document.
-     * @param displayName the display name of the new document.
+     * @param parentDocumentId the parent directory to create the new document
+     *            under.
+     * @param mimeType the concrete MIME type associated with the new document.
+     *            If the MIME type is not supported, the provider must throw.
+     * @param displayName the display name of the new document. The provider may
+     *            alter this name to meet any internal constraints, such as
+     *            conflicting names.
      */
     @SuppressWarnings("unused")
-    public String createDocument(String documentId, String mimeType, String displayName)
+    public String createDocument(String parentDocumentId, String mimeType, String displayName)
             throws FileNotFoundException {
         throw new UnsupportedOperationException("Create not supported");
     }
 
     /**
-     * Delete the given document. Upon returning, any Uri permission grants for
-     * the given document will be revoked. If additional documents were deleted
-     * as a side effect of this call, such as documents inside a directory, the
-     * implementor is responsible for revoking those permissions.
+     * Delete the requested document. Upon returning, any URI permission grants
+     * for the requested document will be revoked. If additional documents were
+     * deleted as a side effect of this call, such as documents inside a
+     * directory, the implementor is responsible for revoking those permissions.
      *
      * @param documentId the document to delete.
      */
@@ -141,8 +193,35 @@
         throw new UnsupportedOperationException("Delete not supported");
     }
 
+    /**
+     * Return all roots currently provided. A provider must define at least one
+     * root to display to users, and it should avoid making network requests to
+     * keep this request fast.
+     * <p>
+     * Each root is defined by the metadata columns described in {@link Root},
+     * including {@link Root#COLUMN_DOCUMENT_ID} which points to a directory
+     * representing a tree of documents to display under that root.
+     * <p>
+     * If this set of roots changes, you must call {@link ContentResolver#notifyChange(Uri,
+     * android.database.ContentObserver)} to notify the system.
+     *
+     * @param projection list of {@link Root} columns to put into the cursor. If
+     *            {@code null} all supported columns should be included.
+     */
     public abstract Cursor queryRoots(String[] projection) throws FileNotFoundException;
 
+    /**
+     * Return recently modified documents under the requested root. This will
+     * only be called for roots that advertise
+     * {@link Root#FLAG_SUPPORTS_RECENTS}. The returned documents should be
+     * sorted by {@link Document#COLUMN_LAST_MODIFIED} in descending order, and
+     * limited to only return the 64 most recently modified documents.
+     *
+     * @param projection list of {@link Document} columns to put into the
+     *            cursor. If {@code null} all supported columns should be
+     *            included.
+     * @see DocumentsContract#EXTRA_LOADING
+     */
     @SuppressWarnings("unused")
     public Cursor queryRecentDocuments(String rootId, String[] projection)
             throws FileNotFoundException {
@@ -150,18 +229,43 @@
     }
 
     /**
-     * Return metadata for the given document. A provider should avoid making
-     * network requests to keep this request fast.
+     * Return metadata for the single requested document. A provider should
+     * avoid making network requests to keep this request fast.
      *
      * @param documentId the document to return.
+     * @param projection list of {@link Document} columns to put into the
+     *            cursor. If {@code null} all supported columns should be
+     *            included.
      */
     public abstract Cursor queryDocument(String documentId, String[] projection)
             throws FileNotFoundException;
 
     /**
-     * Return the children of the given document which is a directory.
+     * Return the children documents contained in the requested directory. This
+     * must only return immediate descendants, as additional queries will be
+     * issued to recursively explore the tree.
+     * <p>
+     * If your provider is cloud-based, and you have some data cached or pinned
+     * locally, you may return the local data immediately, setting
+     * {@link DocumentsContract#EXTRA_LOADING} on the Cursor to indicate that
+     * your provider is still fetching additional data. Then, when the network
+     * data is available, you can call {@link ContentResolver#notifyChange(Uri,
+     * android.database.ContentObserver)} to trigger a requery and return the
+     * complete contents.
      *
      * @param parentDocumentId the directory to return children for.
+     * @param projection list of {@link Document} columns to put into the
+     *            cursor. If {@code null} all supported columns should be
+     *            included.
+     * @param sortOrder how to order the rows, formatted as an SQL
+     *            {@code ORDER BY} clause (excluding the ORDER BY itself).
+     *            Passing {@code null} will use the default sort order, which
+     *            may be unordered. This ordering is a hint that can be used to
+     *            prioritize how data is fetched from the network, but UI may
+     *            always enforce a specific ordering.
+     * @see DocumentsContract#EXTRA_LOADING
+     * @see DocumentsContract#EXTRA_INFO
+     * @see DocumentsContract#EXTRA_ERROR
      */
     public abstract Cursor queryChildDocuments(
             String parentDocumentId, String[] projection, String sortOrder)
@@ -176,9 +280,24 @@
     }
 
     /**
-     * Return documents that that match the given query.
+     * Return documents that that match the given query under the requested
+     * root. The returned documents should be sorted by relevance in descending
+     * order. How documents are matched against the query string is an
+     * implementation detail left to each provider, but it's suggested that at
+     * least {@link Document#COLUMN_DISPLAY_NAME} be matched in a
+     * case-insensitive fashion.
+     * <p>
+     * Only documents may be returned; directories are not supported in search
+     * results.
      *
      * @param rootId the root to search under.
+     * @param query string to match documents against.
+     * @param projection list of {@link Document} columns to put into the
+     *            cursor. If {@code null} all supported columns should be
+     *            included.
+     * @see DocumentsContract#EXTRA_LOADING
+     * @see DocumentsContract#EXTRA_INFO
+     * @see DocumentsContract#EXTRA_ERROR
      */
     @SuppressWarnings("unused")
     public Cursor querySearchDocuments(String rootId, String query, String[] projection)
@@ -187,8 +306,10 @@
     }
 
     /**
-     * Return MIME type for the given document. Must match the value of
-     * {@link Document#COLUMN_MIME_TYPE} for this document.
+     * Return concrete MIME type of the requested document. Must match the value
+     * of {@link Document#COLUMN_MIME_TYPE} for this document. The default
+     * implementation queries {@link #queryDocument(String, String[])}, so
+     * providers may choose to override this as an optimization.
      */
     public String getDocumentType(String documentId) throws FileNotFoundException {
         final Cursor cursor = queryDocument(documentId, null);
@@ -204,18 +325,21 @@
     }
 
     /**
-     * Open and return the requested document. A provider should return a
-     * reliable {@link ParcelFileDescriptor} to detect when the remote caller
-     * has finished reading or writing the document. A provider may return a
-     * pipe or socket pair if the mode is exclusively
-     * {@link ParcelFileDescriptor#MODE_READ_ONLY} or
+     * Open and return the requested document.
+     * <p>
+     * A provider should return a reliable {@link ParcelFileDescriptor} to
+     * detect when the remote caller has finished reading or writing the
+     * document. A provider may return a pipe or socket pair if the mode is
+     * exclusively {@link ParcelFileDescriptor#MODE_READ_ONLY} or
      * {@link ParcelFileDescriptor#MODE_WRITE_ONLY}, but complex modes like
      * {@link ParcelFileDescriptor#MODE_READ_WRITE} require a normal file on
-     * disk. If a provider blocks while downloading content, it should
-     * periodically check {@link CancellationSignal#isCanceled()} to abort
-     * abandoned open requests.
+     * disk.
+     * <p>
+     * If a provider blocks while downloading content, it should periodically
+     * check {@link CancellationSignal#isCanceled()} to abort abandoned open
+     * requests.
      *
-     * @param docId the document to return.
+     * @param documentId the document to return.
      * @param mode the mode to open with, such as 'r', 'w', or 'rw'.
      * @param signal used by the caller to signal if the request should be
      *            cancelled.
@@ -223,20 +347,24 @@
      *      OnCloseListener)
      * @see ParcelFileDescriptor#createReliablePipe()
      * @see ParcelFileDescriptor#createReliableSocketPair()
+     * @see ParcelFileDescriptor#parseMode(String)
      */
     public abstract ParcelFileDescriptor openDocument(
-            String docId, String mode, CancellationSignal signal) throws FileNotFoundException;
+            String documentId, String mode, CancellationSignal signal) throws FileNotFoundException;
 
     /**
-     * Open and return a thumbnail of the requested document. A provider should
-     * return a thumbnail closely matching the hinted size, attempting to serve
-     * from a local cache if possible. A provider should never return images
-     * more than double the hinted size. If a provider performs expensive
-     * operations to download or generate a thumbnail, it should periodically
-     * check {@link CancellationSignal#isCanceled()} to abort abandoned
-     * thumbnail requests.
+     * Open and return a thumbnail of the requested document.
+     * <p>
+     * A provider should return a thumbnail closely matching the hinted size,
+     * attempting to serve from a local cache if possible. A provider should
+     * never return images more than double the hinted size.
+     * <p>
+     * If a provider performs expensive operations to download or generate a
+     * thumbnail, it should periodically check
+     * {@link CancellationSignal#isCanceled()} to abort abandoned thumbnail
+     * requests.
      *
-     * @param docId the document to return.
+     * @param documentId the document to return.
      * @param sizeHint hint of the optimal thumbnail dimensions.
      * @param signal used by the caller to signal if the request should be
      *            cancelled.
@@ -244,7 +372,8 @@
      */
     @SuppressWarnings("unused")
     public AssetFileDescriptor openDocumentThumbnail(
-            String docId, Point sizeHint, CancellationSignal signal) throws FileNotFoundException {
+            String documentId, Point sizeHint, CancellationSignal signal)
+            throws FileNotFoundException {
         throw new UnsupportedOperationException("Thumbnails not supported");
     }
 
@@ -362,7 +491,7 @@
         final String documentId = extras.getString(Document.COLUMN_DOCUMENT_ID);
         final Uri documentUri = DocumentsContract.buildDocumentUri(mAuthority, documentId);
 
-        // Require that caller can manage given document
+        // Require that caller can manage requested document
         final boolean callerHasManage =
                 context.checkCallingOrSelfPermission(android.Manifest.permission.MANAGE_DOCUMENTS)
                 == PackageManager.PERMISSION_GRANTED;
@@ -408,7 +537,7 @@
     }
 
     /**
-     * Implementation is provided by the parent class.
+     * Implementation is provided by the parent class. Cannot be overriden.
      *
      * @see #openDocument(String, String, CancellationSignal)
      */
@@ -418,7 +547,7 @@
     }
 
     /**
-     * Implementation is provided by the parent class.
+     * Implementation is provided by the parent class. Cannot be overriden.
      *
      * @see #openDocument(String, String, CancellationSignal)
      */
@@ -429,7 +558,7 @@
     }
 
     /**
-     * Implementation is provided by the parent class.
+     * Implementation is provided by the parent class. Cannot be overriden.
      *
      * @see #openDocumentThumbnail(String, Point, CancellationSignal)
      */
@@ -445,7 +574,7 @@
     }
 
     /**
-     * Implementation is provided by the parent class.
+     * Implementation is provided by the parent class. Cannot be overriden.
      *
      * @see #openDocumentThumbnail(String, Point, CancellationSignal)
      */
diff --git a/core/res/res/values-af/strings.xml b/core/res/res/values-af/strings.xml
index f384e24..1a9acd3 100644
--- a/core/res/res/values-af/strings.xml
+++ b/core/res/res/values-af/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Probeer weer oor <xliff:g id="COUNT">%d</xliff:g> sekondes"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Probeer later weer"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Sleep van bo af na onder om volskerm te verlaat"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Sleep van bo af na onder om volskerm te verlaat."</string>
 </resources>
diff --git a/core/res/res/values-am/strings.xml b/core/res/res/values-am/strings.xml
index d913063..e64b6f5 100644
--- a/core/res/res/values-am/strings.xml
+++ b/core/res/res/values-am/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"በ<xliff:g id="COUNT">%d</xliff:g> ሰከንዶች ውስጥ እንደገና ይሞክሩ"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"ቆይተው እንደገና ይሞክሩ"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"ከሙሉ ገጽ ማያ ለመውጣት ከላይ ወደታች ጣትዎን ያንቀሳቅሱ"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"ከሙሉ ገጽ ማያ ለመውጣት ጣትዎን ከላይ ወደታች ያንሸራትቱ።"</string>
 </resources>
diff --git a/core/res/res/values-ar/strings.xml b/core/res/res/values-ar/strings.xml
index 6ef77d8..2d91e28 100644
--- a/core/res/res/values-ar/strings.xml
+++ b/core/res/res/values-ar/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"أعد المحاولة خلال <xliff:g id="COUNT">%d</xliff:g> ثانية"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"أعد المحاولة لاحقًا"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"التمرير من أعلى لأسفل للخروج من وضع ملء الشاشة"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"مرر بسرعة من أعلى لأسفل للخروج من وضع ملء الشاشة."</string>
 </resources>
diff --git a/core/res/res/values-be/strings.xml b/core/res/res/values-be/strings.xml
index e7e52e4..ccc2986 100644
--- a/core/res/res/values-be/strings.xml
+++ b/core/res/res/values-be/strings.xml
@@ -1795,6 +1795,7 @@
     <!-- no translation found for restr_pin_countdown:other (4730868920742952817) -->
     <!-- no translation found for restr_pin_try_later (973144472490532377) -->
     <skip />
+    <!-- no translation found for immersive_mode_confirmation (7227416894979047467) -->
     <!-- no translation found for immersive_mode_confirmation (8554991488096662508) -->
     <skip />
 </resources>
diff --git a/core/res/res/values-bg/strings.xml b/core/res/res/values-bg/strings.xml
index e9f0d78..99ba922 100644
--- a/core/res/res/values-bg/strings.xml
+++ b/core/res/res/values-bg/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Опитайте отново след <xliff:g id="COUNT">%d</xliff:g> секунди"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Опитайте отново по-късно"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"За изх. от цял екр. прeк. пръст отгоре надолу"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"За изход от цял екран прекарайте пръст отгоре надолу."</string>
 </resources>
diff --git a/core/res/res/values-ca/strings.xml b/core/res/res/values-ca/strings.xml
index 3bf7b2f..8b46e84 100644
--- a/core/res/res/values-ca/strings.xml
+++ b/core/res/res/values-ca/strings.xml
@@ -440,7 +440,7 @@
     <string name="permlab_readContacts" msgid="8348481131899886131">"lectura dels contactes"</string>
     <string name="permdesc_readContacts" product="tablet" msgid="5294866856941149639">"Permet que l\'aplicació llegeixi dades sobre els contactes que tinguis emmagatzemats a la tauleta, inclosa la freqüència amb què has trucat, has enviat correus electrònics o t\'has comunicat d\'altres maneres amb persones concretes. Aquest permís permet que les aplicacions desin les dades dels teus contactes, i és possible que les aplicacions malicioses comparteixin dades dels contactes sense el teu coneixement."</string>
     <string name="permdesc_readContacts" product="default" msgid="8440654152457300662">"Permet que l\'aplicació llegeixi dades sobre els contactes que tinguis emmagatzemats al telèfon, inclosa la freqüència amb què has trucat, has enviat correus electrònics o t\'has comunicat d\'altres maneres amb persones concretes. Aquest permís permet que les aplicacions desin les dades dels teus contactes, i és possible que les aplicacions malicioses comparteixin dades dels contactes sense el teu coneixement."</string>
-    <string name="permlab_writeContacts" msgid="5107492086416793544">"modificació dels contactes"</string>
+    <string name="permlab_writeContacts" msgid="5107492086416793544">"modificar els teus contactes"</string>
     <string name="permdesc_writeContacts" product="tablet" msgid="897243932521953602">"Permet que l\'aplicació modifiqui les dades sobre contactes emmagatzemades a la tauleta, inclosa la freqüència amb què has trucat, has enviat correus electrònics o t\'has comunicat d\'altres maneres amb contactes concrets. Aquest permís permet que les aplicacions suprimeixin dades de contactes."</string>
     <string name="permdesc_writeContacts" product="default" msgid="589869224625163558">"Permet que l\'aplicació modifiqui les dades sobre contactes emmagatzemades al telèfon, inclosa la freqüència amb què has trucat, has enviat correus electrònics o t\'has comunicat d\'altres maneres amb contactes concrets. Aquest permís permet que les aplicacions suprimeixin dades de contactes."</string>
     <string name="permlab_readCallLog" msgid="3478133184624102739">"lectura del registre de trucades"</string>
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Torna-ho a provar d\'aquí a <xliff:g id="COUNT">%d</xliff:g> segons"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Torna-ho a provar més tard"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Fes llis. dit avall per sortir de pant. comp."</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Fes lliscar el dit cap avall per sortir de la pantalla completa."</string>
 </resources>
diff --git a/core/res/res/values-cs/strings.xml b/core/res/res/values-cs/strings.xml
index b52fd91..73ca02b 100644
--- a/core/res/res/values-cs/strings.xml
+++ b/core/res/res/values-cs/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Zkuste to znovu za <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Zkuste to znovu později"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Režim celé obrazovky ukončíte přejetím dolů"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Režim celé obrazovky ukončíte přejetím dolů."</string>
 </resources>
diff --git a/core/res/res/values-da/strings.xml b/core/res/res/values-da/strings.xml
index 72d31e2..0cf5bc5 100644
--- a/core/res/res/values-da/strings.xml
+++ b/core/res/res/values-da/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Prøv igen om <xliff:g id="COUNT">%d</xliff:g> sekunder"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Prøv igen senere"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Stryg ned fra toppen for at stoppe fuld skærm"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Stryg ned fra toppen for at afslutte fuld skærm"</string>
 </resources>
diff --git a/core/res/res/values-de/strings.xml b/core/res/res/values-de/strings.xml
index 7503e83..f799be4 100644
--- a/core/res/res/values-de/strings.xml
+++ b/core/res/res/values-de/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"In <xliff:g id="COUNT">%d</xliff:g> Sek. wiederholen"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Später erneut versuchen"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Zum Schließen des Vollbilds von oben nach unten wischen"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Zum Schließen des Vollbilds von oben nach unten wischen"</string>
 </resources>
diff --git a/core/res/res/values-el/strings.xml b/core/res/res/values-el/strings.xml
index 40efe0a..605d845 100644
--- a/core/res/res/values-el/strings.xml
+++ b/core/res/res/values-el/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Επανάληψη σε <xliff:g id="COUNT">%d</xliff:g> δευτ."</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Δοκιμάστε ξανά αργότερα"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Σάρωση προς τα κάτω για έξοδο από πλήρη οθόνη"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Σάρωση προς τα κάτω για έξοδο από πλήρη οθόνη"</string>
 </resources>
diff --git a/core/res/res/values-en-rGB/strings.xml b/core/res/res/values-en-rGB/strings.xml
index ecafaaf..ca8509a 100644
--- a/core/res/res/values-en-rGB/strings.xml
+++ b/core/res/res/values-en-rGB/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Try again in <xliff:g id="COUNT">%d</xliff:g> seconds"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Try again later"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Swipe down from the top to exit full screen"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Swipe down from the top to exit full screen."</string>
 </resources>
diff --git a/core/res/res/values-en-rIN/strings.xml b/core/res/res/values-en-rIN/strings.xml
index ecafaaf..ca8509a 100644
--- a/core/res/res/values-en-rIN/strings.xml
+++ b/core/res/res/values-en-rIN/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Try again in <xliff:g id="COUNT">%d</xliff:g> seconds"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Try again later"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Swipe down from the top to exit full screen"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Swipe down from the top to exit full screen."</string>
 </resources>
diff --git a/core/res/res/values-es-rUS/strings.xml b/core/res/res/values-es-rUS/strings.xml
index e9d23ab..eb731a3 100644
--- a/core/res/res/values-es-rUS/strings.xml
+++ b/core/res/res/values-es-rUS/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Intentar en <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Vuelve a intentar más tarde."</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Salir de pantalla completa: deslizar abajo"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Salir de pantalla completa: deslizar abajo"</string>
 </resources>
diff --git a/core/res/res/values-es/strings.xml b/core/res/res/values-es/strings.xml
index bb3ae94..a89f35a 100644
--- a/core/res/res/values-es/strings.xml
+++ b/core/res/res/values-es/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Inténtalo en <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Volver a intentar más tarde"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Desliza hacia abajo para salir de la pantalla completa"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Desliza el dedo hacia abajo para salir de la pantalla completa"</string>
 </resources>
diff --git a/core/res/res/values-et-rEE/strings.xml b/core/res/res/values-et-rEE/strings.xml
index ccb12d1..742d3da 100644
--- a/core/res/res/values-et-rEE/strings.xml
+++ b/core/res/res/values-et-rEE/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Proovige uuesti <xliff:g id="COUNT">%d</xliff:g> sekundi pärast"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Proovige hiljem uuesti"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Täisekraani sulgemiseks pühkige ülevalt alla"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Täisekraani sulgemiseks pühkige ülevalt alla"</string>
 </resources>
diff --git a/core/res/res/values-fa/strings.xml b/core/res/res/values-fa/strings.xml
index f57ef46..92d687f 100644
--- a/core/res/res/values-fa/strings.xml
+++ b/core/res/res/values-fa/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"امتحان پس از <xliff:g id="COUNT">%d</xliff:g> ثانیه"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"بعداً دوباره امتحان کنید"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"برای خروج از تمام صفحه از بالا به پایین بکشید"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"برای خروج از تمام صفحه از بالا به پایین بکشید"</string>
 </resources>
diff --git a/core/res/res/values-fi/strings.xml b/core/res/res/values-fi/strings.xml
index 40d4bdf..3130cba 100644
--- a/core/res/res/values-fi/strings.xml
+++ b/core/res/res/values-fi/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Yritä uud. <xliff:g id="COUNT">%d</xliff:g> s kul."</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Yritä myöhemmin uudelleen"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Poistu koko näytön tilasta liu\'uttamalla alas"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Poistu koko näytön tilasta liu\'uttamalla alas."</string>
 </resources>
diff --git a/core/res/res/values-fr-rCA/strings.xml b/core/res/res/values-fr-rCA/strings.xml
index 8a325ed..385ed4b 100644
--- a/core/res/res/values-fr-rCA/strings.xml
+++ b/core/res/res/values-fr-rCA/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Réessayer dans <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Réessayez plus tard"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Balayez vers le bas pour quitter plein écran"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Balayez vers le bas pour quitter plein écran"</string>
 </resources>
diff --git a/core/res/res/values-fr/strings.xml b/core/res/res/values-fr/strings.xml
index 4046d3d..9a44a31 100644
--- a/core/res/res/values-fr/strings.xml
+++ b/core/res/res/values-fr/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Réessayer dans <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Veuillez réessayer ultérieurement."</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Balayer vers le bas pour quitter le plein écran"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Balayer vers le bas pour quitter le plein écran"</string>
 </resources>
diff --git a/core/res/res/values-hi/strings.xml b/core/res/res/values-hi/strings.xml
index af2f2e3..73575bd 100644
--- a/core/res/res/values-hi/strings.xml
+++ b/core/res/res/values-hi/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"<xliff:g id="COUNT">%d</xliff:g> सेकंड में पुन: प्रयास करें"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"बाद में पुनः प्रयास करें"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"पूर्ण स्क्रीन से बाहर आने हेतु ऊपर से नीचे स्वाइप करें"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"पूर्ण स्क्रीन से बाहर आने के लिए ऊपर से नीचे स्वाइप करें."</string>
 </resources>
diff --git a/core/res/res/values-hr/strings.xml b/core/res/res/values-hr/strings.xml
index 198416b..37dc141 100644
--- a/core/res/res/values-hr/strings.xml
+++ b/core/res/res/values-hr/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Ponovite za <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Pokušajte ponovo kasnije"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Prijeđite prstom s vrha prema dolje za izlaz"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Prijeđite prstom s vrha prema dolje za izlaz iz cijelog zaslona."</string>
 </resources>
diff --git a/core/res/res/values-hu/strings.xml b/core/res/res/values-hu/strings.xml
index 89f8878..4ae5938 100644
--- a/core/res/res/values-hu/strings.xml
+++ b/core/res/res/values-hu/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Próbálja újra <xliff:g id="COUNT">%d</xliff:g> másodperc múlva"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Próbálkozzon később"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"A kilépéshez húzza ujját a tetejétől lefelé"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"A teljes képernyős nézetből való kilépéshez húzza ujját a tetejétől lefelé."</string>
 </resources>
diff --git a/core/res/res/values-hy-rAM/strings.xml b/core/res/res/values-hy-rAM/strings.xml
index 3d7d011..96ff31e 100644
--- a/core/res/res/values-hy-rAM/strings.xml
+++ b/core/res/res/values-hy-rAM/strings.xml
@@ -1142,7 +1142,7 @@
     <string name="smv_application" msgid="3307209192155442829">"<xliff:g id="APPLICATION">%1$s</xliff:g> ծրագիրը (գործընթաց <xliff:g id="PROCESS">%2$s</xliff:g>) խախտել է իր ինքնահարկադրված Խիստ ռեժիմ  քաղաքականությունը:"</string>
     <string name="smv_process" msgid="5120397012047462446">"<xliff:g id="PROCESS">%1$s</xliff:g> գործընթացը խախտել է իր ինքնահարկադրված Խիստ ռեժիմ քաղաքականությունը:"</string>
     <string name="android_upgrading_title" msgid="1584192285441405746">"Android-ը նորացվում է..."</string>
-    <string name="android_upgrading_apk" msgid="7904042682111526169">"Հավելվածը օպտիմալացվում է <xliff:g id="NUMBER_0">%1$d</xliff:g>-ից <xliff:g id="NUMBER_1">%2$d</xliff:g>-ի:"</string>
+    <string name="android_upgrading_apk" msgid="7904042682111526169">"Օպտիմալացվում է հավելված <xliff:g id="NUMBER_0">%1$d</xliff:g>-ը <xliff:g id="NUMBER_1">%2$d</xliff:g>-ից:"</string>
     <string name="android_upgrading_starting_apps" msgid="451464516346926713">"Հավելվածները մեկնարկում են:"</string>
     <string name="android_upgrading_complete" msgid="1405954754112999229">"Բեռնումն ավարտվում է:"</string>
     <string name="heavy_weight_notification" msgid="9087063985776626166">"<xliff:g id="APP">%1$s</xliff:g>-ն աշխատում է"</string>
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Կրկին փորձեք <xliff:g id="COUNT">%d</xliff:g> վայրկյանից"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Կրկին փորձեք մի փոքր ուշ"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Սահահարվածեք վերից վար՝ ամբողջական էկրանից դուրս գալու համար"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Սահահարվածեք վերից վար՝ ամբողջական էկրանից դուրս գալու համար:"</string>
 </resources>
diff --git a/core/res/res/values-in/strings.xml b/core/res/res/values-in/strings.xml
index 9a1c641..417c18e 100644
--- a/core/res/res/values-in/strings.xml
+++ b/core/res/res/values-in/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Coba <xliff:g id="COUNT">%d</xliff:g> detik lagi"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Coba lagi nanti"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Gesek ke bawah untuk keluar dari layar penuh"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Gesek ke bawah untuk keluar dari layar penuh"</string>
 </resources>
diff --git a/core/res/res/values-it/strings.xml b/core/res/res/values-it/strings.xml
index b05046b..d2dbfc3 100644
--- a/core/res/res/values-it/strings.xml
+++ b/core/res/res/values-it/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Riprova tra <xliff:g id="COUNT">%d</xliff:g> s."</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Riprova più tardi"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Uscita schermo intero: scorri in basso da alto"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Scorri dall\'alto verso il basso per uscire dalla modalità schermo intero."</string>
 </resources>
diff --git a/core/res/res/values-iw/strings.xml b/core/res/res/values-iw/strings.xml
index bef3386..51f3889 100644
--- a/core/res/res/values-iw/strings.xml
+++ b/core/res/res/values-iw/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"נסה שוב בעוד <xliff:g id="COUNT">%d</xliff:g> שניות"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"נסה שוב מאוחר יותר"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"החלק מטה מהחלק העליון כדי לצאת ממסך מלא"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"החלק מטה מהחלק העליון כדי לצאת ממסך מלא."</string>
 </resources>
diff --git a/core/res/res/values-ja/strings.xml b/core/res/res/values-ja/strings.xml
index 634ca2c..246a82e 100644
--- a/core/res/res/values-ja/strings.xml
+++ b/core/res/res/values-ja/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"<xliff:g id="COUNT">%d</xliff:g>秒後に再試行"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"しばらくしてから再試行"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"全画面表示を終了するには、上から下にスワイプ"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"全画面表示を終了するには、上から下にスワイプ"</string>
 </resources>
diff --git a/core/res/res/values-ka-rGE/strings.xml b/core/res/res/values-ka-rGE/strings.xml
index b3e7c7d..a8025d8 100644
--- a/core/res/res/values-ka-rGE/strings.xml
+++ b/core/res/res/values-ka-rGE/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"კიდევ ერთხელ სცადეთ <xliff:g id="COUNT">%d</xliff:g> წამში"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"სცადეთ მოგვიანებით"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"ჩამოასრიალეთ ზევიდან სრული ეკრანის დასახურად"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"ჩამოასრიალეთ ზევიდან სრული ეკრანის დასახურად"</string>
 </resources>
diff --git a/core/res/res/values-km-rKH/strings.xml b/core/res/res/values-km-rKH/strings.xml
index 0a98a08..6dace3c 100644
--- a/core/res/res/values-km-rKH/strings.xml
+++ b/core/res/res/values-km-rKH/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"សូម​ព្យាយាម​ម្ដង​ទៀត​ក្នុង​រយៈពេល <xliff:g id="COUNT">%d</xliff:g> វិនាទី"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"សូម​ព្យាយាម​ម្ដងទៀត​នៅ​ពេល​ក្រោយ។"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"អូស​​​ចុះក្រោម ដើម្បី​ចេញ​ពី​ការ​បង្ហាញ​ពេញ​អេក្រង់"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"អូស​​​ចុះក្រោម ដើម្បី​ចេញ​ពី​ការ​បង្ហាញ​ពេញ​អេក្រង់"</string>
 </resources>
diff --git a/core/res/res/values-ko/strings.xml b/core/res/res/values-ko/strings.xml
index 0900446..1a0b2d9 100644
--- a/core/res/res/values-ko/strings.xml
+++ b/core/res/res/values-ko/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"<xliff:g id="COUNT">%d</xliff:g>초 후에 다시 시도하세요."</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"나중에 다시 시도"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"전체화면을 종료하려면 위에서 아래로 스와이프"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"전체화면을 종료하려면 위에서 아래로 스와이프"</string>
 </resources>
diff --git a/core/res/res/values-lo-rLA/strings.xml b/core/res/res/values-lo-rLA/strings.xml
index 46e2fa9..003b3f9 100644
--- a/core/res/res/values-lo-rLA/strings.xml
+++ b/core/res/res/values-lo-rLA/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"ລອງໃໝ່ໃນອີກ <xliff:g id="COUNT">%d</xliff:g> ວິນາທີ"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"ລອງໃໝ່ອີກຄັ້ງໃນພາຍຫລັງ."</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"ປັດລົງຈາກເທິງສຸດເພື່ອອກຈາກໂໝດເຕັມໜ້າຈໍ"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"ປັດລົງມາຈາກທາງເທິງເພື່ອອອກຈາກໂໝດເຕັມໜ້າຈໍ."</string>
 </resources>
diff --git a/core/res/res/values-lt/strings.xml b/core/res/res/values-lt/strings.xml
index 5211311..9fa41c9 100644
--- a/core/res/res/values-lt/strings.xml
+++ b/core/res/res/values-lt/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Band. dar po <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Vėliau bandykite dar kartą"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Perbr. žemyn, kad išeit. iš viso ekr. rež."</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Perbraukite nuo viršaus žemyn, kad išeitumėte iš viso ekrano režimo"</string>
 </resources>
diff --git a/core/res/res/values-lv/strings.xml b/core/res/res/values-lv/strings.xml
index 0fb18dd..036d5d6 100644
--- a/core/res/res/values-lv/strings.xml
+++ b/core/res/res/values-lv/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Mēģ. vēl pēc <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Vēlāk mēģiniet vēlreiz."</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Lai izietu no pilnekr., velc. no augšas lejup."</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Lai izietu no pilnekrāna režīma, velciet no augšas uz leju."</string>
 </resources>
diff --git a/core/res/res/values-mn-rMN/strings.xml b/core/res/res/values-mn-rMN/strings.xml
index af46c33..fe3d78b 100644
--- a/core/res/res/values-mn-rMN/strings.xml
+++ b/core/res/res/values-mn-rMN/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"<xliff:g id="COUNT">%d</xliff:g> секундын дараа дахин оролдоно уу"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Дараа дахин оролдоно уу"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Бүтэн дэлгэцээс гарахын тулд дээрээс нь эхлэн доош шудрана уу"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Бүтэн дэлгэцээс гарахын тулд дээрээс нь эхлэн доош шудрана уу."</string>
 </resources>
diff --git a/core/res/res/values-ms-rMY/strings.xml b/core/res/res/values-ms-rMY/strings.xml
index 7a46b0c..88d76e7 100644
--- a/core/res/res/values-ms-rMY/strings.xml
+++ b/core/res/res/values-ms-rMY/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Cuba <xliff:g id="COUNT">%d</xliff:g> saat lagi"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Cuba sebentar lagi"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Leret ke bawah untuk keluar dari skrin penuh"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Leret ke bawah untuk keluar dari skrin penuh"</string>
 </resources>
diff --git a/core/res/res/values-nb/strings.xml b/core/res/res/values-nb/strings.xml
index 0df98b1..3be8817 100644
--- a/core/res/res/values-nb/strings.xml
+++ b/core/res/res/values-nb/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Prøv på nytt om <xliff:g id="COUNT">%d</xliff:g> sekunder"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Prøv på nytt senere"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Sveip ned for å avslutte fullskjermvisning"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Sveip ned fra toppen av skjermen for å gå ut av fullskjermvisningen."</string>
 </resources>
diff --git a/core/res/res/values-nl/strings.xml b/core/res/res/values-nl/strings.xml
index 24d6c66..30afea7 100644
--- a/core/res/res/values-nl/strings.xml
+++ b/core/res/res/values-nl/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Probeer het over <xliff:g id="COUNT">%d</xliff:g> seconden opnieuw"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Probeer het later opnieuw"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Van boven omlaag vegen: voll. scherm sluiten"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Veeg omlaag vanaf de bovenkant om het volledige scherm te sluiten."</string>
 </resources>
diff --git a/core/res/res/values-pl/strings.xml b/core/res/res/values-pl/strings.xml
index edbbe12..0e1bee9 100644
--- a/core/res/res/values-pl/strings.xml
+++ b/core/res/res/values-pl/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Spróbuj za <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Spróbuj ponownie później"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Przesuń z góry w dół, by zamknąć pełny ekran"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Przesuń z góry w dół, by zamknąć pełny ekran."</string>
 </resources>
diff --git a/core/res/res/values-pt-rPT/strings.xml b/core/res/res/values-pt-rPT/strings.xml
index bc9dddf..f3c4020 100644
--- a/core/res/res/values-pt-rPT/strings.xml
+++ b/core/res/res/values-pt-rPT/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Tente em: <xliff:g id="COUNT">%d</xliff:g> seg"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Tente novamente mais tarde"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Deslize para baixo para sair do ecrã inteiro"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Deslize rapidamente para baixo para sair do ecrã inteiro."</string>
 </resources>
diff --git a/core/res/res/values-pt/strings.xml b/core/res/res/values-pt/strings.xml
index f81af65..15a6130 100644
--- a/core/res/res/values-pt/strings.xml
+++ b/core/res/res/values-pt/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Tente novamente em <xliff:g id="COUNT">%d</xliff:g> segundos"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Tente novamente mais tarde"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Deslize para baixo para sair da tela inteira"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Deslize para baixo para sair da tela inteira"</string>
 </resources>
diff --git a/core/res/res/values-rm/strings.xml b/core/res/res/values-rm/strings.xml
index 6607f02..9154621 100644
--- a/core/res/res/values-rm/strings.xml
+++ b/core/res/res/values-rm/strings.xml
@@ -2732,6 +2732,7 @@
     <!-- no translation found for restr_pin_countdown:other (4730868920742952817) -->
     <!-- no translation found for restr_pin_try_later (973144472490532377) -->
     <skip />
+    <!-- no translation found for immersive_mode_confirmation (7227416894979047467) -->
     <!-- no translation found for immersive_mode_confirmation (8554991488096662508) -->
     <skip />
 </resources>
diff --git a/core/res/res/values-ro/strings.xml b/core/res/res/values-ro/strings.xml
index 7d8c5f2..dc5a211 100644
--- a/core/res/res/values-ro/strings.xml
+++ b/core/res/res/values-ro/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Reîncercați în <xliff:g id="COUNT">%d</xliff:g> sec."</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Reîncercați mai târziu"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Glisați în jos pt. a ieși din ecran complet"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Glisați în jos pentru a ieși din ecran complet."</string>
 </resources>
diff --git a/core/res/res/values-ru/strings.xml b/core/res/res/values-ru/strings.xml
index 3da5950..5348084 100644
--- a/core/res/res/values-ru/strings.xml
+++ b/core/res/res/values-ru/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Повтор через <xliff:g id="COUNT">%d</xliff:g> сек."</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Повторите попытку позже."</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Чтобы вернуться в обычный режим, проведите пальцем вниз"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Чтобы вернуться в обычный режим, проведите пальцем вниз"</string>
 </resources>
diff --git a/core/res/res/values-sk/strings.xml b/core/res/res/values-sk/strings.xml
index df752bd..cd865aa 100644
--- a/core/res/res/values-sk/strings.xml
+++ b/core/res/res/values-sk/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Skúste to zas o <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Skúste to znova neskôr"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Režim celej obraz. ukončíte posunutím nadol"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Režim celej obraz. ukončíte posunutím nadol"</string>
 </resources>
diff --git a/core/res/res/values-sl/strings.xml b/core/res/res/values-sl/strings.xml
index 29c010d..43da811 100644
--- a/core/res/res/values-sl/strings.xml
+++ b/core/res/res/values-sl/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Poskusite znova čez <xliff:g id="COUNT">%d</xliff:g> s"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Poskusite znova pozneje"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Povlecite z vrha, da zaprete celozas. način"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Povlecite z vrha, da zaprete celozaslonski način."</string>
 </resources>
diff --git a/core/res/res/values-sr/strings.xml b/core/res/res/values-sr/strings.xml
index e7c69bf..14b22cc 100644
--- a/core/res/res/values-sr/strings.xml
+++ b/core/res/res/values-sr/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Покушајте опет за <xliff:g id="COUNT">%d</xliff:g> сек"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Покушајте поново касније"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Превуците надоле од врха за излаз из целог екрана"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Превуците прстом одозго надоле да бисте изашли из целог екрана."</string>
 </resources>
diff --git a/core/res/res/values-sv/strings.xml b/core/res/res/values-sv/strings.xml
index 4e21412..aa20a64 100644
--- a/core/res/res/values-sv/strings.xml
+++ b/core/res/res/values-sv/strings.xml
@@ -830,7 +830,7 @@
     <string name="lockscreen_pattern_wrong" msgid="4317955014948108794">"Försök igen"</string>
     <string name="lockscreen_password_wrong" msgid="5737815393253165301">"Försök igen"</string>
     <string name="faceunlock_multiple_failures" msgid="754137583022792429">"Du har försökt låsa upp med Ansiktslås för många gånger"</string>
-    <string name="lockscreen_plugged_in" msgid="8057762828355572315">"Laddar (<xliff:g id="PERCENT">%%</xliff:g> <xliff:g id="NUMBER">%d</xliff:g>)"</string>
+    <string name="lockscreen_plugged_in" msgid="8057762828355572315">"Laddar (<xliff:g id="NUMBER">%d</xliff:g><xliff:g id="PERCENT">%%</xliff:g>)"</string>
     <string name="lockscreen_charged" msgid="321635745684060624">"Batteriet har laddats"</string>
     <string name="lockscreen_battery_short" msgid="4477264849386850266">"<xliff:g id="NUMBER">%d</xliff:g> <xliff:g id="PERCENT">%%</xliff:g>"</string>
     <string name="lockscreen_low_battery" msgid="1482873981919249740">"Anslut din laddare."</string>
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Försök igen om <xliff:g id="COUNT">%d</xliff:g> sekunder"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Försök igen senare"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Dra nedåt om du vill avbryta fullskärmsläget"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Dra nedåt om du vill avbryta fullskärmsläget."</string>
 </resources>
diff --git a/core/res/res/values-sw/strings.xml b/core/res/res/values-sw/strings.xml
index 2d65199..b066758 100644
--- a/core/res/res/values-sw/strings.xml
+++ b/core/res/res/values-sw/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Jaribu tena baada ya sekunde <xliff:g id="COUNT">%d</xliff:g>"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Jaribu tena baadaye"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Telezesha kidole kwa kasi chini kuanzia juu ili uondoke kwenye skrini kamili"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Telezesha kidole kwa kasi chini kuanzia juu ili uondoke kwenye skrini kamili"</string>
 </resources>
diff --git a/core/res/res/values-th/strings.xml b/core/res/res/values-th/strings.xml
index d6135d5..58f5268 100644
--- a/core/res/res/values-th/strings.xml
+++ b/core/res/res/values-th/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"ลองอีกใน <xliff:g id="COUNT">%d</xliff:g> วินาที"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"ลองอีกครั้งในภายหลัง"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"กวาดนิ้วจากบนลงล่างเพื่อออกจากโหมดเต็มหน้าจอ"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"กวาดนิ้วจากบนลงล่างเพื่อออกจากโหมดเต็มหน้าจอ"</string>
 </resources>
diff --git a/core/res/res/values-tl/strings.xml b/core/res/res/values-tl/strings.xml
index e17c7cc..bc396ed 100644
--- a/core/res/res/values-tl/strings.xml
+++ b/core/res/res/values-tl/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Subukan muli sa <xliff:g id="COUNT">%d</xliff:g> seg"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Subukang muli sa ibang pagkakataon"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Mag-swipe pababa upang lumabas sa full screen"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Mag-swipe pababa upang lumabas sa full screen"</string>
 </resources>
diff --git a/core/res/res/values-tr/strings.xml b/core/res/res/values-tr/strings.xml
index 31ae8d8..99a137b 100644
--- a/core/res/res/values-tr/strings.xml
+++ b/core/res/res/values-tr/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"<xliff:g id="COUNT">%d</xliff:g> saniye içinde tekrar deneyin"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Daha sonra tekrar deneyin"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Tam ekrandan çıkmak için aşağıya hızlıca kaydırın"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Tam ekrandan çıkmak için aşağıya hızlıca kaydırın"</string>
 </resources>
diff --git a/core/res/res/values-uk/strings.xml b/core/res/res/values-uk/strings.xml
index 69f2e9e..61f45ec 100644
--- a/core/res/res/values-uk/strings.xml
+++ b/core/res/res/values-uk/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Повтор за <xliff:g id="COUNT">%d</xliff:g> с"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Спробуйте пізніше"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Проведіть пальцем зверху вниз, щоб зменшити"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Проведіть пальцем зверху вниз, щоб вийти з повноекранного режиму."</string>
 </resources>
diff --git a/core/res/res/values-vi/strings.xml b/core/res/res/values-vi/strings.xml
index 2d7ae36..82752b7 100644
--- a/core/res/res/values-vi/strings.xml
+++ b/core/res/res/values-vi/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Hãy thử lại sau <xliff:g id="COUNT">%d</xliff:g> giây"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Hãy thử lại sau"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Vuốt từ trên xuống để thoát toàn màn hình"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Vuốt từ trên xuống để thoát toàn màn hình"</string>
 </resources>
diff --git a/core/res/res/values-zh-rCN/strings.xml b/core/res/res/values-zh-rCN/strings.xml
index 533db45..dbfd039 100644
--- a/core/res/res/values-zh-rCN/strings.xml
+++ b/core/res/res/values-zh-rCN/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"<xliff:g id="COUNT">%d</xliff:g>秒后重试"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"稍后重试"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"从顶部向下滑动即可退出全屏模式"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"从顶部向下滑动即可退出全屏模式"</string>
 </resources>
diff --git a/core/res/res/values-zh-rHK/strings.xml b/core/res/res/values-zh-rHK/strings.xml
index 85a2af6..2d2e77e 100644
--- a/core/res/res/values-zh-rHK/strings.xml
+++ b/core/res/res/values-zh-rHK/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"<xliff:g id="COUNT">%d</xliff:g> 秒後再試一次"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"稍後再試"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"從頂端往下快速滑動即可退出全螢幕"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"從頂端往下滑動即可結束全螢幕。"</string>
 </resources>
diff --git a/core/res/res/values-zh-rTW/strings.xml b/core/res/res/values-zh-rTW/strings.xml
index 377a008..3ec8d4c 100644
--- a/core/res/res/values-zh-rTW/strings.xml
+++ b/core/res/res/values-zh-rTW/strings.xml
@@ -255,7 +255,7 @@
     <string name="permlab_receiveSms" msgid="8673471768947895082">"接收簡訊 (SMS)"</string>
     <string name="permdesc_receiveSms" msgid="6424387754228766939">"允許應用程式接收和處理簡訊。這項設定可讓應用程式監控傳送至您裝置的訊息,或在您閱讀訊息前擅自刪除訊息。"</string>
     <string name="permlab_receiveMms" msgid="1821317344668257098">"接收簡訊 (MMS)"</string>
-    <string name="permdesc_receiveMms" msgid="533019437263212260">"允許應用程式接收和處理 MMS 訊息。這項設定可讓應用程式監控傳送至您裝置的訊息,或在您閱讀訊息前擅自刪除訊息。"</string>
+    <string name="permdesc_receiveMms" msgid="533019437263212260">"允許應用程式接收和處理多媒體訊息。這項設定可讓應用程式監控傳送至您裝置的訊息,或在您閱讀訊息前擅自刪除訊息。"</string>
     <string name="permlab_receiveEmergencyBroadcast" msgid="1803477660846288089">"接收緊急廣播"</string>
     <string name="permdesc_receiveEmergencyBroadcast" msgid="848524070262431974">"允許應用程式接收及處理緊急廣播訊息,只有系統應用程式可以具備這項權限。"</string>
     <string name="permlab_readCellBroadcasts" msgid="1598328843619646166">"讀取區域廣播訊息"</string>
@@ -759,7 +759,7 @@
     <string name="phoneTypeWorkMobile" msgid="1311426989184065709">"公司行動電話"</string>
     <string name="phoneTypeWorkPager" msgid="649938731231157056">"公司呼叫器"</string>
     <string name="phoneTypeAssistant" msgid="5596772636128562884">"助理"</string>
-    <string name="phoneTypeMms" msgid="7254492275502768992">"MMS"</string>
+    <string name="phoneTypeMms" msgid="7254492275502768992">"多媒體簡訊"</string>
     <string name="eventTypeCustom" msgid="7837586198458073404">"自訂"</string>
     <string name="eventTypeBirthday" msgid="2813379844211390740">"生日"</string>
     <string name="eventTypeAnniversary" msgid="3876779744518284000">"週年紀念日"</string>
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"<xliff:g id="COUNT">%d</xliff:g> 秒後再試一次"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"稍後再試"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"從頂端往下滑動即可結束全螢幕"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"從頂端往下滑動即可結束全螢幕。"</string>
 </resources>
diff --git a/core/res/res/values-zu/strings.xml b/core/res/res/values-zu/strings.xml
index 6996f31..57d59c3 100644
--- a/core/res/res/values-zu/strings.xml
+++ b/core/res/res/values-zu/strings.xml
@@ -1649,5 +1649,5 @@
     <item quantity="other" msgid="4730868920742952817">"Zama futhi kumasekhondi angu-<xliff:g id="COUNT">%d</xliff:g>"</item>
   </plurals>
     <string name="restr_pin_try_later" msgid="973144472490532377">"Zama futhi emva kwesikhathi"</string>
-    <string name="immersive_mode_confirmation" msgid="8554991488096662508">"Swayipha ngezansi kusuka ngaphezulu ukuze uphume kusikrini esigcwele"</string>
+    <string name="immersive_mode_confirmation" msgid="7227416894979047467">"Swayiphela phansi kusukela phezulu ukuze uphume kusikrini esigcwele."</string>
 </resources>
diff --git a/packages/Keyguard/src/com/android/keyguard/PagedView.java b/packages/Keyguard/src/com/android/keyguard/PagedView.java
index 814ac982..9d237dc 100644
--- a/packages/Keyguard/src/com/android/keyguard/PagedView.java
+++ b/packages/Keyguard/src/com/android/keyguard/PagedView.java
@@ -1556,6 +1556,9 @@
             if (mTouchState == TOUCH_STATE_SCROLLING) {
                 final int activePointerId = mActivePointerId;
                 final int pointerIndex = ev.findPointerIndex(activePointerId);
+
+                if (pointerIndex == -1) return true;
+
                 final float x = ev.getX(pointerIndex);
                 final VelocityTracker velocityTracker = mVelocityTracker;
                 velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity);
diff --git a/packages/PrintSpooler/res/values-be/arrays.xml b/packages/PrintSpooler/res/values-be/arrays.xml
new file mode 100644
index 0000000..d40278c
--- /dev/null
+++ b/packages/PrintSpooler/res/values-be/arrays.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2013 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<resources>
+
+    <string-array name="pdf_printer_media_sizes" translatable="false">
+        <item>NA_LETTER</item>
+        <item>NA_GOVT_LETTER</item>
+        <item>NA_LEGAL</item>
+        <item>NA_JUNIOR_LEGAL</item>
+        <item>NA_LEDGER</item>
+        <item>NA_TABLOID</item>
+        <item>NA_INDEX_3X5</item>
+        <item>NA_INDEX_4X6</item>
+        <item>NA_INDEX_5X8</item>
+        <item>NA_MONARCH</item>
+        <item>NA_QUARTO</item>
+        <item>NA_FOOLSCAP</item>
+    </string-array>
+
+</resources>
diff --git a/packages/PrintSpooler/res/values-es-rUS/arrays.xml b/packages/PrintSpooler/res/values-es-rUS/arrays.xml
new file mode 100644
index 0000000..d40278c
--- /dev/null
+++ b/packages/PrintSpooler/res/values-es-rUS/arrays.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2013 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<resources>
+
+    <string-array name="pdf_printer_media_sizes" translatable="false">
+        <item>NA_LETTER</item>
+        <item>NA_GOVT_LETTER</item>
+        <item>NA_LEGAL</item>
+        <item>NA_JUNIOR_LEGAL</item>
+        <item>NA_LEDGER</item>
+        <item>NA_TABLOID</item>
+        <item>NA_INDEX_3X5</item>
+        <item>NA_INDEX_4X6</item>
+        <item>NA_INDEX_5X8</item>
+        <item>NA_MONARCH</item>
+        <item>NA_QUARTO</item>
+        <item>NA_FOOLSCAP</item>
+    </string-array>
+
+</resources>
diff --git a/packages/PrintSpooler/res/values-ja/arrays.xml b/packages/PrintSpooler/res/values-ja/arrays.xml
new file mode 100644
index 0000000..57088c8
--- /dev/null
+++ b/packages/PrintSpooler/res/values-ja/arrays.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2013 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<resources>
+
+    <string-array name="pdf_printer_media_sizes" translatable="false">
+        <item>JIS_B10</item>
+        <item>JIS_B9</item>
+        <item>JIS_B8</item>
+        <item>JIS_B7</item>
+        <item>JIS_b6</item>
+        <item>JIS_b5</item>
+        <item>JIS_b4</item>
+        <item>JIS_b3</item>
+        <item>JIS_b2</item>
+        <item>JIS_b1</item>
+        <item>JIS_b0</item>
+        <item>JIS_EXEC</item>
+        <item>JPN_CHOU4</item>
+        <item>JPN_CHOU3</item>
+        <item>JPN_CHOU2</item>
+        <item>JPN_HAGAKI</item>
+        <item>JPN_OUFUKU</item>
+        <item>JPN_KAHU</item>
+        <item>JPN_KAKU2</item>
+        <item>JPN_YOU4</item>
+
+    </string-array>
+
+</resources>
diff --git a/packages/PrintSpooler/res/values-zh-rCN/arrays.xml b/packages/PrintSpooler/res/values-zh-rCN/arrays.xml
new file mode 100644
index 0000000..4fc75db
--- /dev/null
+++ b/packages/PrintSpooler/res/values-zh-rCN/arrays.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2013 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<resources>
+
+    <string-array name="pdf_printer_media_sizes" translatable="false">
+        <item>ROC_8K</item>
+        <item>ROC_16K</item>
+        <item>PRC_1</item>
+        <item>PRC_2</item>
+        <item>PRC_3</item>
+        <item>PRC_4</item>
+        <item>PRC_5</item>
+        <item>PRC_6</item>
+        <item>PRC_7</item>
+        <item>PRC_8</item>
+        <item>PRC_9</item>
+        <item>PRC_10</item>
+        <item>PRC_16K</item>
+        <item>OM_PA_KAI</item>
+        <item>OM_DAI_PA_KAI</item>
+        <item>OM_JUURO_KU_KAI</item>
+    </string-array>
+
+</resources>
diff --git a/packages/SystemUI/src/com/android/systemui/ImageWallpaper.java b/packages/SystemUI/src/com/android/systemui/ImageWallpaper.java
index 6fa863d..4b0c2cb 100644
--- a/packages/SystemUI/src/com/android/systemui/ImageWallpaper.java
+++ b/packages/SystemUI/src/com/android/systemui/ImageWallpaper.java
@@ -326,6 +326,7 @@
                             ((mBackground == null) ? 0 : mBackground.getHeight()) + ", " +
                             dw + ", " + dh);
                 }
+                mWallpaperManager.forgetLoadedWallpaper();
                 updateWallpaperLocked();
                 if (mBackground == null) {
                     if (DEBUG) {
diff --git a/services/java/com/android/server/ConnectivityService.java b/services/java/com/android/server/ConnectivityService.java
index 70418e8..b5f0697 100644
--- a/services/java/com/android/server/ConnectivityService.java
+++ b/services/java/com/android/server/ConnectivityService.java
@@ -4247,27 +4247,35 @@
                         log("isMobileOk: linkHasIpv4=" + linkHasIpv4
                                 + " linkHasIpv6=" + linkHasIpv6);
 
-                        // Loop through at most 3 valid addresses or all of the address or until
-                        // we run out of time
-                        int loops = Math.min(3, addresses.length);
-                        for(int validAddr=0, addrTried=0;
-                                    (validAddr < loops) && (addrTried < addresses.length)
-                                      && (SystemClock.elapsedRealtime() < endTime);
-                                addrTried ++) {
+                        final ArrayList<InetAddress> validAddresses =
+                                new ArrayList<InetAddress>(addresses.length);
 
-                            // Choose the address at random but make sure its type is supported
-                            // TODO: This doesn't work 100% of the time, because we may end up
-                            // trying the same invalid address more than once and ignoring one
-                            // of the valid addresses.
-                            InetAddress hostAddr = addresses[rand.nextInt(addresses.length)];
-                            if (((hostAddr instanceof Inet4Address) && linkHasIpv4)
-                                    || ((hostAddr instanceof Inet6Address) && linkHasIpv6)) {
-                                // Valid address, so use it
-                                validAddr += 1;
-                            } else {
-                                // Invalid address so try next address
-                                continue;
+                        for (InetAddress addr : addresses) {
+                            if (((addr instanceof Inet4Address) && linkHasIpv4) ||
+                                    ((addr instanceof Inet6Address) && linkHasIpv6)) {
+                                validAddresses.add(addr);
                             }
+                        }
+
+                        if (validAddresses.size() == 0) {
+                            return CMP_RESULT_CODE_NO_CONNECTION;
+                        }
+
+                        int addrTried = 0;
+                        while (true) {
+                            // Loop through at most 3 valid addresses or until
+                            // we run out of time
+                            if (addrTried++ >= 3) {
+                                log("too many loops tried - giving up");
+                                break;
+                            }
+                            if (SystemClock.elapsedRealtime() >= endTime) {
+                                log("spend too much time - giving up");
+                                break;
+                            }
+
+                            InetAddress hostAddr = validAddresses.get(rand.nextInt(
+                                    validAddresses.size()));
 
                             // Make a route to host so we check the specific interface.
                             if (mCs.requestRouteToHostAddress(ConnectivityManager.TYPE_MOBILE_HIPRI,
@@ -4283,8 +4291,10 @@
                             }
 
                             // Rewrite the url to have numeric address to use the specific route.
+                            // Add a pointless random query param to fool proxies into not caching.
                             URL newUrl = new URL(orgUri.getScheme(),
-                                    hostAddr.getHostAddress(), orgUri.getPath());
+                                    hostAddr.getHostAddress(),
+                                    orgUri.getPath() + "?q=" + rand.nextInt(Integer.MAX_VALUE));
                             log("isMobileOk: newUrl=" + newUrl);
 
                             HttpURLConnection urlConn = null;
@@ -4321,6 +4331,9 @@
                                     // occasions where a server returned 200 even though
                                     // the device didn't have a "warm" sim.
                                     log("isMobileOk: not expected responseCode=" + responseCode);
+                                    // TODO - it would be nice in the single-address case to do
+                                    // another DNS resolve here, but flushing the cache is a bit
+                                    // heavy-handed.
                                     result = CMP_RESULT_CODE_REDIRECTED;
                                 }
                             } catch (Exception e) {
diff --git a/services/java/com/android/server/am/ActivityManagerService.java b/services/java/com/android/server/am/ActivityManagerService.java
index f1c2025..085f9af 100644
--- a/services/java/com/android/server/am/ActivityManagerService.java
+++ b/services/java/com/android/server/am/ActivityManagerService.java
@@ -226,7 +226,7 @@
     static final boolean DEBUG_RESULTS = localLOGV || false;
     static final boolean DEBUG_SERVICE = localLOGV || false;
     static final boolean DEBUG_SERVICE_EXECUTING = localLOGV || false;
-    static final boolean DEBUG_STACK = localLOGV || true;
+    static final boolean DEBUG_STACK = localLOGV || false;
     static final boolean DEBUG_SWITCH = localLOGV || false;
     static final boolean DEBUG_TASKS = localLOGV || false;
     static final boolean DEBUG_THUMBNAILS = localLOGV || false;
@@ -236,7 +236,7 @@
     static final boolean DEBUG_VISBILITY = localLOGV || false;
     static final boolean DEBUG_PSS = localLOGV || false;
     static final boolean DEBUG_LOCKSCREEN = localLOGV || false;
-    static final boolean VALIDATE_TOKENS = true;
+    static final boolean VALIDATE_TOKENS = false;
     static final boolean SHOW_ACTIVITY_START_TIME = true;
 
     // Control over CPU and battery monitoring.
diff --git a/services/java/com/android/server/am/ActivityStack.java b/services/java/com/android/server/am/ActivityStack.java
index 77f874f..26d9bcf 100644
--- a/services/java/com/android/server/am/ActivityStack.java
+++ b/services/java/com/android/server/am/ActivityStack.java
@@ -551,9 +551,6 @@
      * Move the activities around in the stack to bring a user to the foreground.
      */
     final void switchUserLocked(int userId) {
-        if (VALIDATE_TOKENS) {
-            validateAppTokensLocked();
-        }
         if (mCurrentUser == userId) {
             return;
         }
@@ -564,11 +561,16 @@
         for (int i = 0; i < index; ++i) {
             TaskRecord task = mTaskHistory.get(i);
             if (task.userId == userId) {
+                if (DEBUG_TASKS) Slog.d(TAG, "switchUserLocked: stack=" + getStackId() +
+                        " moving " + task + " to top");
                 mTaskHistory.remove(i);
                 mTaskHistory.add(task);
                 --index;
             }
         }
+        if (VALIDATE_TOKENS) {
+            validateAppTokensLocked();
+        }
     }
 
     void minimalResumeActivityLocked(ActivityRecord r) {
@@ -986,7 +988,7 @@
      */
     final boolean ensureActivitiesVisibleLocked(ActivityRecord top, ActivityRecord starting,
             String onlyThisProcess, int configChanges, boolean forceHomeShown) {
-        if (true || DEBUG_VISBILITY) Slog.v(
+        if (DEBUG_VISBILITY) Slog.v(
                 TAG, "ensureActivitiesVisible behind " + top
                 + " configChanges=0x" + Integer.toHexString(configChanges));
 
@@ -1042,7 +1044,7 @@
                                 r.startFreezingScreenLocked(r.app, configChanges);
                             }
                             if (!r.visible) {
-                                if (true || DEBUG_VISBILITY) Slog.v(
+                                if (DEBUG_VISBILITY) Slog.v(
                                         TAG, "Starting and making visible: " + r);
                                 mWindowManager.setAppVisibility(r.appToken, true);
                             }
@@ -1064,7 +1066,7 @@
                         if (r.state != ActivityState.RESUMED && r != starting) {
                             // If this activity is paused, tell it
                             // to now show its window.
-                            if (true || DEBUG_VISBILITY) Slog.v(
+                            if (DEBUG_VISBILITY) Slog.v(
                                     TAG, "Making visible and scheduling visibility: " + r);
                             try {
                                 if (mTranslucentActivityWaiting != null) {
@@ -1118,7 +1120,7 @@
                     // Now for any activities that aren't visible to the user, make
                     // sure they no longer are keeping the screen frozen.
                     if (r.visible) {
-                        if (true || DEBUG_VISBILITY) Slog.v(TAG, "Making invisible: " + r);
+                        if (DEBUG_VISBILITY) Slog.v(TAG, "Making invisible: " + r);
                         r.visible = false;
                         try {
                             mWindowManager.setAppVisibility(r.appToken, false);
@@ -2951,6 +2953,7 @@
         for (int taskNdx = top; taskNdx >= 0; --taskNdx) {
             final TaskRecord task = mTaskHistory.get(taskNdx);
             if (task.isHomeTask()) {
+                if (DEBUG_TASKS || DEBUG_STACK) Slog.d(TAG, "moveHomeTaskToTop: moving " + task);
                 mTaskHistory.remove(taskNdx);
                 mTaskHistory.add(top, task);
                 mWindowManager.moveTaskToTop(task.taskId);
diff --git a/services/java/com/android/server/am/ActivityStackSupervisor.java b/services/java/com/android/server/am/ActivityStackSupervisor.java
index 040f0ac..2895552 100644
--- a/services/java/com/android/server/am/ActivityStackSupervisor.java
+++ b/services/java/com/android/server/am/ActivityStackSupervisor.java
@@ -68,9 +68,8 @@
 import android.os.UserHandle;
 import android.util.EventLog;
 import android.util.Slog;
-import android.util.SparseArray;
+import android.util.SparseIntArray;
 
-import android.util.SparseBooleanArray;
 import com.android.internal.app.HeavyWeightSwitcherActivity;
 import com.android.internal.os.TransferPipe;
 import com.android.server.am.ActivityManagerService.PendingActivityLaunch;
@@ -89,7 +88,7 @@
     static final boolean DEBUG_ADD_REMOVE = DEBUG || false;
     static final boolean DEBUG_APP = DEBUG || false;
     static final boolean DEBUG_SAVED_STATE = DEBUG || false;
-    static final boolean DEBUG_STATES = DEBUG || true;
+    static final boolean DEBUG_STATES = DEBUG || false;
     static final boolean DEBUG_IDLE = DEBUG || false;
 
     public static final int HOME_STACK_ID = 0;
@@ -204,8 +203,8 @@
      */
     final PowerManager.WakeLock mGoingToSleep;
 
-    /** State of the stacks when user switched, indexed by userId. */
-    SparseBooleanArray mUserHomeInFront = new SparseBooleanArray(2);
+    /** Stack id of the front stack when user switched, indexed by userId. */
+    SparseIntArray mUserStackInFront = new SparseIntArray(2);
 
     public ActivityStackSupervisor(ActivityManagerService service, Context context,
             Looper looper) {
@@ -876,7 +875,7 @@
             throws RemoteException {
 
         r.startFreezingScreenLocked(app, 0);
-        if (true) Slog.d(TAG, "realStartActivity: setting app visibility true");
+        if (false) Slog.d(TAG, "realStartActivity: setting app visibility true");
         mWindowManager.setAppVisibility(r.appToken, true);
 
         // schedule launch ticks to collect information about slow apps.
@@ -1927,7 +1926,7 @@
     }
 
     void removeUserLocked(int userId) {
-        mUserHomeInFront.delete(userId);
+        mUserStackInFront.delete(userId);
     }
 
     /**
@@ -2248,8 +2247,8 @@
     }
 
     boolean switchUserLocked(int userId, UserStartedState uss) {
-        mUserHomeInFront.put(mCurrentUser, isFrontStack(mHomeStack));
-        final boolean homeInFront = mUserHomeInFront.get(userId, true);
+        mUserStackInFront.put(mCurrentUser, getFocusedStack().getStackId());
+        final int restoreStackId = mUserStackInFront.get(userId, HOME_STACK_ID);
         mCurrentUser = userId;
 
         mStartingUsers.add(uss);
@@ -2257,7 +2256,13 @@
             mStacks.get(stackNdx).switchUserLocked(userId);
         }
 
+        ActivityStack stack = getStack(restoreStackId);
+        if (stack == null) {
+            stack = mHomeStack;
+        }
+        final boolean homeInFront = stack.isHomeStack();
         moveHomeStack(homeInFront);
+        mWindowManager.moveTaskToTop(stack.topTask().taskId);
         return homeInFront;
     }
 
@@ -2270,7 +2275,7 @@
         final boolean nowVisible = allResumedActivitiesVisible();
         for (int i=0; i<N; i++) {
             ActivityRecord s = mStoppingActivities.get(i);
-            if (true || localLOGV) Slog.v(TAG, "Stopping " + s + ": nowVisible="
+            if (localLOGV) Slog.v(TAG, "Stopping " + s + ": nowVisible="
                     + nowVisible + " waitingVisible=" + s.waitingVisible
                     + " finishing=" + s.finishing);
             if (s.waitingVisible && nowVisible) {
@@ -2351,7 +2356,7 @@
         pw.print(prefix); pw.print("mStackState="); pw.println(stackStateToString(mStackState));
         pw.print(prefix); pw.println("mSleepTimeout: " + mSleepTimeout);
         pw.print(prefix); pw.println("mCurTaskId: " + mCurTaskId);
-        pw.print(prefix); pw.println("mUserHomeInFront: " + mUserHomeInFront);
+        pw.print(prefix); pw.println("mUserStackInFront: " + mUserStackInFront);
     }
 
     ArrayList<ActivityRecord> getDumpActivitiesLocked(String name) {
diff --git a/services/java/com/android/server/wm/WindowManagerService.java b/services/java/com/android/server/wm/WindowManagerService.java
index 00a653b..cfb10a0 100644
--- a/services/java/com/android/server/wm/WindowManagerService.java
+++ b/services/java/com/android/server/wm/WindowManagerService.java
@@ -9735,7 +9735,7 @@
                 newFocus = computeFocusedWindowLocked();
             }
 
-            if (true || DEBUG_FOCUS_LIGHT || localLOGV) Slog.v(TAG, "Changing focus from " +
+            if (DEBUG_FOCUS_LIGHT || localLOGV) Slog.v(TAG, "Changing focus from " +
                     mCurrentFocus + " to " + newFocus + " Callers=" + Debug.getCallers(4));
             final WindowState oldFocus = mCurrentFocus;
             mCurrentFocus = newFocus;
diff --git a/tests/Camera2Tests/SmartCamera/Android.mk b/tests/Camera2Tests/SmartCamera/Android.mk
new file mode 100644
index 0000000..3fa8f54a
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/Android.mk
@@ -0,0 +1,14 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+include $(call all-subdir-makefiles)
diff --git a/tests/Camera2Tests/SmartCamera/README.txt b/tests/Camera2Tests/SmartCamera/README.txt
new file mode 100644
index 0000000..1fff3ab8
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/README.txt
@@ -0,0 +1,60 @@
+Copyright 2013 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+
+Smart Camera / Auto Snapshot (formerly named SimpleCamera) ReadMe
+
+Created by: Benjamin W Hendricks
+
+How to build the application:
+From root: make SmartCamera will build the apk for generic
+Otherwise, to build the application for a specific device, lunch to that device
+and then run mm while in the SimpleCamera directory.
+Then take the given Install path (out/target/.../SmartCamera.apk)
+and run adb install out/target/.../SmartCamera.apk. The application should
+then appear in the launcher of your device.
+You might also need to run adb sync after building to sync the
+libsmartcamera_jni library
+Summarized:
+    make SmartCamera
+    adb remount
+    adb sync
+    adb install -r $ANDROID_PRODUCT_OUT/data/app/SmartCamera.apk
+
+How to run the application:
+On a Nexus 7, open up the application from the launcher, and the camera preview
+should appear. From there, you can go to the gallery with the gallery button or
+press start to start capturing images. You can also change the number of images
+to be captured by changing the number on the spinner (between 1-10).
+
+What does it do:
+The application tries to take good pictures for you automatically when in the
+start mode. On stop, the application will capture whatever images are in the
+bottom preview and save them to the Gallery. It does this by looking at the
+following image features:
+    - Sharpness
+    - Brightness
+    - Motion of the device
+    - Colorfulness
+    - Contrast
+    - Exposure (over/under)
+
+By comparing each of these features frame by frame, a score is calculated to
+determine whether an image is better or worse than the previous few frames,
+and from that score I can determine the great images from the bad ones.
+
+What libraries does it use:
+- Mobile Filter Framework (MFF)
+- Camera2 API
+- Renderscript
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/.classpath b/tests/Camera2Tests/SmartCamera/SimpleCamera/.classpath
new file mode 100644
index 0000000..3f9691c
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/.classpath
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
+	<classpathentry kind="src" path="src"/>
+	<classpathentry kind="src" path="gen"/>
+	<classpathentry kind="output" path="bin/classes"/>
+</classpath>
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/.project b/tests/Camera2Tests/SmartCamera/SimpleCamera/.project
new file mode 100644
index 0000000..2517e2d
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/.project
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>CameraShoot</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+		<buildCommand>
+			<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+		<buildCommand>
+			<name>org.eclipse.jdt.core.javabuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+		<buildCommand>
+			<name>com.android.ide.eclipse.adt.ApkBuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
+		<nature>org.eclipse.jdt.core.javanature</nature>
+	</natures>
+</projectDescription>
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/Android.mk b/tests/Camera2Tests/SmartCamera/SimpleCamera/Android.mk
new file mode 100644
index 0000000..801c81c
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/Android.mk
@@ -0,0 +1,42 @@
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifneq ($(TARGET_BUILD_JAVA_SUPPORT_LEVEL),)
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_PROGUARD_ENABLED := disabled
+
+# comment it out for now since we need use some hidden APIs
+# LOCAL_SDK_VERSION := current
+
+LOCAL_STATIC_JAVA_LIBRARIES := android-ex-camera2
+
+LOCAL_SRC_FILES := \
+    $(call all-java-files-under, src) \
+    $(call all-renderscript-files-under, src)
+
+LOCAL_PACKAGE_NAME := SmartCamera
+LOCAL_JNI_SHARED_LIBRARIES := libsmartcamera_jni
+
+include $(BUILD_PACKAGE)
+
+# Include packages in subdirectories
+include $(call all-makefiles-under,$(LOCAL_PATH))
+
+endif
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/AndroidManifest.xml b/tests/Camera2Tests/SmartCamera/SimpleCamera/AndroidManifest.xml
new file mode 100644
index 0000000..0681868
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/AndroidManifest.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2013 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+      android:versionCode="1"
+      android:versionName="1.0"
+      package="androidx.media.filterfw.samples.simplecamera">
+    <uses-sdk android:minSdkVersion="18" android:targetSdkVersion="19"/>
+    <uses-permission android:name="android.permission.CAMERA" />
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+    <application android:label="Smart Camera"
+                  android:debuggable="true">
+    <uses-library android:name="com.google.android.media.effects"
+                  android:required="false" />
+
+        <activity android:name=".SmartCamera"
+                  android:label="Smart Camera"
+                  android:screenOrientation="portrait">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+
+    </application>
+</manifest>
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/assets/0002_000390.jpg b/tests/Camera2Tests/SmartCamera/SimpleCamera/assets/0002_000390.jpg
new file mode 100644
index 0000000..9b4bce4
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/assets/0002_000390.jpg
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/assets/frsdk_expression_modules/BCLeyesclosed_100.emd b/tests/Camera2Tests/SmartCamera/SimpleCamera/assets/frsdk_expression_modules/BCLeyesclosed_100.emd
new file mode 100644
index 0000000..8c3d811
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/assets/frsdk_expression_modules/BCLeyesclosed_100.emd
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/assets/frsdk_expression_modules/BCLjoy_100.emd b/tests/Camera2Tests/SmartCamera/SimpleCamera/assets/frsdk_expression_modules/BCLjoy_100.emd
new file mode 100644
index 0000000..4ae3fbd
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/assets/frsdk_expression_modules/BCLjoy_100.emd
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/ic_launcher-web.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/ic_launcher-web.png
new file mode 100644
index 0000000..f142216
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/ic_launcher-web.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/Android.mk b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/Android.mk
new file mode 100644
index 0000000..616a11b
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/Android.mk
@@ -0,0 +1,49 @@
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+FILTERFW_NATIVE_PATH := $(call my-dir)
+
+
+#
+# Build module libfilterframework
+#
+LOCAL_PATH := $(FILTERFW_NATIVE_PATH)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SDK_VERSION := 14
+
+LOCAL_MODULE := libsmartcamera_jni
+
+LOCAL_SRC_FILES := contrast.cpp \
+                brightness.cpp \
+                exposure.cpp \
+                colorspace.cpp \
+                histogram.cpp \
+                frametovalues.cpp \
+                pixelutils.cpp \
+                sobeloperator.cpp \
+                stats_scorer.cpp
+
+LOCAL_STATIC_LIBRARIES += \
+    libcutils
+
+LOCAL_C_INCLUDES += \
+    system/core/include \
+
+LOCAL_NDK_STL_VARIANT := stlport_static
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/Application.mk b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/Application.mk
new file mode 100644
index 0000000..2b93b3c
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/Application.mk
@@ -0,0 +1,16 @@
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+APP_STL := stlport_static
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/brightness.cpp b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/brightness.cpp
new file mode 100644
index 0000000..998fd4c
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/brightness.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract brightness from image (handed down as ByteBuffer).
+
+#include "brightness.h"
+
+#include <math.h>
+#include <string.h>
+#include <jni.h>
+#include <unistd.h>
+#include <android/log.h>
+
+jfloat
+Java_androidx_media_filterfw_samples_simplecamera_AvgBrightnessFilter_brightnessOperator(
+    JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer) {
+
+    if (imageBuffer == 0) {
+        return 0.0f;
+    }
+    float pixelTotals[] = { 0.0f, 0.0f, 0.0f };
+    const int numPixels = width * height;
+    unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    for (int i = 0; i < numPixels; i++) {
+        pixelTotals[0] += *(srcPtr + 4 * i);
+        pixelTotals[1] += *(srcPtr + 4 * i + 1);
+        pixelTotals[2] += *(srcPtr + 4 * i + 2);
+    }
+    float avgPixels[] = { 0.0f, 0.0f, 0.0f };
+
+    avgPixels[0] = pixelTotals[0] / numPixels;
+    avgPixels[1] = pixelTotals[1] / numPixels;
+    avgPixels[2] = pixelTotals[2] / numPixels;
+    float returnValue = sqrt(0.241f * avgPixels[0] * avgPixels[0] +
+                            0.691f * avgPixels[1] * avgPixels[1] +
+                            0.068f * avgPixels[2] * avgPixels[2]);
+
+    return returnValue / 255;
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/brightness.h b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/brightness.h
new file mode 100644
index 0000000..c09e3b5
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/brightness.h
@@ -0,0 +1,36 @@
+/* Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract brightness from image (handed down as ByteBuffer).
+
+#ifndef ANDROID_FILTERFW_JNI_BRIGHTNESS_H
+#define ANDROID_FILTERFW_JNI_BRIGHTNESS_H
+
+#include <jni.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+    JNIEXPORT jfloat JNICALL
+    Java_androidx_media_filterfw_samples_simplecamera_AvgBrightnessFilter_brightnessOperator(
+        JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // ANDROID_FILTERFW_JNI_BRIGHTNESS_H
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/colorspace.cpp b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/colorspace.cpp
new file mode 100644
index 0000000..63e2ebf
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/colorspace.cpp
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "colorspace.h"
+
+#include <jni.h>
+#include <stdint.h>
+
+typedef uint8_t uint8;
+typedef uint32_t uint32;
+typedef int32_t int32;
+
+// RGBA helper struct allows access as int and individual channels
+// WARNING: int value depends on endianness and should not be used to analyze individual channels.
+union Rgba {
+  uint32 color;
+  uint8 channel[4];
+};
+
+// Channel index constants
+static const uint8 kRed = 0;
+static const uint8 kGreen = 1;
+static const uint8 kBlue = 2;
+static const uint8 kAlpha = 3;
+
+// Clamp to range 0-255
+static inline uint32 clamp(int32 x) {
+  return x > 255 ? 255 : (x < 0 ? 0 : x);
+}
+
+// Convert YUV to RGBA
+// This uses the ITU-R BT.601 coefficients.
+static inline Rgba convertYuvToRgba(int32 y, int32 u, int32 v) {
+  Rgba color;
+  color.channel[kRed] = clamp(y + static_cast<int>(1.402 * v));
+  color.channel[kGreen] = clamp(y - static_cast<int>(0.344 * u + 0.714 * v));
+  color.channel[kBlue] = clamp(y + static_cast<int>(1.772 * u));
+  color.channel[kAlpha] = 0xFF;
+  return color;
+}
+
+// Colorspace conversion functions /////////////////////////////////////////////////////////////////
+void JNI_COLORSPACE_METHOD(nativeYuv420pToRgba8888)(
+    JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height) {
+  uint8* const pInput = static_cast<uint8*>(env->GetDirectBufferAddress(input));
+  Rgba* const pOutput = static_cast<Rgba*>(env->GetDirectBufferAddress(output));
+
+  const int size = width * height;
+
+  uint8* pInY = pInput;
+  uint8* pInU = pInput + size;
+  uint8* pInV = pInput + size + size / 4;
+  Rgba* pOutColor = pOutput;
+
+  const int u_offset = size;
+  const int v_offset = u_offset + size / 4;
+
+  for (int y = 0; y < height; y += 2) {
+    for (int x = 0; x < width; x += 2) {
+      int u, v, y1, y2, y3, y4;
+
+      y1 = pInY[0];
+      y2 = pInY[1];
+      y3 = pInY[width];
+      y4 = pInY[width + 1];
+
+      u = *pInU - 128;
+      v = *pInV - 128;
+
+      pOutColor[0] = convertYuvToRgba(y1, u, v);
+      pOutColor[1] = convertYuvToRgba(y2, u, v);
+      pOutColor[width] = convertYuvToRgba(y3, u, v);
+      pOutColor[width + 1] = convertYuvToRgba(y4, u, v);
+
+      pInY += 2;
+      pInU++;
+      pInV++;
+      pOutColor += 2;
+    }
+    pInY += width;
+    pOutColor += width;
+  }
+}
+
+void JNI_COLORSPACE_METHOD(nativeArgb8888ToRgba8888)(
+    JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height) {
+  Rgba* pInput = static_cast<Rgba*>(env->GetDirectBufferAddress(input));
+  Rgba* pOutput = static_cast<Rgba*>(env->GetDirectBufferAddress(output));
+
+  for (int i = 0; i < width * height; ++i) {
+    Rgba color_in = *pInput++;
+    Rgba& color_out = *pOutput++;
+    color_out.channel[kRed] = color_in.channel[kGreen];
+    color_out.channel[kGreen] = color_in.channel[kBlue];
+    color_out.channel[kBlue] = color_in.channel[kAlpha];
+    color_out.channel[kAlpha] = color_in.channel[kRed];
+  }
+}
+
+void JNI_COLORSPACE_METHOD(nativeRgba8888ToHsva8888)(
+    JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height) {
+  Rgba* pInput = static_cast<Rgba*>(env->GetDirectBufferAddress(input));
+  Rgba* pOutput = static_cast<Rgba*>(env->GetDirectBufferAddress(output));
+
+  int r, g, b, a, h, s, v, c_max, c_min;
+  float delta;
+  for (int i = 0; i < width * height; ++i) {
+    Rgba color_in = *pInput++;
+    Rgba& color_out = *pOutput++;
+    r = color_in.channel[kRed];
+    g = color_in.channel[kGreen];
+    b = color_in.channel[kBlue];
+    a = color_in.channel[kAlpha];
+
+    if (r > g) {
+      c_min = (g > b) ? b : g;
+      c_max = (r > b) ? r : b;
+    } else {
+      c_min = (r > b) ? b : r;
+      c_max = (g > b) ? g : b;
+    }
+    delta = c_max -c_min;
+
+    float scaler = 255 * 60 / 360.0f;
+    if (c_max == r) {
+      h = (g > b) ? static_cast<int>(scaler * (g - b) / delta) :
+          static_cast<int>(scaler * ((g - b) / delta + 6));
+    } else if (c_max == g) {
+      h = static_cast<int>(scaler * ((b - r) / delta + 2));
+    } else {  // Cmax == b
+      h = static_cast<int>(scaler * ((r - g) / delta + 4));
+    }
+    s = (delta == 0.0f) ? 0 : static_cast<unsigned char>(delta / c_max * 255);
+    v = c_max;
+
+    color_out.channel[kRed] = h;
+    color_out.channel[kGreen] = s;
+    color_out.channel[kBlue] = v;
+    color_out.channel[kAlpha] = a;
+  }
+}
+
+void JNI_COLORSPACE_METHOD(nativeRgba8888ToYcbcra8888)(
+    JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height) {
+  Rgba* pInput = static_cast<Rgba*>(env->GetDirectBufferAddress(input));
+  Rgba* pOutput = static_cast<Rgba*>(env->GetDirectBufferAddress(output));
+
+  int r, g, b;
+  for (int i = 0; i < width * height; ++i) {
+    Rgba color_in = *pInput++;
+    Rgba& color_out = *pOutput++;
+    r = color_in.channel[kRed];
+    g = color_in.channel[kGreen];
+    b = color_in.channel[kBlue];
+
+    color_out.channel[kRed] =
+        static_cast<unsigned char>((65.738 * r + 129.057 * g + 25.064 * b) / 256 + 16);
+    color_out.channel[kGreen] =
+        static_cast<unsigned char>((-37.945 * r - 74.494 * g + 112.439 * b) / 256 + 128);
+    color_out.channel[kBlue] =
+        static_cast<unsigned char>((112.439 * r - 94.154 * g - 18.285 * b) / 256 + 128);
+    color_out.channel[kAlpha] = color_in.channel[kAlpha];
+  }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/colorspace.h b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/colorspace.h
new file mode 100644
index 0000000..c332749
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/colorspace.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_FILTERFW_JNI_COLORSPACE_H
+#define ANDROID_FILTERFW_JNI_COLORSPACE_H
+
+#include <jni.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define JNI_COLORSPACE_METHOD(METHOD_NAME) \
+  Java_androidx_media_filterfw_ColorSpace_ ## METHOD_NAME
+
+JNIEXPORT void JNICALL
+JNI_COLORSPACE_METHOD(nativeYuv420pToRgba8888)(
+    JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height);
+
+JNIEXPORT void JNICALL
+JNI_COLORSPACE_METHOD(nativeArgb8888ToRgba8888)(
+    JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height);
+
+JNIEXPORT void JNICALL
+JNI_COLORSPACE_METHOD(nativeRgba8888ToHsva8888)(
+    JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height);
+
+JNIEXPORT void JNICALL
+JNI_COLORSPACE_METHOD(nativeRgba8888ToYcbcra8888)(
+    JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height);
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // ANDROID_FILTERFW_JNI_COLORSPACE_H
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/contrast.cpp b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/contrast.cpp
new file mode 100644
index 0000000..222f738
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/contrast.cpp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract contrast ratio from image (handed down as ByteBuffer).
+
+#include "contrast.h"
+
+#include <math.h>
+#include <string.h>
+#include <jni.h>
+#include <unistd.h>
+#include <android/log.h>
+
+jfloat
+Java_androidx_media_filterfw_samples_simplecamera_ContrastRatioFilter_contrastOperator(
+    JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer) {
+
+    if (imageBuffer == 0) {
+      return 0.0f;
+    }
+    float total = 0;
+    const int numPixels = width * height;
+    unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    float* lumArray = new float[numPixels];
+    for (int i = 0; i < numPixels; i++) {
+        lumArray[i] = (0.2126f * *(srcPtr + 4 * i) + 0.7152f *
+            *(srcPtr + 4 * i + 1) + 0.0722f * *(srcPtr + 4 * i + 2)) / 255;
+        total += lumArray[i];
+    }
+    const float avg = total / numPixels;
+    float sum = 0;
+
+    for (int i = 0; i < numPixels; i++) {
+        sum += (lumArray[i] - avg) * (lumArray[i] - avg);
+    }
+    delete[] lumArray;
+    return ((float) sqrt(sum / numPixels));
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/contrast.h b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/contrast.h
new file mode 100644
index 0000000..ddcd3d4
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/contrast.h
@@ -0,0 +1,36 @@
+/* Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract contrast from image (handed down as ByteBuffer).
+
+#ifndef ANDROID_FILTERFW_JNI_CONTRAST_H
+#define ANDROID_FILTERFW_JNI_CONTRAST_H
+
+#include <jni.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+    JNIEXPORT jfloat JNICALL
+    Java_androidx_media_filterfw_samples_simplecamera_ContrastRatioFilter_contrastOperator(
+        JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // ANDROID_FILTERFW_JNI_CONTRAST_H
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/exposure.cpp b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/exposure.cpp
new file mode 100644
index 0000000..b2853f7
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/exposure.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract exposure from image (handed down as ByteBuffer).
+
+#include "exposure.h"
+
+#include <math.h>
+#include <string.h>
+#include <jni.h>
+#include <unistd.h>
+#include <android/log.h>
+
+
+jfloat
+Java_androidx_media_filterfw_samples_simplecamera_ExposureFilter_overExposureOperator(
+    JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer) {
+    if (imageBuffer == 0) {
+        return 0.0f;
+    }
+    const int numPixels = width * height;
+    unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    int output = 0;
+    float tempLuminance = 0.0f;
+
+    for (int i = 0; i < numPixels; i++) {
+        tempLuminance = (0.2126f * *(srcPtr + 4 * i) +
+                        0.7152f * *(srcPtr + 4 * i + 1) +
+                        0.0722f * *(srcPtr + 4 * i + 2));
+        if (tempLuminance + 5 >= 255) {
+            output++;
+        }
+    }
+    return (static_cast<float>(output)) / numPixels;
+}
+
+jfloat
+Java_androidx_media_filterfw_samples_simplecamera_ExposureFilter_underExposureOperator(
+    JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer) {
+    if (imageBuffer == 0) {
+        return 0.0f;
+    }
+    const int numPixels = width * height;
+    unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    int output = 0;
+    float tempLuminance = 0.0f;
+
+    for (int i = 0; i < numPixels; i++) {
+        tempLuminance = (0.2126f * *(srcPtr + 4 * i) +
+                        0.7152f * *(srcPtr + 4 * i + 1) +
+                        0.0722f * *(srcPtr + 4 * i + 2));
+        if (tempLuminance - 5 <= 0) {
+            output++;
+        }
+    }
+    return (static_cast<float>(output)) / numPixels;
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/exposure.h b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/exposure.h
new file mode 100644
index 0000000..bc6e3b1
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/exposure.h
@@ -0,0 +1,39 @@
+/* Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract exposure from image (handed down as ByteBuffer).
+
+#ifndef ANDROID_FILTERFW_JNI_EXPOSURE_H
+#define ANDROID_FILTERFW_JNI_EXPOSURE_H
+
+#include <jni.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+    JNIEXPORT jfloat JNICALL
+    Java_androidx_media_filterfw_samples_simplecamera_ExposureFilter_underExposureOperator(
+        JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer);
+
+    JNIEXPORT jfloat JNICALL
+    Java_androidx_media_filterfw_samples_simplecamera_ExposureFilter_overExposureOperator(
+        JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer);
+#ifdef __cplusplus
+}
+#endif
+
+#endif // ANDROID_FILTERFW_JNI_EXPOSURE_H
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/frametovalues.cpp b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/frametovalues.cpp
new file mode 100644
index 0000000..2e3a0ec
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/frametovalues.cpp
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract histogram from image (handed down as ByteBuffer).
+
+#include "frametovalues.h"
+
+#include <string.h>
+#include <jni.h>
+#include <unistd.h>
+#include <android/log.h>
+
+#include "imgprocutil.h"
+
+jboolean Java_androidx_media_filterpacks_image_ToGrayValuesFilter_toGrayValues(
+    JNIEnv* env, jclass clazz, jobject imageBuffer, jobject grayBuffer )
+{
+    unsigned char* pixelPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    unsigned char* grayPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(grayBuffer));
+
+    if (pixelPtr == 0 || grayPtr == 0) {
+      return JNI_FALSE;
+    }
+
+    int numPixels  = env->GetDirectBufferCapacity(imageBuffer) / 4;
+
+    // TODO: the current implementation is focused on the correctness not performance.
+    // If performance becomes an issue, it is better to increment pixelPtr directly.
+    int disp = 0;
+    for(int idx = 0; idx < numPixels; idx++, disp+=4) {
+      int R = *(pixelPtr + disp);
+      int G = *(pixelPtr + disp + 1);
+      int B = *(pixelPtr + disp + 2);
+      int gray = getIntensityFast(R, G, B);
+      *(grayPtr+idx) = static_cast<unsigned char>(gray);
+    }
+
+    return JNI_TRUE;
+}
+
+jboolean Java_androidx_media_filterpacks_image_ToRgbValuesFilter_toRgbValues(
+    JNIEnv* env, jclass clazz, jobject imageBuffer, jobject rgbBuffer )
+{
+    unsigned char* pixelPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    unsigned char* rgbPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(rgbBuffer));
+
+    if (pixelPtr == 0 || rgbPtr == 0) {
+      return JNI_FALSE;
+    }
+
+    int numPixels  = env->GetDirectBufferCapacity(imageBuffer) / 4;
+
+    // TODO: this code could be revised to improve the performance as the TODO above.
+    int pixelDisp = 0;
+    int rgbDisp = 0;
+    for(int idx = 0; idx < numPixels; idx++, pixelDisp += 4, rgbDisp += 3) {
+      for (int c = 0; c < 3; ++c) {
+        *(rgbPtr + rgbDisp + c) = *(pixelPtr + pixelDisp + c);
+      }
+    }
+    return JNI_TRUE;
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/frametovalues.h b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/frametovalues.h
new file mode 100644
index 0000000..4abb848
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/frametovalues.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native functions to pack a RGBA frame into either a one channel grayscale buffer
+// or a three channel RGB buffer.
+
+#ifndef ANDROID_FILTERFW_JNI_TOGRAYVALUES_H
+#define ANDROID_FILTERFW_JNI_TOGRAYVALUES_H
+
+#include <jni.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+JNIEXPORT jboolean JNICALL
+Java_androidx_media_filterpacks_image_ToGrayValuesFilter_toGrayValues(
+    JNIEnv* env, jclass clazz, jobject imageBuffer, jobject grayBuffer );
+
+JNIEXPORT jboolean JNICALL
+Java_androidx_media_filterpacks_image_ToRgbValuesFilter_toRgbValues(
+    JNIEnv* env, jclass clazz, jobject imageBuffer, jobject rgbBuffer );
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // ANDROID_FILTERFW_JNI_TOGRAYVALUES_H
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/histogram.cpp b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/histogram.cpp
new file mode 100644
index 0000000..ba060d4
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/histogram.cpp
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract histogram from image (handed down as ByteBuffer).
+
+#include "histogram.h"
+
+#include <string.h>
+#include <jni.h>
+#include <unistd.h>
+#include <android/log.h>
+
+#include "imgprocutil.h"
+
+inline void addPixelToHistogram(unsigned char*& pImg, int* pHist, int numBins) {
+    int R = *(pImg++);
+    int G = *(pImg++);
+    int B = *(pImg++);
+    ++pImg;
+    int i = getIntensityFast(R, G, B);
+    int bin = clamp(0, static_cast<int>(static_cast<float>(i * numBins) / 255.0f), numBins - 1);
+    ++pHist[bin];
+}
+
+void Java_androidx_media_filterpacks_histogram_GrayHistogramFilter_extractHistogram(
+    JNIEnv* env, jclass clazz, jobject imageBuffer, jobject maskBuffer, jobject histogramBuffer )
+{
+    unsigned char* pImg = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    int* pHist = static_cast<int*>(env->GetDirectBufferAddress(histogramBuffer));
+    int numPixels  = env->GetDirectBufferCapacity(imageBuffer) / 4;  // 4 bytes per pixel
+    int numBins    = env->GetDirectBufferCapacity(histogramBuffer);
+
+    unsigned char* pMask = NULL;
+    if(maskBuffer != NULL) {
+        pMask = static_cast<unsigned char*>(env->GetDirectBufferAddress(maskBuffer));
+    }
+
+    for(int i = 0; i < numBins; ++i) pHist[i] = 0;
+
+    if(pMask == NULL) {
+        for( ; numPixels > 0; --numPixels) {
+            addPixelToHistogram(pImg, pHist, numBins);
+        }
+    } else {
+        for( ; numPixels > 0; --numPixels) {
+            if(*pMask == 0){
+                pMask += 4;
+                pImg  += 4;  // Note that otherwise addPixelToHistogram advances pImg by 4
+                continue;
+            }
+            pMask += 4;
+            addPixelToHistogram(pImg, pHist, numBins);
+        }
+    }
+}
+
+void Java_androidx_media_filterpacks_histogram_ChromaHistogramFilter_extractChromaHistogram(
+    JNIEnv* env, jclass clazz, jobject imageBuffer, jobject histogramBuffer, jint hBins, jint sBins)
+{
+    unsigned char* pixelIn = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    float* histOut = static_cast<float*>(env->GetDirectBufferAddress(histogramBuffer));
+    int numPixels  = env->GetDirectBufferCapacity(imageBuffer) / 4;  // 4 bytes per pixel
+
+    for (int i = 0; i < hBins * sBins; ++i) histOut[i] = 0.0f;
+
+    int h, s, v;
+    float hScaler = hBins / 256.0f;
+    float sScaler = sBins / 256.0f;
+    for( ; numPixels > 0; --numPixels) {
+      h = *(pixelIn++);
+      s = *(pixelIn++);
+      v = *(pixelIn++);
+      pixelIn++;
+
+      int index = static_cast<int>(s * sScaler) * hBins + static_cast<int>(h * hScaler);
+      histOut[index] += 1.0f;
+    }
+}
+
+void Java_androidx_media_filterpacks_histogram_NewChromaHistogramFilter_extractChromaHistogram(
+    JNIEnv* env, jclass clazz, jobject imageBuffer, jobject histogramBuffer,
+    jint hueBins, jint saturationBins, jint valueBins,
+    jint saturationThreshold, jint valueThreshold) {
+    unsigned char* pixelIn = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    float* histOut = static_cast<float*>(env->GetDirectBufferAddress(histogramBuffer));
+    int numPixels  = env->GetDirectBufferCapacity(imageBuffer) / 4;  // 4 bytes per pixel
+
+    // TODO: add check on the size of histOut
+    for (int i = 0; i < (hueBins * saturationBins + valueBins); ++i) {
+      histOut[i] = 0.0f;
+    }
+
+    for( ; numPixels > 0; --numPixels) {
+      int h = *(pixelIn++);
+      int s = *(pixelIn++);
+      int v = *(pixelIn++);
+
+      pixelIn++;
+      // If a pixel that is either too dark (less than valueThreshold) or colorless
+      // (less than saturationThreshold), if will be put in a 1-D value histogram instead.
+
+      int index;
+      if (s > saturationThreshold && v > valueThreshold) {
+        int sIndex = s * saturationBins / 256;
+
+        // Shifting hue index by 0.5 such that peaks of red, yellow, green, cyan, blue, pink
+        // will be at the center of some bins.
+        int hIndex = ((h * hueBins + 128) / 256) % hueBins;
+        index = sIndex * hueBins + hIndex;
+      } else {
+        index =  hueBins * saturationBins + (v * valueBins / 256);
+      }
+      histOut[index] += 1.0f;
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/histogram.h b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/histogram.h
new file mode 100644
index 0000000..b5e88aa
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/histogram.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract histogram from image (handed down as ByteBuffer).
+
+#ifndef ANDROID_FILTERFW_JNI_HISTOGRAM_H
+#define ANDROID_FILTERFW_JNI_HISTOGRAM_H
+
+#include <jni.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+JNIEXPORT void JNICALL
+Java_androidx_media_filterpacks_histogram_GrayHistogramFilter_extractHistogram(
+    JNIEnv* env, jclass clazz, jobject imageBuffer, jobject maskBuffer, jobject histogramBuffer );
+
+JNIEXPORT void JNICALL
+Java_androidx_media_filterpacks_histogram_ChromaHistogramFilter_extractChromaHistogram(
+    JNIEnv* env, jclass clazz, jobject imageBuffer, jobject histogramBuffer, jint hBins, jint sBins);
+
+JNIEXPORT void JNICALL
+Java_androidx_media_filterpacks_histogram_NewChromaHistogramFilter_extractChromaHistogram(
+    JNIEnv* env, jclass clazz, jobject imageBuffer, jobject histogramBuffer,
+    jint hueBins, jint saturationBins, jint valueBins,
+    jint saturationThreshold, jint valueThreshold);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // ANDROID_FILTERFW_JNI_HISTOGRAM_H
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/imgprocutil.h b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/imgprocutil.h
new file mode 100644
index 0000000..aef67a5
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/imgprocutil.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Some native low-level image processing functions.
+
+
+#ifndef ANDROID_FILTERFW_JNI_IMGPROCUTIL_H
+#define ANDROID_FILTERFW_JNI_IMGPROCUTIL_H
+
+inline int getIntensityFast(int R, int G, int B) {
+    return (R + R + R + B + G + G + G + G) >> 3;  // see http://stackoverflow.com/a/596241
+}
+
+inline int clamp(int min, int val, int max) {
+    return val < min ? min : (val > max ? max : val);
+        // Note that for performance reasons, this function does *not* check if min < max!
+}
+
+#endif // ANDROID_FILTERFW_JNI_IMGPROCUTIL_H
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/pixelutils.cpp b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/pixelutils.cpp
new file mode 100644
index 0000000..596c7c0
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/pixelutils.cpp
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "pixelutils.h"
+
+#include <stdint.h>
+
+typedef uint32_t uint32;
+
+void JNI_PIXELUTILS_METHOD(nativeCopyPixels)(
+    JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height, jint offset,
+    jint pixStride, jint rowStride) {
+  uint32* pInPix = static_cast<uint32*>(env->GetDirectBufferAddress(input));
+  uint32* pOutput = static_cast<uint32*>(env->GetDirectBufferAddress(output));
+  uint32* pOutRow = pOutput + offset;
+  for (int y = 0; y < height; ++y) {
+    uint32* pOutPix = pOutRow;
+    for (int x = 0; x < width; ++x) {
+      *pOutPix = *(pInPix++);
+      pOutPix += pixStride;
+    }
+    pOutRow += rowStride;
+  }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/pixelutils.h b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/pixelutils.h
new file mode 100644
index 0000000..be69009
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/pixelutils.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_FILTERFW_JNI_PIXELUTILS_H
+#define ANDROID_FILTERFW_JNI_PIXELUTILS_H
+
+#include <jni.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define JNI_PIXELUTILS_METHOD(METHOD_NAME) \
+  Java_androidx_media_filterfw_PixelUtils_ ## METHOD_NAME
+
+JNIEXPORT void JNICALL
+JNI_PIXELUTILS_METHOD(nativeCopyPixels)(
+    JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height, jint offset,
+    jint pixStride, jint rowStride);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // ANDROID_FILTERFW_JNI_PIXELUTILS_H
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/sobeloperator.cpp b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/sobeloperator.cpp
new file mode 100644
index 0000000..dc5c305
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/sobeloperator.cpp
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract histogram from image (handed down as ByteBuffer).
+
+#include "sobeloperator.h"
+
+#include <math.h>
+#include <string.h>
+#include <jni.h>
+#include <unistd.h>
+#include <android/log.h>
+
+#include "imgprocutil.h"
+
+/*
+ * Perform 1d convolution on 3 channel image either horizontally or vertically.
+ * Parameters:
+ *  inputHead: pointer to input image
+ *  length: the length of image in the chosen axis.
+ *  fragments: number of lines of the image in the chosen axis.
+ *  step: the 1d pixel distance between adjacent pixels in the chosen axis.
+ *  shift: the 1d pixel distance between adjacent lines in the chosen axis.
+ *  filter: pointer to 1d filter
+ *  halfSize: the length of filter is supposed to be (2 * halfSize + 1)
+ *  outputHead: pointer to output image
+ */
+
+void computeGradient(unsigned char* dataPtr, int width, int height, short* gxPtr, short* gyPtr) {
+  for (int i = 0; i < height; i++) {
+    for (int j = 0; j < width; j++) {
+      const int left = (j > 0)? -4 : 0;
+      const int right = (j < width - 1) ? 4 : 0;
+      const int curr = (i * width + j) * 4;
+      const int above = (i > 0) ? curr - 4 * width : curr;
+      const int below = (i < height - 1) ? curr + 4 * width : curr;
+      const int offset = (i * width + j) * 3;
+      for (int c = 0; c < 3; c++) {
+        *(gxPtr + offset + c) =
+            (*(dataPtr + curr + c + right) - *(dataPtr + curr + c + left)) * 2 +
+            *(dataPtr + above + c + right) - *(dataPtr + above + c + left) +
+            *(dataPtr + below + c + right) - *(dataPtr + below + c + left);
+        *(gyPtr + offset + c) =
+            (*(dataPtr + c + below) - *(dataPtr + c + above)) * 2 +
+            *(dataPtr + left + c + below) - *(dataPtr + left + c + above) +
+            *(dataPtr + right + c + below) - *(dataPtr + right + c + above);
+      }
+    }
+  }
+}
+
+jboolean Java_androidx_media_filterpacks_image_SobelFilter_sobelOperator(
+    JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer,
+    jobject magBuffer, jobject dirBuffer) {
+
+  if (imageBuffer == 0) {
+    return JNI_FALSE;
+  }
+  unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+  unsigned char* magPtr = (magBuffer == 0) ?
+      0 : static_cast<unsigned char*>(env->GetDirectBufferAddress(magBuffer));
+  unsigned char* dirPtr = (dirBuffer == 0) ?
+      0 : static_cast<unsigned char*>(env->GetDirectBufferAddress(dirBuffer));
+
+  int numPixels = width * height;
+  // TODO: avoid creating and deleting these buffers within this native function.
+  short* gxPtr = new short[3 * numPixels];
+  short* gyPtr = new short[3 * numPixels];
+  computeGradient(srcPtr, width, height, gxPtr, gyPtr);
+
+  unsigned char* mag = magPtr;
+  unsigned char* dir = dirPtr;
+  for (int i = 0; i < numPixels; ++i) {
+    for (int c = 0; c < 3; c++) {
+      int gx = static_cast<int>(*(gxPtr + 3 * i + c) / 8 + 127.5);
+      int gy = static_cast<int>(*(gyPtr + 3 * i + c) / 8 + 127.5);
+
+      // emulate arithmetic in GPU.
+      gx = 2 * gx - 255;
+      gy = 2 * gy - 255;
+      if (magPtr != 0) {
+        double value = sqrt(gx * gx + gy * gy);
+        *(magPtr + 4 * i + c) = static_cast<unsigned char>(value);
+      }
+      if (dirPtr != 0) {
+        *(dirPtr + 4 * i + c) = static_cast<unsigned char>(
+            (atan(static_cast<double>(gy)/static_cast<double>(gx)) + 3.14) / 6.28);
+      }
+    }
+    //setting alpha change to 1.0 (255)
+    if (magPtr != 0) {
+      *(magPtr + 4 * i + 3) = 255;
+    }
+    if (dirPtr != 0) {
+      *(dirPtr + 4 * i + 3) = 255;
+    }
+  }
+
+  delete[] gxPtr;
+  delete[] gyPtr;
+
+  return JNI_TRUE;
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/sobeloperator.h b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/sobeloperator.h
new file mode 100644
index 0000000..c7639d2
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/sobeloperator.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Native function to extract histogram from image (handed down as ByteBuffer).
+
+#ifndef ANDROID_FILTERFW_JNI_SOBELOPERATOR_H
+#define ANDROID_FILTERFW_JNI_SOBELOPERATOR_H
+
+#include <jni.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+JNIEXPORT jboolean JNICALL
+Java_androidx_media_filterpacks_image_SobelFilter_sobelOperator(
+    JNIEnv* env, jclass clazz, jint width, jint height,
+    jobject imageBuffer, jobject magBuffer, jobject dirBuffer);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // ANDROID_FILTERFW_JNI_SOBELOPERATOR_H
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/stats_scorer.cpp b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/stats_scorer.cpp
new file mode 100644
index 0000000..f282675
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/stats_scorer.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Stats (mean and stdev) scoring in the native.
+
+#include "stats_scorer.h"
+
+#include <jni.h>
+#include <math.h>
+
+void Java_androidx_media_filterpacks_numeric_StatsFilter_score(
+    JNIEnv* env, jobject thiz, jobject imageBuffer, jfloatArray statsArray)
+{
+    unsigned char* pImg = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    int numPixels  = env->GetDirectBufferCapacity(imageBuffer);  // 1 byte per pixel
+    float sum = 0.0;
+    float sumSquares = 0.0;
+
+    for (int i = 0; i < numPixels; ++i) {
+        float val = static_cast<float>(pImg[i]);
+        sum += val;
+        sumSquares += val * val;
+    }
+    jfloat result[2];
+    result[0] = sum / numPixels;  // mean
+    result[1] = sqrt((sumSquares - numPixels * result[0] * result[0]) / (numPixels - 1));  // stdev.
+    env->SetFloatArrayRegion(statsArray, 0, 2, result);
+}
+
+void Java_androidx_media_filterpacks_numeric_StatsFilter_regionscore(
+    JNIEnv* env, jobject thiz, jobject imageBuffer, jint width, jint height,
+    jfloat left, jfloat top, jfloat right, jfloat bottom, jfloatArray statsArray)
+{
+    unsigned char* pImg = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));
+    int xStart = static_cast<int>(width * left);
+    int xEnd = static_cast<int>(width * right);
+    int yStart = static_cast<int>(height * top);
+    int yEnd = static_cast<int>(height * bottom);
+    int numPixels  = (xEnd - xStart) * (yEnd - yStart);
+    float sum = 0.0;
+    float sumSquares = 0.0;
+
+    for (int y = yStart; y < yEnd; y++) {
+      int disp = width * y;
+      for (int x = xStart; x < xEnd; ++x) {
+        float val = static_cast<float>(*(pImg + disp + x));
+        sum += val;
+        sumSquares += val * val;
+      }
+    }
+    jfloat result[2];
+    result[0] = sum / numPixels;  // mean
+    result[1] = (numPixels == 1) ?
+        0 : sqrt((sumSquares - numPixels * result[0] * result[0]) / (numPixels - 1));  // stdev.
+    env->SetFloatArrayRegion(statsArray, 0, 2, result);
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/stats_scorer.h b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/stats_scorer.h
new file mode 100644
index 0000000..a951ec9
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/stats_scorer.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Stats (mean and stdev) scoring in the native.
+
+#ifndef ANDROID_FILTERFW_JNI_STATS_SCORER_H
+#define ANDROID_FILTERFW_JNI_STATS_SCORER_H
+
+#include <jni.h>
+
+#define JNI_FES_FUNCTION(name) Java_androidx_media_filterpacks_numeric_StatsFilter_ ## name
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+JNIEXPORT void JNICALL
+JNI_FES_FUNCTION(score)(
+    JNIEnv* env, jobject thiz, jobject imageBuffer, jfloatArray statsArray);
+
+JNIEXPORT void JNICALL
+JNI_FES_FUNCTION(regionscore)(
+   JNIEnv* env, jobject thiz, jobject imageBuffer, jint width, jint height,
+   jfloat lefp, jfloat top, jfloat right, jfloat bottom, jfloatArray statsArray);
+
+#ifdef __cplusplus
+}
+#endif
+
+
+#endif  // ANDROID_FILTERFW_JNI_STATS_SCORER_H
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/proguard-project.txt b/tests/Camera2Tests/SmartCamera/SimpleCamera/proguard-project.txt
new file mode 100644
index 0000000..f2fe155
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/proguard-project.txt
@@ -0,0 +1,20 @@
+# To enable ProGuard in your project, edit project.properties
+# to define the proguard.config property as described in that file.
+#
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in ${sdk.dir}/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the ProGuard
+# include property in project.properties.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/project.properties b/tests/Camera2Tests/SmartCamera/SimpleCamera/project.properties
new file mode 100644
index 0000000..10149cb
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/project.properties
@@ -0,0 +1,15 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-16
+android.library.reference.1=../../filterfw
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/black_screen.jpg b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/black_screen.jpg
new file mode 100644
index 0000000..702d9fa
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/black_screen.jpg
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_launcher.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_launcher.png
new file mode 100644
index 0000000..1c7b44a
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_launcher.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_menu_gallery.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_menu_gallery.png
new file mode 100644
index 0000000..f61bbd8
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_menu_gallery.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_menu_quill.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_menu_quill.png
new file mode 100644
index 0000000..7ea01b7
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_menu_quill.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_menu_save.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_menu_save.png
new file mode 100644
index 0000000..62d0b9a
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-hdpi/ic_menu_save.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-ldpi/ic_launcher.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-ldpi/ic_launcher.png
new file mode 100644
index 0000000..b42e903
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-ldpi/ic_launcher.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-mdpi/ic_launcher.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-mdpi/ic_launcher.png
new file mode 100644
index 0000000..d4b4d6b
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-mdpi/ic_launcher.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-nodpi/android_figure.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-nodpi/android_figure.png
new file mode 100644
index 0000000..71c6d76
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-nodpi/android_figure.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-nodpi/oldframe.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-nodpi/oldframe.png
new file mode 100644
index 0000000..8b7ae63
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-nodpi/oldframe.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-nodpi/polaroid.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-nodpi/polaroid.png
new file mode 100644
index 0000000..5504c57
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-nodpi/polaroid.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-xhdpi/ic_launcher.png b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..3bb5454
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/drawable-xhdpi/ic_launcher.png
Binary files differ
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/layout/imageview.xml b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/layout/imageview.xml
new file mode 100644
index 0000000..4e20c3f
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/layout/imageview.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!-- Copyright 2013 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<ImageView xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="180px"
+    android:layout_height="240px"
+    android:src="@drawable/black_screen"
+    android:adjustViewBounds="true"
+/>
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/layout/simplecamera.xml b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/layout/simplecamera.xml
new file mode 100644
index 0000000..8d8ff51
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/layout/simplecamera.xml
@@ -0,0 +1,97 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2013 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="vertical"
+    android:layout_width="fill_parent"
+    android:layout_height="fill_parent">
+
+    <RelativeLayout android:id="@+id/surfaceViewLayout"
+        android:layout_width="wrap_content"
+        android:layout_height="1240px"
+        android:layout_alignParentTop="true" >
+        <SurfaceView android:id="@+id/cameraView"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        android:layout_weight="1.0"
+        />
+        <Button android:id="@+id/startButton"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:text="@string/startButton"
+        android:layout_alignParentBottom="true"
+        android:layout_alignParentLeft="true"
+        />
+        <Button android:id="@+id/galleryOpenButton"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:text="@string/galleryOpenButton"
+        android:layout_alignParentBottom="true"
+        android:layout_alignParentRight="true"
+        />
+        <Spinner android:id="@+id/spinner"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:entries="@array/number_array"
+        android:layout_alignParentTop="true"
+        android:layout_alignParentRight="true"
+        />
+        <TextView android:id="@+id/imagesSavedTextView"
+        android:layout_height="wrap_content"
+        android:layout_width="wrap_content"
+        android:padding="16dip"
+        android:text="@string/imagesSavedTextView"
+        android:layout_centerHorizontal="true"
+        android:layout_alignParentBottom="true"
+        android:textColor="#FF0000"
+        android:textSize="20sp"
+        />
+    </RelativeLayout>
+    <HorizontalScrollView xmlns:android="http://schemas.android.com/apk/res/android"
+        android:id="@+id/scrollView"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content" >
+    <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+      android:id="@+id/scrollViewLinearLayout"
+      android:orientation="horizontal"
+      android:layout_width="fill_parent"
+      android:layout_height="320px">
+    </LinearLayout>
+    </HorizontalScrollView>
+    <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+      android:orientation="horizontal"
+      android:layout_width="fill_parent"
+      android:layout_height="wrap_content">
+        <TextView android:id="@+id/goodOrBadTextView"
+        android:layout_width="wrap_content"
+        android:layout_height="fill_parent"
+        android:padding="16dip"
+        android:text="@string/goodOrBadTextView"
+        />
+        <TextView android:id="@+id/fpsTextView"
+        android:layout_height="fill_parent"
+        android:layout_width="wrap_content"
+        android:padding="16dip"
+        android:text="@string/fpsTextView"
+        />
+        <TextView android:id="@+id/scoreTextView"
+        android:layout_height="fill_parent"
+        android:layout_width="wrap_content"
+        android:padding="16dip"
+        android:text="@string/scoreTextView"
+        />
+    </LinearLayout>
+</LinearLayout>
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/raw/camera_graph.xml b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/raw/camera_graph.xml
new file mode 100644
index 0000000..6661fd7
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/raw/camera_graph.xml
@@ -0,0 +1,182 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+
+<graph>
+    <!-- Packages -->
+    <import package="androidx.media.filterpacks.base"/>
+    <import package="androidx.media.filterpacks.image"/>
+    <import package="androidx.media.filterpacks.video"/>
+    <import package="androidx.media.filterpacks.text" />
+    <import package="androidx.media.filterpacks.numeric" />
+    <import package="androidx.media.filterpacks.face" />
+    <import package="androidx.media.filterpacks.transform" />
+    <import package="androidx.media.filterpacks.performance" />
+    <import package="androidx.media.filterfw.samples.simplecamera" />
+    <import package="androidx.media.filterpacks.histogram" />
+    <import package="androidx.media.filterpacks.colorspace" />
+    <import package="androidx.media.filterpacks.sensors" />
+
+    <!-- Filters -->
+    <filter class="ResizeFilter" name="resize" >
+        <input name="outputWidth" intValue="480" />
+        <input name="outputHeight" intValue="640" />
+    </filter>
+
+    <filter class="Camera2Source" name="camera"/>
+
+    <filter class="BranchFilter" name="mainBranch" />
+    <filter class="BranchFilter" name="preMainBranch" />
+    <filter class="BranchFilter" name="featureBranch" />
+
+    <filter class="SurfaceHolderTarget" name="camViewTarget"/>
+
+    <filter class="ScaleFilter" name="scale" >
+        <input name="scale" floatValue="0.50"/>
+    </filter>
+
+    <filter class="SobelFilter" name="sobel" />
+    <filter class="StatsFilter" name="statsFilter" />
+    <filter class="NormFilter" name="normFilter" />
+    <filter class="TextViewTarget" name="goodOrBadTextView" />
+    <filter class="ToGrayValuesFilter" name="sobelConverter" />
+    <filter class="AverageFilter" name="avgFilter" />
+
+    <var name="startCapture" />
+    <filter class="ImageGoodnessFilter" name="goodnessFilter" >
+        <input name="capturing" varValue="startCapture" />
+    </filter>
+
+    <filter class="ToStringFilter" name="scoreToString" />
+    <filter class="TextViewTarget" name="scoreTextView" />
+
+    <filter class="ExposureFilter" name="exposure" />
+
+    <filter class="TextViewTarget" name="fpsTextView" />
+    <filter class="ToStringFilter" name="throughputToString" />
+
+
+    <filter class="ContrastRatioFilter" name="contrast" />
+
+    <filter class="ScaleFilter" name="secondaryScale" >
+        <input name="scale" floatValue="0.50"/>
+    </filter>
+
+    <filter class="ThroughputFilter" name="throughput" />
+
+    <filter class="NewChromaHistogramFilter" name="histogram" />
+    <filter class="ColorfulnessFilter" name="colorfulness" />
+
+    <filter class="MotionSensorWTime" name="motion" />
+
+    <filter class="AvgBrightnessFilter" name="brightness" />
+
+    <filter class="RotateFilter" name="rotate" />
+
+    <filter class="BrightnessFilter" name="snapBrightness" />
+    <filter class="WaveTriggerFilter" name="snapEffect" />
+    <!-- Connections -->
+    <connect sourceFilter="camera" sourcePort="video"
+        targetFilter="rotate" targetPort="image" />
+
+    <connect sourceFilter="camera" sourcePort="orientation"
+        targetFilter="rotate" targetPort="rotateAngle" />
+
+    <connect sourceFilter="rotate" sourcePort="image"
+        targetFilter="resize" targetPort="image" />
+    <connect sourceFilter="resize" sourcePort="image"
+        targetFilter="preMainBranch" targetPort="input" />
+    <connect sourceFilter="preMainBranch" sourcePort="toMainBranch"
+        targetFilter="scale" targetPort="image" />
+    <connect sourceFilter="scale" sourcePort="image"
+         targetFilter="mainBranch" targetPort="input" />
+
+    <connect sourceFilter="preMainBranch" sourcePort="toGoodnessFilter"
+        targetFilter="goodnessFilter" targetPort="image" />
+    <connect sourceFilter="mainBranch" sourcePort="toFeatureBranch"
+        targetFilter="secondaryScale" targetPort="image" />
+    <connect sourceFilter="secondaryScale" sourcePort="image"
+        targetFilter="featureBranch" targetPort="input" />
+
+    <connect sourceFilter="featureBranch" sourcePort="toSobel"
+         targetFilter="sobel" targetPort="image" />
+
+    <connect sourceFilter="sobel" sourcePort="magnitude"
+         targetFilter="sobelConverter" targetPort="image" />
+
+    <connect sourceFilter="sobelConverter" sourcePort="image"
+         targetFilter="statsFilter" targetPort="buffer" />
+
+    <connect sourceFilter="statsFilter" sourcePort="mean"
+         targetFilter="normFilter" targetPort="x" />
+
+    <connect sourceFilter="statsFilter" sourcePort="stdev"
+         targetFilter="normFilter" targetPort="y" />
+
+    <connect sourceFilter="normFilter" sourcePort="norm"
+         targetFilter="avgFilter" targetPort="sharpness" />
+
+    <connect sourceFilter="avgFilter" sourcePort="avg"
+         targetFilter="goodnessFilter" targetPort="sharpness" />
+
+    <connect sourceFilter="goodnessFilter" sourcePort="goodOrBadPic"
+         targetFilter="goodOrBadTextView" targetPort="text" />
+
+    <connect sourceFilter="featureBranch" sourcePort="toExposure"
+        targetFilter="exposure" targetPort="image" />
+    <connect sourceFilter="exposure" sourcePort="underExposureRating"
+        targetFilter="goodnessFilter" targetPort="underExposure" />
+    <connect sourceFilter="exposure" sourcePort="overExposureRating"
+        targetFilter="goodnessFilter" targetPort="overExposure" />
+
+    <connect sourceFilter="goodnessFilter" sourcePort="score"
+        targetFilter="scoreToString" targetPort="object" />
+    <connect sourceFilter="scoreToString" sourcePort="string"
+        targetFilter="scoreTextView" targetPort="text" />
+
+    <connect sourceFilter="mainBranch" sourcePort="camView"
+        targetFilter="throughput" targetPort="frame" />
+    <connect sourceFilter="throughput" sourcePort="frame"
+        targetFilter="snapBrightness" targetPort="image" />
+    <connect sourceFilter="snapEffect" sourcePort="value"
+        targetFilter="snapBrightness" targetPort="brightness" />
+    <connect sourceFilter="snapBrightness" sourcePort="image"
+        targetFilter="camViewTarget" targetPort="image" />
+    <connect sourceFilter="throughput" sourcePort="throughput"
+        targetFilter="throughputToString" targetPort="object" />
+    <connect sourceFilter="throughputToString" sourcePort="string"
+        targetFilter="fpsTextView" targetPort="text" />
+
+    <connect sourceFilter="featureBranch" sourcePort="contrastRatio"
+        targetFilter="contrast" targetPort="image" />
+    <connect sourceFilter="contrast" sourcePort="contrastRatingToGoodness"
+        targetFilter="goodnessFilter" targetPort="contrastRating" />
+
+    <connect sourceFilter="mainBranch" sourcePort="colorfulness"
+        targetFilter="histogram" targetPort="image" />
+    <connect sourceFilter="histogram" sourcePort="histogram"
+        targetFilter="colorfulness" targetPort="histogram" />
+    <connect sourceFilter="colorfulness" sourcePort="score"
+        targetFilter="goodnessFilter" targetPort="colorfulness" />
+
+    <connect sourceFilter="motion" sourcePort="values"
+        targetFilter="goodnessFilter" targetPort="motionValues" />
+
+    <connect sourceFilter="featureBranch" sourcePort="brightness"
+        targetFilter="brightness" targetPort="image" />
+    <connect sourceFilter="brightness" sourcePort="brightnessRating"
+        targetFilter="goodnessFilter" targetPort="brightness" />
+</graph>
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values-v11/styles.xml b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values-v11/styles.xml
new file mode 100644
index 0000000..d408cbc
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values-v11/styles.xml
@@ -0,0 +1,5 @@
+<resources>
+
+    <style name="AppTheme" parent="android:Theme.Holo.Light" />
+
+</resources>
\ No newline at end of file
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values-v14/styles.xml b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values-v14/styles.xml
new file mode 100644
index 0000000..1c089a7
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values-v14/styles.xml
@@ -0,0 +1,5 @@
+<resources>
+
+    <style name="AppTheme" parent="android:Theme.Holo.Light.DarkActionBar" />
+
+</resources>
\ No newline at end of file
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values/strings.xml b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values/strings.xml
new file mode 100644
index 0000000..5e6b8ab
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values/strings.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+
+<resources>
+     <string name="goodOrBadTextView"> Good/Bad Picture </string>
+     <string name="fpsTextView"> FPS </string>
+     <string name="scoreTextView"> Score</string>
+     <string name="gallery"> Go To Gallery </string>
+     <string name="camera"> Go To Camera </string>
+     <string name="startButton" > Start </string>
+     <string name="imagesSavedTextView" > Images Saved </string>
+     <string name="galleryOpenButton" > Gallery </string>
+     <string-array name="number_array">
+        <item> 1 </item>
+        <item> 2 </item>
+        <item> 3 </item>
+        <item> 4 </item>
+        <item> 5 </item>
+        <item> 6 </item>
+        <item> 7 </item>
+        <item> 8 </item>
+        <item> 9 </item>
+        <item> 10 </item>
+     </string-array>
+</resources>
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values/styles.xml b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values/styles.xml
new file mode 100644
index 0000000..bd5027f
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/res/values/styles.xml
@@ -0,0 +1,5 @@
+<resources>
+
+    <style name="AppTheme" parent="android:Theme.Light" />
+
+</resources>
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/BackingStore.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/BackingStore.java
new file mode 100644
index 0000000..216e743
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/BackingStore.java
@@ -0,0 +1,929 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.annotation.TargetApi;
+import android.graphics.Bitmap;
+import android.os.Build;
+import android.renderscript.Allocation;
+import android.renderscript.Element;
+import android.renderscript.RenderScript;
+import android.renderscript.Type;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.Arrays;
+import java.util.Vector;
+
+final class BackingStore {
+
+    /** Access mode None: Frame data will not be accessed at all. */
+    static final int ACCESS_NONE = 0x00;
+    /** Access mode Bytes: Frame data will be accessed as a ByteBuffer. */
+    static final int ACCESS_BYTES = 0x01;
+    /** Access mode Texture: Frame data will be accessed as a TextureSource. */
+    static final int ACCESS_TEXTURE = 0x02;
+    /** Access mode RenderTarget: Frame data will be accessed as a RenderTarget. */
+    static final int ACCESS_RENDERTARGET = 0x04;
+    /** Access mode Object: Frame data will be accessed as a generic Object. */
+    static final int ACCESS_OBJECT = 0x08;
+    /** Access mode Bitmap: Frame data will be accessed as a Bitmap. */
+    static final int ACCESS_BITMAP = 0x10;
+    /** Access mode Allocation: Frame data will be accessed as a RenderScript Allocation. */
+    static final int ACCESS_ALLOCATION = 0x20;
+
+    private static final int BACKING_BYTEBUFFER = 1;
+    private static final int BACKING_TEXTURE = 2;
+    private static final int BACKING_OBJECT = 3;
+    private static final int BACKING_BITMAP = 4;
+    private static final int BACKING_ALLOCATION = 5;
+
+    private final FrameType mType;
+    private int[] mDimensions;
+    private long mTimestamp = Frame.TIMESTAMP_NOT_SET;
+
+    private final FrameManager mFrameManager;
+
+    private Vector<Backing> mBackings = new Vector<Backing>();
+
+    private boolean mWriteLocked = false;
+    private int mReadLocks = 0;
+
+    private int mRefCount = 1;
+
+    /** The most up-to-date data backing */
+    private Backing mCurrentBacking = null;
+
+    /** The currently locked backing */
+    private Backing mLockedBacking = null;
+
+    // Public Methods //////////////////////////////////////////////////////////////////////////////
+    public BackingStore(FrameType type, int[] dimensions, FrameManager frameManager) {
+        mType = type;
+        mDimensions = dimensions != null ? Arrays.copyOf(dimensions, dimensions.length) : null;
+        mFrameManager = frameManager;
+    }
+
+    public FrameType getFrameType() {
+        return mType;
+    }
+
+    public Object lockData(int mode, int accessFormat) {
+        return lockBacking(mode, accessFormat).lock(accessFormat);
+    }
+
+    public Backing lockBacking(int mode, int access) {
+        Backing backing = fetchBacking(mode, access);
+        if (backing == null) {
+            throw new RuntimeException("Could not fetch frame data!");
+        }
+        lock(backing, mode);
+        return backing;
+    }
+
+    public boolean unlock() {
+        if (mWriteLocked) {
+            mWriteLocked = false;
+        } else if (mReadLocks > 0) {
+            --mReadLocks;
+        } else {
+            return false;
+        }
+        mLockedBacking.unlock();
+        mLockedBacking = null;
+        return true;
+    }
+
+    public BackingStore retain() {
+        if (mRefCount >= 10) {
+            Log.w("BackingStore", "High ref-count of " + mRefCount + " on " + this + "!");
+        }
+        if (mRefCount <= 0) {
+            throw new RuntimeException("RETAINING RELEASED");
+        }
+        ++mRefCount;
+        return this;
+    }
+
+    public BackingStore release() {
+        if (mRefCount <= 0) {
+            throw new RuntimeException("DOUBLE-RELEASE");
+        }
+        --mRefCount;
+        if (mRefCount == 0) {
+            releaseBackings();
+            return null;
+        }
+        return this;
+    }
+
+    /**
+     * Resizes the backing store. This invalidates all data in the store.
+     */
+    public void resize(int[] newDimensions) {
+        Vector<Backing> resized = new Vector<Backing>();
+        for (Backing backing : mBackings) {
+            if (backing.resize(newDimensions)) {
+                resized.add(backing);
+            } else {
+                releaseBacking(backing);
+            }
+        }
+        mBackings = resized;
+        mDimensions = newDimensions;
+    }
+
+    public int[] getDimensions() {
+        return mDimensions;
+    }
+
+    public int getElementCount() {
+        int result = 1;
+        if (mDimensions != null) {
+            for (int dim : mDimensions) {
+                result *= dim;
+            }
+        }
+        return result;
+    }
+
+    public void importStore(BackingStore store) {
+        // TODO: Better backing selection?
+        if (store.mBackings.size() > 0) {
+            importBacking(store.mBackings.firstElement());
+        }
+        mTimestamp = store.mTimestamp;
+    }
+
+    /**
+     * @return the timestamp
+     */
+    public long getTimestamp() {
+        return mTimestamp;
+    }
+
+    /**
+     * @param timestamp the timestamp to set
+     */
+    public void setTimestamp(long timestamp) {
+        mTimestamp = timestamp;
+    }
+
+    // Internal Methods ////////////////////////////////////////////////////////////////////////////
+    private Backing fetchBacking(int mode, int access) {
+        Backing backing = getBacking(mode, access);
+        if (backing == null) {
+            backing = attachNewBacking(mode, access);
+        }
+        syncBacking(backing);
+        return backing;
+    }
+
+    private void syncBacking(Backing backing) {
+        if (backing != null && backing.isDirty() && mCurrentBacking != null) {
+            backing.syncTo(mCurrentBacking);
+        }
+    }
+
+    private Backing getBacking(int mode, int access) {
+        // [Non-iterator looping]
+        for (int i = 0; i < mBackings.size(); ++i) {
+            final Backing backing = mBackings.get(i);
+
+            int backingAccess =
+                    (mode == Frame.MODE_WRITE) ? backing.writeAccess() : backing.readAccess();
+            if ((backingAccess & access) == access) {
+                return backing;
+            }
+        }
+        return null;
+    }
+
+    private Backing attachNewBacking(int mode, int access) {
+        Backing backing = createBacking(mode, access);
+        if (mBackings.size() > 0) {
+            backing.markDirty();
+        }
+        mBackings.add(backing);
+        return backing;
+    }
+
+    private Backing createBacking(int mode, int access) {
+        // TODO: If the read/write access flags indicate, make/fetch a GraphicBuffer backing.
+        Backing backing = null;
+        int elemSize = mType.getElementSize();
+        if (shouldFetchCached(access)) {
+            backing = mFrameManager.fetchBacking(mode, access, mDimensions, elemSize);
+        }
+        if (backing == null) {
+            switch (access) {
+                case ACCESS_BYTES:
+                    backing = new ByteBufferBacking();
+                    break;
+                case ACCESS_TEXTURE:
+                case ACCESS_RENDERTARGET:
+                    backing = new TextureBacking();
+                    break;
+                case ACCESS_OBJECT:
+                    backing = new ObjectBacking();
+                    break;
+                case ACCESS_BITMAP:
+                    backing = new BitmapBacking();
+                    break;
+                case ACCESS_ALLOCATION:
+                    if (!AllocationBacking.isSupported()) {
+                        throw new RuntimeException(
+                                "Attempted to create an AllocationBacking in context that does " +
+                                "not support RenderScript!");
+                    }
+                    backing = new AllocationBacking(mFrameManager.getContext().getRenderScript());
+                    break;
+            }
+            if (backing == null) {
+                throw new RuntimeException(
+                        "Could not create backing for access type " + access + "!");
+            }
+            if (backing.requiresGpu() && !mFrameManager.getRunner().isOpenGLSupported()) {
+                throw new RuntimeException(
+                        "Cannot create backing that requires GPU in a runner that does not " +
+                        "support OpenGL!");
+            }
+            backing.setDimensions(mDimensions);
+            backing.setElementSize(elemSize);
+            backing.setElementId(mType.getElementId());
+            backing.allocate(mType);
+            mFrameManager.onBackingCreated(backing);
+        }
+        return backing;
+    }
+
+    private void importBacking(Backing backing) {
+        // TODO: This actually needs synchronization between the two BackingStore threads for the
+        // general case
+        int access = backing.requiresGpu() ? ACCESS_BYTES : backing.readAccess();
+        Backing newBacking = createBacking(Frame.MODE_READ, access);
+        newBacking.syncTo(backing);
+        mBackings.add(newBacking);
+        mCurrentBacking = newBacking;
+    }
+
+    private void releaseBackings() {
+        // [Non-iterator looping]
+        for (int i = 0; i < mBackings.size(); ++i) {
+            releaseBacking(mBackings.get(i));
+        }
+        mBackings.clear();
+        mCurrentBacking = null;
+    }
+
+    private void releaseBacking(Backing backing) {
+        mFrameManager.onBackingAvailable(backing);
+    }
+
+    private void lock(Backing backingToLock, int mode) {
+        if (mode == Frame.MODE_WRITE) {
+            // Make sure frame is not read-locked
+            if (mReadLocks > 0) {
+                throw new RuntimeException(
+                        "Attempting to write-lock the read-locked frame " + this + "!");
+            } else if (mWriteLocked) {
+                throw new RuntimeException(
+                        "Attempting to write-lock the write-locked frame " + this + "!");
+            }
+            // Mark all other backings dirty
+            // [Non-iterator looping]
+            for (int i = 0; i < mBackings.size(); ++i) {
+                final Backing backing = mBackings.get(i);
+                if (backing != backingToLock) {
+                    backing.markDirty();
+                }
+            }
+            mWriteLocked = true;
+            mCurrentBacking = backingToLock;
+        } else {
+            if (mWriteLocked) {
+                throw new RuntimeException("Attempting to read-lock locked frame " + this + "!");
+            }
+            ++mReadLocks;
+        }
+        mLockedBacking = backingToLock;
+    }
+
+    private static boolean shouldFetchCached(int access) {
+        return access != ACCESS_OBJECT;
+    }
+
+
+    // Backings ////////////////////////////////////////////////////////////////////////////////////
+    static abstract class Backing {
+        protected int[] mDimensions = null;
+        private int mElementSize;
+        private int mElementID;
+        protected boolean mIsDirty = false;
+
+        int cachePriority = 0;
+
+        public abstract void allocate(FrameType frameType);
+
+        public abstract int readAccess();
+
+        public abstract int writeAccess();
+
+        public abstract void syncTo(Backing backing);
+
+        public abstract Object lock(int accessType);
+
+        public abstract int getType();
+
+        public abstract boolean shouldCache();
+
+        public abstract boolean requiresGpu();
+
+        public abstract void destroy();
+
+        public abstract int getSize();
+
+        public void unlock() {
+            // Default implementation does nothing.
+        }
+
+        public void setData(Object data) {
+            throw new RuntimeException("Internal error: Setting data on frame backing " + this
+                    + ", which does not support setting data directly!");
+        }
+
+        public void setDimensions(int[] dimensions) {
+            mDimensions = dimensions;
+        }
+
+        public void setElementSize(int elemSize) {
+            mElementSize = elemSize;
+        }
+
+        public void setElementId(int elemId) {
+            mElementID = elemId;
+        }
+
+        public int[] getDimensions() {
+            return mDimensions;
+        }
+
+        public int getElementSize() {
+            return mElementSize;
+        }
+
+        public int getElementId() {
+            return mElementID;
+        }
+
+        public boolean resize(int[] newDimensions) {
+            return false;
+        }
+
+        public void markDirty() {
+            mIsDirty = true;
+        }
+
+        public boolean isDirty() {
+            return mIsDirty;
+        }
+
+        protected void assertImageCompatible(FrameType type) {
+            if (type.getElementId() != FrameType.ELEMENT_RGBA8888) {
+                throw new RuntimeException("Cannot allocate texture with non-RGBA data type!");
+            } else if (mDimensions == null || mDimensions.length != 2) {
+                throw new RuntimeException("Cannot allocate non 2-dimensional texture!");
+            }
+        }
+
+    }
+
+    static class ObjectBacking extends Backing {
+
+        private Object mObject = null;
+
+        @Override
+        public void allocate(FrameType frameType) {
+            mObject = null;
+        }
+
+        @Override
+        public int readAccess() {
+            return ACCESS_OBJECT;
+        }
+
+        @Override
+        public int writeAccess() {
+            return ACCESS_OBJECT;
+        }
+
+        @Override
+        public void syncTo(Backing backing) {
+            switch (backing.getType()) {
+                case BACKING_OBJECT:
+                    mObject = backing.lock(ACCESS_OBJECT);
+                    backing.unlock();
+                    break;
+                case BACKING_BITMAP:
+                    mObject = backing.lock(ACCESS_BITMAP);
+                    backing.unlock();
+                    break;
+                default:
+                    mObject = null;
+            }
+            mIsDirty = false;
+        }
+
+        @Override
+        public Object lock(int accessType) {
+            return mObject;
+        }
+
+        @Override
+        public int getType() {
+            return BACKING_OBJECT;
+        }
+
+        @Override
+        public boolean shouldCache() {
+            return false;
+        }
+
+        @Override
+        public boolean requiresGpu() {
+            return false;
+        }
+
+        @Override
+        public void destroy() {
+            mObject = null;
+        }
+
+        @Override
+        public int getSize() {
+            return 0;
+        }
+
+        @Override
+        public void setData(Object data) {
+            mObject = data;
+        }
+
+    }
+
+    static class BitmapBacking extends Backing {
+
+        private Bitmap mBitmap = null;
+
+        @Override
+        public void allocate(FrameType frameType) {
+            assertImageCompatible(frameType);
+        }
+
+        @Override
+        public int readAccess() {
+            return ACCESS_BITMAP;
+        }
+
+        @Override
+        public int writeAccess() {
+            return ACCESS_BITMAP;
+        }
+
+        @Override
+        public void syncTo(Backing backing) {
+            int access = backing.readAccess();
+            if ((access & ACCESS_BITMAP) != 0) {
+                mBitmap = (Bitmap) backing.lock(ACCESS_BITMAP);
+            } else if ((access & ACCESS_BYTES) != 0) {
+                createBitmap();
+                ByteBuffer buffer = (ByteBuffer) backing.lock(ACCESS_BYTES);
+                mBitmap.copyPixelsFromBuffer(buffer);
+                buffer.rewind();
+            } else if ((access & ACCESS_TEXTURE) != 0) {
+                createBitmap();
+                RenderTarget renderTarget = (RenderTarget) backing.lock(ACCESS_RENDERTARGET);
+                mBitmap.copyPixelsFromBuffer(
+                        renderTarget.getPixelData(mDimensions[0], mDimensions[1]));
+            } else if ((access & ACCESS_ALLOCATION) != 0 && AllocationBacking.isSupported()) {
+                createBitmap();
+                syncToAllocationBacking(backing);
+            } else {
+                throw new RuntimeException("Cannot sync bytebuffer backing!");
+            }
+            backing.unlock();
+            mIsDirty = false;
+        }
+
+        @TargetApi(11)
+        private void syncToAllocationBacking(Backing backing) {
+            Allocation allocation = (Allocation) backing.lock(ACCESS_ALLOCATION);
+            allocation.copyTo(mBitmap);
+        }
+
+        @Override
+        public Object lock(int accessType) {
+            return mBitmap;
+        }
+
+        @Override
+        public int getType() {
+            return BACKING_BITMAP;
+        }
+
+        @Override
+        public boolean shouldCache() {
+            return false;
+        }
+
+        @Override
+        public boolean requiresGpu() {
+            return false;
+        }
+
+        @Override
+        public void destroy() {
+            // As we share the bitmap with other backings (such as object backings), we must not
+            // recycle it here.
+            mBitmap = null;
+        }
+
+        @Override
+        public int getSize() {
+            return 4 * mDimensions[0] * mDimensions[1];
+        }
+
+        @Override
+        public void setData(Object data) {
+            // We can assume that data will always be a Bitmap instance.
+            mBitmap = (Bitmap) data;
+        }
+
+        private void createBitmap() {
+            mBitmap = Bitmap.createBitmap(mDimensions[0], mDimensions[1], Bitmap.Config.ARGB_8888);
+        }
+    }
+
+    static class TextureBacking extends Backing {
+
+        private RenderTarget mRenderTarget = null;
+        private TextureSource mTexture = null;
+
+        @Override
+        public void allocate(FrameType frameType) {
+            assertImageCompatible(frameType);
+            mTexture = TextureSource.newTexture();
+        }
+
+        @Override
+        public int readAccess() {
+            return ACCESS_TEXTURE;
+        }
+
+        @Override
+        public int writeAccess() {
+            return ACCESS_RENDERTARGET;
+        }
+
+        @Override
+        public void syncTo(Backing backing) {
+            int access = backing.readAccess();
+            if ((access & ACCESS_BYTES) != 0) {
+                ByteBuffer pixels = (ByteBuffer) backing.lock(ACCESS_BYTES);
+                mTexture.allocateWithPixels(pixels, mDimensions[0], mDimensions[1]);
+            } else if ((access & ACCESS_BITMAP) != 0) {
+                Bitmap bitmap = (Bitmap) backing.lock(ACCESS_BITMAP);
+                mTexture.allocateWithBitmapPixels(bitmap);
+            } else if ((access & ACCESS_TEXTURE) != 0) {
+                TextureSource texture = (TextureSource) backing.lock(ACCESS_TEXTURE);
+                int w = mDimensions[0];
+                int h = mDimensions[1];
+                ImageShader.renderTextureToTarget(texture, getRenderTarget(), w, h);
+            } else if ((access & ACCESS_ALLOCATION) != 0 && AllocationBacking.isSupported()) {
+                syncToAllocationBacking(backing);
+            } else {
+                throw new RuntimeException("Cannot sync bytebuffer backing!");
+            }
+            backing.unlock();
+            mIsDirty = false;
+        }
+
+        @TargetApi(11)
+        private void syncToAllocationBacking(Backing backing) {
+            Allocation allocation = (Allocation) backing.lock(ACCESS_ALLOCATION);
+            ByteBuffer pixels = ByteBuffer.allocateDirect(getSize());
+            allocation.copyTo(pixels.array());
+            mTexture.allocateWithPixels(pixels, mDimensions[0], mDimensions[1]);
+        }
+
+        @Override
+        public Object lock(int accessType) {
+            switch (accessType) {
+                case ACCESS_TEXTURE:
+                    return getTexture();
+
+                case ACCESS_RENDERTARGET:
+                    return getRenderTarget();
+
+                default:
+                    throw new RuntimeException("Illegal access to texture!");
+            }
+        }
+
+        @Override
+        public int getType() {
+            return BACKING_TEXTURE;
+        }
+
+        @Override
+        public boolean shouldCache() {
+            return true;
+        }
+
+        @Override
+        public boolean requiresGpu() {
+            return true;
+        }
+
+        @Override
+        public void destroy() {
+            if (mRenderTarget != null) {
+                mRenderTarget.release();
+            }
+            if (mTexture.isAllocated()) {
+                mTexture.release();
+            }
+        }
+
+        @Override
+        public int getSize() {
+            return 4 * mDimensions[0] * mDimensions[1];
+        }
+
+        private TextureSource getTexture() {
+            if (!mTexture.isAllocated()) {
+                mTexture.allocate(mDimensions[0], mDimensions[1]);
+            }
+            return mTexture;
+        }
+
+        private RenderTarget getRenderTarget() {
+            if (mRenderTarget == null) {
+                int w = mDimensions[0];
+                int h = mDimensions[1];
+                mRenderTarget = RenderTarget.currentTarget().forTexture(getTexture(), w, h);
+            }
+            return mRenderTarget;
+        }
+
+    }
+
+    static class ByteBufferBacking extends Backing {
+
+        ByteBuffer mBuffer = null;
+
+        @Override
+        public void allocate(FrameType frameType) {
+            int size = frameType.getElementSize();
+            for (int dim : mDimensions) {
+                size *= dim;
+            }
+            mBuffer = ByteBuffer.allocateDirect(size);
+        }
+
+        @Override
+        public int readAccess() {
+            return ACCESS_BYTES;
+        }
+
+        @Override
+        public int writeAccess() {
+            return ACCESS_BYTES;
+        }
+
+        @Override
+        public boolean requiresGpu() {
+            return false;
+        }
+
+        @Override
+        public void syncTo(Backing backing) {
+            int access = backing.readAccess();
+            if ((access & ACCESS_TEXTURE) != 0) {
+                RenderTarget target = (RenderTarget) backing.lock(ACCESS_RENDERTARGET);
+                GLToolbox.readTarget(target, mBuffer, mDimensions[0], mDimensions[1]);
+            } else if ((access & ACCESS_BITMAP) != 0) {
+                Bitmap bitmap = (Bitmap) backing.lock(ACCESS_BITMAP);
+                bitmap.copyPixelsToBuffer(mBuffer);
+                mBuffer.rewind();
+            } else if ((access & ACCESS_BYTES) != 0) {
+                ByteBuffer otherBuffer = (ByteBuffer) backing.lock(ACCESS_BYTES);
+                mBuffer.put(otherBuffer);
+                otherBuffer.rewind();
+            } else if ((access & ACCESS_ALLOCATION) != 0 && AllocationBacking.isSupported()) {
+                syncToAllocationBacking(backing);
+            } else {
+                throw new RuntimeException("Cannot sync bytebuffer backing!");
+            }
+            backing.unlock();
+            mBuffer.rewind();
+            mIsDirty = false;
+        }
+
+        @TargetApi(11)
+        private void syncToAllocationBacking(Backing backing) {
+            Allocation allocation = (Allocation) backing.lock(ACCESS_ALLOCATION);
+            if (getElementId() == FrameType.ELEMENT_RGBA8888) {
+                byte[] bytes = mBuffer.array();
+                allocation.copyTo(bytes);
+            } else if (getElementId() == FrameType.ELEMENT_FLOAT32) {
+                float[] floats = new float[getSize() / 4];
+                allocation.copyTo(floats);
+                mBuffer.asFloatBuffer().put(floats);
+            } else {
+                throw new RuntimeException(
+                        "Trying to sync to an allocation with an unsupported element id: "
+                        + getElementId());
+            }
+        }
+
+        @Override
+        public Object lock(int accessType) {
+            return mBuffer.rewind();
+        }
+
+        @Override
+        public void unlock() {
+            mBuffer.rewind();
+        }
+
+        @Override
+        public int getType() {
+            return BACKING_BYTEBUFFER;
+        }
+
+        @Override
+        public boolean shouldCache() {
+            return true;
+        }
+
+        @Override
+        public void destroy() {
+            mBuffer = null;
+        }
+
+        @Override
+        public int getSize() {
+            return mBuffer.remaining();
+        }
+
+    }
+
+    @TargetApi(11)
+    static class AllocationBacking extends Backing {
+
+        private final RenderScript mRenderScript;
+        private Allocation mAllocation = null;
+
+        public AllocationBacking(RenderScript renderScript) {
+            mRenderScript = renderScript;
+        }
+
+        @Override
+        public void allocate(FrameType frameType) {
+            assertCompatible(frameType);
+
+            Element element = null;
+            switch (frameType.getElementId()) {
+                case FrameType.ELEMENT_RGBA8888:
+                    element = Element.RGBA_8888(mRenderScript);
+                    break;
+                case FrameType.ELEMENT_FLOAT32:
+                    element = Element.F32(mRenderScript);
+                    break;
+            }
+            Type.Builder imageTypeBuilder = new Type.Builder(mRenderScript, element);
+            imageTypeBuilder.setX(mDimensions.length >= 1 ? mDimensions[0] : 1);
+            imageTypeBuilder.setY(mDimensions.length == 2 ? mDimensions[1] : 1);
+            Type imageType = imageTypeBuilder.create();
+
+            mAllocation = Allocation.createTyped(mRenderScript, imageType);
+        }
+
+        @Override
+        public int readAccess() {
+            return ACCESS_ALLOCATION;
+        }
+
+        @Override
+        public int writeAccess() {
+            return ACCESS_ALLOCATION;
+        }
+
+        @Override
+        public boolean requiresGpu() {
+            return false;
+        }
+
+        @Override
+        public void syncTo(Backing backing) {
+            int access = backing.readAccess();
+            if ((access & ACCESS_TEXTURE) != 0) {
+                RenderTarget target = (RenderTarget) backing.lock(ACCESS_RENDERTARGET);
+                ByteBuffer pixels = ByteBuffer.allocateDirect(getSize());
+                GLToolbox.readTarget(target, pixels, mDimensions[0], mDimensions[1]);
+                mAllocation.copyFrom(pixels.array());
+            } else if ((access & ACCESS_BITMAP) != 0) {
+                Bitmap bitmap = (Bitmap) backing.lock(ACCESS_BITMAP);
+                mAllocation.copyFrom(bitmap);
+            } else if ((access & ACCESS_BYTES) != 0) {
+                ByteBuffer buffer = (ByteBuffer) backing.lock(ACCESS_BYTES);
+                if (buffer.order() != ByteOrder.nativeOrder()) {
+                    throw new RuntimeException(
+                            "Trying to sync to the ByteBufferBacking with non-native byte order!");
+                }
+                byte[] bytes;
+                if (buffer.hasArray()) {
+                    bytes = buffer.array();
+                } else {
+                    bytes = new byte[getSize()];
+                    buffer.get(bytes);
+                    buffer.rewind();
+                }
+                mAllocation.copyFromUnchecked(bytes);
+            } else {
+                throw new RuntimeException("Cannot sync allocation backing!");
+            }
+            backing.unlock();
+            mIsDirty = false;
+        }
+
+        @Override
+        public Object lock(int accessType) {
+            return mAllocation;
+        }
+
+        @Override
+        public void unlock() {
+        }
+
+        @Override
+        public int getType() {
+            return BACKING_ALLOCATION;
+        }
+
+        @Override
+        public boolean shouldCache() {
+            return true;
+        }
+
+        @Override
+        public void destroy() {
+            if (mAllocation != null) {
+                mAllocation.destroy();
+                mAllocation = null;
+            }
+        }
+
+        @Override
+        public int getSize() {
+            int elementCount = 1;
+            for (int dim : mDimensions) {
+                elementCount *= dim;
+            }
+            return getElementSize() * elementCount;
+        }
+
+        public static boolean isSupported() {
+            return Build.VERSION.SDK_INT >= 11;
+        }
+
+        private void assertCompatible(FrameType type) {
+            // TODO: consider adding support for other data types.
+            if (type.getElementId() != FrameType.ELEMENT_RGBA8888
+                    && type.getElementId() != FrameType.ELEMENT_FLOAT32) {
+                throw new RuntimeException(
+                        "Cannot allocate allocation with a non-RGBA or non-float data type!");
+            }
+            if (mDimensions == null || mDimensions.length > 2) {
+                throw new RuntimeException(
+                        "Cannot create an allocation with more than 2 dimensions!");
+            }
+        }
+
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/BranchFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/BranchFilter.java
new file mode 100644
index 0000000..6e7c014
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/BranchFilter.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.base;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.InputPort;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+public final class BranchFilter extends Filter {
+
+    private boolean mSynchronized = true;
+
+    public BranchFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    public BranchFilter(MffContext context, String name, boolean synced) {
+        super(context, name);
+        mSynchronized = synced;
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addInputPort("input", Signature.PORT_REQUIRED, FrameType.any())
+            .addInputPort("synchronized", Signature.PORT_OPTIONAL,FrameType.single(boolean.class))
+            .disallowOtherInputs();
+    }
+
+    @Override
+    public void onInputPortOpen(InputPort port) {
+        if (port.getName().equals("input")) {
+            for (OutputPort outputPort : getConnectedOutputPorts()) {
+                port.attachToOutputPort(outputPort);
+            }
+        } else if (port.getName().equals("synchronized")) {
+            port.bindToFieldNamed("mSynchronized");
+            port.setAutoPullEnabled(true);
+        }
+    }
+
+    @Override
+    protected void onOpen() {
+        updateSynchronization();
+    }
+
+    @Override
+    protected void onProcess() {
+        Frame inputFrame = getConnectedInputPort("input").pullFrame();
+        for (OutputPort outputPort : getConnectedOutputPorts()) {
+            if (outputPort.isAvailable()) {
+                outputPort.pushFrame(inputFrame);
+            }
+        }
+    }
+
+    private void updateSynchronization() {
+        if (mSynchronized) {
+            for (OutputPort port : getConnectedOutputPorts()) {
+                port.setWaitsUntilAvailable(true);
+            }
+        } else {
+            for (OutputPort port : getConnectedOutputPorts()) {
+                port.setWaitsUntilAvailable(false);
+            }
+        }
+    }
+
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/BrightnessFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/BrightnessFilter.java
new file mode 100644
index 0000000..5a70776
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/BrightnessFilter.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.image;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.ImageShader;
+import androidx.media.filterfw.InputPort;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+public class BrightnessFilter extends Filter {
+
+    private float mBrightness = 1.0f;
+    private ImageShader mShader;
+
+    private static final String mBrightnessShader =
+            "precision mediump float;\n" +
+            "uniform sampler2D tex_sampler_0;\n" +
+            "uniform float brightness;\n" +
+            "varying vec2 v_texcoord;\n" +
+            "void main() {\n" +
+            "  vec4 color = texture2D(tex_sampler_0, v_texcoord);\n" +
+            "  if (brightness < 0.5) {\n" +
+            "    gl_FragColor = color * (2.0 * brightness);\n" +
+            "  } else {\n" +
+            "    vec4 diff = 1.0 - color;\n" +
+            "    gl_FragColor = color + diff * (2.0 * (brightness - 0.5));\n" +
+            "  }\n" +
+            "}\n";
+
+
+    public BrightnessFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
+        return new Signature()
+            .addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+            .addInputPort("brightness", Signature.PORT_OPTIONAL, FrameType.single(float.class))
+            .addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
+            .disallowOtherPorts();
+    }
+
+    @Override
+    public void onInputPortOpen(InputPort port) {
+        if (port.getName().equals("brightness")) {
+            port.bindToFieldNamed("mBrightness");
+            port.setAutoPullEnabled(true);
+        }
+    }
+
+    @Override
+    protected void onPrepare() {
+        mShader = new ImageShader(mBrightnessShader);
+    }
+
+    @Override
+    protected void onProcess() {
+        OutputPort outPort = getConnectedOutputPort("image");
+        FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+        int[] dim = inputImage.getDimensions();
+        FrameImage2D outputImage = outPort.fetchAvailableFrame(dim).asFrameImage2D();
+        mShader.setUniformValue("brightness", mBrightness);
+        mShader.process(inputImage, outputImage);
+        outPort.pushFrame(outputImage);
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/CameraStreamer.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/CameraStreamer.java
new file mode 100644
index 0000000..d1642fd
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/CameraStreamer.java
@@ -0,0 +1,1906 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+import android.hardware.Camera.PreviewCallback;
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder;
+import android.opengl.GLES20;
+import android.os.Build.VERSION;
+import android.util.Log;
+import android.view.Display;
+import android.view.Surface;
+import android.view.SurfaceView;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.Vector;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+/**
+ * The CameraStreamer streams Frames from a camera to connected clients.
+ *
+ * There is one centralized CameraStreamer object per MffContext, and only one stream can be
+ * active at any time. The CameraStreamer acts as a Camera "server" that streams frames to any
+ * number of connected clients. Typically, these are CameraSource filters that are part of a
+ * graph, but other clients can be written as well.
+ */
+public class CameraStreamer {
+
+    /** Camera Facing: Don't Care: Picks any available camera. */
+    public static final int FACING_DONTCARE = 0;
+    /** Camera Facing: Front: Use the front facing camera. */
+    public static final int FACING_FRONT = 1;
+    /** Camera Facing: Back: Use the rear facing camera. */
+    public static final int FACING_BACK = 2;
+
+    /** How long the streamer should wait to acquire the camera before giving up. */
+    public static long MAX_CAMERA_WAIT_TIME = 5;
+
+    /**
+     * The global camera lock, that is closed when the camera is acquired by any CameraStreamer,
+     * and opened when a streamer is done using the camera.
+     */
+    static ReentrantLock mCameraLock = new ReentrantLock();
+
+    /** The Camera thread that grabs frames from the camera */
+    private CameraRunnable mCameraRunner = null;
+
+    private abstract class CamFrameHandler {
+        protected int mCameraWidth;
+        protected int mCameraHeight;
+        protected int mOutWidth;
+        protected int mOutHeight;
+        protected CameraRunnable mRunner;
+
+        /** Map of GLSL shaders (one for each target context) */
+        protected HashMap<EGLContext, ImageShader> mTargetShaders
+            = new HashMap<EGLContext, ImageShader>();
+
+        /** Map of target textures (one for each target context) */
+        protected HashMap<EGLContext, TextureSource> mTargetTextures
+            = new HashMap<EGLContext, TextureSource>();
+
+        /** Map of set of clients (one for each target context) */
+        protected HashMap<EGLContext, Set<FrameClient>> mContextClients
+            = new HashMap<EGLContext, Set<FrameClient>>();
+
+        /** List of clients that are consuming camera frames. */
+        protected Vector<FrameClient> mClients = new Vector<FrameClient>();
+
+        public void initWithRunner(CameraRunnable camRunner) {
+            mRunner = camRunner;
+        }
+
+        public void setCameraSize(int width, int height) {
+            mCameraWidth = width;
+            mCameraHeight = height;
+        }
+
+        public void registerClient(FrameClient client) {
+            EGLContext context = RenderTarget.currentContext();
+            Set<FrameClient> clientTargets = clientsForContext(context);
+            clientTargets.add(client);
+            mClients.add(client);
+            onRegisterClient(client, context);
+        }
+
+        public void unregisterClient(FrameClient client) {
+            EGLContext context = RenderTarget.currentContext();
+            Set<FrameClient> clientTargets = clientsForContext(context);
+            clientTargets.remove(client);
+            if (clientTargets.isEmpty()) {
+                onCleanupContext(context);
+            }
+            mClients.remove(client);
+        }
+
+        public abstract void setupServerFrame();
+        public abstract void updateServerFrame();
+        public abstract void grabFrame(FrameImage2D targetFrame);
+        public abstract void release();
+
+        public void onUpdateCameraOrientation(int orientation) {
+            if (orientation % 180 != 0) {
+                mOutWidth = mCameraHeight;
+                mOutHeight = mCameraWidth;
+            } else {
+                mOutWidth = mCameraWidth;
+                mOutHeight = mCameraHeight;
+            }
+        }
+
+        protected Set<FrameClient> clientsForContext(EGLContext context) {
+            Set<FrameClient> clients = mContextClients.get(context);
+            if (clients == null) {
+                clients = new HashSet<FrameClient>();
+                mContextClients.put(context, clients);
+            }
+            return clients;
+        }
+
+        protected void onRegisterClient(FrameClient client, EGLContext context) {
+        }
+
+        protected void onCleanupContext(EGLContext context) {
+            TextureSource texture = mTargetTextures.get(context);
+            ImageShader shader = mTargetShaders.get(context);
+            if (texture != null) {
+                texture.release();
+                mTargetTextures.remove(context);
+            }
+            if (shader != null) {
+                mTargetShaders.remove(context);
+            }
+        }
+
+        protected TextureSource textureForContext(EGLContext context) {
+            TextureSource texture = mTargetTextures.get(context);
+            if (texture == null) {
+                texture = createClientTexture();
+                mTargetTextures.put(context, texture);
+            }
+            return texture;
+        }
+
+        protected ImageShader shaderForContext(EGLContext context) {
+            ImageShader shader = mTargetShaders.get(context);
+            if (shader == null) {
+                shader = createClientShader();
+                mTargetShaders.put(context, shader);
+            }
+            return shader;
+        }
+
+        protected ImageShader createClientShader() {
+            return null;
+        }
+
+        protected TextureSource createClientTexture() {
+            return null;
+        }
+
+        public boolean isFrontMirrored() {
+            return true;
+        }
+    }
+
+    // Jellybean (and later) back-end
+    @TargetApi(16)
+    private class CamFrameHandlerJB extends CamFrameHandlerICS {
+
+        @Override
+        public void setupServerFrame() {
+            setupPreviewTexture(mRunner.mCamera);
+        }
+
+        @Override
+        public synchronized void updateServerFrame() {
+            updateSurfaceTexture();
+            informClients();
+        }
+
+        @Override
+        public synchronized void grabFrame(FrameImage2D targetFrame) {
+            TextureSource targetTex = TextureSource.newExternalTexture();
+            ImageShader copyShader = shaderForContext(RenderTarget.currentContext());
+            if (targetTex == null || copyShader == null) {
+                throw new RuntimeException("Attempting to grab camera frame from unknown "
+                    + "thread: " + Thread.currentThread() + "!");
+            }
+            mPreviewSurfaceTexture.attachToGLContext(targetTex.getTextureId());
+            updateTransform(copyShader);
+            updateShaderTargetRect(copyShader);
+            targetFrame.resize(new int[] { mOutWidth, mOutHeight });
+            copyShader.process(targetTex,
+                               targetFrame.lockRenderTarget(),
+                               mOutWidth,
+                               mOutHeight);
+            targetFrame.setTimestamp(mPreviewSurfaceTexture.getTimestamp());
+            targetFrame.unlock();
+            mPreviewSurfaceTexture.detachFromGLContext();
+            targetTex.release();
+        }
+
+        @Override
+        protected void updateShaderTargetRect(ImageShader shader) {
+            if ((mRunner.mActualFacing == FACING_FRONT) && mRunner.mFlipFront) {
+                shader.setTargetRect(1f, 1f, -1f, -1f);
+            } else {
+                shader.setTargetRect(0f, 1f, 1f, -1f);
+            }
+        }
+
+        @Override
+        protected void setupPreviewTexture(Camera camera) {
+            super.setupPreviewTexture(camera);
+            mPreviewSurfaceTexture.detachFromGLContext();
+        }
+
+        protected void updateSurfaceTexture() {
+            mPreviewSurfaceTexture.attachToGLContext(mPreviewTexture.getTextureId());
+            mPreviewSurfaceTexture.updateTexImage();
+            mPreviewSurfaceTexture.detachFromGLContext();
+        }
+
+        protected void informClients() {
+            synchronized (mClients) {
+                for (FrameClient client : mClients) {
+                    client.onCameraFrameAvailable();
+                }
+            }
+        }
+    }
+
+    // ICS (and later) back-end
+    @TargetApi(15)
+    private class CamFrameHandlerICS extends CamFrameHandler  {
+
+        protected static final String mCopyShaderSource =
+            "#extension GL_OES_EGL_image_external : require\n" +
+            "precision mediump float;\n" +
+            "uniform samplerExternalOES tex_sampler_0;\n" +
+            "varying vec2 v_texcoord;\n" +
+            "void main() {\n" +
+            "  gl_FragColor = texture2D(tex_sampler_0, v_texcoord);\n" +
+            "}\n";
+
+        /** The camera transform matrix */
+        private float[] mCameraTransform = new float[16];
+
+        /** The texture the camera streams to */
+        protected TextureSource mPreviewTexture = null;
+        protected SurfaceTexture mPreviewSurfaceTexture = null;
+
+        /** Map of target surface textures (one for each target context) */
+        protected HashMap<EGLContext, SurfaceTexture> mTargetSurfaceTextures
+            = new HashMap<EGLContext, SurfaceTexture>();
+
+        /** Map of RenderTargets for client SurfaceTextures */
+        protected HashMap<SurfaceTexture, RenderTarget> mClientRenderTargets
+            = new HashMap<SurfaceTexture, RenderTarget>();
+
+        /** Server side copy shader */
+        protected ImageShader mCopyShader = null;
+
+        @Override
+        public synchronized void setupServerFrame() {
+            setupPreviewTexture(mRunner.mCamera);
+        }
+
+        @Override
+        public synchronized void updateServerFrame() {
+            mPreviewSurfaceTexture.updateTexImage();
+            distributeFrames();
+        }
+
+        @Override
+        public void onUpdateCameraOrientation(int orientation) {
+            super.onUpdateCameraOrientation(orientation);
+            mRunner.mCamera.setDisplayOrientation(orientation);
+            updateSurfaceTextureSizes();
+        }
+
+        @Override
+        public synchronized void onRegisterClient(FrameClient client, EGLContext context) {
+            final Set<FrameClient> clientTargets = clientsForContext(context);
+
+            // Make sure we have texture, shader, and surfacetexture setup for this context.
+            TextureSource clientTex = textureForContext(context);
+            ImageShader copyShader = shaderForContext(context);
+            SurfaceTexture surfTex = surfaceTextureForContext(context);
+
+            // Listen to client-side surface texture updates
+            surfTex.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+                @Override
+                public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+                    for (FrameClient clientTarget : clientTargets) {
+                        clientTarget.onCameraFrameAvailable();
+                    }
+                }
+            });
+        }
+
+        @Override
+        public synchronized void grabFrame(FrameImage2D targetFrame) {
+            // Get the GL objects for the receiver's context
+            EGLContext clientContext = RenderTarget.currentContext();
+            TextureSource clientTex = textureForContext(clientContext);
+            ImageShader copyShader = shaderForContext(clientContext);
+            SurfaceTexture surfTex = surfaceTextureForContext(clientContext);
+            if (clientTex == null || copyShader == null || surfTex == null) {
+                throw new RuntimeException("Attempting to grab camera frame from unknown "
+                    + "thread: " + Thread.currentThread() + "!");
+            }
+
+            // Copy from client ST to client tex
+            surfTex.updateTexImage();
+            targetFrame.resize(new int[] { mOutWidth, mOutHeight });
+            copyShader.process(clientTex,
+                               targetFrame.lockRenderTarget(),
+                               mOutWidth,
+                               mOutHeight);
+
+            targetFrame.setTimestamp(mPreviewSurfaceTexture.getTimestamp());
+            targetFrame.unlock();
+        }
+
+        @Override
+        public synchronized void release() {
+            if (mPreviewTexture != null) {
+                mPreviewTexture.release();
+                mPreviewTexture = null;
+            }
+            if (mPreviewSurfaceTexture != null) {
+                mPreviewSurfaceTexture.release();
+                mPreviewSurfaceTexture = null;
+            }
+        }
+
+        @Override
+        protected ImageShader createClientShader() {
+            return new ImageShader(mCopyShaderSource);
+        }
+
+        @Override
+        protected TextureSource createClientTexture() {
+            return TextureSource.newExternalTexture();
+        }
+
+        protected void distributeFrames() {
+            updateTransform(getCopyShader());
+            updateShaderTargetRect(getCopyShader());
+
+            for (SurfaceTexture clientTexture : mTargetSurfaceTextures.values()) {
+                RenderTarget clientTarget = renderTargetFor(clientTexture);
+                clientTarget.focus();
+                getCopyShader().process(mPreviewTexture,
+                                        clientTarget,
+                                        mOutWidth,
+                                        mOutHeight);
+                GLToolbox.checkGlError("distribute frames");
+                clientTarget.swapBuffers();
+            }
+        }
+
+        protected RenderTarget renderTargetFor(SurfaceTexture surfaceTex) {
+            RenderTarget target = mClientRenderTargets.get(surfaceTex);
+            if (target == null) {
+                target = RenderTarget.currentTarget().forSurfaceTexture(surfaceTex);
+                mClientRenderTargets.put(surfaceTex, target);
+            }
+            return target;
+        }
+
+        protected void setupPreviewTexture(Camera camera) {
+            if (mPreviewTexture == null) {
+                mPreviewTexture = TextureSource.newExternalTexture();
+            }
+            if (mPreviewSurfaceTexture == null) {
+                mPreviewSurfaceTexture = new SurfaceTexture(mPreviewTexture.getTextureId());
+                try {
+                    camera.setPreviewTexture(mPreviewSurfaceTexture);
+                } catch (IOException e) {
+                    throw new RuntimeException("Could not bind camera surface texture: " +
+                                               e.getMessage() + "!");
+                }
+                mPreviewSurfaceTexture.setOnFrameAvailableListener(mOnCameraFrameListener);
+            }
+        }
+
+        protected ImageShader getCopyShader() {
+            if (mCopyShader == null) {
+                mCopyShader = new ImageShader(mCopyShaderSource);
+            }
+            return mCopyShader;
+        }
+
+        protected SurfaceTexture surfaceTextureForContext(EGLContext context) {
+            SurfaceTexture surfTex = mTargetSurfaceTextures.get(context);
+            if (surfTex == null) {
+                TextureSource texture = textureForContext(context);
+                if (texture != null) {
+                    surfTex = new SurfaceTexture(texture.getTextureId());
+                    surfTex.setDefaultBufferSize(mOutWidth, mOutHeight);
+                    mTargetSurfaceTextures.put(context, surfTex);
+                }
+            }
+            return surfTex;
+        }
+
+        protected void updateShaderTargetRect(ImageShader shader) {
+            if ((mRunner.mActualFacing == FACING_FRONT) && mRunner.mFlipFront) {
+                shader.setTargetRect(1f, 0f, -1f, 1f);
+            } else {
+                shader.setTargetRect(0f, 0f, 1f, 1f);
+            }
+        }
+
+        protected synchronized void updateSurfaceTextureSizes() {
+            for (SurfaceTexture clientTexture : mTargetSurfaceTextures.values()) {
+                clientTexture.setDefaultBufferSize(mOutWidth, mOutHeight);
+            }
+        }
+
+        protected void updateTransform(ImageShader shader) {
+            mPreviewSurfaceTexture.getTransformMatrix(mCameraTransform);
+            shader.setSourceTransform(mCameraTransform);
+        }
+
+        @Override
+        protected void onCleanupContext(EGLContext context) {
+            super.onCleanupContext(context);
+            SurfaceTexture surfaceTex = mTargetSurfaceTextures.get(context);
+            if (surfaceTex != null) {
+                surfaceTex.release();
+                mTargetSurfaceTextures.remove(context);
+            }
+        }
+
+        protected SurfaceTexture.OnFrameAvailableListener mOnCameraFrameListener =
+                new SurfaceTexture.OnFrameAvailableListener() {
+            @Override
+            public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+                mRunner.signalNewFrame();
+            }
+        };
+    }
+
+    // Gingerbread (and later) back-end
+    @TargetApi(9)
+    private final class CamFrameHandlerGB extends CamFrameHandler  {
+
+        private SurfaceView mSurfaceView;
+        private byte[] mFrameBufferFront;
+        private byte[] mFrameBufferBack;
+        private boolean mWriteToBack = true;
+        private float[] mTargetCoords = new float[] { 0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f };
+        final Object mBufferLock = new Object();
+
+        private String mNV21ToRGBAFragment =
+            "precision mediump float;\n" +
+            "\n" +
+            "uniform sampler2D tex_sampler_0;\n" +
+            "varying vec2 v_y_texcoord;\n" +
+            "varying vec2 v_vu_texcoord;\n" +
+            "varying vec2 v_pixcoord;\n" +
+            "\n" +
+            "vec3 select(vec4 yyyy, vec4 vuvu, int s) {\n" +
+            "  if (s == 0) {\n" +
+            "    return vec3(yyyy.r, vuvu.g, vuvu.r);\n" +
+            "  } else if (s == 1) {\n" +
+            "    return vec3(yyyy.g, vuvu.g, vuvu.r);\n" +
+            " } else if (s == 2) {\n" +
+            "    return vec3(yyyy.b, vuvu.a, vuvu.b);\n" +
+            "  } else  {\n" +
+            "    return vec3(yyyy.a, vuvu.a, vuvu.b);\n" +
+            "  }\n" +
+            "}\n" +
+            "\n" +
+            "vec3 yuv2rgb(vec3 yuv) {\n" +
+            "  mat4 conversion = mat4(1.0,  0.0,    1.402, -0.701,\n" +
+            "                         1.0, -0.344, -0.714,  0.529,\n" +
+            "                         1.0,  1.772,  0.0,   -0.886,\n" +
+            "                         0, 0, 0, 0);" +
+            "  return (vec4(yuv, 1.0) * conversion).rgb;\n" +
+            "}\n" +
+            "\n" +
+            "void main() {\n" +
+            "  vec4 yyyy = texture2D(tex_sampler_0, v_y_texcoord);\n" +
+            "  vec4 vuvu = texture2D(tex_sampler_0, v_vu_texcoord);\n" +
+            "  int s = int(mod(floor(v_pixcoord.x), 4.0));\n" +
+            "  vec3 yuv = select(yyyy, vuvu, s);\n" +
+            "  vec3 rgb = yuv2rgb(yuv);\n" +
+            "  gl_FragColor = vec4(rgb, 1.0);\n" +
+            "}";
+
+        private String mNV21ToRGBAVertex =
+            "attribute vec4 a_position;\n" +
+            "attribute vec2 a_y_texcoord;\n" +
+            "attribute vec2 a_vu_texcoord;\n" +
+            "attribute vec2 a_pixcoord;\n" +
+            "varying vec2 v_y_texcoord;\n" +
+            "varying vec2 v_vu_texcoord;\n" +
+            "varying vec2 v_pixcoord;\n" +
+            "void main() {\n" +
+            "  gl_Position = a_position;\n" +
+            "  v_y_texcoord = a_y_texcoord;\n" +
+            "  v_vu_texcoord = a_vu_texcoord;\n" +
+            "  v_pixcoord = a_pixcoord;\n" +
+            "}\n";
+
+        private byte[] readBuffer() {
+            synchronized (mBufferLock) {
+                return mWriteToBack ? mFrameBufferFront : mFrameBufferBack;
+            }
+        }
+
+        private byte[] writeBuffer() {
+            synchronized (mBufferLock) {
+                return mWriteToBack ? mFrameBufferBack : mFrameBufferFront;
+            }
+        }
+
+        private synchronized void swapBuffers() {
+            synchronized (mBufferLock) {
+                mWriteToBack = !mWriteToBack;
+            }
+        }
+
+        private PreviewCallback mPreviewCallback = new PreviewCallback() {
+
+            @Override
+            public void onPreviewFrame(byte[] data, Camera camera) {
+                swapBuffers();
+                camera.addCallbackBuffer(writeBuffer());
+                mRunner.signalNewFrame();
+            }
+
+        };
+
+        @Override
+        public void setupServerFrame() {
+            checkCameraDimensions();
+            Camera camera = mRunner.mCamera;
+            int bufferSize = mCameraWidth * (mCameraHeight + mCameraHeight/2);
+            mFrameBufferFront = new byte[bufferSize];
+            mFrameBufferBack = new byte[bufferSize];
+            camera.addCallbackBuffer(writeBuffer());
+            camera.setPreviewCallbackWithBuffer(mPreviewCallback);
+            SurfaceView previewDisplay = getPreviewDisplay();
+            if (previewDisplay != null) {
+                try {
+                    camera.setPreviewDisplay(previewDisplay.getHolder());
+                } catch (IOException e) {
+                    throw new RuntimeException("Could not start camera with given preview " +
+                            "display!");
+                }
+            }
+        }
+
+        private void checkCameraDimensions() {
+            if (mCameraWidth % 4 != 0) {
+                throw new RuntimeException("Camera width must be a multiple of 4!");
+            } else if (mCameraHeight % 2 != 0) {
+                throw new RuntimeException("Camera height must be a multiple of 2!");
+            }
+        }
+
+        @Override
+        public void updateServerFrame() {
+            // Server frame has been updated already, simply inform clients here.
+            informClients();
+        }
+
+        @Override
+        public void grabFrame(FrameImage2D targetFrame) {
+            EGLContext clientContext = RenderTarget.currentContext();
+
+            // Copy camera data to the client YUV texture
+            TextureSource clientTex = textureForContext(clientContext);
+            int texWidth = mCameraWidth / 4;
+            int texHeight = mCameraHeight + mCameraHeight / 2;
+            synchronized(mBufferLock) {    // Don't swap buffers while we are reading
+                ByteBuffer pixels = ByteBuffer.wrap(readBuffer());
+                clientTex.allocateWithPixels(pixels, texWidth, texHeight);
+            }
+            clientTex.setParameter(GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
+            clientTex.setParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
+
+            // Setup the YUV-2-RGBA shader
+            ImageShader transferShader = shaderForContext(clientContext);
+            transferShader.setTargetCoords(mTargetCoords);
+            updateShaderPixelSize(transferShader);
+
+            // Convert pixels into target frame
+            targetFrame.resize(new int[] { mOutWidth, mOutHeight });
+            transferShader.process(clientTex,
+                    targetFrame.lockRenderTarget(),
+                    mOutWidth,
+                    mOutHeight);
+            targetFrame.unlock();
+        }
+
+        @Override
+        public void onUpdateCameraOrientation(int orientation) {
+            super.onUpdateCameraOrientation(orientation);
+            if ((mRunner.mActualFacing == FACING_FRONT) && mRunner.mFlipFront) {
+                switch (orientation) {
+                    case 0:
+                        mTargetCoords = new float[] { 1f, 0f, 0f, 0f, 1f, 1f, 0f, 1f };
+                        break;
+                    case 90:
+                        mTargetCoords = new float[] { 0f, 0f, 0f, 1f, 1f, 0f, 1f, 1f };
+                        break;
+                    case 180:
+                        mTargetCoords = new float[] { 0f, 1f, 1f, 1f, 0f, 0f, 1f, 0f };
+                        break;
+                    case 270:
+                        mTargetCoords = new float[] { 1f, 1f, 1f, 0f, 0f, 1f, 0f, 0f };
+                        break;
+                }
+            } else {
+                switch (orientation) {
+                    case 0:
+                        mTargetCoords = new float[] { 0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f };
+                        break;
+                    case 90:
+                        mTargetCoords = new float[] { 1f, 0f, 1f, 1f, 0f, 0f, 0f, 1f };
+                        break;
+                    case 180:
+                        mTargetCoords = new float[] { 1f, 1f, 0f, 1f, 1f, 0f, 0f, 0f };
+                        break;
+                    case 270:
+                        mTargetCoords = new float[] { 0f, 1f, 0f, 0f, 1f, 1f, 1f, 0f };
+                        break;
+                }
+            }
+        }
+
+        @Override
+        public void release() {
+            mFrameBufferBack = null;
+            mFrameBufferFront = null;
+        }
+
+        @Override
+        public boolean isFrontMirrored() {
+            return false;
+        }
+
+        @Override
+        protected ImageShader createClientShader() {
+            ImageShader shader = new ImageShader(mNV21ToRGBAVertex, mNV21ToRGBAFragment);
+            // TODO: Make this a VBO
+            float[] yCoords = new float[] {
+                    0f, 0f,
+                    1f, 0f,
+                    0f, 2f / 3f,
+                    1f, 2f / 3f };
+            float[] uvCoords = new float[] {
+                    0f, 2f / 3f,
+                    1f, 2f / 3f,
+                    0f, 1f,
+                    1f, 1f };
+            shader.setAttributeValues("a_y_texcoord", yCoords, 2);
+            shader.setAttributeValues("a_vu_texcoord", uvCoords, 2);
+            return shader;
+        }
+
+        @Override
+        protected TextureSource createClientTexture() {
+            TextureSource texture = TextureSource.newTexture();
+            texture.setParameter(GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
+            texture.setParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
+            return texture;
+        }
+
+        private void updateShaderPixelSize(ImageShader shader) {
+            float[] pixCoords = new float[] {
+                    0f, 0f,
+                    mCameraWidth, 0f,
+                    0f, mCameraHeight,
+                    mCameraWidth, mCameraHeight };
+            shader.setAttributeValues("a_pixcoord", pixCoords, 2);
+        }
+
+        private SurfaceView getPreviewDisplay() {
+            if (mSurfaceView == null) {
+                mSurfaceView = mRunner.getContext().getDummySurfaceView();
+            }
+            return mSurfaceView;
+        }
+
+        private void informClients() {
+            synchronized (mClients) {
+                for (FrameClient client : mClients) {
+                    client.onCameraFrameAvailable();
+                }
+            }
+        }
+    }
+
+    private static class State {
+        public static final int STATE_RUNNING = 1;
+        public static final int STATE_STOPPED = 2;
+        public static final int STATE_HALTED = 3;
+
+        private AtomicInteger mCurrent = new AtomicInteger(STATE_STOPPED);
+
+        public int current() {
+            return mCurrent.get();
+        }
+
+        public void set(int newState) {
+            mCurrent.set(newState);
+        }
+    }
+
+    private static class Event {
+        public static final int START = 1;
+        public static final int FRAME = 2;
+        public static final int STOP = 3;
+        public static final int HALT = 4;
+        public static final int RESTART = 5;
+        public static final int UPDATE = 6;
+        public static final int TEARDOWN = 7;
+
+        public int code;
+
+        public Event(int code) {
+            this.code = code;
+        }
+    }
+
+    private final class CameraRunnable implements Runnable {
+
+        /** On slower devices the event queue can easily fill up. We bound the queue to this. */
+        private final static int MAX_EVENTS = 32;
+
+        /** The runner's state */
+        private State mState = new State();
+
+        /** The CameraRunner's event queue */
+        private LinkedBlockingQueue<Event> mEventQueue = new LinkedBlockingQueue<Event>(MAX_EVENTS);
+
+        /** The requested FPS */
+        private int mRequestedFramesPerSec = 30;
+
+        /** The actual FPS */
+        private int mActualFramesPerSec = 0;
+
+        /** The requested preview width and height */
+        private int mRequestedPreviewWidth = 640;
+        private int mRequestedPreviewHeight = 480;
+
+        /** The requested picture width and height */
+        private int mRequestedPictureWidth = 640;
+        private int mRequestedPictureHeight = 480;
+
+        /** The actual camera width and height */
+        private int[] mActualDims = null;
+
+        /** The requested facing */
+        private int mRequestedFacing = FACING_DONTCARE;
+
+        /** The actual facing */
+        private int mActualFacing = FACING_DONTCARE;
+
+        /** Whether to horizontally flip the front facing camera */
+        private boolean mFlipFront = true;
+
+        /** The display the camera streamer is bound to. */
+        private Display mDisplay = null;
+
+        /** The camera and screen orientation. */
+        private int mCamOrientation = 0;
+        private int mOrientation = -1;
+
+        /** The camera rotation (used for capture). */
+        private int mCamRotation = 0;
+
+        /** The camera flash mode */
+        private String mFlashMode = Camera.Parameters.FLASH_MODE_OFF;
+
+        /** The camera object */
+        private Camera mCamera = null;
+
+        private MediaRecorder mRecorder = null;
+
+        /** The ID of the currently used camera */
+        int mCamId = 0;
+
+        /** The platform-dependent camera frame handler. */
+        private CamFrameHandler mCamFrameHandler = null;
+
+        /** The set of camera listeners. */
+        private Set<CameraListener> mCamListeners = new HashSet<CameraListener>();
+
+        private ReentrantLock mCameraReadyLock = new ReentrantLock(true);
+        // mCameraReady condition is used when waiting for the camera getting ready.
+        private Condition mCameraReady = mCameraReadyLock.newCondition();
+        // external camera lock used to provide the capability of external camera access.
+        private ExternalCameraLock mExternalCameraLock = new ExternalCameraLock();
+
+        private RenderTarget mRenderTarget;
+        private MffContext mContext;
+
+        /**
+         *  This provides the capability of locking and unlocking from different threads.
+         *  The thread will wait until the lock state is idle. Any thread can wake up
+         *  a waiting thread by calling unlock (i.e. signal), provided that unlock
+         *  are called using the same context when lock was called. Using context prevents
+         *  from rogue usage of unlock.
+         */
+        private class ExternalCameraLock {
+            public static final int IDLE = 0;
+            public static final int IN_USE = 1;
+            private int mLockState = IDLE;
+            private Object mLockContext;
+            private final ReentrantLock mLock = new ReentrantLock(true);
+            private final Condition mInUseLockCondition= mLock.newCondition();
+
+            public boolean lock(Object context) {
+                if (context == null) {
+                    throw new RuntimeException("Null context when locking");
+                }
+                mLock.lock();
+                if (mLockState == IN_USE) {
+                    try {
+                        mInUseLockCondition.await();
+                    } catch (InterruptedException e) {
+                        return false;
+                    }
+                }
+                mLockState = IN_USE;
+                mLockContext = context;
+                mLock.unlock();
+                return true;
+            }
+
+            public void unlock(Object context) {
+                mLock.lock();
+                if (mLockState != IN_USE) {
+                    throw new RuntimeException("Not in IN_USE state");
+                }
+                if (context != mLockContext) {
+                    throw new RuntimeException("Lock is not owned by this context");
+                }
+                mLockState = IDLE;
+                mLockContext = null;
+                mInUseLockCondition.signal();
+                mLock.unlock();
+            }
+        }
+
+        public CameraRunnable(MffContext context) {
+            mContext = context;
+            createCamFrameHandler();
+            mCamFrameHandler.initWithRunner(this);
+            launchThread();
+        }
+
+        public MffContext getContext() {
+            return mContext;
+        }
+
+        public void loop() {
+            while (true) {
+                try {
+                    Event event = nextEvent();
+                    if (event == null) continue;
+                    switch (event.code) {
+                        case Event.START:
+                            onStart();
+                            break;
+                        case Event.STOP:
+                            onStop();
+                            break;
+                        case Event.FRAME:
+                            onFrame();
+                            break;
+                        case Event.HALT:
+                            onHalt();
+                            break;
+                        case Event.RESTART:
+                            onRestart();
+                            break;
+                        case Event.UPDATE:
+                            onUpdate();
+                            break;
+                        case Event.TEARDOWN:
+                            onTearDown();
+                            break;
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        @Override
+        public void run() {
+            loop();
+        }
+
+        public void signalNewFrame() {
+            pushEvent(Event.FRAME, false);
+        }
+
+        public void pushEvent(int eventId, boolean required) {
+            try {
+                if (required) {
+                    mEventQueue.put(new Event(eventId));
+                } else {
+                    mEventQueue.offer(new Event(eventId));
+                }
+            } catch (InterruptedException e) {
+                // We should never get here (as we do not limit capacity in the queue), but if
+                // we do, we log an error.
+                Log.e("CameraStreamer", "Dropping event " + eventId + "!");
+            }
+        }
+
+        public void launchThread() {
+            Thread cameraThread = new Thread(this);
+            cameraThread.start();
+        }
+
+        @Deprecated
+        public Camera getCamera() {
+            synchronized (mState) {
+                return mCamera;
+            }
+        }
+
+        public Camera lockCamera(Object context) {
+            mExternalCameraLock.lock(context);
+            /**
+             * since lockCamera can happen right after closeCamera,
+             * the camera handle can be null, wait until valid handle
+             * is acquired.
+             */
+            while (mCamera == null) {
+                mExternalCameraLock.unlock(context);
+                mCameraReadyLock.lock();
+                try {
+                    mCameraReady.await();
+                } catch (InterruptedException e) {
+                    throw new RuntimeException("Condition interrupted", e);
+                }
+                mCameraReadyLock.unlock();
+                mExternalCameraLock.lock(context);
+            }
+            return mCamera;
+        }
+
+        public void unlockCamera(Object context) {
+            mExternalCameraLock.unlock(context);
+        }
+
+        public int getCurrentCameraId() {
+            synchronized (mState) {
+                return mCamId;
+            }
+        }
+
+        public boolean isRunning() {
+            return mState.current() != State.STATE_STOPPED;
+        }
+
+        public void addListener(CameraListener listener) {
+            synchronized (mCamListeners) {
+                mCamListeners.add(listener);
+            }
+        }
+
+        public void removeListener(CameraListener listener) {
+            synchronized (mCamListeners) {
+                mCamListeners.remove(listener);
+            }
+        }
+
+        public synchronized void bindToDisplay(Display display) {
+            mDisplay = display;
+        }
+
+        public synchronized void setDesiredPreviewSize(int width, int height) {
+            if (width != mRequestedPreviewWidth || height != mRequestedPreviewHeight) {
+                mRequestedPreviewWidth = width;
+                mRequestedPreviewHeight = height;
+                onParamsUpdated();
+            }
+        }
+
+        public synchronized void setDesiredPictureSize(int width, int height) {
+            if (width != mRequestedPictureWidth || height != mRequestedPictureHeight) {
+                mRequestedPictureWidth = width;
+                mRequestedPictureHeight = height;
+                onParamsUpdated();
+            }
+        }
+
+        public synchronized void setDesiredFrameRate(int fps) {
+            if (fps != mRequestedFramesPerSec) {
+                mRequestedFramesPerSec = fps;
+                onParamsUpdated();
+            }
+        }
+
+        public synchronized void setFacing(int facing) {
+            if (facing != mRequestedFacing) {
+                switch (facing) {
+                    case FACING_DONTCARE:
+                    case FACING_FRONT:
+                    case FACING_BACK:
+                        mRequestedFacing = facing;
+                        break;
+                    default:
+                        throw new IllegalArgumentException("Unknown facing value '" + facing
+                            + "' passed to setFacing!");
+                }
+                onParamsUpdated();
+            }
+        }
+
+        public synchronized void setFlipFrontCamera(boolean flipFront) {
+            if (mFlipFront != flipFront) {
+                mFlipFront = flipFront;
+            }
+        }
+
+        public synchronized void setFlashMode(String flashMode) {
+            if (!flashMode.equals(mFlashMode)) {
+                mFlashMode = flashMode;
+                onParamsUpdated();
+            }
+        }
+
+        public synchronized int getCameraFacing() {
+            return mActualFacing;
+        }
+
+        public synchronized int getCameraRotation() {
+            return mCamRotation;
+        }
+
+        public synchronized boolean supportsHardwareFaceDetection() {
+            //return mCamFrameHandler.supportsHardwareFaceDetection();
+            // TODO
+            return true;
+        }
+
+        public synchronized int getCameraWidth() {
+            return (mActualDims != null) ? mActualDims[0] : 0;
+        }
+
+        public synchronized int getCameraHeight() {
+            return (mActualDims != null) ? mActualDims[1] : 0;
+        }
+
+        public synchronized int getCameraFrameRate() {
+            return mActualFramesPerSec;
+        }
+
+        public synchronized String getFlashMode() {
+            return mCamera.getParameters().getFlashMode();
+        }
+
+        public synchronized boolean canStart() {
+            // If we can get a camera id without error we should be able to start.
+            try {
+                getCameraId();
+            } catch (RuntimeException e) {
+                return false;
+            }
+            return true;
+        }
+
+        public boolean grabFrame(FrameImage2D targetFrame) {
+            // Make sure we stay in state running while we are grabbing the frame.
+            synchronized (mState) {
+                if (mState.current() != State.STATE_RUNNING) {
+                    return false;
+                }
+                // we may not have the camera ready, this might happen when in the middle
+                // of switching camera.
+                if (mCamera == null) {
+                    return false;
+                }
+                mCamFrameHandler.grabFrame(targetFrame);
+                return true;
+            }
+        }
+
+        public CamFrameHandler getCamFrameHandler() {
+            return mCamFrameHandler;
+        }
+
+        private void onParamsUpdated() {
+            pushEvent(Event.UPDATE, true);
+        }
+
+        private Event nextEvent() {
+            try {
+                return mEventQueue.take();
+            } catch (InterruptedException e) {
+                // Ignore and keep going.
+                Log.w("GraphRunner", "Event queue processing was interrupted.");
+                return null;
+            }
+        }
+
+        private void onStart() {
+            if (mState.current() == State.STATE_STOPPED) {
+                mState.set(State.STATE_RUNNING);
+                getRenderTarget().focus();
+                openCamera();
+            }
+        }
+
+        private void onStop() {
+            if (mState.current() == State.STATE_RUNNING) {
+                closeCamera();
+                RenderTarget.focusNone();
+            }
+            // Set state to stop (halted becomes stopped).
+            mState.set(State.STATE_STOPPED);
+        }
+
+        private void onHalt() {
+            // Only halt if running. Stopped overrides halt.
+            if (mState.current() == State.STATE_RUNNING) {
+                closeCamera();
+                RenderTarget.focusNone();
+                mState.set(State.STATE_HALTED);
+            }
+        }
+
+        private void onRestart() {
+            // Only restart if halted
+            if (mState.current() == State.STATE_HALTED) {
+                mState.set(State.STATE_RUNNING);
+                getRenderTarget().focus();
+                openCamera();
+            }
+        }
+
+        private void onUpdate() {
+            if (mState.current() == State.STATE_RUNNING) {
+                pushEvent(Event.STOP, true);
+                pushEvent(Event.START, true);
+            }
+        }
+        private void onFrame() {
+            if (mState.current() == State.STATE_RUNNING) {
+                updateRotation();
+                mCamFrameHandler.updateServerFrame();
+            }
+        }
+
+        private void onTearDown() {
+            if (mState.current() == State.STATE_STOPPED) {
+                // Remove all listeners. This will release their resources
+                for (CameraListener listener : mCamListeners) {
+                    removeListener(listener);
+                }
+                mCamListeners.clear();
+            } else {
+                Log.e("CameraStreamer", "Could not tear-down CameraStreamer as camera still "
+                        + "seems to be running!");
+            }
+        }
+
+        private void createCamFrameHandler() {
+            // TODO: For now we simply assert that OpenGL is supported. Later on, we should add
+            // a CamFrameHandler that does not depend on OpenGL.
+            getContext().assertOpenGLSupported();
+            if (VERSION.SDK_INT >= 16) {
+                mCamFrameHandler = new CamFrameHandlerJB();
+            } else if (VERSION.SDK_INT >= 15) {
+                mCamFrameHandler = new CamFrameHandlerICS();
+            } else {
+                mCamFrameHandler = new CamFrameHandlerGB();
+            }
+        }
+
+        private void updateRotation() {
+            if (mDisplay != null) {
+                updateDisplayRotation(mDisplay.getRotation());
+            }
+        }
+
+        private synchronized void updateDisplayRotation(int rotation) {
+            switch (rotation) {
+                case Surface.ROTATION_0:
+                    onUpdateOrientation(0);
+                    break;
+                case Surface.ROTATION_90:
+                    onUpdateOrientation(90);
+                    break;
+                case Surface.ROTATION_180:
+                    onUpdateOrientation(180);
+                    break;
+                case Surface.ROTATION_270:
+                    onUpdateOrientation(270);
+                    break;
+                default:
+                    throw new IllegalArgumentException("Unsupported display rotation constant! Use "
+                        + "one of the Surface.ROTATION_ constants!");
+            }
+        }
+
+        private RenderTarget getRenderTarget() {
+            if (mRenderTarget == null) {
+                mRenderTarget = RenderTarget.newTarget(1, 1);
+            }
+            return mRenderTarget;
+        }
+
+        private void updateCamera() {
+            synchronized (mState) {
+                mCamId = getCameraId();
+                updateCameraOrientation(mCamId);
+                mCamera = Camera.open(mCamId);
+                initCameraParameters();
+            }
+        }
+
+        private void updateCameraOrientation(int camId) {
+            CameraInfo cameraInfo = new CameraInfo();
+            Camera.getCameraInfo(camId, cameraInfo);
+            mCamOrientation = cameraInfo.orientation;
+            mOrientation = -1;  // Forces recalculation to match display
+            mActualFacing = (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT)
+                ? FACING_FRONT
+                : FACING_BACK;
+        }
+
+        private int getCameraId() {
+            int camCount = Camera.getNumberOfCameras();
+            if (camCount == 0) {
+                throw new RuntimeException("Device does not have any cameras!");
+            } else if (mRequestedFacing == FACING_DONTCARE) {
+                // Simply return first camera if mRequestedFacing is don't care
+                return 0;
+            }
+
+            // Attempt to find requested camera
+            boolean useFrontCam = (mRequestedFacing == FACING_FRONT);
+            CameraInfo cameraInfo = new CameraInfo();
+            for (int i = 0; i < camCount; ++i) {
+                Camera.getCameraInfo(i, cameraInfo);
+                if ((cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT) == useFrontCam) {
+                    return i;
+                }
+            }
+            throw new RuntimeException("Could not find a camera facing (" + mRequestedFacing
+                    + ")!");
+        }
+
+        private void initCameraParameters() {
+            Camera.Parameters params = mCamera.getParameters();
+
+            // Find closest preview size
+            mActualDims =
+                findClosestPreviewSize(mRequestedPreviewWidth, mRequestedPreviewHeight, params);
+            mCamFrameHandler.setCameraSize(mActualDims[0], mActualDims[1]);
+            params.setPreviewSize(mActualDims[0], mActualDims[1]);
+            // Find closest picture size
+            int[] dims =
+                findClosestPictureSize(mRequestedPictureWidth, mRequestedPictureHeight, params);
+            params.setPictureSize(dims[0], dims[1]);
+
+            // Find closest FPS
+            int closestRange[] = findClosestFpsRange(mRequestedFramesPerSec, params);
+            params.setPreviewFpsRange(closestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+                                      closestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+
+            // Set flash mode (if supported)
+            if (params.getFlashMode() != null) {
+                params.setFlashMode(mFlashMode);
+            }
+
+            mCamera.setParameters(params);
+        }
+
+        private int[] findClosestPreviewSize(int width, int height, Camera.Parameters parameters) {
+            List<Camera.Size> previewSizes = parameters.getSupportedPreviewSizes();
+            return findClosestSizeFromList(width, height, previewSizes);
+        }
+
+        private int[] findClosestPictureSize(int width, int height, Camera.Parameters parameters) {
+            List<Camera.Size> pictureSizes = parameters.getSupportedPictureSizes();
+            return findClosestSizeFromList(width, height, pictureSizes);
+        }
+
+        private int[] findClosestSizeFromList(int width, int height, List<Camera.Size> sizes) {
+            int closestWidth = -1;
+            int closestHeight = -1;
+            int smallestWidth = sizes.get(0).width;
+            int smallestHeight =  sizes.get(0).height;
+            for (Camera.Size size : sizes) {
+                // Best match defined as not being larger in either dimension than
+                // the requested size, but as close as possible. The below isn't a
+                // stable selection (reording the size list can give different
+                // results), but since this is a fallback nicety, that's acceptable.
+                if ( size.width <= width &&
+                     size.height <= height &&
+                     size.width >= closestWidth &&
+                     size.height >= closestHeight) {
+                    closestWidth = size.width;
+                    closestHeight = size.height;
+                }
+                if ( size.width < smallestWidth &&
+                     size.height < smallestHeight) {
+                    smallestWidth = size.width;
+                    smallestHeight = size.height;
+                }
+            }
+            if (closestWidth == -1) {
+                // Requested size is smaller than any listed size; match with smallest possible
+                closestWidth = smallestWidth;
+                closestHeight = smallestHeight;
+            }
+            int[] closestSize = {closestWidth, closestHeight};
+            return closestSize;
+        }
+
+        private int[] findClosestFpsRange(int fps, Camera.Parameters params) {
+            List<int[]> supportedFpsRanges = params.getSupportedPreviewFpsRange();
+            int[] closestRange = supportedFpsRanges.get(0);
+            int fpsk = fps * 1000;
+            int minDiff = 1000000;
+            for (int[] range : supportedFpsRanges) {
+                int low = range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+                int high = range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+                if (low <= fpsk && high >= fpsk) {
+                    int diff = (fpsk - low) + (high - fpsk);
+                    if (diff < minDiff) {
+                        closestRange = range;
+                        minDiff = diff;
+                    }
+                }
+            }
+            mActualFramesPerSec = closestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] / 1000;
+            return closestRange;
+        }
+
+        private void onUpdateOrientation(int orientation) {
+            // First we calculate the camera rotation.
+            int rotation = (mActualFacing == FACING_FRONT)
+                    ? (mCamOrientation + orientation) % 360
+                    : (mCamOrientation - orientation + 360) % 360;
+            if (rotation != mCamRotation) {
+                synchronized (this) {
+                    mCamRotation = rotation;
+                }
+            }
+
+            // We compensate for mirroring in the orientation. This differs from the rotation,
+            // where we are invariant to mirroring.
+            int fixedOrientation = rotation;
+            if (mActualFacing == FACING_FRONT && mCamFrameHandler.isFrontMirrored()) {
+                fixedOrientation = (360 - rotation) % 360;  // compensate the mirror
+            }
+            if (mOrientation != fixedOrientation) {
+                mOrientation = fixedOrientation;
+                mCamFrameHandler.onUpdateCameraOrientation(mOrientation);
+            }
+        }
+
+        private void openCamera() {
+            // Acquire lock for camera
+            try {
+                if (!mCameraLock.tryLock(MAX_CAMERA_WAIT_TIME, TimeUnit.SECONDS)) {
+                    throw new RuntimeException("Timed out while waiting to acquire camera!");
+                }
+            } catch (InterruptedException e) {
+                throw new RuntimeException("Interrupted while waiting to acquire camera!");
+            }
+
+            // Make sure external entities are not holding camera. We need to hold the lock until
+            // the preview is started again.
+            Object lockContext = new Object();
+            mExternalCameraLock.lock(lockContext);
+
+            // Need to synchronize this as many of the member values are modified during setup.
+            synchronized (this) {
+                updateCamera();
+                updateRotation();
+                mCamFrameHandler.setupServerFrame();
+            }
+
+            mCamera.startPreview();
+
+            // Inform listeners
+            synchronized (mCamListeners) {
+                for (CameraListener listener : mCamListeners) {
+                    listener.onCameraOpened(CameraStreamer.this);
+                }
+            }
+            mExternalCameraLock.unlock(lockContext);
+            // New camera started
+            mCameraReadyLock.lock();
+            mCameraReady.signal();
+            mCameraReadyLock.unlock();
+        }
+
+        /**
+         * Creates an instance of MediaRecorder to be used for the streamer.
+         * User should call the functions in the following sequence:<p>
+         *   {@link #createRecorder}<p>
+         *   {@link #startRecording}<p>
+         *   {@link #stopRecording}<p>
+         *   {@link #releaseRecorder}<p>
+         * @param outputPath the output video path for the recorder
+         * @param profile the recording {@link CamcorderProfile} which has parameters indicating
+         *  the resolution, quality etc.
+         */
+        public void createRecorder(String outputPath, CamcorderProfile profile) {
+            lockCamera(this);
+            mCamera.unlock();
+            if (mRecorder != null) {
+                mRecorder.release();
+            }
+            mRecorder = new MediaRecorder();
+            mRecorder.setCamera(mCamera);
+            mRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
+            mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
+            mRecorder.setProfile(profile);
+            mRecorder.setOutputFile(outputPath);
+            try {
+                mRecorder.prepare();
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+        }
+
+        /**
+         * Starts recording video using the created MediaRecorder object
+         */
+        public void startRecording() {
+            if (mRecorder == null) {
+                throw new RuntimeException("No recorder created");
+            }
+            mRecorder.start();
+        }
+
+        /**
+         * Stops recording video
+         */
+        public void stopRecording() {
+            if (mRecorder == null) {
+                throw new RuntimeException("No recorder created");
+            }
+            mRecorder.stop();
+        }
+
+        /**
+         * Release the resources held by the MediaRecorder, call this after done recording.
+         */
+        public void releaseRecorder() {
+            if (mRecorder == null) {
+                throw new RuntimeException("No recorder created");
+            }
+            mRecorder.release();
+            mRecorder = null;
+            mCamera.lock();
+            unlockCamera(this);
+        }
+
+        private void closeCamera() {
+            Object lockContext = new Object();
+            mExternalCameraLock.lock(lockContext);
+            if (mCamera != null) {
+                mCamera.stopPreview();
+                mCamera.release();
+                mCamera = null;
+            }
+            mCameraLock.unlock();
+            mCamFrameHandler.release();
+            mExternalCameraLock.unlock(lockContext);
+            // Inform listeners
+            synchronized (mCamListeners) {
+                for (CameraListener listener : mCamListeners) {
+                    listener.onCameraClosed(CameraStreamer.this);
+                }
+            }
+        }
+
+    }
+
+    /**
+     * The frame-client callback interface.
+     * FrameClients, that wish to receive Frames from the camera must implement this callback
+     * method.
+     * Note, that this method is called on the Camera server thread. However, the
+     * {@code getLatestFrame()} method must be called from the client thread.
+     */
+    public static interface FrameClient {
+        public void onCameraFrameAvailable();
+    }
+
+    /**
+     * The CameraListener callback interface.
+     * This interface allows observers to monitor the CameraStreamer and respond to stream open
+     * and close events.
+     */
+    public static interface CameraListener {
+        /**
+         * Called when the camera is opened and begins producing frames.
+         * This is also called when settings have changed that caused the camera to be reopened.
+         */
+        public void onCameraOpened(CameraStreamer camera);
+
+        /**
+         * Called when the camera is closed and stops producing frames.
+         */
+        public void onCameraClosed(CameraStreamer camera);
+    }
+
+    /**
+     * Manually update the display rotation.
+     * You do not need to call this, if the camera is bound to a display, or your app does not
+     * support multiple orientations.
+     */
+    public void updateDisplayRotation(int rotation) {
+        mCameraRunner.updateDisplayRotation(rotation);
+    }
+
+    /**
+     * Bind the camera to your Activity's display.
+     * Use this, if your Activity supports multiple display orientation, and you would like the
+     * camera to update accordingly when the orientation is changed.
+     */
+    public void bindToDisplay(Display display) {
+        mCameraRunner.bindToDisplay(display);
+    }
+
+    /**
+     * Sets the desired preview size.
+     * Note that the actual width and height may vary.
+     *
+     * @param width The desired width of the preview camera stream.
+     * @param height The desired height of the preview camera stream.
+     */
+    public void setDesiredPreviewSize(int width, int height) {
+        mCameraRunner.setDesiredPreviewSize(width, height);
+    }
+
+    /**
+     * Sets the desired picture size.
+     * Note that the actual width and height may vary.
+     *
+     * @param width The desired picture width.
+     * @param height The desired picture height.
+     */
+    public void setDesiredPictureSize(int width, int height) {
+        mCameraRunner.setDesiredPictureSize(width, height);
+    }
+
+    /**
+     * Sets the desired camera frame-rate.
+     * Note, that the actual frame-rate may vary.
+     *
+     * @param fps The desired FPS.
+     */
+    public void setDesiredFrameRate(int fps) {
+        mCameraRunner.setDesiredFrameRate(fps);
+    }
+
+    /**
+     * Sets the camera facing direction.
+     *
+     * Specify {@code FACING_DONTCARE} (default) if you would like the CameraStreamer to choose
+     * the direction. When specifying any other direction be sure to first check whether the
+     * device supports the desired facing.
+     *
+     * @param facing The desired camera facing direction.
+     */
+    public void setFacing(int facing) {
+        mCameraRunner.setFacing(facing);
+    }
+
+    /**
+     * Set whether to flip the camera image horizontally when using the front facing camera.
+     */
+    public void setFlipFrontCamera(boolean flipFront) {
+        mCameraRunner.setFlipFrontCamera(flipFront);
+    }
+
+    /**
+     * Sets the camera flash mode.
+     *
+     * This must be one of the String constants defined in the Camera.Parameters class.
+     *
+     * @param flashMode A String constant specifying the flash mode.
+     */
+    public void setFlashMode(String flashMode) {
+        mCameraRunner.setFlashMode(flashMode);
+    }
+
+    /**
+     * Returns the current flash mode.
+     *
+     * This returns the currently running camera's flash-mode, or NULL if flash modes are not
+     * supported on that camera.
+     *
+     * @return The flash mode String, or NULL if flash modes are not supported.
+     */
+    public String getFlashMode() {
+        return mCameraRunner.getFlashMode();
+    }
+
+    /**
+     * Get the actual camera facing.
+     * Returns 0 if actual facing is not yet known.
+     */
+    public int getCameraFacing() {
+        return mCameraRunner.getCameraFacing();
+    }
+
+    /**
+     * Get the current camera rotation.
+     *
+     * Use this rotation if you want to snap pictures from the camera and need to rotate the
+     * picture to be up-right.
+     *
+     * @return the current camera rotation.
+     */
+    public int getCameraRotation() {
+        return mCameraRunner.getCameraRotation();
+    }
+
+    /**
+     * Specifies whether or not the camera supports hardware face detection.
+     * @return true, if the camera supports hardware face detection.
+     */
+    public boolean supportsHardwareFaceDetection() {
+        return mCameraRunner.supportsHardwareFaceDetection();
+    }
+
+    /**
+     * Returns the camera facing that is chosen when DONT_CARE is specified.
+     * Returns 0 if neither a front nor back camera could be found.
+     */
+    public static int getDefaultFacing() {
+        int camCount = Camera.getNumberOfCameras();
+        if (camCount == 0) {
+            return 0;
+        } else {
+            CameraInfo cameraInfo = new CameraInfo();
+            Camera.getCameraInfo(0, cameraInfo);
+            return (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT)
+                ? FACING_FRONT
+                : FACING_BACK;
+        }
+    }
+
+    /**
+     * Get the actual camera width.
+     * Returns 0 if actual width is not yet known.
+     */
+    public int getCameraWidth() {
+        return mCameraRunner.getCameraWidth();
+    }
+
+    /**
+     * Get the actual camera height.
+     * Returns 0 if actual height is not yet known.
+     */
+    public int getCameraHeight() {
+        return mCameraRunner.getCameraHeight();
+    }
+
+    /**
+     * Get the actual camera frame-rate.
+     * Returns 0 if actual frame-rate is not yet known.
+     */
+    public int getCameraFrameRate() {
+        return mCameraRunner.getCameraFrameRate();
+    }
+
+    /**
+     * Returns true if the camera can be started at this point.
+     */
+    public boolean canStart() {
+        return mCameraRunner.canStart();
+    }
+
+    /**
+     * Returns true if the camera is currently running.
+     */
+    public boolean isRunning() {
+        return mCameraRunner.isRunning();
+    }
+
+    /**
+     * Starts the camera.
+     */
+    public void start() {
+        mCameraRunner.pushEvent(Event.START, true);
+    }
+
+    /**
+     * Stops the camera.
+     */
+    public void stop() {
+        mCameraRunner.pushEvent(Event.STOP, true);
+    }
+
+    /**
+     * Stops the camera and waits until it is completely closed. Generally, this should not be
+     * called in the UI thread, but may be necessary if you need the camera to be closed before
+     * performing subsequent steps.
+     */
+    public void stopAndWait() {
+        mCameraRunner.pushEvent(Event.STOP, true);
+        try {
+            if (!mCameraLock.tryLock(MAX_CAMERA_WAIT_TIME, TimeUnit.SECONDS)) {
+                Log.w("CameraStreamer", "Time-out waiting for camera to close!");
+            }
+        } catch (InterruptedException e) {
+            Log.w("CameraStreamer", "Interrupted while waiting for camera to close!");
+        }
+        mCameraLock.unlock();
+    }
+
+    /**
+     * Registers a listener to handle camera state changes.
+     */
+    public void addListener(CameraListener listener) {
+        mCameraRunner.addListener(listener);
+    }
+
+    /**
+     * Unregisters a listener to handle camera state changes.
+     */
+    public void removeListener(CameraListener listener) {
+        mCameraRunner.removeListener(listener);
+    }
+
+    /**
+     * Registers the frame-client with the camera.
+     * This MUST be called from the client thread!
+     */
+    public void registerClient(FrameClient client) {
+        mCameraRunner.getCamFrameHandler().registerClient(client);
+    }
+
+    /**
+     * Unregisters the frame-client with the camera.
+     * This MUST be called from the client thread!
+     */
+    public void unregisterClient(FrameClient client) {
+        mCameraRunner.getCamFrameHandler().unregisterClient(client);
+    }
+
+    /**
+     * Gets the latest camera frame for the client.
+     *
+     * This must be called from the same thread as the {@link #registerClient(FrameClient)} call!
+     * The frame passed in will be resized by the camera streamer to fit the camera frame.
+     * Returns false if the frame could not be grabbed. This may happen if the camera has been
+     * closed in the meantime, and its resources let go.
+     *
+     * @return true, if the frame was grabbed successfully.
+     */
+    public boolean getLatestFrame(FrameImage2D targetFrame) {
+        return mCameraRunner.grabFrame(targetFrame);
+    }
+
+    /**
+     * Expose the underlying android.hardware.Camera object.
+     * Use the returned object with care: some camera functions may break the functionality
+     * of CameraStreamer.
+     * @return the Camera object.
+     */
+    @Deprecated
+    public Camera getCamera() {
+        return mCameraRunner.getCamera();
+    }
+
+    /**
+     * Obtain access to the underlying android.hardware.Camera object.
+     * This grants temporary access to the internal Camera handle. Once you are done using the
+     * handle you must call {@link #unlockCamera(Object)}. While you are holding the Camera,
+     * it will not be modified or released by the CameraStreamer. The Camera object return is
+     * guaranteed to have the preview running.
+     *
+     * The CameraStreamer does not account for changes you make to the Camera. That is, if you
+     * change the Camera unexpectedly this may cause unintended behavior by the streamer.
+     *
+     * Note that the returned object may be null. This can happen when the CameraStreamer is not
+     * running, or is just transitioning to another Camera, such as during a switch from front to
+     * back Camera.
+     * @param context an object used as a context for locking and unlocking. lockCamera and
+     *   unlockCamera should use the same context object.
+     * @return The Camera object.
+     */
+    public Camera lockCamera(Object context) {
+        return mCameraRunner.lockCamera(context);
+    }
+
+    /**
+     * Release the acquire Camera object.
+     * @param context the context object that used when lockCamera is called.
+     */
+    public void unlockCamera(Object context) {
+        mCameraRunner.unlockCamera(context);
+    }
+
+    /**
+     * Creates an instance of MediaRecorder to be used for the streamer.
+     * User should call the functions in the following sequence:<p>
+     *   {@link #createRecorder}<p>
+     *   {@link #startRecording}<p>
+     *   {@link #stopRecording}<p>
+     *   {@link #releaseRecorder}<p>
+     * @param path the output video path for the recorder
+     * @param profile the recording {@link CamcorderProfile} which has parameters indicating
+     *  the resolution, quality etc.
+     */
+    public void createRecorder(String path, CamcorderProfile profile) {
+        mCameraRunner.createRecorder(path, profile);
+    }
+
+    public void releaseRecorder() {
+        mCameraRunner.releaseRecorder();
+    }
+
+    public void startRecording() {
+        mCameraRunner.startRecording();
+    }
+
+    public void stopRecording() {
+        mCameraRunner.stopRecording();
+    }
+
+    /**
+     * Retrieve the ID of the currently used camera.
+     * @return the ID of the currently used camera.
+     */
+    public int getCameraId() {
+        return mCameraRunner.getCurrentCameraId();
+    }
+
+    /**
+     * @return The number of cameras available for streaming on this device.
+     */
+    public static int getNumberOfCameras() {
+        // Currently, this is just the number of cameras that are available on the device.
+        return Camera.getNumberOfCameras();
+    }
+
+    CameraStreamer(MffContext context) {
+        mCameraRunner = new CameraRunnable(context);
+    }
+
+    /** Halt is like stop, but may be resumed using restart(). */
+    void halt() {
+        mCameraRunner.pushEvent(Event.HALT, true);
+    }
+
+    /** Restart starts the camera only if previously halted. */
+    void restart() {
+        mCameraRunner.pushEvent(Event.RESTART, true);
+    }
+
+    static boolean requireDummySurfaceView() {
+        return VERSION.SDK_INT < 15;
+    }
+
+    void tearDown() {
+        mCameraRunner.pushEvent(Event.TEARDOWN, true);
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ColorSpace.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ColorSpace.java
new file mode 100644
index 0000000..f2bfe08
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ColorSpace.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media.filterfw;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Utility functions to convert between color-spaces.
+ *
+ * Currently these methods are all CPU based native methods. These could be updated in the future
+ * to provide other implementations.
+ */
+public class ColorSpace {
+
+    /**
+     * Convert YUV420-Planer data to RGBA8888.
+     *
+     * The input data is expected to be laid out in 3 planes. The width x height Y plane, followed
+     * by the U and V planes, where each chroma value corresponds to a 2x2 luminance value block.
+     * YUV to RGB conversion is done using the ITU-R BT.601 transformation. The output buffer must
+     * be large enough to hold the data, and the dimensions must be multiples of 2.
+     *
+     * @param input data encoded in YUV420-Planar.
+     * @param output buffer to hold RGBA8888 data.
+     * @param width the width of the image (must be a multiple of 2)
+     * @param height the height of the image (must be a multiple of 2)
+     */
+    public static void convertYuv420pToRgba8888(
+            ByteBuffer input, ByteBuffer output, int width, int height) {
+        expectInputSize(input, (3 * width * height) / 2);
+        expectOutputSize(output, width * height * 4);
+        nativeYuv420pToRgba8888(input, output, width, height);
+    }
+
+    /**
+     * Convert ARGB8888 to RGBA8888.
+     *
+     * The input data is expected to be encoded in 8-bit interleaved ARGB channels. The output
+     * buffer must be large enough to hold the data. The output buffer may be the same as the
+     * input buffer.
+     *
+     * @param input data encoded in ARGB8888.
+     * @param output buffer to hold RGBA8888 data.
+     * @param width the width of the image
+     * @param height the height of the image
+     */
+    public static void convertArgb8888ToRgba8888(
+            ByteBuffer input, ByteBuffer output, int width, int height) {
+        expectInputSize(input, width * height * 4);
+        expectOutputSize(output, width * height * 4);
+        nativeArgb8888ToRgba8888(input, output, width, height);
+    }
+
+    /**
+     * Convert RGBA8888 to HSVA8888.
+     *
+     * The input data is expected to be encoded in 8-bit interleaved RGBA channels. The output
+     * buffer must be large enough to hold the data. The output buffer may be the same as the
+     * input buffer.
+     *
+     * @param input data encoded in RGBA8888.
+     * @param output buffer to hold HSVA8888 data.
+     * @param width the width of the image
+     * @param height the height of the image
+     */
+    public static void convertRgba8888ToHsva8888(
+            ByteBuffer input, ByteBuffer output, int width, int height) {
+        expectInputSize(input, width * height * 4);
+        expectOutputSize(output, width * height * 4);
+        nativeRgba8888ToHsva8888(input, output, width, height);
+    }
+
+    /**
+     * Convert RGBA8888 to YCbCrA8888.
+     *
+     * The input data is expected to be encoded in 8-bit interleaved RGBA channels. The output
+     * buffer must be large enough to hold the data. The output buffer may be the same as the
+     * input buffer.
+     *
+     * @param input data encoded in RGBA8888.
+     * @param output buffer to hold YCbCrA8888 data.
+     * @param width the width of the image
+     * @param height the height of the image
+     */
+    public static void convertRgba8888ToYcbcra8888(
+            ByteBuffer input, ByteBuffer output, int width, int height) {
+        expectInputSize(input, width * height * 4);
+        expectOutputSize(output, width * height * 4);
+        nativeRgba8888ToYcbcra8888(input, output, width, height);
+    }
+
+    private static void expectInputSize(ByteBuffer input, int expectedSize) {
+        if (input.remaining() < expectedSize) {
+            throw new IllegalArgumentException("Input buffer's size does not fit given width "
+                    + "and height! Expected: " + expectedSize + ", Got: " + input.remaining()
+                    + ".");
+        }
+    }
+
+    private static void expectOutputSize(ByteBuffer output, int expectedSize) {
+        if (output.remaining() < expectedSize) {
+            throw new IllegalArgumentException("Output buffer's size does not fit given width "
+                    + "and height! Expected: " + expectedSize + ", Got: " + output.remaining()
+                    + ".");
+        }
+    }
+
+    private static native void nativeYuv420pToRgba8888(
+            ByteBuffer input, ByteBuffer output, int width, int height);
+
+    private static native void nativeArgb8888ToRgba8888(
+            ByteBuffer input, ByteBuffer output, int width, int height);
+
+    private static native void nativeRgba8888ToHsva8888(
+            ByteBuffer input, ByteBuffer output, int width, int height);
+
+    private static native void nativeRgba8888ToYcbcra8888(
+            ByteBuffer input, ByteBuffer output, int width, int height);
+
+    static {
+        System.loadLibrary("smartcamera_jni");
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ColorfulnessFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ColorfulnessFilter.java
new file mode 100644
index 0000000..5bdf4af
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ColorfulnessFilter.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Extract histogram from image.
+
+package androidx.media.filterpacks.colorspace;
+
+import androidx.media.filterfw.FrameValue;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameBuffer2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * ColorfulnessFilter takes in a particular Chroma histogram generated by NewChromaHistogramFilter
+ * and compute the colorfulness based on the entropy in Hue space.
+ */
+public final class ColorfulnessFilter extends Filter {
+
+    public ColorfulnessFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType dataIn = FrameType.buffer2D(FrameType.ELEMENT_FLOAT32);
+        return new Signature()
+            .addInputPort("histogram", Signature.PORT_REQUIRED, dataIn)
+            .addOutputPort("score", Signature.PORT_REQUIRED, FrameType.single(float.class))
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onProcess() {
+        FrameBuffer2D histogramFrame =
+                getConnectedInputPort("histogram").pullFrame().asFrameBuffer2D();
+        ByteBuffer byteBuffer = histogramFrame.lockBytes(Frame.MODE_READ);
+        byteBuffer.order(ByteOrder.nativeOrder());
+        FloatBuffer histogramBuffer = byteBuffer.asFloatBuffer();
+        histogramBuffer.rewind();
+
+        // Create a hue histogram from hue-saturation histogram
+        int hueBins = histogramFrame.getWidth();
+        int saturationBins = histogramFrame.getHeight() - 1;
+        float[] hueHistogram = new float[hueBins];
+        float total = 0;
+        for (int r = 0; r < saturationBins; ++r) {
+            float weight = (float) Math.pow(2, r);
+            for (int c = 0; c < hueBins; c++) {
+                float value = histogramBuffer.get() * weight;
+                hueHistogram[c] += value;
+                total += value;
+            }
+        }
+        float colorful = 0f;
+        for (int c = 0; c < hueBins; ++c) {
+            float value = hueHistogram[c] / total;
+            if (value > 0f) {
+                colorful -= value * ((float) Math.log(value));
+            }
+        }
+
+        colorful /= Math.log(2);
+
+        histogramFrame.unlock();
+        OutputPort outPort = getConnectedOutputPort("score");
+        FrameValue frameValue = outPort.fetchAvailableFrame(null).asFrameValue();
+        frameValue.setValue(colorful);
+        outPort.pushFrame(frameValue);
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/CropFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/CropFilter.java
new file mode 100644
index 0000000..91fe21c
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/CropFilter.java
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.transform;
+
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Matrix;
+import android.graphics.Paint;
+import android.util.FloatMath;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.ImageShader;
+import androidx.media.filterfw.InputPort;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+import androidx.media.filterfw.geometry.Quad;
+
+public class CropFilter extends Filter {
+
+    private Quad mCropRect = Quad.fromRect(0f, 0f, 1f, 1f);
+    private int mOutputWidth = 0;
+    private int mOutputHeight = 0;
+    private ImageShader mShader;
+    private boolean mUseMipmaps = false;
+    private FrameImage2D mPow2Frame = null;
+
+    public CropFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
+        return new Signature()
+            .addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+            .addInputPort("cropRect", Signature.PORT_REQUIRED, FrameType.single(Quad.class))
+            .addInputPort("outputWidth", Signature.PORT_OPTIONAL, FrameType.single(int.class))
+            .addInputPort("outputHeight", Signature.PORT_OPTIONAL, FrameType.single(int.class))
+            .addInputPort("useMipmaps", Signature.PORT_OPTIONAL, FrameType.single(boolean.class))
+            .addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
+            .disallowOtherPorts();
+    }
+
+    @Override
+    public void onInputPortOpen(InputPort port) {
+        if (port.getName().equals("cropRect")) {
+            port.bindToFieldNamed("mCropRect");
+            port.setAutoPullEnabled(true);
+        } else if (port.getName().equals("outputWidth")) {
+            port.bindToFieldNamed("mOutputWidth");
+            port.setAutoPullEnabled(true);
+        } else if (port.getName().equals("outputHeight")) {
+            port.bindToFieldNamed("mOutputHeight");
+            port.setAutoPullEnabled(true);
+        } else  if (port.getName().equals("useMipmaps")) {
+            port.bindToFieldNamed("mUseMipmaps");
+            port.setAutoPullEnabled(true);
+        }
+    }
+
+    @Override
+    protected void onPrepare() {
+        if (isOpenGLSupported()) {
+            mShader = ImageShader.createIdentity();
+        }
+    }
+
+    @Override
+    protected void onProcess() {
+        OutputPort outPort = getConnectedOutputPort("image");
+
+        // Pull input frame
+        FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+        int[] inDims = inputImage.getDimensions();
+        int[] croppedDims = { (int)FloatMath.ceil(mCropRect.xEdge().length() * inDims[0]),
+                              (int)FloatMath.ceil(mCropRect.yEdge().length() * inDims[1]) };
+        int[] outDims = { getOutputWidth(croppedDims[0], croppedDims[1]),
+                getOutputHeight(croppedDims[0], croppedDims[1]) };
+        FrameImage2D outputImage = outPort.fetchAvailableFrame(outDims).asFrameImage2D();
+
+        if (isOpenGLSupported()) {
+            FrameImage2D sourceFrame;
+            Quad sourceQuad = null;
+            boolean scaleDown = (outDims[0] < croppedDims[0]) || (outDims[1] < croppedDims[1]);
+            if (scaleDown && mUseMipmaps) {
+                mPow2Frame = TransformUtils.makeMipMappedFrame(mPow2Frame, croppedDims);
+                int[] extDims = mPow2Frame.getDimensions();
+                float targetWidth = croppedDims[0] / (float)extDims[0];
+                float targetHeight = croppedDims[1] / (float)extDims[1];
+                Quad targetQuad = Quad.fromRect(0f, 0f, targetWidth, targetHeight);
+                mShader.setSourceQuad(mCropRect);
+                mShader.setTargetQuad(targetQuad);
+                mShader.process(inputImage, mPow2Frame);
+                TransformUtils.generateMipMaps(mPow2Frame);
+                sourceFrame = mPow2Frame;
+                sourceQuad = targetQuad;
+            } else {
+                sourceFrame = inputImage;
+                sourceQuad = mCropRect;
+            }
+
+            mShader.setSourceQuad(sourceQuad);
+            mShader.setTargetRect(0f, 0f, 1f, 1f);
+            mShader.process(sourceFrame, outputImage);
+        } else {
+            // Convert quads to canvas coordinate space
+            Quad sourceQuad = mCropRect.scale2(inDims[0], inDims[1]);
+            Quad targetQuad = Quad.fromRect(0f, 0f, inDims[0], inDims[1]);
+
+            // Calculate transform for crop
+            Matrix transform = Quad.getTransform(sourceQuad, targetQuad);
+            transform.postScale(outDims[0] / (float)inDims[0], outDims[1] / (float)inDims[1]);
+
+            // Create target canvas
+            Bitmap.Config config = Bitmap.Config.ARGB_8888;
+            Bitmap cropped = Bitmap.createBitmap(outDims[0], outDims[1], config);
+            Canvas canvas = new Canvas(cropped);
+
+            // Draw source bitmap into target canvas
+            Paint paint = new Paint();
+            paint.setFilterBitmap(true);
+            Bitmap sourceBitmap = inputImage.toBitmap();
+            canvas.drawBitmap(sourceBitmap, transform, paint);
+
+            // Assign bitmap to output frame
+            outputImage.setBitmap(cropped);
+        }
+
+        outPort.pushFrame(outputImage);
+    }
+
+    @Override
+    protected void onClose() {
+        if (mPow2Frame != null){
+            mPow2Frame.release();
+            mPow2Frame = null;
+        }
+    }
+
+    protected int getOutputWidth(int inWidth, int inHeight) {
+        return mOutputWidth <= 0 ? inWidth : mOutputWidth;
+    }
+
+    protected int getOutputHeight(int inWidth, int inHeight) {
+        return mOutputHeight <= 0 ? inHeight : mOutputHeight;
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Filter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Filter.java
new file mode 100644
index 0000000..9e2eb92
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Filter.java
@@ -0,0 +1,766 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.os.SystemClock;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * Filters are the processing nodes of the filter graphs.
+ *
+ * Filters may have any number of input and output ports, through which the data frames flow.
+ * TODO: More documentation on filter life-cycle, port and type checking, GL and RenderScript, ...
+ */
+public abstract class Filter {
+
+    private static class State {
+        private static final int STATE_UNPREPARED = 1;
+        private static final int STATE_PREPARED = 2;
+        private static final int STATE_OPEN = 3;
+        private static final int STATE_CLOSED = 4;
+        private static final int STATE_DESTROYED = 5;
+
+        public int current = STATE_UNPREPARED;
+
+        public synchronized boolean check(int state) {
+            return current == state;
+        }
+
+    }
+
+    private final int REQUEST_FLAG_NONE = 0;
+    private final int REQUEST_FLAG_CLOSE = 1;
+
+    private String mName;
+    private MffContext mContext;
+    private FilterGraph mFilterGraph;
+
+    private State mState = new State();
+    private int mRequests = REQUEST_FLAG_NONE;
+
+    private int mMinimumAvailableInputs = 1;
+    private int mMinimumAvailableOutputs = 1;
+
+    private int mScheduleCount = 0;
+    private long mLastScheduleTime = 0;
+
+    private boolean mIsActive = true;
+    private AtomicBoolean mIsSleeping = new AtomicBoolean(false);
+
+    private long mCurrentTimestamp = Frame.TIMESTAMP_NOT_SET;
+
+    private HashMap<String, InputPort> mConnectedInputPorts = new HashMap<String, InputPort>();
+    private HashMap<String, OutputPort> mConnectedOutputPorts = new HashMap<String, OutputPort>();
+
+    private InputPort[] mConnectedInputPortArray = null;
+    private OutputPort[] mConnectedOutputPortArray = null;
+
+    private ArrayList<Frame> mAutoReleaseFrames = new ArrayList<Frame>();
+
+
+    /**
+     * Constructs a new filter.
+     * A filter is bound to a specific MffContext. Its name can be any String value, but it must
+     * be unique within the filter graph.
+     *
+     * Note that names starting with "$" are reserved for internal use, and should not be used.
+     *
+     * @param context The MffContext in which the filter will live.
+     * @param name The name of the filter.
+     */
+    protected Filter(MffContext context, String name) {
+        mName = name;
+        mContext = context;
+    }
+
+    /**
+     * Checks whether the filter class is available on this platform.
+     * Some filters may not be installed on all platforms and can therefore not be instantiated.
+     * Before instantiating a filter, check if it is available by using this method.
+     *
+     * This method uses the shared FilterFactory to check whether the filter class is available.
+     *
+     * @param filterClassName The fully qualified class name of the Filter class.
+     * @return true, if filters of the specified class name are available.
+     */
+    public static final boolean isAvailable(String filterClassName) {
+        return FilterFactory.sharedFactory().isFilterAvailable(filterClassName);
+    }
+
+    /**
+     * Returns the name of this filter.
+     *
+     * @return the name of the filter (specified during construction).
+     */
+    public String getName() {
+        return mName;
+    }
+
+    /**
+     * Returns the signature of this filter.
+     *
+     * Subclasses should override this and return their filter signature. The default
+     * implementation returns a generic signature with no constraints.
+     *
+     * This method may be called at any time.
+     *
+     * @return the Signature instance for this filter.
+     */
+    public Signature getSignature() {
+        return new Signature();
+    }
+
+    /**
+     * Returns the MffContext that the filter resides in.
+     *
+     * @return the MffContext of the filter.
+     */
+    public MffContext getContext() {
+        return mContext;
+    }
+
+    /**
+     * Returns true, if the filter is active.
+     * TODO: thread safety?
+     *
+     * @return true, if the filter is active.
+     */
+    public boolean isActive() {
+        return mIsActive;
+    }
+
+    /**
+     * Activates the current filter.
+     * Only active filters can be scheduled for execution. This method can only be called if the
+     * GraphRunner that is executing the filter is stopped or paused.
+     */
+    public void activate() {
+        assertIsPaused();
+        if (!mIsActive) {
+            mIsActive = true;
+        }
+    }
+
+    /**
+     * Deactivates the current filter.
+     * Only active filters can be scheduled for execution. This method can only be called if the
+     * GraphRunner that is executing the filter is stopped or paused.
+     */
+    public void deactivate() {
+        // TODO: Support close-on-deactivate (must happen in processing thread).
+        assertIsPaused();
+        if (mIsActive) {
+            mIsActive = false;
+        }
+    }
+
+    /**
+     * Returns the filter's set of input ports.
+     * Note that this contains only the *connected* input ports. To retrieve all
+     * input ports that this filter accepts, one has to go via the filter's Signature.
+     *
+     * @return An array containing all connected input ports.
+     */
+    public final InputPort[] getConnectedInputPorts() {
+        return mConnectedInputPortArray;
+    }
+
+    /**
+     * Returns the filter's set of output ports.
+     * Note that this contains only the *connected* output ports. To retrieve all
+     * output ports that this filter provides, one has to go via the filter's Signature.
+     *
+     * @return An array containing all connected output ports.
+     */
+    public final OutputPort[] getConnectedOutputPorts() {
+        return mConnectedOutputPortArray;
+    }
+
+    /**
+     * Returns the input port with the given name.
+     * Note that this can only access the *connected* input ports. To retrieve all
+     * input ports that this filter accepts, one has to go via the filter's Signature.
+     *
+     * @return the input port with the specified name, or null if no connected input port
+     *  with this name exists.
+     */
+    public final InputPort getConnectedInputPort(String name) {
+        return mConnectedInputPorts.get(name);
+    }
+
+    /**
+     * Returns the output port with the given name.
+     * Note that this can only access the *connected* output ports. To retrieve all
+     * output ports that this filter provides, one has to go via the filter's Signature.
+     *
+     * @return the output port with the specified name, or null if no connected output port
+     *  with this name exists.
+     */
+    public final OutputPort getConnectedOutputPort(String name) {
+        return mConnectedOutputPorts.get(name);
+    }
+
+    /**
+     * Called when an input port has been attached in the graph.
+     * Override this method, in case you want to be informed of any connected input ports, or make
+     * modifications to them. Note that you may not assume that any other ports have been attached
+     * already. If you have dependencies on other ports, override
+     * {@link #onInputPortOpen(InputPort)}. The default implementation does nothing.
+     *
+     * @param port The InputPort instance that was attached.
+     */
+    protected void onInputPortAttached(InputPort port) {
+    }
+
+    /**
+     * Called when an output port has been attached in the graph.
+     * Override this method, in case you want to be informed of any connected output ports, or make
+     * modifications to them. Note that you may not assume that any other ports have been attached
+     * already. If you have dependencies on other ports, override
+     * {@link #onOutputPortOpen(OutputPort)}. The default implementation does nothing.
+     *
+     * @param port The OutputPort instance that was attached.
+     */
+    protected void onOutputPortAttached(OutputPort port) {
+    }
+
+    /**
+     * Called when an input port is opened on this filter.
+     * Input ports are opened by the data produce, that is the filter that is connected to an
+     * input port. Override this if you need to make modifications to the port before processing
+     * begins. Note, that this is only called if the connected filter is scheduled. You may assume
+     * that all ports are attached when this is called.
+     *
+     * @param port The InputPort instance that was opened.
+     */
+    protected void onInputPortOpen(InputPort port) {
+    }
+
+    /**
+     * Called when an output port is opened on this filter.
+     * Output ports are opened when the filter they are attached to is opened. Override this if you
+     * need to make modifications to the port before processing begins. Note, that this is only
+     * called if the filter is scheduled. You may assume that all ports are attached when this is
+     * called.
+     *
+     * @param port The OutputPort instance that was opened.
+     */
+    protected void onOutputPortOpen(OutputPort port) {
+    }
+
+    /**
+     * Returns true, if the filter is currently open.
+     * @return true, if the filter is currently open.
+     */
+    public final boolean isOpen() {
+        return mState.check(State.STATE_OPEN);
+    }
+
+    @Override
+    public String toString() {
+        return mName + " (" + getClass().getSimpleName() + ")";
+    }
+
+    /**
+     * Called when filter is prepared.
+     * Subclasses can override this to prepare the filter for processing. This method gets called
+     * once only just before the filter is scheduled for processing the first time.
+     *
+     * @see #onTearDown()
+     */
+    protected void onPrepare() {
+    }
+
+    /**
+     * Called when the filter is opened.
+     * Subclasses can override this to perform any kind of initialization just before processing
+     * starts. This method may be called any number of times, but is always balanced with an
+     * {@link #onClose()} call.
+     *
+     * @see #onClose()
+     */
+    protected void onOpen() {
+    }
+
+    /**
+     * Called to perform processing on Frame data.
+     * This is the only method subclasses must override. It is called every time the filter is
+     * ready for processing. Typically this is when there is input data to process and available
+     * output ports, but may differ depending on the port configuration.
+     */
+    protected abstract void onProcess();
+
+    /**
+     * Called when the filter is closed.
+     * Subclasses can override this to perform any kind of post-processing steps. Processing will
+     * not resume until {@link #onOpen()} is called again. This method is only called if the filter
+     * is open.
+     *
+     * @see #onOpen()
+     */
+    protected void onClose() {
+    }
+
+    /**
+     * Called when the filter is torn down.
+     * Subclasses can override this to perform clean-up tasks just before the filter is disposed of.
+     * It is called when the filter graph that the filter belongs to is disposed.
+     *
+     * @see #onPrepare()
+     */
+    protected void onTearDown() {
+    }
+
+    /**
+     * Check if the input conditions are met in order to schedule this filter.
+     *
+     * This is used by {@link #canSchedule()} to determine if the input-port conditions given by
+     * the filter are met. Subclasses that override scheduling behavior can make use of this
+     * function.
+     *
+     * @return true, if the filter's input conditions are met.
+     */
+    protected boolean inputConditionsMet() {
+        if (mConnectedInputPortArray.length > 0) {
+            int inputFrames = 0;
+            // [Non-iterator looping]
+            for (int i = 0; i < mConnectedInputPortArray.length; ++i) {
+                if (!mConnectedInputPortArray[i].conditionsMet()) {
+                    return false;
+                } else if (mConnectedInputPortArray[i].hasFrame()) {
+                    ++inputFrames;
+                }
+            }
+            if (inputFrames < mMinimumAvailableInputs) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    /**
+     * Check if the output conditions are met in order to schedule this filter.
+     *
+     * This is used by {@link #canSchedule()} to determine if the output-port conditions given by
+     * the filter are met. Subclasses that override scheduling behavior can make use of this
+     * function.
+     *
+     * @return true, if the filter's output conditions are met.
+     */
+    protected boolean outputConditionsMet() {
+        if (mConnectedOutputPortArray.length > 0) {
+            int availableOutputs = 0;
+            for (int i = 0; i < mConnectedOutputPortArray.length; ++i) {
+                if (!mConnectedOutputPortArray[i].conditionsMet()) {
+                    return false;
+                } else if (mConnectedOutputPortArray[i].isAvailable()) {
+                    ++availableOutputs;
+                }
+            }
+            if (availableOutputs < mMinimumAvailableOutputs) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    /**
+     * Check if the Filter is in a state so that it can be scheduled.
+     *
+     * When overriding the filter's {@link #canSchedule()} method, you should never allow
+     * scheduling a filter that is not in a schedulable state. This will result in undefined
+     * behavior.
+     *
+     * @return true, if the filter is in a schedulable state.
+     */
+    protected boolean inSchedulableState() {
+        return (mIsActive && !mState.check(State.STATE_CLOSED));
+    }
+
+    /**
+     * Returns true if the filter can be currently scheduled.
+     *
+     * Filters may override this method if they depend on custom factors that determine whether
+     * they can be scheduled or not. The scheduler calls this method to determine whether or not
+     * a filter can be scheduled for execution. It does not guarantee that it will be executed.
+     * It is strongly recommended to call super's implementation to make sure your filter can be
+     * scheduled based on its state, input and output ports.
+     *
+     * @return true, if the filter can be scheduled.
+     */
+    protected boolean canSchedule() {
+        return inSchedulableState() && inputConditionsMet() && outputConditionsMet();
+    }
+
+    /**
+     * Returns the current FrameManager instance.
+     * @return the current FrameManager instance or null if there is no FrameManager set up yet.
+     */
+    protected final FrameManager getFrameManager() {
+        return mFilterGraph.mRunner != null ? mFilterGraph.mRunner.getFrameManager() : null;
+    }
+
+    /**
+     * Returns whether the GraphRunner for this filter is running.
+     *
+     * Generally, this method should not be used for performing operations that need to be carried
+     * out before running begins. Use {@link #performPreparation(Runnable)} for this.
+     *
+     * @return true, if the GraphRunner for this filter is running.
+     */
+    protected final boolean isRunning() {
+        return mFilterGraph != null && mFilterGraph.mRunner != null
+                && mFilterGraph.mRunner.isRunning();
+    }
+
+    /**
+     * Performs operations before the filter is running.
+     *
+     * Use this method when your filter requires to perform operations while the graph is not
+     * running. The filter will not be scheduled for execution until your method has completed
+     * execution.
+     */
+    protected final boolean performPreparation(Runnable runnable) {
+        synchronized (mState) {
+            if (mState.current == State.STATE_OPEN) {
+                return false;
+            } else {
+                runnable.run();
+                return true;
+            }
+        }
+    }
+
+    /**
+     * Request that this filter be closed after the current processing step.
+     *
+     * Implementations may call this within their {@link #onProcess()} calls to indicate that the
+     * filter is done processing and wishes to be closed. After such a request the filter will be
+     * closed and no longer receive {@link #onProcess()} calls.
+     *
+     * @see #onClose()
+     * @see #onProcess()
+     */
+    protected final void requestClose() {
+        mRequests |= REQUEST_FLAG_CLOSE;
+    }
+
+    /**
+     * Sets the minimum number of input frames required to process.
+     * A filter will not be scheduled unless at least a certain number of input frames are available
+     * on the input ports. This is only relevant if the filter has input ports and is not waiting on
+     * all ports.
+     * The default value is 1.
+     *
+     * @param count the minimum number of frames required to process.
+     * @see #getMinimumAvailableInputs()
+     * @see #setMinimumAvailableOutputs(int)
+     * @see InputPort#setWaitsForFrame(boolean)
+     */
+    protected final void setMinimumAvailableInputs(int count) {
+        mMinimumAvailableInputs = count;
+    }
+
+    /**
+     * Returns the minimum number of input frames required to process this filter.
+     * The default value is 1.
+     *
+     * @return the minimum number of input frames required to process.
+     * @see #setMinimumAvailableInputs(int)
+     */
+    protected final int getMinimumAvailableInputs() {
+        return mMinimumAvailableInputs;
+    }
+
+    /**
+     * Sets the minimum number of available output ports required to process.
+     * A filter will not be scheduled unless atleast a certain number of output ports are available.
+     * This is only relevant if the filter has output ports and is not waiting on all ports. The
+     * default value is 1.
+     *
+     * @param count the minimum number of frames required to process.
+     * @see #getMinimumAvailableOutputs()
+     * @see #setMinimumAvailableInputs(int)
+     * @see OutputPort#setWaitsUntilAvailable(boolean)
+     */
+    protected final void setMinimumAvailableOutputs(int count) {
+        mMinimumAvailableOutputs = count;
+    }
+
+    /**
+     * Returns the minimum number of available outputs required to process this filter.
+     * The default value is 1.
+     *
+     * @return the minimum number of available outputs required to process.
+     * @see #setMinimumAvailableOutputs(int)
+     */
+    protected final int getMinimumAvailableOutputs() {
+        return mMinimumAvailableOutputs;
+    }
+
+    /**
+     * Puts the filter to sleep so that it is no longer scheduled.
+     * To resume scheduling the filter another thread must call wakeUp() on this filter.
+     */
+    protected final void enterSleepState() {
+        mIsSleeping.set(true);
+    }
+
+    /**
+     * Wakes the filter and resumes scheduling.
+     * This is generally called from another thread to signal that this filter should resume
+     * processing. Does nothing if filter is not sleeping.
+     */
+    protected final void wakeUp() {
+        if (mIsSleeping.getAndSet(false)) {
+            if (isRunning()) {
+                mFilterGraph.mRunner.signalWakeUp();
+            }
+        }
+    }
+
+    /**
+     * Returns whether this Filter is allowed to use OpenGL.
+     *
+     * Filters may use OpenGL if the MffContext supports OpenGL and its GraphRunner allows it.
+     *
+     * @return true, if this Filter is allowed to use OpenGL.
+     */
+   protected final boolean isOpenGLSupported() {
+        return mFilterGraph.mRunner.isOpenGLSupported();
+    }
+
+    /**
+     * Connect an output port to an input port of another filter.
+     * Connects the output port with the specified name to the input port with the specified name
+     * of the specified filter. If the input or output ports do not exist already, they are
+     * automatically created and added to the respective filter.
+     */
+    final void connect(String outputName, Filter targetFilter, String inputName) {
+        // Make sure not connected already
+        if (getConnectedOutputPort(outputName) != null) {
+            throw new RuntimeException("Attempting to connect already connected output port '"
+                + outputName + "' of filter " + this + "'!");
+        } else if (targetFilter.getConnectedInputPort(inputName) != null) {
+            throw new RuntimeException("Attempting to connect already connected input port '"
+                + inputName + "' of filter " + targetFilter + "'!");
+        }
+
+        // Establish connection
+        InputPort inputPort = targetFilter.newInputPort(inputName);
+        OutputPort outputPort = newOutputPort(outputName);
+        outputPort.setTarget(inputPort);
+
+        // Fire attachment callbacks
+        targetFilter.onInputPortAttached(inputPort);
+        onOutputPortAttached(outputPort);
+
+        // Update array of ports (which is maintained for more efficient access)
+        updatePortArrays();
+    }
+
+    final Map<String, InputPort> getConnectedInputPortMap() {
+        return mConnectedInputPorts;
+    }
+
+    final Map<String, OutputPort> getConnectedOutputPortMap() {
+        return mConnectedOutputPorts;
+    }
+
+    final void execute() {
+        synchronized (mState) {
+            autoPullInputs();
+            mLastScheduleTime = SystemClock.elapsedRealtime();
+            if (mState.current == State.STATE_UNPREPARED) {
+                onPrepare();
+                mState.current = State.STATE_PREPARED;
+            }
+            if (mState.current == State.STATE_PREPARED) {
+                openPorts();
+                onOpen();
+                mState.current = State.STATE_OPEN;
+            }
+            if (mState.current == State.STATE_OPEN) {
+                onProcess();
+                if (mRequests != REQUEST_FLAG_NONE) {
+                    processRequests();
+                }
+            }
+        }
+        autoReleaseFrames();
+        ++mScheduleCount;
+    }
+
+    final void performClose() {
+        synchronized (mState) {
+            if (mState.current == State.STATE_OPEN) {
+                onClose();
+                mIsSleeping.set(false);
+                mState.current = State.STATE_CLOSED;
+                mCurrentTimestamp = Frame.TIMESTAMP_NOT_SET;
+            }
+        }
+    }
+
+    final void softReset() {
+        synchronized (mState) {
+            performClose();
+            if (mState.current == State.STATE_CLOSED) {
+                mState.current = State.STATE_PREPARED;
+            }
+        }
+    }
+
+    final void performTearDown() {
+        synchronized (mState) {
+            if (mState.current == State.STATE_OPEN) {
+                throw new RuntimeException("Attempting to tear-down filter " + this + " which is "
+                    + "in an open state!");
+            } else if (mState.current != State.STATE_DESTROYED
+                    && mState.current != State.STATE_UNPREPARED) {
+                onTearDown();
+                mState.current = State.STATE_DESTROYED;
+            }
+        }
+    }
+
+    final void insertIntoFilterGraph(FilterGraph graph) {
+        mFilterGraph = graph;
+        updatePortArrays();
+    }
+
+    final int getScheduleCount() {
+        return mScheduleCount;
+    }
+
+    final void resetScheduleCount() {
+        mScheduleCount = 0;
+    }
+
+    final void openPorts() {
+        // Opening the output ports will open the connected input ports
+        for (OutputPort outputPort : mConnectedOutputPorts.values()) {
+            openOutputPort(outputPort);
+        }
+    }
+
+    final void addAutoReleaseFrame(Frame frame) {
+        mAutoReleaseFrames.add(frame);
+    }
+
+    final long getCurrentTimestamp() {
+        return mCurrentTimestamp;
+    }
+
+    final void onPulledFrameWithTimestamp(long timestamp) {
+        if (timestamp > mCurrentTimestamp || mCurrentTimestamp == Frame.TIMESTAMP_NOT_SET) {
+            mCurrentTimestamp = timestamp;
+        }
+    }
+
+    final void openOutputPort(OutputPort outPort) {
+        if (outPort.getQueue() == null) {
+            try {
+                FrameQueue.Builder builder = new FrameQueue.Builder();
+                InputPort inPort = outPort.getTarget();
+                outPort.onOpen(builder);
+                inPort.onOpen(builder);
+                Filter targetFilter = inPort.getFilter();
+                String queueName = mName + "[" + outPort.getName() + "] -> " + targetFilter.mName
+                        + "[" + inPort.getName() + "]";
+                FrameQueue queue = builder.build(queueName);
+                outPort.setQueue(queue);
+                inPort.setQueue(queue);
+            } catch (RuntimeException e) {
+                throw new RuntimeException("Could not open output port " + outPort + "!", e);
+            }
+        }
+    }
+
+    final boolean isSleeping() {
+        return mIsSleeping.get();
+    }
+
+    final long getLastScheduleTime() {
+        return mLastScheduleTime ;
+    }
+
+    private final void autoPullInputs() {
+        // [Non-iterator looping]
+        for (int i = 0; i < mConnectedInputPortArray.length; ++i) {
+            InputPort port = mConnectedInputPortArray[i];
+            if (port.hasFrame() && port.isAutoPullEnabled()) {
+                mConnectedInputPortArray[i].pullFrame();
+            }
+        }
+    }
+
+    private final void autoReleaseFrames() {
+        // [Non-iterator looping]
+        for (int i = 0; i < mAutoReleaseFrames.size(); ++i) {
+            mAutoReleaseFrames.get(i).release();
+        }
+        mAutoReleaseFrames.clear();
+    }
+
+    private final InputPort newInputPort(String name) {
+        InputPort result = mConnectedInputPorts.get(name);
+        if (result == null) {
+            Signature.PortInfo info = getSignature().getInputPortInfo(name);
+            result = new InputPort(this, name, info);
+            mConnectedInputPorts.put(name, result);
+        }
+        return result;
+    }
+
+    private final OutputPort newOutputPort(String name) {
+        OutputPort result = mConnectedOutputPorts.get(name);
+        if (result == null) {
+            Signature.PortInfo info = getSignature().getOutputPortInfo(name);
+            result = new OutputPort(this, name, info);
+            mConnectedOutputPorts.put(name, result);
+        }
+        return result;
+    }
+
+    private final void processRequests() {
+        if ((mRequests & REQUEST_FLAG_CLOSE) != 0) {
+            performClose();
+            mRequests = REQUEST_FLAG_NONE;
+        }
+    }
+
+    private void assertIsPaused() {
+        GraphRunner runner = GraphRunner.current();
+        if (runner != null && !runner.isPaused() && !runner.isStopped()) {
+            throw new RuntimeException("Attempting to modify filter state while runner is "
+                + "executing. Please pause or stop the runner first!");
+        }
+    }
+
+    private final void updatePortArrays() {
+        // Copy our port-maps to arrays for faster non-iterator access
+        mConnectedInputPortArray = mConnectedInputPorts.values().toArray(new InputPort[0]);
+        mConnectedOutputPortArray = mConnectedOutputPorts.values().toArray(new OutputPort[0]);
+    }
+
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FilterFactory.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FilterFactory.java
new file mode 100644
index 0000000..2c67c79
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FilterFactory.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *            http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package androidx.media.filterfw;
+
+import android.util.Log;
+
+import dalvik.system.PathClassLoader;
+
+import java.lang.reflect.Constructor;
+import java.util.HashSet;
+
+public class FilterFactory {
+
+    private static FilterFactory mSharedFactory;
+    private HashSet<String> mPackages = new HashSet<String>();
+
+    private static ClassLoader mCurrentClassLoader;
+    private static HashSet<String> mLibraries;
+    private static Object mClassLoaderGuard;
+
+    static {
+        mCurrentClassLoader = Thread.currentThread().getContextClassLoader();
+        mLibraries = new HashSet<String>();
+        mClassLoaderGuard = new Object();
+    }
+
+    private static final String TAG = "FilterFactory";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+
+    public static FilterFactory sharedFactory() {
+        if (mSharedFactory == null) {
+            mSharedFactory = new FilterFactory();
+        }
+        return mSharedFactory;
+    }
+
+    /**
+     * Adds a new Java library to the list to be scanned for filters.
+     * libraryPath must be an absolute path of the jar file.  This needs to be
+     * static because only one classloader per process can open a shared native
+     * library, which a filter may well have.
+     */
+    public static void addFilterLibrary(String libraryPath) {
+        if (mLogVerbose) Log.v(TAG, "Adding filter library " + libraryPath);
+        synchronized(mClassLoaderGuard) {
+            if (mLibraries.contains(libraryPath)) {
+                if (mLogVerbose) Log.v(TAG, "Library already added");
+                return;
+            }
+            mLibraries.add(libraryPath);
+            // Chain another path loader to the current chain
+            mCurrentClassLoader = new PathClassLoader(libraryPath, mCurrentClassLoader);
+        }
+    }
+
+    public void addPackage(String packageName) {
+        if (mLogVerbose) Log.v(TAG, "Adding package " + packageName);
+        /* TODO: This should use a getPackage call in the caller's context, but no such method
+                 exists.
+        Package pkg = Package.getPackage(packageName);
+        if (pkg == null) {
+            throw new IllegalArgumentException("Unknown filter package '" + packageName + "'!");
+        }
+        */
+        mPackages.add(packageName);
+    }
+
+    public boolean isFilterAvailable(String className) {
+        return getFilterClass(className) != null;
+    }
+
+    public Filter createFilterByClassName(String className, String filterName, MffContext context) {
+        if (mLogVerbose) Log.v(TAG, "Looking up class " + className);
+        Class<? extends Filter> filterClass = getFilterClass(className);
+        if (filterClass == null) {
+            throw new IllegalArgumentException("Unknown filter class '" + className + "'!");
+        }
+        return createFilterByClass(filterClass, filterName, context);
+    }
+
+    public Filter createFilterByClass(Class<? extends Filter> filterClass,
+            String filterName, MffContext context) {
+        // Look for the correct constructor
+        Constructor<? extends Filter> filterConstructor = null;
+        try {
+            filterConstructor = filterClass.getConstructor(MffContext.class, String.class);
+        } catch (NoSuchMethodException e) {
+            throw new IllegalArgumentException("The filter class '" + filterClass
+                + "' does not have a constructor of the form <init>(MffContext, String)!");
+        }
+
+        // Construct the filter
+        Filter filter = null;
+        try {
+            filter = filterConstructor.newInstance(context, filterName);
+        } catch (Throwable t) {
+            throw new RuntimeException("Error creating filter " + filterName + "!", t);
+        }
+
+        if (filter == null) {
+            throw new IllegalArgumentException("Could not construct the filter '"
+                + filterName + "'!");
+        }
+        return filter;
+    }
+
+    private Class<? extends Filter> getFilterClass(String name) {
+        Class<?> filterClass = null;
+
+        // Look for the class in the imported packages
+        for (String packageName : mPackages) {
+            try {
+                if (mLogVerbose) Log.v(TAG, "Trying "+ packageName + "." + name);
+                synchronized(mClassLoaderGuard) {
+                    filterClass = mCurrentClassLoader.loadClass(packageName + "." + name);
+                }
+            } catch (ClassNotFoundException e) {
+                continue;
+            }
+            // Exit loop if class was found.
+            if (filterClass != null) {
+                break;
+            }
+        }
+        Class<? extends Filter> result = null;
+        try {
+            if (filterClass != null) {
+                result = filterClass.asSubclass(Filter.class);
+            }
+        } catch (ClassCastException e) {
+            // Leave result == null
+        }
+        return result;
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FilterGraph.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FilterGraph.java
new file mode 100644
index 0000000..7d5ed9f
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FilterGraph.java
@@ -0,0 +1,567 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.util.Log;
+import android.view.View;
+import androidx.media.filterpacks.base.BranchFilter;
+import androidx.media.filterpacks.base.FrameSlotSource;
+import androidx.media.filterpacks.base.FrameSlotTarget;
+import androidx.media.filterpacks.base.GraphInputSource;
+import androidx.media.filterpacks.base.GraphOutputTarget;
+import androidx.media.filterpacks.base.ValueTarget;
+import androidx.media.filterpacks.base.ValueTarget.ValueListener;
+import androidx.media.filterpacks.base.VariableSource;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * A graph of Filter nodes.
+ *
+ * A FilterGraph instance contains a set of Filter instances connected by their output and input
+ * ports. Every filter belongs to exactly one graph and cannot be moved to another graph.
+ *
+ * FilterGraphs may contain sub-graphs that are dependent on the parent graph. These are typically
+ * used when inserting sub-graphs into MetaFilters. When a parent graph is torn down so are its
+ * sub-graphs. The same applies to flushing frames of a graph.
+ */
+public class FilterGraph {
+
+    private final static boolean DEBUG = false;
+
+    /** The context that this graph lives in */
+    private MffContext mContext;
+
+    /** Map from name of filter to the filter instance */
+    private HashMap<String, Filter> mFilterMap = new HashMap<String, Filter>();
+
+    /** Allows quick access to array of all filters. */
+    private Filter[] mAllFilters = null;
+
+    /** The GraphRunner currently attached to this graph */
+    GraphRunner mRunner;
+
+    /** The set of sub-graphs of this graph */
+    HashSet<FilterGraph> mSubGraphs = new HashSet<FilterGraph>();
+
+    /** The parent graph of this graph, or null it this graph is a root graph. */
+    private FilterGraph mParentGraph;
+
+    public static class Builder {
+
+        /** The context that this builder lives in */
+        private MffContext mContext;
+
+        /** Map from name of filter to the filter instance */
+        private HashMap<String, Filter> mFilterMap = new HashMap<String, Filter>();
+
+        /**
+         * Creates a new builder for specifying a graph structure.
+         * @param context The context the graph will live in.
+         */
+        public Builder(MffContext context) {
+            mContext = context;
+        }
+
+        /**
+         * Add a filter to the graph.
+         *
+         * Adds the specified filter to the set of filters of this graph. The filter must not be in
+         * the graph already, and the filter's name must be unique within the graph.
+         *
+         * @param filter the filter to add to the graph.
+         * @throws IllegalArgumentException if the filter is in the graph already, or its name is
+         *                                  is already taken.
+         */
+        public void addFilter(Filter filter) {
+            if (mFilterMap.values().contains(filter)) {
+                throw new IllegalArgumentException("Attempting to add filter " + filter + " that "
+                    + "is in the graph already!");
+            } else if (mFilterMap.containsKey(filter.getName())) {
+                throw new IllegalArgumentException("Graph contains filter with name '"
+                    + filter.getName() + "' already!");
+            } else {
+                mFilterMap.put(filter.getName(), filter);
+            }
+        }
+
+        /**
+         * Adds a variable to the graph.
+         *
+         * TODO: More documentation.
+         *
+         * @param name the name of the variable.
+         * @param value the value of the variable or null if no value is to be set yet.
+         * @return the VariableSource filter that holds the value of this variable.
+         */
+        public VariableSource addVariable(String name, Object value) {
+            if (getFilter(name) != null) {
+                throw new IllegalArgumentException("Filter named '" + name + "' exists already!");
+            }
+            VariableSource valueSource = new VariableSource(mContext, name);
+            addFilter(valueSource);
+            if (value != null) {
+                valueSource.setValue(value);
+            }
+            return valueSource;
+        }
+
+        public FrameSlotSource addFrameSlotSource(String name, String slotName) {
+            FrameSlotSource filter = new FrameSlotSource(mContext, name, slotName);
+            addFilter(filter);
+            return filter;
+        }
+
+        public FrameSlotTarget addFrameSlotTarget(String name, String slotName) {
+            FrameSlotTarget filter = new FrameSlotTarget(mContext, name, slotName);
+            addFilter(filter);
+            return filter;
+        }
+
+        /**
+         * Connect two filters by their ports.
+         * The filters specified must have been previously added to the graph builder.
+         *
+         * @param sourceFilterName The name of the source filter.
+         * @param sourcePort The name of the source port.
+         * @param targetFilterName The name of the target filter.
+         * @param targetPort The name of the target port.
+         */
+        public void connect(String sourceFilterName, String sourcePort,
+                            String targetFilterName, String targetPort) {
+            Filter sourceFilter = getFilter(sourceFilterName);
+            Filter targetFilter = getFilter(targetFilterName);
+            if (sourceFilter == null) {
+                throw new IllegalArgumentException("Unknown filter '" + sourceFilterName + "'!");
+            } else if (targetFilter == null) {
+                throw new IllegalArgumentException("Unknown filter '" + targetFilterName + "'!");
+            }
+            connect(sourceFilter, sourcePort, targetFilter, targetPort);
+        }
+
+        /**
+         * Connect two filters by their ports.
+         * The filters specified must have been previously added to the graph builder.
+         *
+         * @param sourceFilter The source filter.
+         * @param sourcePort The name of the source port.
+         * @param targetFilter The target filter.
+         * @param targetPort The name of the target port.
+         */
+        public void connect(Filter sourceFilter, String sourcePort,
+                            Filter targetFilter, String targetPort) {
+            sourceFilter.connect(sourcePort, targetFilter, targetPort);
+        }
+
+        /**
+         * Returns the filter with the specified name.
+         *
+         * @return the filter with the specified name, or null if no such filter exists.
+         */
+        public Filter getFilter(String name) {
+            return mFilterMap.get(name);
+        }
+
+        /**
+         * Builds the graph and checks signatures.
+         *
+         * @return The new graph instance.
+         */
+        public FilterGraph build() {
+            checkSignatures();
+            return buildWithParent(null);
+        }
+
+        /**
+         * Builds the sub-graph and checks signatures.
+         *
+         * @param parentGraph the parent graph of the built sub-graph.
+         * @return The new graph instance.
+         */
+        public FilterGraph buildSubGraph(FilterGraph parentGraph) {
+            if (parentGraph == null) {
+                throw new NullPointerException("Parent graph must be non-null!");
+            }
+            checkSignatures();
+            return buildWithParent(parentGraph);
+        }
+
+        VariableSource assignValueToFilterInput(Object value, String filterName, String inputName) {
+            // Get filter to connect to
+            Filter filter = getFilter(filterName);
+            if (filter == null) {
+                throw new IllegalArgumentException("Unknown filter '" + filterName + "'!");
+            }
+
+            // Construct a name for our value source and make sure it does not exist already
+            String valueSourceName = filterName + "." + inputName;
+            if (getFilter(valueSourceName) != null) {
+                throw new IllegalArgumentException("VariableSource for '" + filterName + "' and "
+                    + "input '" + inputName + "' exists already!");
+            }
+
+            // Create new VariableSource and connect it to the target filter and port
+            VariableSource valueSource = new VariableSource(mContext, valueSourceName);
+            addFilter(valueSource);
+            try {
+                ((Filter)valueSource).connect("value", filter, inputName);
+            } catch (RuntimeException e) {
+                throw new RuntimeException("Could not connect VariableSource to input '" + inputName
+                    + "' of filter '" + filterName + "'!", e);
+            }
+
+            // Assign the value to the VariableSource
+            if (value != null) {
+                valueSource.setValue(value);
+            }
+
+            return valueSource;
+        }
+
+        VariableSource assignVariableToFilterInput(String varName,
+                                                   String filterName,
+                                                   String inputName) {
+            // Get filter to connect to
+            Filter filter = getFilter(filterName);
+            if (filter == null) {
+                throw new IllegalArgumentException("Unknown filter '" + filterName + "'!");
+            }
+
+            // Get variable
+            Filter variable = getFilter(varName);
+            if (variable == null || !(variable instanceof VariableSource)) {
+                throw new IllegalArgumentException("Unknown variable '" + varName + "'!");
+            }
+
+            // Connect variable (and possibly branch) variable to filter
+            try {
+                connectAndBranch(variable, "value", filter, inputName);
+            } catch (RuntimeException e) {
+                throw new RuntimeException("Could not connect VariableSource to input '" + inputName
+                    + "' of filter '" + filterName + "'!", e);
+            }
+
+            return (VariableSource)variable;
+        }
+
+        /**
+         * Builds the graph without checking signatures.
+         * If parent is non-null, build a sub-graph of the specified parent.
+         *
+         * @return The new graph instance.
+         */
+        private FilterGraph buildWithParent(FilterGraph parent) {
+            FilterGraph graph = new FilterGraph(mContext, parent);
+            graph.mFilterMap = mFilterMap;
+            graph.mAllFilters = mFilterMap.values().toArray(new Filter[0]);
+            for (Entry<String, Filter> filterEntry : mFilterMap.entrySet()) {
+                filterEntry.getValue().insertIntoFilterGraph(graph);
+            }
+            return graph;
+        }
+
+        private void checkSignatures() {
+            checkSignaturesForFilters(mFilterMap.values());
+        }
+
+        // TODO: Currently this always branches even if the connection is a 1:1 connection. Later
+        // we may optimize to pass through directly in the 1:1 case (may require disconnecting
+        // ports).
+        private void connectAndBranch(Filter sourceFilter,
+                                      String sourcePort,
+                                      Filter targetFilter,
+                                      String targetPort) {
+            String branchName = "__" + sourceFilter.getName() + "_" + sourcePort + "Branch";
+            Filter branch = getFilter(branchName);
+            if (branch == null) {
+                branch = new BranchFilter(mContext, branchName, false);
+                addFilter(branch);
+                sourceFilter.connect(sourcePort, branch, "input");
+            }
+            String portName = "to" + targetFilter.getName() + "_" + targetPort;
+            branch.connect(portName, targetFilter, targetPort);
+        }
+
+    }
+
+    /**
+     * Attach the graph and its subgraphs to a custom GraphRunner.
+     *
+     * Call this if you want the graph to be executed by a specific GraphRunner. You must call
+     * this before any other runner is set. Note that calls to {@code getRunner()} and
+     * {@code run()} auto-create a GraphRunner.
+     *
+     * @param runner The GraphRunner instance that should execute this graph.
+     * @see #getRunner()
+     * @see #run()
+     */
+    public void attachToRunner(GraphRunner runner) {
+        if (mRunner == null) {
+            for (FilterGraph subGraph : mSubGraphs) {
+                subGraph.attachToRunner(runner);
+            }
+            runner.attachGraph(this);
+            mRunner = runner;
+        } else if (mRunner != runner) {
+            throw new RuntimeException("Cannot attach FilterGraph to GraphRunner that is already "
+                + "attached to another GraphRunner!");
+        }
+    }
+
+    /**
+     * Forcibly tear down a filter graph.
+     *
+     * Call this to release any resources associated with the filter graph, its filters and any of
+     * its sub-graphs. This method must not be called if the graph (or any sub-graph) is running.
+     *
+     * You may no longer access this graph instance or any of its subgraphs after calling this
+     * method.
+     *
+     * Tearing down of sub-graphs is not supported. You must tear down the root graph, which will
+     * tear down all of its sub-graphs.
+     *
+     * @throws IllegalStateException if the graph is still running.
+     * @throws RuntimeException if you attempt to tear down a sub-graph.
+     */
+    public void tearDown() {
+        assertNotRunning();
+        if (mParentGraph != null) {
+            throw new RuntimeException("Attempting to tear down sub-graph!");
+        }
+        if (mRunner != null) {
+            mRunner.tearDownGraph(this);
+        }
+        for (FilterGraph subGraph : mSubGraphs) {
+            subGraph.mParentGraph = null;
+            subGraph.tearDown();
+        }
+        mSubGraphs.clear();
+    }
+
+    /**
+     * Returns the context of the graph.
+     *
+     * @return the MffContext instance that this graph is bound to.
+     */
+    public MffContext getContext() {
+        return mContext;
+    }
+
+    /**
+     * Returns the filter with the specified name.
+     *
+     * @return the filter with the specified name, or null if no such filter exists.
+     */
+    public Filter getFilter(String name) {
+        return mFilterMap.get(name);
+    }
+
+    /**
+     * Returns the VariableSource for the specified variable.
+     *
+     * TODO: More documentation.
+     * TODO: More specialized error handling.
+     *
+     * @param name The name of the VariableSource.
+     * @return The VariableSource filter instance with the specified name.
+     */
+    public VariableSource getVariable(String name) {
+        Filter result = mFilterMap.get(name);
+        if (result != null && result instanceof VariableSource) {
+            return (VariableSource)result;
+        } else {
+            throw new IllegalArgumentException("Unknown variable '" + name + "' specified!");
+        }
+    }
+
+    /**
+     * Returns the GraphOutputTarget with the specified name.
+     *
+     * @param name The name of the target.
+     * @return The GraphOutputTarget instance with the specified name.
+     */
+    public GraphOutputTarget getGraphOutput(String name) {
+        Filter result = mFilterMap.get(name);
+        if (result != null && result instanceof GraphOutputTarget) {
+            return (GraphOutputTarget)result;
+        } else {
+            throw new IllegalArgumentException("Unknown target '" + name + "' specified!");
+        }
+    }
+
+    /**
+     * Returns the GraphInputSource with the specified name.
+     *
+     * @param name The name of the source.
+     * @return The GraphInputSource instance with the specified name.
+     */
+    public GraphInputSource getGraphInput(String name) {
+        Filter result = mFilterMap.get(name);
+        if (result != null && result instanceof GraphInputSource) {
+            return (GraphInputSource)result;
+        } else {
+            throw new IllegalArgumentException("Unknown source '" + name + "' specified!");
+        }
+    }
+
+    /**
+     * Binds a filter to a view.
+     *
+     * ViewFilter instances support visualizing their data to a view. See the specific filter
+     * documentation for details. Views may be bound only if the graph is not running.
+     *
+     * @param filterName the name of the filter to bind.
+     * @param view the view to bind to.
+     * @throws IllegalStateException if the filter is in an illegal state.
+     * @throws IllegalArgumentException if no such view-filter exists.
+     */
+    public void bindFilterToView(String filterName, View view) {
+        Filter filter = mFilterMap.get(filterName);
+        if (filter != null && filter instanceof ViewFilter) {
+            ((ViewFilter)filter).bindToView(view);
+        } else {
+            throw new IllegalArgumentException("Unknown view filter '" + filterName + "'!");
+        }
+    }
+
+    /**
+     * TODO: Documentation.
+     */
+    public void bindValueTarget(String filterName, ValueListener listener, boolean onCallerThread) {
+        Filter filter = mFilterMap.get(filterName);
+        if (filter != null && filter instanceof ValueTarget) {
+            ((ValueTarget)filter).setListener(listener, onCallerThread);
+        } else {
+            throw new IllegalArgumentException("Unknown ValueTarget filter '" + filterName + "'!");
+        }
+    }
+
+    // Running Graphs //////////////////////////////////////////////////////////////////////////////
+    /**
+     * Convenience method to run the graph.
+     *
+     * Creates a new runner for this graph in the specified mode and executes it. Returns the
+     * runner to allow control of execution.
+     *
+     * @throws IllegalStateException if the graph is already running.
+     * @return the GraphRunner instance that was used for execution.
+     */
+    public GraphRunner run() {
+        GraphRunner runner = getRunner();
+        runner.setIsVerbose(false);
+        runner.start(this);
+        return runner;
+    }
+
+    /**
+     * Returns the GraphRunner for this graph.
+     *
+     * Every FilterGraph instance has a GraphRunner instance associated with it for executing the
+     * graph.
+     *
+     * @return the GraphRunner instance for this graph.
+     */
+    public GraphRunner getRunner() {
+        if (mRunner == null) {
+            GraphRunner runner = new GraphRunner(mContext);
+            attachToRunner(runner);
+        }
+        return mRunner;
+    }
+
+    /**
+     * Returns whether the graph is currently running.
+     *
+     * @return true if the graph is currently running.
+     */
+    public boolean isRunning() {
+        return mRunner != null && mRunner.isRunning();
+    }
+
+    /**
+     * Check each filter's signatures if all requirements are fulfilled.
+     *
+     * This will throw a RuntimeException if any unfulfilled requirements are found.
+     * Note that FilterGraph.Builder also has a function checkSignatures(), which allows
+     * to do the same /before/ the FilterGraph is built.
+     */
+    public void checkSignatures() {
+        checkSignaturesForFilters(mFilterMap.values());
+    }
+
+    // MFF Internal Methods ////////////////////////////////////////////////////////////////////////
+    Filter[] getAllFilters() {
+        return mAllFilters;
+    }
+
+    static void checkSignaturesForFilters(Collection<Filter> filters) {
+        for (Filter filter : filters) {
+            if (DEBUG) {
+                Log.d("FilterGraph", "Checking filter " + filter.getName() + "...");
+            }
+            Signature signature = filter.getSignature();
+            signature.checkInputPortsConform(filter);
+            signature.checkOutputPortsConform(filter);
+        }
+    }
+
+    /**
+     * Wipes the filter references in this graph, so that they may be collected.
+     *
+     * This must be called only after a tearDown as this will make the FilterGraph invalid.
+     */
+    void wipe() {
+        mAllFilters = null;
+        mFilterMap = null;
+    }
+
+    void flushFrames() {
+        for (Filter filter : mFilterMap.values()) {
+            for (InputPort inputPort : filter.getConnectedInputPorts()) {
+                inputPort.clear();
+            }
+            for (OutputPort outputPort : filter.getConnectedOutputPorts()) {
+                outputPort.clear();
+            }
+        }
+    }
+
+    Set<FilterGraph> getSubGraphs() {
+        return mSubGraphs;
+    }
+
+    // Internal Methods ////////////////////////////////////////////////////////////////////////////
+    private FilterGraph(MffContext context, FilterGraph parentGraph) {
+        mContext = context;
+        mContext.addGraph(this);
+        if (parentGraph != null) {
+            mParentGraph = parentGraph;
+            mParentGraph.mSubGraphs.add(this);
+        }
+    }
+
+    private void assertNotRunning() {
+        if (isRunning()) {
+            throw new IllegalStateException("Attempting to modify running graph!");
+        }
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Frame.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Frame.java
new file mode 100644
index 0000000..67907d3
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Frame.java
@@ -0,0 +1,203 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import java.util.Arrays;
+
+/**
+ * Frames are the data containers that are transported between Filters.
+ *
+ * Frames may be used only within a Filter during filter graph execution. Accessing Frames outside
+ * of graph execution may cause unexpected results.
+ *
+ * There are two ways to obtain new Frame instances. You can call
+ * {@link OutputPort#fetchAvailableFrame(int[])} on an OutputPort to obtain a Frame to pass to an
+ * output. You can also call {@link #create(FrameType, int[])} to obtain
+ * a detached Frame instance that you may hold onto in your filter. If you need to hold on to a
+ * Frame that is owned by an input or output queue, you must call
+ * {@link #retain()} on it.
+ *
+ * When you are done using a detached Frame, you must release it yourself.
+ *
+ * To access frame data, call any of the {@code lock}-methods. This will give you access to the
+ * frame data in the desired format. You must pass in a {@code mode} indicating whether you wish
+ * to read or write to the data. Writing to a read-locked Frame may produce unexpected results and
+ * interfere with other filters. When you are done reading or writing to the data, you must call
+ * {@link #unlock()}. Note, that a Frame must be unlocked before you push it into an output queue.
+ *
+ * Generally, any type of access format to a Frame's data will be granted. However, it is strongly
+ * recommended to specify the access format that you intend to use in your filter's signature or
+ * in the access flags passed to {@code newFrame()}. This will allow the Frame to allocate
+ * the most efficient backings for the intended type of access.
+ *
+ * A frame can be be pushed to an OutputPort by calling the {@link OutputPort#pushFrame(Frame)}
+ * method. Frames that have been pushed become read-only, and can no longer be modified.
+ *
+ * On the other end, a Filter can pull in an input Frame by calling {@link InputPort#pullFrame()}
+ * on the desired InputPort. Such frames are always read-only.
+ */
+public class Frame {
+
+    /** Special timestamp value indicating that no time-stamp was set. */
+    public static final long TIMESTAMP_NOT_SET = -1;
+
+    /** Frame data access mode: Read */
+    public static final int MODE_READ = 1;
+    /** Frame data access mode: Write */
+    public static final int MODE_WRITE = 2;
+
+    BackingStore mBackingStore;
+    boolean mReadOnly = false;
+
+    // Public API //////////////////////////////////////////////////////////////////////////////////
+    /**
+     * Returns the frame's type.
+     * @return A FrameType instance describing the frame data-type.
+     */
+    public final FrameType getType() {
+        return mBackingStore.getFrameType();
+    }
+
+    public final int getElementCount() {
+        return mBackingStore.getElementCount();
+    }
+
+    /**
+     * Set the frame's timestamp in nanoseconds.
+     *
+     * @param timestamp the timestamp of this frame in nanoseconds.
+     */
+    public final void setTimestamp(long timestamp) {
+        mBackingStore.setTimestamp(timestamp);
+    }
+
+    /**
+     * @return the frame's timestamp in nanoseconds.
+     */
+    public final long getTimestamp() {
+        return mBackingStore.getTimestamp();
+    }
+
+    /**
+     * @return the frame's timestamp in milliseconds.
+     */
+    public final long getTimestampMillis() {
+        return mBackingStore.getTimestamp() / 1000000L;
+    }
+
+    public final boolean isReadOnly() {
+        return mReadOnly;
+    }
+
+    public final FrameValue asFrameValue() {
+        return FrameValue.create(mBackingStore);
+    }
+
+    public final FrameValues asFrameValues() {
+        return FrameValues.create(mBackingStore);
+    }
+
+    public final FrameBuffer1D asFrameBuffer1D() {
+        return FrameBuffer1D.create(mBackingStore);
+    }
+
+    public final FrameBuffer2D asFrameBuffer2D() {
+        return FrameBuffer2D.create(mBackingStore);
+    }
+
+    public final FrameImage2D asFrameImage2D() {
+        return FrameImage2D.create(mBackingStore);
+    }
+
+    @Override
+    public String toString() {
+        return "Frame[" + getType().toString() + "]: " + mBackingStore;
+    }
+
+    @Override
+    public boolean equals(Object object) {
+        return object instanceof Frame && ((Frame)object).mBackingStore == mBackingStore;
+    }
+
+    public static Frame create(FrameType type, int[] dimensions) {
+        FrameManager manager = FrameManager.current();
+        if (manager == null) {
+            throw new IllegalStateException("Attempting to create new Frame outside of "
+                + "FrameManager context!");
+        }
+        return new Frame(type, dimensions, manager);
+    }
+
+    public final Frame release() {
+        mBackingStore = mBackingStore.release();
+        return mBackingStore != null ? this : null;
+    }
+
+    public final Frame retain() {
+        mBackingStore = mBackingStore.retain();
+        return this;
+    }
+
+    public void unlock() {
+        if (!mBackingStore.unlock()) {
+            throw new RuntimeException("Attempting to unlock frame that is not locked!");
+        }
+    }
+
+    public int[] getDimensions() {
+        int[] dim = mBackingStore.getDimensions();
+        return dim != null ? Arrays.copyOf(dim, dim.length) : null;
+    }
+
+    Frame(FrameType type, int[] dimensions, FrameManager manager) {
+        mBackingStore = new BackingStore(type, dimensions, manager);
+    }
+
+    Frame(BackingStore backingStore) {
+        mBackingStore = backingStore;
+    }
+
+    final void assertAccessible(int mode) {
+        // Make sure frame is in write-mode
+        if (mReadOnly && mode == MODE_WRITE) {
+            throw new RuntimeException("Attempting to write to read-only frame " + this + "!");
+        }
+    }
+
+    final void setReadOnly(boolean readOnly) {
+        mReadOnly = readOnly;
+    }
+
+    void resize(int[] newDims) {
+        int[] oldDims = mBackingStore.getDimensions();
+        int oldCount = oldDims == null ? 0 : oldDims.length;
+        int newCount = newDims == null ? 0 : newDims.length;
+        if (oldCount != newCount) {
+            throw new IllegalArgumentException("Cannot resize " + oldCount + "-dimensional "
+                + "Frame to " + newCount + "-dimensional Frame!");
+        } else if (newDims != null && !Arrays.equals(oldDims, newDims)) {
+            mBackingStore.resize(newDims);
+        }
+    }
+
+    Frame makeCpuCopy(FrameManager frameManager) {
+        Frame frame = new Frame(getType(), getDimensions(), frameManager);
+        frame.mBackingStore.importStore(mBackingStore);
+        return frame;
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameBuffer1D.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameBuffer1D.java
new file mode 100644
index 0000000..0e24f5b
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameBuffer1D.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.annotation.TargetApi;
+import android.renderscript.Allocation;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+public class FrameBuffer1D extends Frame {
+
+    private int mLength = 0;
+
+    /**
+     * Access frame's data using a {@link ByteBuffer}.
+     * This is a convenience method and is equivalent to calling {@code lockData} with an
+     * {@code accessFormat} of {@code ACCESS_BYTES}.
+     * When writing to the {@link ByteBuffer}, the byte order should be always set to
+     * {@link ByteOrder#nativeOrder()}.
+     *
+     * @return The byte buffer instance holding the Frame's data.
+     */
+    public ByteBuffer lockBytes(int mode) {
+        assertAccessible(mode);
+        return (ByteBuffer)mBackingStore.lockData(mode, BackingStore.ACCESS_BYTES);
+    }
+
+    /**
+     * Access frame's data using a RenderScript {@link Allocation}.
+     * This is a convenience method and is equivalent to calling {@code lockData} with an
+     * {@code accessFormat} of {@code ACCESS_ALLOCATION}.
+     *
+     * @return The Allocation instance holding the Frame's data.
+     */
+    @TargetApi(11)
+    public Allocation lockAllocation(int mode) {
+        assertAccessible(mode);
+        return (Allocation) mBackingStore.lockData(mode, BackingStore.ACCESS_ALLOCATION);
+    }
+
+    public int getLength() {
+        return mLength;
+    }
+
+    @Override
+    public int[] getDimensions() {
+        return super.getDimensions();
+    }
+
+    /**
+     * TODO: Documentation. Note that frame contents are invalidated.
+     */
+    @Override
+    public void resize(int[] newDimensions) {
+        super.resize(newDimensions);
+    }
+
+    static FrameBuffer1D create(BackingStore backingStore) {
+        assertCanCreate(backingStore);
+        return new FrameBuffer1D(backingStore);
+    }
+
+    FrameBuffer1D(BackingStore backingStore) {
+        super(backingStore);
+        updateLength(backingStore.getDimensions());
+    }
+
+    static void assertCanCreate(BackingStore backingStore) {
+        FrameType type = backingStore.getFrameType();
+        if (type.getElementSize() == 0) {
+            throw new RuntimeException("Cannot access Frame of type " + type + " as a FrameBuffer "
+                + "instance!");
+        }
+        int[] dims = backingStore.getDimensions();
+        if (dims == null || dims.length == 0) {
+            throw new RuntimeException("Cannot access Frame with no dimensions as a FrameBuffer "
+                + "instance!");
+        }
+    }
+
+    void updateLength(int[] dimensions) {
+        mLength = 1;
+        for (int dim : dimensions) {
+            mLength *= dim;
+        }
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameBuffer2D.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameBuffer2D.java
new file mode 100644
index 0000000..6a7f12a
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameBuffer2D.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+public class FrameBuffer2D extends FrameBuffer1D {
+
+    public int getWidth() {
+        return mBackingStore.getDimensions()[0];
+    }
+
+    public int getHeight() {
+        return mBackingStore.getDimensions()[1];
+    }
+
+    static FrameBuffer2D create(BackingStore backingStore) {
+        assertCanCreate(backingStore);
+        return new FrameBuffer2D(backingStore);
+    }
+
+    FrameBuffer2D(BackingStore backingStore) {
+        super(backingStore);
+    }
+
+    static void assertCanCreate(BackingStore backingStore) {
+        FrameBuffer1D.assertCanCreate(backingStore);
+        int[] dimensions = backingStore.getDimensions();
+        int dimCount = dimensions != null ? dimensions.length : 0;
+        if (dimCount != 2) {
+            throw new RuntimeException("Cannot access " + dimCount + "-dimensional Frame as a "
+                + "FrameBuffer2D instance!");
+        }
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameImage2D.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameImage2D.java
new file mode 100644
index 0000000..bca94f7
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameImage2D.java
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.graphics.RectF;
+import androidx.media.filterfw.BackingStore.Backing;
+
+public class FrameImage2D extends FrameBuffer2D {
+
+    /**
+     * Access frame's data using a TextureSource.
+     * This is a convenience method and is equivalent to calling {@code lockData} with an
+     * {@code accessFormat} of {@code ACCESS_TEXTURE}.
+     *
+     * @return The TextureSource instance holding the Frame's data.
+     */
+    public TextureSource lockTextureSource() {
+        return (TextureSource)mBackingStore.lockData(MODE_READ, BackingStore.ACCESS_TEXTURE);
+    }
+
+    /**
+     * Access frame's data using a RenderTarget.
+     * This is a convenience method and is equivalent to calling {@code lockData} with an
+     * {@code accessFormat} of {@code ACCESS_RENDERTARGET}.
+     *
+     * @return The RenderTarget instance holding the Frame's data.
+     */
+    public RenderTarget lockRenderTarget() {
+        return (RenderTarget)mBackingStore.lockData(MODE_WRITE, BackingStore.ACCESS_RENDERTARGET);
+    }
+
+    /**
+     * Assigns the pixel data of the specified bitmap.
+     *
+     * The RGBA pixel data will be extracted from the bitmap and assigned to the frame data. Note,
+     * that the colors are premultiplied with the alpha channel. If you wish to have
+     * non-premultiplied colors, you must pass the Frame through an
+     * {@code UnpremultiplyAlphaFilter}.
+     *
+     * @param bitmap The bitmap pixels to assign.
+     */
+    public void setBitmap(Bitmap bitmap) {
+        bitmap = convertToFrameType(bitmap, mBackingStore.getFrameType());
+        validateBitmapSize(bitmap, mBackingStore.getDimensions());
+        Backing backing = mBackingStore.lockBacking(MODE_WRITE, BackingStore.ACCESS_BITMAP);
+        backing.setData(bitmap);
+        mBackingStore.unlock();
+    }
+
+    /**
+     * Returns the RGBA image contents as a Bitmap instance.
+     *
+     * @return a Bitmap instance holding the RGBA Frame image content.
+     */
+    public Bitmap toBitmap() {
+        Bitmap result = (Bitmap)mBackingStore.lockData(MODE_READ, BackingStore.ACCESS_BITMAP);
+        mBackingStore.unlock();
+        return result;
+    }
+
+    /**
+     * Copies the image data from one frame to another.
+     *
+     * The source and target rectangles must be given in normalized coordinates, where 0,0 is the
+     * top-left of the image and 1,1 is the bottom-right.
+     *
+     * If the target rectangle is smaller than the target frame, the pixel values outside of the
+     * target rectangle are undefined.
+     *
+     * This method must be called within a Filter during execution. It supports both GL-enabled
+     * and GL-disabled run contexts.
+     *
+     * @param target The target frame to copy to.
+     * @param sourceRect The source rectangle in normalized coordinates.
+     * @param targetRect The target rectangle in normalized coordinates.
+     */
+    public void copyToFrame(FrameImage2D target, RectF sourceRect, RectF targetRect) {
+        if (GraphRunner.current().isOpenGLSupported()) {
+            gpuImageCopy(this, target, sourceRect, targetRect);
+        } else {
+            cpuImageCopy(this, target, sourceRect, targetRect);
+        }
+    }
+
+    static FrameImage2D create(BackingStore backingStore) {
+        assertCanCreate(backingStore);
+        return new FrameImage2D(backingStore);
+    }
+
+    FrameImage2D(BackingStore backingStore) {
+        super(backingStore);
+    }
+
+    static void assertCanCreate(BackingStore backingStore) {
+        FrameBuffer2D.assertCanCreate(backingStore);
+    }
+
+    private static Bitmap convertToFrameType(Bitmap bitmap, FrameType type) {
+        Bitmap.Config config = bitmap.getConfig();
+        Bitmap result = bitmap;
+        switch(type.getElementId()) {
+            case FrameType.ELEMENT_RGBA8888:
+                if (config != Bitmap.Config.ARGB_8888) {
+                    result = bitmap.copy(Bitmap.Config.ARGB_8888, false);
+                    if (result == null) {
+                        throw new RuntimeException("Could not convert bitmap to frame-type " +
+                                "RGBA8888!");
+                    }
+                }
+                break;
+            default:
+                throw new IllegalArgumentException("Unsupported frame type '" + type + "' for " +
+                        "bitmap assignment!");
+        }
+        return result;
+    }
+
+    private void validateBitmapSize(Bitmap bitmap, int[] dimensions) {
+        if (bitmap.getWidth() != dimensions[0] || bitmap.getHeight() != dimensions[1]) {
+            throw new IllegalArgumentException("Cannot assign bitmap of size " + bitmap.getWidth()
+                    + "x" + bitmap.getHeight() + " to frame of size " + dimensions[0] + "x"
+                    + dimensions[1] + "!");
+        }
+    }
+
+    private static void gpuImageCopy(
+            FrameImage2D srcImage, FrameImage2D dstImage, RectF srcRect, RectF dstRect) {
+        ImageShader idShader = RenderTarget.currentTarget().getIdentityShader();
+        // We briefly modify the shader
+        // TODO: Implement a safer way to save and restore a shared shader.
+        idShader.setSourceRect(srcRect);
+        idShader.setTargetRect(dstRect);
+        idShader.process(srcImage, dstImage);
+        // And reset it as others may use it as well
+        idShader.setSourceRect(0f, 0f, 1f, 1f);
+        idShader.setTargetRect(0f, 0f, 1f, 1f);
+    }
+
+    private static void cpuImageCopy(
+            FrameImage2D srcImage, FrameImage2D dstImage, RectF srcRect, RectF dstRect) {
+        // Convert rectangles to integer rectangles in image dimensions
+        Rect srcIRect = new Rect((int) srcRect.left * srcImage.getWidth(),
+                (int) srcRect.top * srcImage.getHeight(),
+                (int) srcRect.right * srcImage.getWidth(),
+                (int) srcRect.bottom * srcImage.getHeight());
+        Rect dstIRect = new Rect((int) dstRect.left * srcImage.getWidth(),
+                (int) dstRect.top * srcImage.getHeight(),
+                (int) dstRect.right * srcImage.getWidth(),
+                (int) dstRect.bottom * srcImage.getHeight());
+
+        // Create target canvas
+        Bitmap.Config config = Bitmap.Config.ARGB_8888;
+        Bitmap dstBitmap = Bitmap.createBitmap(dstImage.getWidth(), dstImage.getHeight(), config);
+        Canvas canvas = new Canvas(dstBitmap);
+
+        // Draw source bitmap into target canvas
+        Paint paint = new Paint();
+        paint.setFilterBitmap(true);
+        Bitmap srcBitmap = srcImage.toBitmap();
+        canvas.drawBitmap(srcBitmap, srcIRect, dstIRect, paint);
+
+        // Assign bitmap to output frame
+        dstImage.setBitmap(dstBitmap);
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameManager.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameManager.java
new file mode 100644
index 0000000..55ed277
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameManager.java
@@ -0,0 +1,473 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import androidx.media.filterfw.BackingStore.Backing;
+
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.Set;
+
+/**
+ * The FrameManager tracks, caches, allocates and deallocates frame data.
+ * All Frame instances are managed by a FrameManager, and belong to exactly one of these. Frames
+ * cannot be shared across FrameManager instances, however multiple MffContexts may use the same
+ * FrameManager.
+ *
+ * Additionally, frame managers allow attaching Frames under a specified key. This allows decoupling
+ * filter-graphs by instructing one node to attach a frame under a specific key, and another to
+ * fetch the frame under the same key.
+ */
+public class FrameManager {
+
+    /** The default max cache size is set to 12 MB */
+    public final static int DEFAULT_MAX_CACHE_SIZE = 12 * 1024 * 1024;
+
+    /** Frame caching policy: No caching */
+    public final static int FRAME_CACHE_NONE = 0;
+    /** Frame caching policy: Drop least recently used frame buffers */
+    public final static int FRAME_CACHE_LRU = 1;
+    /** Frame caching policy: Drop least frequently used frame buffers */
+    public final static int FRAME_CACHE_LFU = 2;
+
+    /** Slot Flag: No flags set */
+    public final static int SLOT_FLAGS_NONE = 0x00;
+    /** Slot Flag: Sticky flag set: Frame will remain in slot after fetch. */
+    public final static int SLOT_FLAG_STICKY = 0x01;
+
+    private GraphRunner mRunner;
+    private Set<Backing> mBackings = new HashSet<Backing>();
+    private BackingCache mCache;
+
+    private Map<String, FrameSlot> mFrameSlots = new HashMap<String, FrameSlot>();
+
+    static class FrameSlot {
+        private FrameType mType;
+        private int mFlags;
+        private Frame mFrame = null;
+
+        public FrameSlot(FrameType type, int flags) {
+            mType = type;
+            mFlags = flags;
+        }
+
+        public FrameType getType() {
+            return mType;
+        }
+
+        public boolean hasFrame() {
+            return mFrame != null;
+        }
+
+        public void releaseFrame() {
+            if (mFrame != null) {
+                mFrame.release();
+                mFrame = null;
+            }
+        }
+
+        // TODO: Type check
+        public void assignFrame(Frame frame) {
+            Frame oldFrame = mFrame;
+            mFrame = frame.retain();
+            if (oldFrame != null) {
+                oldFrame.release();
+            }
+        }
+
+        public Frame getFrame() {
+            Frame result = mFrame.retain();
+            if ((mFlags & SLOT_FLAG_STICKY) == 0) {
+                releaseFrame();
+            }
+            return result;
+        }
+
+        public void markWritable() {
+            if (mFrame != null) {
+                mFrame.setReadOnly(false);
+            }
+        }
+    }
+
+    private static abstract class BackingCache {
+
+        protected int mCacheMaxSize = DEFAULT_MAX_CACHE_SIZE;
+
+        public abstract Backing fetchBacking(int mode, int access, int[] dimensions, int elemSize);
+
+        public abstract boolean cacheBacking(Backing backing);
+
+        public abstract void clear();
+
+        public abstract int getSizeLeft();
+
+        public void setSize(int size) {
+            mCacheMaxSize = size;
+        }
+
+        public int getSize() {
+            return mCacheMaxSize;
+        }
+    }
+
+    private static class BackingCacheNone extends BackingCache {
+
+        @Override
+        public Backing fetchBacking(int mode, int access, int[] dimensions, int elemSize) {
+            return null;
+        }
+
+        @Override
+        public boolean cacheBacking(Backing backing) {
+            return false;
+        }
+
+        @Override
+        public void clear() {
+        }
+
+        @Override
+        public int getSize() {
+            return 0;
+        }
+
+        @Override
+        public int getSizeLeft() {
+            return 0;
+        }
+    }
+
+    private static abstract class PriorityBackingCache extends BackingCache {
+        private int mSize = 0;
+        private PriorityQueue<Backing> mQueue;
+
+        public PriorityBackingCache() {
+            mQueue = new PriorityQueue<Backing>(4, new Comparator<Backing>() {
+                @Override
+                public int compare(Backing left, Backing right) {
+                    return left.cachePriority - right.cachePriority;
+                }
+            });
+        }
+
+        @Override
+        public Backing fetchBacking(int mode, int access, int[] dimensions, int elemSize) {
+            for (Backing backing : mQueue) {
+                int backingAccess = (mode == Frame.MODE_WRITE)
+                    ? backing.writeAccess()
+                    : backing.readAccess();
+                if ((backingAccess & access) == access
+                    && dimensionsCompatible(backing.getDimensions(), dimensions)
+                    && (elemSize == backing.getElementSize())) {
+                    mQueue.remove(backing);
+                    mSize -= backing.getSize();
+                    onFetchBacking(backing);
+                    return backing;
+                }
+            }
+            //Log.w("FrameManager", "Could not find backing for dimensions " + Arrays.toString(dimensions));
+            return null;
+        }
+
+        @Override
+        public boolean cacheBacking(Backing backing) {
+            if (reserve(backing.getSize())) {
+                onCacheBacking(backing);
+                mQueue.add(backing);
+                return true;
+            }
+            return false;
+        }
+
+        @Override
+        public void clear() {
+            mQueue.clear();
+            mSize = 0;
+        }
+
+        @Override
+        public int getSizeLeft() {
+            return mCacheMaxSize - mSize;
+        }
+
+        protected abstract void onCacheBacking(Backing backing);
+
+        protected abstract void onFetchBacking(Backing backing);
+
+        private boolean reserve(int size) {
+            //Log.i("FM", "Reserving " + size + " bytes (max: " + mCacheMaxSize + " bytes).");
+            //Log.i("FM", "Current size " + mSize);
+            if (size > mCacheMaxSize) {
+                return false;
+            }
+            mSize += size;
+            while (mSize > mCacheMaxSize) {
+                Backing dropped = mQueue.poll();
+                mSize -= dropped.getSize();
+                //Log.i("FM", "Dropping  " + dropped + " with priority "
+                //    + dropped.cachePriority + ". New size: " + mSize + "!");
+                dropped.destroy();
+            }
+            return true;
+        }
+
+
+    }
+
+    private static class BackingCacheLru extends PriorityBackingCache {
+        private int mTimestamp = 0;
+
+        @Override
+        protected void onCacheBacking(Backing backing) {
+            backing.cachePriority = 0;
+        }
+
+        @Override
+        protected void onFetchBacking(Backing backing) {
+            ++mTimestamp;
+            backing.cachePriority = mTimestamp;
+        }
+    }
+
+    private static class BackingCacheLfu extends PriorityBackingCache {
+        @Override
+        protected void onCacheBacking(Backing backing) {
+            backing.cachePriority = 0;
+        }
+
+        @Override
+        protected void onFetchBacking(Backing backing) {
+            ++backing.cachePriority;
+        }
+    }
+
+    public static FrameManager current() {
+        GraphRunner runner = GraphRunner.current();
+        return runner != null ? runner.getFrameManager() : null;
+    }
+
+    /**
+     * Returns the context that the FrameManager is bound to.
+     *
+     * @return the MffContext instance that the FrameManager is bound to.
+     */
+    public MffContext getContext() {
+        return mRunner.getContext();
+    }
+
+    /**
+     * Returns the GraphRunner that the FrameManager is bound to.
+     *
+     * @return the GraphRunner instance that the FrameManager is bound to.
+     */
+    public GraphRunner getRunner() {
+        return mRunner;
+    }
+
+    /**
+     * Sets the size of the cache.
+     *
+     * Resizes the cache to the specified size in bytes.
+     *
+     * @param bytes the new size in bytes.
+     */
+    public void setCacheSize(int bytes) {
+        mCache.setSize(bytes);
+    }
+
+    /**
+     * Returns the size of the cache.
+     *
+     * @return the size of the cache in bytes.
+     */
+    public int getCacheSize() {
+        return mCache.getSize();
+    }
+
+    /**
+     * Imports a frame from another FrameManager.
+     *
+     * This will return a frame with the contents of the given frame for use in this FrameManager.
+     * Note, that there is a substantial cost involved in moving a Frame from one FrameManager to
+     * another. This may be called from any thread. After the frame has been imported, it may be
+     * used in the runner that uses this FrameManager. As the new frame may share data with the
+     * provided frame, that frame must be read-only.
+     *
+     * @param frame The frame to import
+     */
+    public Frame importFrame(Frame frame) {
+        if (!frame.isReadOnly()) {
+            throw new IllegalArgumentException("Frame " + frame + " must be read-only to import "
+                    + "into another FrameManager!");
+        }
+        return frame.makeCpuCopy(this);
+    }
+
+    /**
+     * Adds a new frame slot to the frame manager.
+     * Filters can reference frame slots to pass frames between graphs or runs. If the name
+     * specified here is already taken the frame slot is overwritten. You can only
+     * modify frame-slots while no graph of the frame manager is running.
+     *
+     * @param name The name of the slot.
+     * @param type The type of Frame that will be assigned to this slot.
+     * @param flags A mask of {@code SLOT} flags.
+     */
+    public void addFrameSlot(String name, FrameType type, int flags) {
+        assertNotRunning();
+        FrameSlot oldSlot = mFrameSlots.get(name);
+        if (oldSlot != null) {
+            removeFrameSlot(name);
+        }
+        FrameSlot slot = new FrameSlot(type, flags);
+        mFrameSlots.put(name, slot);
+    }
+
+    /**
+     * Removes a frame slot from the frame manager.
+     * Any frame within the slot is released. You can only modify frame-slots while no graph
+     * of the frame manager is running.
+     *
+     * @param name The name of the slot
+     * @throws IllegalArgumentException if no such slot exists.
+     */
+    public void removeFrameSlot(String name) {
+        assertNotRunning();
+        FrameSlot slot = getSlot(name);
+        slot.releaseFrame();
+        mFrameSlots.remove(slot);
+    }
+
+    /**
+     * TODO: Document!
+     */
+    public void storeFrame(Frame frame, String slotName) {
+        assertInGraphRun();
+        getSlot(slotName).assignFrame(frame);
+    }
+
+    /**
+     * TODO: Document!
+     */
+    public Frame fetchFrame(String slotName) {
+        assertInGraphRun();
+        return getSlot(slotName).getFrame();
+    }
+
+    /**
+     * Clears the Frame cache.
+     */
+    public void clearCache() {
+        mCache.clear();
+    }
+
+    /**
+     * Create a new FrameManager instance.
+     *
+     * Creates a new FrameManager instance in the specified context and employing a cache with the
+     * specified cache type (see the cache type constants defined by the FrameManager class).
+     *
+     * @param runner the GraphRunner to bind the FrameManager to.
+     * @param cacheType the type of cache to use.
+     */
+    FrameManager(GraphRunner runner, int cacheType) {
+        mRunner = runner;
+        switch (cacheType) {
+            case FRAME_CACHE_NONE:
+                mCache = new BackingCacheNone();
+                break;
+            case FRAME_CACHE_LRU:
+                mCache = new BackingCacheLru();
+                break;
+            case FRAME_CACHE_LFU:
+                mCache = new BackingCacheLfu();
+                break;
+            default:
+                throw new IllegalArgumentException("Unknown cache-type " + cacheType + "!");
+        }
+    }
+
+    Backing fetchBacking(int mode, int access, int[] dimensions, int elemSize) {
+        return mCache.fetchBacking(mode, access, dimensions, elemSize);
+    }
+
+    void onBackingCreated(Backing backing) {
+        if (backing != null) {
+            mBackings.add(backing);
+            // Log.i("FrameManager", "RM: Now have " + mBackings.size() + " backings");
+        }
+    }
+
+    void onBackingAvailable(Backing backing) {
+        if (!backing.shouldCache() || !mCache.cacheBacking(backing)) {
+            backing.destroy();
+            mBackings.remove(backing);
+            //Log.i("FrameManager", "RM: Now have " + mBackings.size() + " backings (" + mCache.getSizeLeft() + ")");
+        }
+    }
+
+    /**
+     * Destroying all references makes any Frames that contain them invalid.
+     */
+    void destroyBackings() {
+        for (Backing backing : mBackings) {
+            backing.destroy();
+        }
+        mBackings.clear();
+        mCache.clear();
+    }
+
+    FrameSlot getSlot(String name) {
+        FrameSlot slot = mFrameSlots.get(name);
+        if (slot == null) {
+            throw new IllegalArgumentException("Unknown frame slot '" + name + "'!");
+        }
+        return slot;
+    }
+
+    void onBeginRun() {
+        for (FrameSlot slot : mFrameSlots.values()) {
+            slot.markWritable();
+        }
+    }
+
+    // Internals ///////////////////////////////////////////////////////////////////////////////////
+    private static boolean dimensionsCompatible(int[] dimA, int[] dimB) {
+        return dimA == null || dimB == null || Arrays.equals(dimA, dimB);
+    }
+
+    private void assertNotRunning() {
+        if (mRunner.isRunning()) {
+            throw new IllegalStateException("Attempting to modify FrameManager while graph is "
+                + "running!");
+        }
+    }
+
+    private void assertInGraphRun() {
+        if (!mRunner.isRunning() || GraphRunner.current() != mRunner) {
+            throw new IllegalStateException("Attempting to access FrameManager Frame data "
+                + "outside of graph run-loop!");
+        }
+    }
+
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameQueue.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameQueue.java
new file mode 100644
index 0000000..c26f937
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameQueue.java
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package androidx.media.filterfw;
+
+import java.util.Vector;
+
+class FrameQueue {
+
+    public static class Builder {
+
+        private FrameType mReadType = null;
+        private FrameType mWriteType = null;
+
+        private Vector<FrameQueue> mAttachedQueues = new Vector<FrameQueue>();
+
+        public Builder() {}
+
+        public void setWriteType(FrameType type) {
+            mWriteType = type;
+        }
+
+        public void setReadType(FrameType type) {
+            mReadType = type;
+        }
+
+        public void attachQueue(FrameQueue queue) {
+            mAttachedQueues.add(queue);
+        }
+
+        public FrameQueue build(String name) {
+            FrameType type = buildType();
+            // TODO: This currently does not work correctly (Try camera -> branch -> target-slot)
+            //validateType(type, name);
+            FrameQueue result = new FrameQueue(type, name);
+            buildQueueImpl(result);
+            return result;
+        }
+
+        private void buildQueueImpl(FrameQueue queue) {
+            QueueImpl queueImpl = queue.new SingleFrameQueueImpl();
+            queue.mQueueImpl = queueImpl;
+        }
+
+        private FrameType buildType() {
+            FrameType result = FrameType.merge(mWriteType, mReadType);
+            for (FrameQueue queue : mAttachedQueues) {
+                result = FrameType.merge(result, queue.mType);
+            }
+            return result;
+        }
+
+        /*
+        private void validateType(FrameType type, String queueName) {
+            if (!type.isSpecified()) {
+                throw new RuntimeException("Cannot build connection queue '" + queueName + "' as "
+                        + "its type (" + type + ") is underspecified!");
+            }
+        }
+         */
+    }
+
+    private interface QueueImpl {
+        public boolean canPull();
+
+        public boolean canPush();
+
+        public Frame pullFrame();
+
+        public Frame fetchAvailableFrame(int[] dimensions);
+
+        public Frame peek();
+
+        public void pushFrame(Frame frame);
+
+        public void clear();
+    }
+
+    private class SingleFrameQueueImpl implements QueueImpl {
+        private Frame mFrame = null;
+
+        @Override
+        public boolean canPull() {
+            return mFrame != null;
+        }
+
+        @Override
+        public boolean canPush() {
+            return mFrame == null;
+        }
+
+        @Override
+        public Frame pullFrame() {
+            Frame result = mFrame;
+            mFrame = null;
+            return result;
+        }
+
+        @Override
+        public Frame peek() {
+            return mFrame;
+        }
+
+        @Override
+        public Frame fetchAvailableFrame(int[] dimensions) {
+            // Note that we cannot use a cached frame here, as we do not know where that cached
+            // instance would end up.
+            FrameManager manager = FrameManager.current();
+            return new Frame(mType, dimensions, manager);
+        }
+
+        @Override
+        public void pushFrame(Frame frame) {
+            mFrame = frame.retain();
+            mFrame.setReadOnly(true);
+        }
+
+        @Override
+        public void clear() {
+            if (mFrame != null) {
+                mFrame.release();
+                mFrame = null;
+            }
+        }
+    }
+
+    private QueueImpl mQueueImpl;
+    private FrameType mType;
+    private String mName;
+
+    public FrameType getType() {
+        return mType;
+    }
+
+    public boolean canPull() {
+        return mQueueImpl.canPull();
+    }
+
+    public boolean canPush() {
+        return mQueueImpl.canPush();
+    }
+
+    public Frame pullFrame() {
+        return mQueueImpl.pullFrame();
+    }
+
+    public Frame fetchAvailableFrame(int[] dimensions) {
+        return mQueueImpl.fetchAvailableFrame(dimensions);
+    }
+
+    public void pushFrame(Frame frame) {
+        mQueueImpl.pushFrame(frame);
+    }
+
+    public Frame peek() {
+        return mQueueImpl.peek();
+    }
+
+    @Override
+    public String toString() {
+        return mName;
+    }
+
+    public void clear() {
+        mQueueImpl.clear();
+    }
+
+    private FrameQueue(FrameType type, String name) {
+        mType = type;
+        mName = name;
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameSlotSource.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameSlotSource.java
new file mode 100644
index 0000000..0a093f93
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameSlotSource.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.base;
+
+import androidx.media.filterfw.*;
+
+public final class FrameSlotSource extends SlotFilter {
+
+    public FrameSlotSource(MffContext context, String name, String slotName) {
+        super(context, name, slotName);
+    }
+
+    @Override
+    public Signature getSignature() {
+        // TODO: It would be nice if we could return the slot type here. Not currently possible
+        // as getSignature() is typically called before a FrameManager and its slots are setup.
+        return new Signature()
+            .addOutputPort("frame", Signature.PORT_REQUIRED, FrameType.any())
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected boolean canSchedule() {
+        return super.canSchedule() && slotHasFrame();
+    }
+
+    @Override
+    protected void onProcess() {
+        Frame frame = getFrameManager().fetchFrame(mSlotName);
+        getConnectedOutputPort("frame").pushFrame(frame);
+        frame.release();
+    }
+
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameSlotTarget.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameSlotTarget.java
new file mode 100644
index 0000000..55648c68
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameSlotTarget.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.base;
+
+import androidx.media.filterfw.*;
+
+public final class FrameSlotTarget extends SlotFilter {
+
+    public FrameSlotTarget(MffContext context, String name, String slotName) {
+        super(context, name, slotName);
+    }
+
+    @Override
+    public Signature getSignature() {
+        // TODO: It would be nice if we could return the slot type here. Not currently possible
+        // as getSignature() is typically called before a FrameManager and its slots are setup.
+        return new Signature()
+            .addInputPort("frame", Signature.PORT_REQUIRED, FrameType.any())
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onProcess() {
+        Frame frame = getConnectedInputPort("frame").pullFrame();
+        getFrameManager().storeFrame(frame, mSlotName);
+    }
+
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameType.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameType.java
new file mode 100644
index 0000000..bfa4018
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameType.java
@@ -0,0 +1,430 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package androidx.media.filterfw;
+
+
+/**
+ * A FrameType instance specifies the data format of a Frame.
+ *
+ * FrameTypes are used mainly by Filters to specify the data type they intend to consume or produce.
+ * When filters are connected, their FrameType information is analyzed and checked for
+ * compatibility. This allows Filter writers to assume a certain data input type. It also helps
+ * filter-graph designers determine which filters can be hooked up to one another.
+ *
+ * A FrameType generally consists of an element type and number of dimensions. The currently
+ * supported element types are:
+ *
+ * <ul>
+ * <li>int8, int16, int32, in64</li>
+ * <li>float32, float64</li>
+ * <li>rgba8888</li>
+ * <li>object</li>
+ * <li>don't-care</li>
+ * </ul>
+ *
+ * If the object element type is used, class information may be appended to the FrameType to
+ * indicate what class of objects are expected. When constructing an object based FrameType, you
+ * have the option of either specifying a type that represents a single object of that class, or
+ * an array of objects (see the {@link #single()} and {@link #array()} constructors). A single
+ * object has a dimensionality of 0, while an array has a dimensionality of 1.
+ *
+ * When constructing a non-object type, you have the option of creating a 1D or 2D buffer, or
+ * a 2D image (see the {@link #buffer1D(int)}, {@link #buffer2D(int)}, and
+ * {@link #image2D(int, int)} constructors). To optimize access, provide access hints when making
+ * an image type.
+ *
+ * Finally, it is possible to create a wild-card type with the {@link #any()} constructor. This
+ * type matches any other type. Note, that this is a more general type than a {@code single(Object)}
+ * type that matches only object-base types (of any Object subclass). You may also specify the
+ * leave the element of any type unspecified by using the {@code ELEMENT_DONTCARE} constant.
+ *
+ * When a graph is connected the types between outputs and inputs are merged to a queue-type. All
+ * Frames in this queue will be of that type. In order for a merge to succeed the following
+ * conditions must hold:
+ *
+ * <ul>
+ * <li>The element types must be identical.</li>
+ * <li>The dimensions must match (except for singles and arrays, see below).</li>
+ * <li>For object-based types: The classes must be compatible.</li>
+ * <li>If one of the types is a wild-card, both types are always compatible.</li>
+ * </ul>
+ *
+ * Class compatibility is determined in an optimistic fashion, i.e. one class must be the subclass
+ * of the other. It does not matter which of the types is the subclass of the other. For instance,
+ * if one Filter outputs a type of class {@code Object}, and the consumer expects a Filter of type
+ * {@code Bitmap}, the connection is considered compatible. (Of course if at runtime a non-Bitmap
+ * object is produced, this will cause a runtime exception to be thrown).
+ *
+ * For convenience, single and array object-based types are compatible with one another. This
+ * in turn means that Frames with a single object can be accessed as an array with a single entry,
+ * and array based Frames can be accessed as a single object of the array class. For this reason
+ * you should prefer consuming objects as array types (if it makes sense for that specific port),
+ * as this will allow your Filter to handle multiple objects in one Frame while not giving up the
+ * possibility to deal with singles.
+ * TODO: This needs to be reworked. An array(int) should not be interchangeable with a single(int),
+ * but rather with a single(int[]). Use ArraySelectFilter for the former!
+ *
+ * After the types are merged, the queue-type must be a fully specified type. This means that the
+ * type must have its element and dimensions specified. This ensures that filters that need to
+ * query their input or output types receive meaningful information.
+ */
+public final class FrameType {
+
+    public final static int ELEMENT_DONTCARE = 0;
+    public final static int ELEMENT_OBJECT = 1;
+
+    public final static int ELEMENT_INT8 = 100;
+    public final static int ELEMENT_INT16 = 101;
+    public final static int ELEMENT_INT32 = 102;
+    public final static int ELEMENT_INT64 = 103;
+
+    public final static int ELEMENT_FLOAT32 = 200;
+    public final static int ELEMENT_FLOAT64 = 201;
+
+    public final static int ELEMENT_RGBA8888 = 301;
+
+    public final static int READ_CPU = 0x01;
+    public final static int READ_GPU = 0x02;
+    public final static int READ_ALLOCATION = 0x04;
+    public final static int WRITE_CPU = 0x08;
+    public final static int WRITE_GPU = 0x10;
+    public final static int WRITE_ALLOCATION = 0x20;
+
+    private final static int ACCESS_UNKNOWN = 0x00;
+
+    private final int mElementId;
+    private final int mDimensions;
+    private final int mAccessHints;
+    private final Class<?> mClass;
+
+    private static SimpleCache<String, FrameType> mTypeCache =
+            new SimpleCache<String, FrameType>(64);
+
+    /**
+     * Constructs a wild-card FrameType that matches any other FrameType.
+     * @return The wild-card FrameType instance.
+     */
+    public static FrameType any() {
+        return FrameType.fetchType(ELEMENT_DONTCARE, -1, ACCESS_UNKNOWN);
+    }
+
+    /**
+     * Constructs an object-based single FrameType that matches object-based FrameTypes of any
+     * class.
+     * @return A single object-based FrameType instance.
+     */
+    public static FrameType single() {
+        return FrameType.fetchType(null, 0);
+    }
+
+    /**
+     * Constructs an object-based single FrameType of the specified class.
+     * @param clazz The class of the FrameType.
+     * @return A single object-base FrameType instance of the specified class.
+     */
+    public static FrameType single(Class<?> clazz) {
+        return FrameType.fetchType(clazz, 0);
+    }
+
+    /**
+     * Constructs an object-based array FrameType that matches object-based FrameTypes of any class.
+     * @return An array object-based FrameType instance.
+     */
+    public static FrameType array() {
+        return FrameType.fetchType(null, 1);
+    }
+
+    /**
+     * Constructs an object-based array FrameType with elements of the specified class.
+     * @param clazz The class of the array elements (not the array type).
+     * @return An array object-based FrameType instance of the specified class.
+     */
+    public static FrameType array(Class<?> clazz) {
+        return FrameType.fetchType(clazz, 1);
+    }
+
+    /**
+     * Constructs a one-dimensional buffer type of the specified element.
+     * @param elementType One of the {@code ELEMENT} constants.
+     * @return A 1D buffer FrameType instance.
+     */
+    public static FrameType buffer1D(int elementType) {
+        return FrameType.fetchType(elementType, 1, ACCESS_UNKNOWN);
+    }
+
+    /**
+     * Constructs a two-dimensional buffer type of the specified element.
+     * @param elementType One of the {@code ELEMENT} constants.
+     * @return A 2D buffer FrameType instance.
+     */
+    public static FrameType buffer2D(int elementType) {
+        return FrameType.fetchType(elementType, 2, ACCESS_UNKNOWN);
+    }
+
+    /**
+     * Constructs a two-dimensional image type of the specified element.
+     * @param elementType One of the {@code ELEMENT} constants.
+     * @param accessHint A bit-mask of access flags (see {@code READ} and {@code WRITE} constants).
+     * @return A 2D image FrameType instance.
+     */
+    public static FrameType image2D(int elementType, int accessHint) {
+        return FrameType.fetchType(elementType, 2, accessHint);
+    }
+
+    /**
+     * Converts the current array type to a single type.
+     * The type must be an object-based type. If the type is already a single type, this does
+     * nothing.
+     * @return type as a single type.
+     */
+    public FrameType asSingle() {
+        if (mElementId != ELEMENT_OBJECT) {
+            throw new RuntimeException("Calling asSingle() on non-object type!");
+        }
+        return FrameType.fetchType(mClass, 0);
+    }
+
+    /**
+     * Converts the current single type to an array type.
+     * The type must be an object-based type. If the type is already an array type, this does
+     * nothing.
+     * @return type as an array type.
+     */
+    public FrameType asArray() {
+        if (mElementId != ELEMENT_OBJECT) {
+            throw new RuntimeException("Calling asArray() on non-object type!");
+        }
+        return FrameType.fetchType(mClass, 1);
+    }
+
+    /**
+     * Returns the FrameType's class specifier, or null if no class was set or the receiver is not
+     * an object-based type.
+     * @return The FrameType's class specifier or null.
+     */
+    public Class<?> getContentClass() {
+        return mClass;
+    }
+
+    /**
+     * Returns the FrameType's element id.
+     * @return The element id constant.
+     */
+    public int getElementId() {
+        return mElementId;
+    }
+
+    /**
+     * Returns the number of bytes of the FrameType's element, or 0 if no such size can be
+     * determined.
+     * @return The number of bytes of the FrameType's element.
+     */
+    public int getElementSize() {
+        switch (mElementId) {
+            case ELEMENT_INT8:
+                return 1;
+            case ELEMENT_INT16:
+                return 2;
+            case ELEMENT_INT32:
+            case ELEMENT_FLOAT32:
+            case ELEMENT_RGBA8888:
+                return 4;
+            case ELEMENT_INT64:
+            case ELEMENT_FLOAT64:
+                return 4;
+            default:
+                return 0;
+        }
+    }
+
+    /**
+     * Returns the access hints bit-mask of the FrameType.
+     * @return The access hints bit-mask of the FrameType.
+     */
+    public int getAccessHints() {
+        return mAccessHints;
+    }
+
+    /**
+     * Returns the number of dimensions of the FrameType or -1 if no dimensions were set.
+     * @return The number of dimensions of the FrameType.
+     */
+    public int getNumberOfDimensions() {
+        return mDimensions;
+    }
+
+    /**
+     * Returns true, if the FrameType is fully specified.
+     *
+     * A FrameType is fully specified if its element and dimensions are specified.
+     *
+     * @return true, if the FrameType is fully specified.
+     */
+    public boolean isSpecified() {
+        return mElementId != ELEMENT_DONTCARE && mDimensions >= 0;
+    }
+
+    @Override
+    public boolean equals(Object object) {
+        if (object instanceof FrameType) {
+            FrameType type = (FrameType) object;
+            return mElementId == type.mElementId && mDimensions == type.mDimensions
+                    && mAccessHints == type.mAccessHints && mClass == type.mClass;
+        }
+        return false;
+    }
+
+    @Override
+    public int hashCode() {
+        return mElementId ^ mDimensions ^ mAccessHints ^ mClass.hashCode();
+    }
+
+    @Override
+    public String toString() {
+        String result = elementToString(mElementId, mClass) + "[" + mDimensions + "]";
+        if ((mAccessHints & READ_CPU) != 0) {
+            result += "(rcpu)";
+        }
+        if ((mAccessHints & READ_GPU) != 0) {
+            result += "(rgpu)";
+        }
+        if ((mAccessHints & READ_ALLOCATION) != 0) {
+            result += "(ralloc)";
+        }
+        if ((mAccessHints & WRITE_CPU) != 0) {
+            result += "(wcpu)";
+        }
+        if ((mAccessHints & WRITE_GPU) != 0) {
+            result += "(wgpu)";
+        }
+        if ((mAccessHints & WRITE_ALLOCATION) != 0) {
+            result += "(walloc)";
+        }
+        return result;
+    }
+
+    String keyString() {
+        return keyValueForType(mElementId, mDimensions, mAccessHints, mClass);
+    }
+
+    static FrameType tryMerge(FrameType writer, FrameType reader) {
+        if (writer.mElementId == ELEMENT_DONTCARE) {
+            return reader;
+        } else if (reader.mElementId == ELEMENT_DONTCARE) {
+            return writer;
+        } else if (writer.mElementId == ELEMENT_OBJECT && reader.mElementId == ELEMENT_OBJECT) {
+            return tryMergeObjectTypes(writer, reader);
+        } else if (writer.mDimensions > 0 && writer.mElementId == reader.mElementId) {
+            return tryMergeBuffers(writer, reader);
+        } else {
+            return null;
+        }
+    }
+
+    static FrameType tryMergeObjectTypes(FrameType writer, FrameType reader) {
+        int dimensions = Math.max(writer.mDimensions, reader.mDimensions);
+        Class<?> mergedClass = mergeClasses(writer.mClass, reader.mClass);
+        boolean success = mergedClass != null || writer.mClass == null;
+        return success ? FrameType.fetchType(mergedClass, dimensions) : null;
+    }
+
+    static FrameType tryMergeBuffers(FrameType writer, FrameType reader) {
+        if (writer.mDimensions == reader.mDimensions) {
+            int accessHints = writer.mAccessHints | reader.mAccessHints;
+            return FrameType.fetchType(writer.mElementId, writer.mDimensions, accessHints);
+        }
+        return null;
+    }
+
+    static FrameType merge(FrameType writer, FrameType reader) {
+        FrameType result = tryMerge(writer, reader);
+        if (result == null) {
+            throw new RuntimeException(
+                    "Incompatible types in connection: " + writer + " vs. " + reader + "!");
+        }
+        return result;
+    }
+
+    private static String keyValueForType(int elemId, int dims, int hints, Class<?> clazz) {
+        return elemId + ":" + dims + ":" + hints + ":" + (clazz != null ? clazz.getName() : "0");
+    }
+
+    private static String elementToString(int elemId, Class<?> clazz) {
+        switch (elemId) {
+            case ELEMENT_INT8:
+                return "int8";
+            case ELEMENT_INT16:
+                return "int16";
+            case ELEMENT_INT32:
+                return "int32";
+            case ELEMENT_INT64:
+                return "int64";
+            case ELEMENT_FLOAT32:
+                return "float32";
+            case ELEMENT_FLOAT64:
+                return "float64";
+            case ELEMENT_RGBA8888:
+                return "rgba8888";
+            case ELEMENT_OBJECT:
+                return "<" + (clazz == null ? "*" : clazz.getSimpleName()) + ">";
+            case ELEMENT_DONTCARE:
+                return "*";
+            default:
+                return "?";
+        }
+    }
+
+    private static Class<?> mergeClasses(Class<?> classA, Class<?> classB) {
+        // Return the most specialized class.
+        if (classA == null) {
+            return classB;
+        } else if (classB == null) {
+            return classA;
+        } else if (classA.isAssignableFrom(classB)) {
+            return classB;
+        } else if (classB.isAssignableFrom(classA)) {
+            return classA;
+        } else {
+            return null;
+        }
+    }
+
+    private static FrameType fetchType(int elementId, int dimensions, int accessHints) {
+        return fetchType(elementId, dimensions, accessHints, null);
+    }
+
+    private static FrameType fetchType(Class<?> clazz, int dimensions) {
+        return fetchType(ELEMENT_OBJECT, dimensions, ACCESS_UNKNOWN, clazz);
+    }
+
+    private static FrameType fetchType(
+            int elementId, int dimensions, int accessHints, Class<?> clazz) {
+        String typeKey = FrameType.keyValueForType(elementId, dimensions, accessHints, clazz);
+        FrameType type = mTypeCache.get(typeKey);
+        if (type == null) {
+            type = new FrameType(elementId, dimensions, accessHints, clazz);
+            mTypeCache.put(typeKey, type);
+        }
+        return type;
+    }
+
+    private FrameType(int elementId, int dimensions, int accessHints, Class<?> clazz) {
+        mElementId = elementId;
+        mDimensions = dimensions;
+        mClass = clazz;
+        mAccessHints = accessHints;
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameValue.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameValue.java
new file mode 100644
index 0000000..fb007e2
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameValue.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import androidx.media.filterfw.BackingStore.Backing;
+
+public class FrameValue extends Frame {
+
+    public Object getValue() {
+        Object result = mBackingStore.lockData(MODE_READ, BackingStore.ACCESS_OBJECT);
+        mBackingStore.unlock();
+        return result;
+    }
+
+    public void setValue(Object value) {
+        Backing backing = mBackingStore.lockBacking(MODE_WRITE, BackingStore.ACCESS_OBJECT);
+        backing.setData(value);
+        mBackingStore.unlock();
+    }
+
+    static FrameValue create(BackingStore backingStore) {
+        assertObjectBased(backingStore.getFrameType());
+        return new FrameValue(backingStore);
+    }
+
+    FrameValue(BackingStore backingStore) {
+        super(backingStore);
+    }
+
+    static void assertObjectBased(FrameType type) {
+        if (type.getElementId() != FrameType.ELEMENT_OBJECT) {
+            throw new RuntimeException("Cannot access non-object based Frame as FrameValue!");
+        }
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameValues.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameValues.java
new file mode 100644
index 0000000..fbddcb1
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/FrameValues.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import java.lang.reflect.Array;
+
+public class FrameValues extends FrameValue {
+
+    /**
+     * Returns the number of values in the Frame.
+     *
+     * This returns 1, if the Frame value is null, or if the value is not an array.
+     *
+     * @return The number of values in the Frame.
+     */
+    public int getCount() {
+        Object value = super.getValue();
+        if (value == null || !value.getClass().isArray()) {
+            return 1;
+        } else {
+            return Array.getLength(super.getValue());
+        }
+    }
+
+    /**
+     * Returns the values in the Frame as an array.
+     *
+     * Note, that this may be called on Frames that have a non-array object assigned to them. In
+     * that case, this method will wrap the object in an array and return that. This way, filters
+     * can treat any object based frame as arrays.
+     *
+     * @return The array of values in this frame.
+     */
+    public Object getValues() {
+        Object value = super.getValue();
+        if (value == null || value.getClass().isArray()) {
+            return super.getValue();
+        } else {
+            // Allow reading a single as an array.
+            Object[] array = (Object[])Array.newInstance(value.getClass(), 1);
+            array[0] = value;
+            return array;
+        }
+    }
+
+    /**
+     * Returns the value at the specified index.
+     *
+     * In case the value is null or not an array, the index must be 0, and the value itself is
+     * returned.
+     *
+     * @param index The index to access.
+     * @return The value at that index.
+     */
+    public Object getValueAtIndex(int index) {
+        Object value = super.getValue();
+        if (value == null || !value.getClass().isArray()) {
+            if (index != 0) {
+                throw new ArrayIndexOutOfBoundsException(index);
+            } else {
+                return value;
+            }
+        } else {
+            return Array.get(value, index);
+        }
+    }
+
+    /**
+     * Returns the value as a FrameValue at the specified index.
+     *
+     * Use this if you want to access elements as FrameValues. You must release the result when
+     * you are done using it.
+     *
+     * @param index The index to access.
+     * @return The value as a FrameValue at that index (must release).
+     */
+    public FrameValue getFrameValueAtIndex(int index) {
+        Object value = getValueAtIndex(index);
+        FrameValue result = Frame.create(getType().asSingle(), new int[0]).asFrameValue();
+        result.setValue(value);
+        return result;
+    }
+
+    /**
+     * Assign the array of values to the frame.
+     *
+     * You may assign null or a non-array object, which are interpreted as a 1-length array.
+     *
+     * @param values The values to assign to the frame.
+     */
+    public void setValues(Object values) {
+        super.setValue(values);
+    }
+
+    /**
+     * Assign a value at the specified index.
+     *
+     * In case the held value is not an array, the index must be 0, and the object will be replaced
+     * by the new object.
+     *
+     * @param value The value to assign.
+     * @param index The index to assign to.
+     */
+    public void setValueAtIndex(Object value, int index) {
+        super.assertAccessible(MODE_WRITE);
+        Object curValue = super.getValue();
+        if (curValue == null || !curValue.getClass().isArray()) {
+            if (index != 0) {
+                throw new ArrayIndexOutOfBoundsException(index);
+            } else {
+                curValue = value;
+            }
+        } else {
+            Array.set(curValue, index, value);
+        }
+    }
+
+    /**
+     * Assign a FrameValue's value at the specified index.
+     *
+     * This method unpacks the FrameValue and assigns the unpacked value to the specified index.
+     * This does not affect the retain-count of the passed Frame.
+     *
+     * @param frame The frame value to assign.
+     * @param index The index to assign to.
+     */
+    public void setFrameValueAtIndex(FrameValue frame, int index) {
+        Object value = frame.getValue();
+        setValueAtIndex(value, index);
+    }
+
+    static FrameValues create(BackingStore backingStore) {
+        assertObjectBased(backingStore.getFrameType());
+        return new FrameValues(backingStore);
+    }
+
+    FrameValues(BackingStore backingStore) {
+        super(backingStore);
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GLToolbox.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GLToolbox.java
new file mode 100644
index 0000000..1c3c7e9
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GLToolbox.java
@@ -0,0 +1,194 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.graphics.Bitmap;
+import android.opengl.GLES20;
+import android.opengl.GLUtils;
+import android.os.Looper;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * TODO: Make this package-private as RenderTarget and TextureSource should suffice as public
+ * facing OpenGL utilities.
+ * @hide
+ */
+public class GLToolbox {
+
+    public static int textureNone() {
+        return 0;
+    }
+
+    public static boolean isTexture(int texId) {
+        return GLES20.glIsTexture(texId);
+    }
+
+    public static void deleteTexture(int texId) {
+        int[] textures = new int[] { texId };
+        assertNonUiThread("glDeleteTextures");
+        GLES20.glDeleteTextures(1, textures, 0);
+        checkGlError("glDeleteTextures");
+    }
+
+    public static void deleteFbo(int fboId) {
+        int[] fbos = new int[] { fboId };
+        assertNonUiThread("glDeleteFramebuffers");
+        GLES20.glDeleteFramebuffers(1, fbos, 0);
+        checkGlError("glDeleteFramebuffers");
+    }
+
+    public static int generateTexture() {
+        int[] textures = new int[1];
+        GLES20.glGenTextures(1, textures, 0);
+        checkGlError("glGenTextures");
+        return textures[0];
+    }
+
+    public static int generateFbo() {
+        int[] fbos = new int[1];
+        GLES20.glGenFramebuffers(1, fbos, 0);
+        checkGlError("glGenFramebuffers");
+        return fbos[0];
+    }
+
+    public static void readFbo(int fboId, ByteBuffer pixels, int width, int height) {
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
+        GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels);
+        checkGlError("glReadPixels");
+    }
+
+    public static void readTarget(RenderTarget target, ByteBuffer pixels, int width, int height) {
+        target.focus();
+        GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels);
+        checkGlError("glReadPixels");
+    }
+
+    public static int attachedTexture(int fboId) {
+        int[] params = new int[1];
+        GLES20.glGetFramebufferAttachmentParameteriv(
+            GLES20.GL_FRAMEBUFFER,
+            GLES20.GL_COLOR_ATTACHMENT0,
+            GLES20.GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME,
+            params, 0);
+        checkGlError("glGetFramebufferAttachmentParameteriv");
+        return params[0];
+    }
+
+    public static void attachTextureToFbo(int texId, int fboId) {
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
+        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
+                                      GLES20.GL_COLOR_ATTACHMENT0,
+                                      GLES20.GL_TEXTURE_2D,
+                                      texId,
+                                      0);
+        checkGlError("glFramebufferTexture2D");
+    }
+
+    public static void allocateTexturePixels(int texId, int target, int width, int height) {
+        setTexturePixels(texId, target, (ByteBuffer)null, width, height);
+    }
+
+    public static void setTexturePixels(int texId, int target, Bitmap bitmap) {
+        GLES20.glBindTexture(target, texId);
+        GLUtils.texImage2D(target, 0, bitmap, 0);
+        checkGlError("glTexImage2D");
+        setDefaultTexParams();
+    }
+
+    public static void setTexturePixels(int texId, int target, ByteBuffer pixels,
+                                        int width, int height) {
+        GLES20.glBindTexture(target, texId);
+
+        // For some devices, "pixels" being null causes system error.
+        if (pixels == null) {
+            pixels = ByteBuffer.allocateDirect(width * height * 4);
+        }
+        GLES20.glTexImage2D(target, 0, GLES20.GL_RGBA, width, height, 0,
+                            GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels);
+        checkGlError("glTexImage2D");
+        setDefaultTexParams();
+    }
+
+    public static void setDefaultTexParams() {
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
+                               GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
+                               GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
+                               GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
+                               GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+        checkGlError("glTexParameteri");
+    }
+
+    public static int vboNone() {
+        return 0;
+    }
+
+    public static int generateVbo() {
+        int[] vbos = new int[1];
+        GLES20.glGenBuffers(1, vbos, 0);
+        checkGlError("glGenBuffers");
+        return vbos[0];
+    }
+
+    public static void setVboData(int vboId, ByteBuffer data) {
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
+        GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, data.remaining(), data, GLES20.GL_STATIC_DRAW);
+        checkGlError("glBufferData");
+    }
+
+    public static void setVboFloats(int vboId, float[] values) {
+        int len = values.length * 4;
+        ByteBuffer buffer = ByteBuffer.allocateDirect(len).order(ByteOrder.nativeOrder());
+        setVboData(vboId, buffer);
+    }
+
+    public static boolean isVbo(int vboId) {
+        return GLES20.glIsBuffer(vboId);
+    }
+
+    public static void deleteVbo(int vboId) {
+        int[] buffers = new int[] { vboId };
+        GLES20.glDeleteBuffers(1, buffers, 0);
+        checkGlError("glDeleteBuffers");
+    }
+
+    public static void checkGlError(String operation) {
+        int error;
+        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+            throw new RuntimeException("GL Operation '" + operation + "' caused error "
+                + Integer.toHexString(error) + "!");
+        }
+    }
+
+    /**
+     * Make sure we are not operating in the UI thread.
+     *
+     * It is often tricky to track down bugs that happen when issuing GL commands in the UI thread.
+     * This is especially true when releasing GL resources. Often this will cause errors much later
+     * on. Therefore we make sure we do not do these dangerous operations on the UI thread.
+     */
+    private static void assertNonUiThread(String operation) {
+        if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
+            throw new RuntimeException("Attempting to perform GL operation '" + operation
+                    + "' on UI thread!");
+        }
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphExporter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphExporter.java
new file mode 100644
index 0000000..0013965
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphExporter.java
@@ -0,0 +1,199 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This class provides functions to export a FilterGraph.
+
+package androidx.media.filterfw;
+
+import android.content.Context;
+
+import java.io.FileOutputStream;
+import java.io.OutputStreamWriter;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * This class provides functions to export a FilterGraph as a DOT file.
+ */
+public class GraphExporter {
+
+    /**
+     * Exports the graph as DOT (see http://en.wikipedia.org/wiki/DOT_language).
+     * Using the exported file, the graph can be visualized e.g. with the command line tool dot.
+     * Optionally, one may /exclude/ unconnected optional ports (third parameter = false),
+     * since they can quickly clutter the visualization (and, depending on the purpose, may not
+     * be interesting).
+     *
+     * Example workflow:
+     *  1. run application on device, make sure it calls exportGraphAsDOT(...);
+     *  2. adb pull /data/data/<application name>/files/<graph filename>.gv graph.gv
+     *  3. dot -Tpng graph.gv -o graph.png
+     *  4. eog graph.png
+     */
+    static public void exportAsDot(FilterGraph graph, String filename,
+            boolean includeUnconnectedOptionalPorts)
+            throws java.io.FileNotFoundException, java.io.IOException {
+        // Initialize, open file stream
+        Context myAppContext = graph.getContext().getApplicationContext();
+        Filter[] filters = graph.getAllFilters();
+        FileOutputStream fOut = myAppContext.openFileOutput(filename, Context.MODE_PRIVATE);
+        OutputStreamWriter dotFile = new OutputStreamWriter(fOut);
+
+        // Write beginning of DOT file
+        dotFile.write("digraph graphname {\n");
+        dotFile.write("  node [shape=record];\n");
+
+        // N.B. For specification and lots of examples of the DOT language, see
+        //   http://www.graphviz.org/Documentation/dotguide.pdf
+
+        // Iterate over all filters of the graph, write corresponding DOT node elements
+
+        for(Filter filter : filters) {
+            dotFile.write(getDotName("  " + filter.getName()) + " [label=\"{");
+
+            // Write upper part of element (i.e., input ports)
+            Set<String> inputPorts = getInputPorts(filter, includeUnconnectedOptionalPorts);
+            if(inputPorts.size() > 0) {
+                dotFile.write(" { ");
+                int counter = 0;
+                for(String p : inputPorts) {
+                    dotFile.write("<" + getDotName(p) + "_IN>" + p);
+                    if(++counter != inputPorts.size()) dotFile.write(" | ");
+                }
+                dotFile.write(" } | ");
+            }
+
+            // Write center part of element (i.e., element label)
+            dotFile.write(filter.getName());
+
+            // Write lower part of element (i.e., output ports)
+            Set<String> outputPorts = getOutputPorts(filter, includeUnconnectedOptionalPorts);
+            if(outputPorts.size() > 0) {
+                dotFile.write(" | { ");
+                int counter = 0;
+                for(String p : outputPorts) {
+                    dotFile.write("<" + getDotName(p) + "_OUT>" + p);
+                    if(++counter != outputPorts.size()) dotFile.write(" | ");
+                }
+                dotFile.write(" } ");
+            }
+
+            dotFile.write("}\"];\n");
+        }
+        dotFile.write("\n");
+
+        // Iterate over all filters again to collect connections and find unconnected ports
+
+        int dummyNodeCounter = 0;
+        for(Filter filter : filters) {
+            Set<String> outputPorts = getOutputPorts(filter, includeUnconnectedOptionalPorts);
+            for(String portName : outputPorts) {
+                OutputPort source = filter.getConnectedOutputPort(portName);
+                if(source != null) {
+                    // Found a connection, draw it
+                    InputPort target = source.getTarget();
+                    dotFile.write("  " +
+                        getDotName(source.getFilter().getName()) + ":" +
+                        getDotName(source.getName()) + "_OUT -> " +
+                        getDotName(target.getFilter().getName()) + ":" +
+                        getDotName(target.getName()) + "_IN;\n" );
+                } else {
+                    // Found a unconnected output port, add dummy node
+                    String color = filter.getSignature().getOutputPortInfo(portName).isRequired()
+                        ? "red" : "blue";  // red for unconnected, required ports
+                    dotFile.write("  " +
+                        "dummy" + (++dummyNodeCounter) +
+                        " [shape=point,label=\"\",color=" + color + "];\n" +
+                        "  " + getDotName(filter.getName()) + ":" +
+                        getDotName(portName) + "_OUT -> " +
+                        "dummy" + dummyNodeCounter + " [color=" + color + "];\n");
+                }
+            }
+
+            Set<String> inputPorts = getInputPorts(filter, includeUnconnectedOptionalPorts);
+            for(String portName : inputPorts) {
+                InputPort target = filter.getConnectedInputPort(portName);
+                if(target != null) {
+                    // Found a connection -- nothing to do, connections have been written out above
+                } else {
+                    // Found a unconnected input port, add dummy node
+                    String color = filter.getSignature().getInputPortInfo(portName).isRequired()
+                        ? "red" : "blue";  // red for unconnected, required ports
+                    dotFile.write("  " +
+                        "dummy" + (++dummyNodeCounter) +
+                        " [shape=point,label=\"\",color=" + color + "];\n" +
+                        "  dummy" + dummyNodeCounter + " -> " +
+                        getDotName(filter.getName()) + ":" +
+                        getDotName(portName) + "_IN [color=" + color + "];\n");
+                }
+            }
+        }
+
+        // Write end of DOT file, close file stream
+        dotFile.write("}\n");
+        dotFile.flush();
+        dotFile.close();
+    }
+
+    // Internal methods
+
+    // From element's name in XML, create DOT-allowed element name
+    static private String getDotName(String raw) {
+        return raw.replaceAll("\\.", "___"); // DOT does not allow . in element names
+    }
+
+    // Retrieve all input ports of a filter, including:
+    //  unconnected ports (which can not be retrieved from the filter, only from the signature), and
+    //  additional (connected) ports not listed in the signature (which is allowed by default,
+    //    unless disallowOtherInputs is defined in signature).
+    // With second parameter = false, *omit* unconnected optional ports.
+    static private Set<String> getInputPorts(Filter filter, boolean includeUnconnectedOptional) {
+        // add (connected) ports from filter
+        Set<String> ports = new HashSet<String>();
+        ports.addAll(filter.getConnectedInputPortMap().keySet());
+
+        // add (unconnected) ports from signature
+        HashMap<String, Signature.PortInfo> signaturePorts = filter.getSignature().getInputPorts();
+        if(signaturePorts != null){
+            for(Entry<String, Signature.PortInfo> e : signaturePorts.entrySet()) {
+                if(includeUnconnectedOptional || e.getValue().isRequired()) {
+                    ports.add(e.getKey());
+                }
+            }
+        }
+        return ports;
+    }
+
+    // Retrieve all output ports of a filter (analogous to above function)
+    static private Set<String> getOutputPorts(Filter filter, boolean includeUnconnectedOptional) {
+        // add (connected) ports from filter
+        Set<String> ports = new HashSet<String>();
+        ports.addAll(filter.getConnectedOutputPortMap().keySet());
+
+        // add (unconnected) ports from signature
+        HashMap<String, Signature.PortInfo> signaturePorts = filter.getSignature().getOutputPorts();
+        if(signaturePorts != null){
+            for(Entry<String, Signature.PortInfo> e : signaturePorts.entrySet()) {
+                if(includeUnconnectedOptional || e.getValue().isRequired()) {
+                    ports.add(e.getKey());
+                }
+            }
+        }
+        return ports;
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphInputSource.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphInputSource.java
new file mode 100644
index 0000000..03b3abe
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphInputSource.java
@@ -0,0 +1,58 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+
+package androidx.media.filterpacks.base;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.Signature;
+
+public class GraphInputSource extends Filter {
+
+    private Frame mFrame = null;
+
+    public GraphInputSource(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addOutputPort("frame", Signature.PORT_REQUIRED, FrameType.any())
+            .disallowOtherInputs();
+    }
+
+    public void pushFrame(Frame frame) {
+        if (mFrame != null) {
+            mFrame.release();
+        }
+        if (frame == null) {
+            throw new RuntimeException("Attempting to assign null-frame!");
+        }
+        mFrame = frame.retain();
+    }
+
+    @Override
+    protected void onProcess() {
+        if (mFrame != null) {
+            getConnectedOutputPort("frame").pushFrame(mFrame);
+            mFrame.release();
+            mFrame = null;
+        }
+    }
+
+    @Override
+    protected void onTearDown() {
+        if (mFrame != null) {
+            mFrame.release();
+            mFrame = null;
+        }
+    }
+
+    @Override
+    protected boolean canSchedule() {
+        return super.canSchedule() && mFrame != null;
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphOutputTarget.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphOutputTarget.java
new file mode 100644
index 0000000..1f3be10
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphOutputTarget.java
@@ -0,0 +1,60 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+
+package androidx.media.filterpacks.base;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.Signature;
+
+public class GraphOutputTarget extends Filter {
+
+    private Frame mFrame = null;
+    private FrameType mType = FrameType.any();
+
+    public GraphOutputTarget(MffContext context, String name) {
+        super(context, name);
+    }
+
+    // TODO: During initialization only?
+    public void setType(FrameType type) {
+        mType = type;
+    }
+
+    public FrameType getType() {
+        return mType;
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addInputPort("frame", Signature.PORT_REQUIRED, mType)
+            .disallowOtherInputs();
+    }
+
+    // Returns a retained frame!
+    public Frame pullFrame() {
+        Frame result = null;
+        if (mFrame != null) {
+            result = mFrame;
+            mFrame = null;
+        }
+        return result;
+    }
+
+    @Override
+    protected void onProcess() {
+        Frame frame = getConnectedInputPort("frame").pullFrame();
+        if (mFrame != null) {
+            mFrame.release();
+        }
+        mFrame = frame.retain();
+    }
+
+    @Override
+    protected boolean canSchedule() {
+        return super.canSchedule() && mFrame == null;
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphReader.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphReader.java
new file mode 100644
index 0000000..ef885e3
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphReader.java
@@ -0,0 +1,576 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.text.TextUtils;
+
+import java.io.InputStream;
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.ArrayList;
+
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.parsers.SAXParser;
+import javax.xml.parsers.SAXParserFactory;
+
+import org.xml.sax.Attributes;
+import org.xml.sax.InputSource;
+import org.xml.sax.SAXException;
+import org.xml.sax.XMLReader;
+import org.xml.sax.helpers.DefaultHandler;
+
+/**
+ * A GraphReader allows obtaining filter graphs from XML graph files or strings.
+ */
+public class GraphReader {
+
+    private static interface Command {
+        public void execute(CommandStack stack);
+    }
+
+    private static class CommandStack {
+        private ArrayList<Command> mCommands = new ArrayList<Command>();
+        private FilterGraph.Builder mBuilder;
+        private FilterFactory mFactory;
+        private MffContext mContext;
+
+        public CommandStack(MffContext context) {
+            mContext = context;
+            mBuilder = new FilterGraph.Builder(mContext);
+            mFactory = new FilterFactory();
+        }
+
+        public void execute() {
+            for (Command command : mCommands) {
+                command.execute(this);
+            }
+        }
+
+        public void append(Command command) {
+            mCommands.add(command);
+        }
+
+        public FilterFactory getFactory() {
+            return mFactory;
+        }
+
+        public MffContext getContext() {
+            return mContext;
+        }
+
+        protected FilterGraph.Builder getBuilder() {
+            return mBuilder;
+        }
+    }
+
+    private static class ImportPackageCommand implements Command {
+        private String mPackageName;
+
+        public ImportPackageCommand(String packageName) {
+            mPackageName = packageName;
+        }
+
+        @Override
+        public void execute(CommandStack stack) {
+            try {
+                stack.getFactory().addPackage(mPackageName);
+            } catch (IllegalArgumentException e) {
+                throw new RuntimeException(e.getMessage());
+            }
+        }
+    }
+
+    private static class AddLibraryCommand implements Command {
+        private String mLibraryName;
+
+        public AddLibraryCommand(String libraryName) {
+            mLibraryName = libraryName;
+        }
+
+        @Override
+        public void execute(CommandStack stack) {
+            FilterFactory.addFilterLibrary(mLibraryName);
+        }
+    }
+
+    private static class AllocateFilterCommand implements Command {
+        private String mClassName;
+        private String mFilterName;
+
+        public AllocateFilterCommand(String className, String filterName) {
+            mClassName = className;
+            mFilterName = filterName;
+        }
+
+        @Override
+	public void execute(CommandStack stack) {
+            Filter filter = null;
+            try {
+                filter = stack.getFactory().createFilterByClassName(mClassName,
+                                                                    mFilterName,
+                                                                    stack.getContext());
+            } catch (IllegalArgumentException e) {
+                throw new RuntimeException("Error creating filter " + mFilterName + "!", e);
+            }
+            stack.getBuilder().addFilter(filter);
+        }
+    }
+
+    private static class AddSourceSlotCommand implements Command {
+        private String mName;
+        private String mSlotName;
+
+        public AddSourceSlotCommand(String name, String slotName) {
+            mName = name;
+            mSlotName = slotName;
+        }
+
+        @Override
+        public void execute(CommandStack stack) {
+            stack.getBuilder().addFrameSlotSource(mName, mSlotName);
+        }
+    }
+
+    private static class AddTargetSlotCommand implements Command {
+        private String mName;
+        private String mSlotName;
+
+        public AddTargetSlotCommand(String name, String slotName) {
+            mName = name;
+            mSlotName = slotName;
+        }
+
+        @Override
+        public void execute(CommandStack stack) {
+            stack.getBuilder().addFrameSlotTarget(mName, mSlotName);
+        }
+    }
+
+    private static class AddVariableCommand implements Command {
+        private String mName;
+        private Object mValue;
+
+        public AddVariableCommand(String name, Object value) {
+            mName = name;
+            mValue = value;
+        }
+
+        @Override
+        public void execute(CommandStack stack) {
+            stack.getBuilder().addVariable(mName, mValue);
+        }
+    }
+
+    private static class SetFilterInputCommand implements Command {
+        private String mFilterName;
+        private String mFilterInput;
+        private Object mValue;
+
+        public SetFilterInputCommand(String filterName, String input, Object value) {
+            mFilterName = filterName;
+            mFilterInput = input;
+            mValue = value;
+        }
+
+        @Override
+        public void execute(CommandStack stack) {
+            if (mValue instanceof Variable) {
+                String varName = ((Variable)mValue).name;
+                stack.getBuilder().assignVariableToFilterInput(varName, mFilterName, mFilterInput);
+            } else {
+                stack.getBuilder().assignValueToFilterInput(mValue, mFilterName, mFilterInput);
+            }
+        }
+    }
+
+    private static class ConnectCommand implements Command {
+        private String mSourceFilter;
+        private String mSourcePort;
+        private String mTargetFilter;
+        private String mTargetPort;
+
+        public ConnectCommand(String sourceFilter,
+                              String sourcePort,
+                              String targetFilter,
+                              String targetPort) {
+            mSourceFilter = sourceFilter;
+            mSourcePort = sourcePort;
+            mTargetFilter = targetFilter;
+            mTargetPort = targetPort;
+        }
+
+        @Override
+        public void execute(CommandStack stack) {
+            stack.getBuilder().connect(mSourceFilter, mSourcePort, mTargetFilter, mTargetPort);
+        }
+    }
+
+    private static class Variable {
+        public String name;
+
+        public Variable(String name) {
+            this.name = name;
+        }
+    }
+
+    private static class XmlGraphReader {
+
+        private SAXParserFactory mParserFactory;
+
+        private static class GraphDataHandler extends DefaultHandler {
+
+            private CommandStack mCommandStack;
+            private boolean mInGraph = false;
+            private String mCurFilterName = null;
+
+            public GraphDataHandler(CommandStack commandStack) {
+                mCommandStack = commandStack;
+            }
+
+            @Override
+            public void startElement(String uri, String localName, String qName, Attributes attr)
+                    throws SAXException {
+                if (localName.equals("graph")) {
+                    beginGraph();
+                } else {
+                    assertInGraph(localName);
+                    if (localName.equals("import")) {
+                        addImportCommand(attr);
+                    } else if (localName.equals("library")) {
+                        addLibraryCommand(attr);
+                    } else if (localName.equals("connect")) {
+                        addConnectCommand(attr);
+                    } else if (localName.equals("var")) {
+                        addVarCommand(attr);
+                    } else if (localName.equals("filter")) {
+                        beginFilter(attr);
+                    } else if (localName.equals("input")) {
+                        addFilterInput(attr);
+                    } else {
+                        throw new SAXException("Unknown XML element '" + localName + "'!");
+                    }
+                }
+            }
+
+            @Override
+            public void endElement (String uri, String localName, String qName) {
+                if (localName.equals("graph")) {
+                    endGraph();
+                } else if (localName.equals("filter")) {
+                    endFilter();
+                }
+            }
+
+            private void addImportCommand(Attributes attributes) throws SAXException {
+                String packageName = getRequiredAttribute(attributes, "package");
+                mCommandStack.append(new ImportPackageCommand(packageName));
+            }
+
+            private void addLibraryCommand(Attributes attributes) throws SAXException {
+                String libraryName = getRequiredAttribute(attributes, "name");
+                mCommandStack.append(new AddLibraryCommand(libraryName));
+            }
+
+            private void addConnectCommand(Attributes attributes) {
+                String sourcePortName   = null;
+                String sourceFilterName = null;
+                String targetPortName   = null;
+                String targetFilterName = null;
+
+                // check for shorthand: <connect source="filter:port" target="filter:port"/>
+                String sourceTag = attributes.getValue("source");
+                if (sourceTag != null) {
+                    String[] sourceParts = sourceTag.split(":");
+                    if (sourceParts.length == 2) {
+                        sourceFilterName = sourceParts[0];
+                        sourcePortName   = sourceParts[1];
+                    } else {
+                        throw new RuntimeException(
+                            "'source' tag needs to have format \"filter:port\"! " +
+                            "Alternatively, you may use the form " +
+                            "'sourceFilter=\"filter\" sourcePort=\"port\"'.");
+                    }
+                } else {
+                    sourceFilterName = attributes.getValue("sourceFilter");
+                    sourcePortName   = attributes.getValue("sourcePort");
+                }
+
+                String targetTag = attributes.getValue("target");
+                if (targetTag != null) {
+                    String[] targetParts = targetTag.split(":");
+                    if (targetParts.length == 2) {
+                        targetFilterName = targetParts[0];
+                        targetPortName   = targetParts[1];
+                    } else {
+                        throw new RuntimeException(
+                            "'target' tag needs to have format \"filter:port\"! " +
+                            "Alternatively, you may use the form " +
+                            "'targetFilter=\"filter\" targetPort=\"port\"'.");
+                    }
+                } else {
+                    targetFilterName = attributes.getValue("targetFilter");
+                    targetPortName   = attributes.getValue("targetPort");
+                }
+
+                String sourceSlotName = attributes.getValue("sourceSlot");
+                String targetSlotName = attributes.getValue("targetSlot");
+                if (sourceSlotName != null) {
+                    sourceFilterName = "sourceSlot_" + sourceSlotName;
+                    mCommandStack.append(new AddSourceSlotCommand(sourceFilterName,
+                                                                  sourceSlotName));
+                    sourcePortName = "frame";
+                }
+                if (targetSlotName != null) {
+                    targetFilterName = "targetSlot_" + targetSlotName;
+                    mCommandStack.append(new AddTargetSlotCommand(targetFilterName,
+                                                                  targetSlotName));
+                    targetPortName = "frame";
+                }
+                assertValueNotNull("sourceFilter", sourceFilterName);
+                assertValueNotNull("sourcePort", sourcePortName);
+                assertValueNotNull("targetFilter", targetFilterName);
+                assertValueNotNull("targetPort", targetPortName);
+                // TODO: Should slot connections auto-branch?
+                mCommandStack.append(new ConnectCommand(sourceFilterName,
+                                                        sourcePortName,
+                                                        targetFilterName,
+                                                        targetPortName));
+            }
+
+            private void addVarCommand(Attributes attributes) throws SAXException {
+                String varName = getRequiredAttribute(attributes, "name");
+                Object varValue = getAssignmentValue(attributes);
+                mCommandStack.append(new AddVariableCommand(varName, varValue));
+            }
+
+            private void beginGraph() throws SAXException {
+                if (mInGraph) {
+                    throw new SAXException("Found more than one graph element in XML!");
+                }
+                mInGraph = true;
+            }
+
+            private void endGraph() {
+                mInGraph = false;
+            }
+
+            private void beginFilter(Attributes attributes) throws SAXException {
+                String className = getRequiredAttribute(attributes, "class");
+                mCurFilterName = getRequiredAttribute(attributes, "name");
+                mCommandStack.append(new AllocateFilterCommand(className, mCurFilterName));
+            }
+
+            private void endFilter() {
+                mCurFilterName = null;
+            }
+
+            private void addFilterInput(Attributes attributes) throws SAXException {
+                // Make sure we are in a filter element
+                if (mCurFilterName == null) {
+                    throw new SAXException("Found 'input' element outside of 'filter' "
+                        + "element!");
+                }
+
+                // Get input name and value
+                String inputName = getRequiredAttribute(attributes, "name");
+                Object inputValue = getAssignmentValue(attributes);
+                if (inputValue == null) {
+                    throw new SAXException("No value specified for input '" + inputName + "' "
+                        + "of filter '" + mCurFilterName + "'!");
+                }
+
+                // Push commmand
+                mCommandStack.append(new SetFilterInputCommand(mCurFilterName,
+                                                               inputName,
+                                                               inputValue));
+            }
+
+            private void assertInGraph(String localName) throws SAXException {
+                if (!mInGraph) {
+                    throw new SAXException("Encountered '" + localName + "' element outside of "
+                        + "'graph' element!");
+                }
+            }
+
+            private static Object getAssignmentValue(Attributes attributes) {
+                String strValue = null;
+                if ((strValue = attributes.getValue("stringValue")) != null) {
+                    return strValue;
+                } else if ((strValue = attributes.getValue("booleanValue")) != null) {
+                    return Boolean.parseBoolean(strValue);
+                } else if ((strValue = attributes.getValue("intValue")) != null) {
+                    return Integer.parseInt(strValue);
+                } else if ((strValue = attributes.getValue("floatValue")) != null) {
+                    return Float.parseFloat(strValue);
+                } else if ((strValue = attributes.getValue("floatsValue")) != null) {
+                    String[] floatStrings = TextUtils.split(strValue, ",");
+                    float[] result = new float[floatStrings.length];
+                    for (int i = 0; i < floatStrings.length; ++i) {
+                        result[i] = Float.parseFloat(floatStrings[i]);
+                    }
+                    return result;
+                } else if ((strValue = attributes.getValue("varValue")) != null) {
+                    return new Variable(strValue);
+                } else {
+                    return null;
+                }
+            }
+
+            private static String getRequiredAttribute(Attributes attributes, String name)
+                    throws SAXException {
+                String result = attributes.getValue(name);
+                if (result == null) {
+                    throw new SAXException("Required attribute '" + name + "' not found!");
+                }
+                return result;
+            }
+
+            private static void assertValueNotNull(String valueName, Object value) {
+                if (value == null) {
+                    throw new NullPointerException("Required value '" + value + "' not specified!");
+                }
+            }
+
+        }
+
+        public XmlGraphReader() {
+            mParserFactory = SAXParserFactory.newInstance();
+        }
+
+        public void parseString(String graphString, CommandStack commandStack) throws IOException {
+            try {
+                XMLReader reader = getReaderForCommandStack(commandStack);
+                reader.parse(new InputSource(new StringReader(graphString)));
+            } catch (SAXException e) {
+                throw new IOException("XML parse error during graph parsing!", e);
+            }
+        }
+
+        public void parseInput(InputStream inputStream, CommandStack commandStack)
+                throws IOException {
+            try {
+                XMLReader reader = getReaderForCommandStack(commandStack);
+                reader.parse(new InputSource(inputStream));
+            } catch (SAXException e) {
+                throw new IOException("XML parse error during graph parsing!", e);
+            }
+        }
+
+        private XMLReader getReaderForCommandStack(CommandStack commandStack) throws IOException {
+            try {
+                SAXParser parser = mParserFactory.newSAXParser();
+                XMLReader reader = parser.getXMLReader();
+                GraphDataHandler graphHandler = new GraphDataHandler(commandStack);
+                reader.setContentHandler(graphHandler);
+                return reader;
+            } catch (ParserConfigurationException e) {
+                throw new IOException("Error creating SAXParser for graph parsing!", e);
+            } catch (SAXException e) {
+                throw new IOException("Error creating XMLReader for graph parsing!", e);
+            }
+        }
+    }
+
+    /**
+     * Read an XML graph from a String.
+     *
+     * This function automatically checks each filters' signatures and throws a Runtime Exception
+     * if required ports are unconnected. Use the 3-parameter version to avoid this behavior.
+     *
+     * @param context the MffContext into which to load the graph.
+     * @param xmlSource the graph specified in XML.
+     * @return the FilterGraph instance for the XML source.
+     * @throws IOException if there was an error parsing the source.
+     */
+    public static FilterGraph readXmlGraph(MffContext context, String xmlSource)
+            throws IOException {
+        FilterGraph.Builder builder = getBuilderForXmlString(context, xmlSource);
+        return builder.build();
+    }
+
+    /**
+     * Read an XML sub-graph from a String.
+     *
+     * @param context the MffContext into which to load the graph.
+     * @param xmlSource the graph specified in XML.
+     * @param parentGraph the parent graph.
+     * @return the FilterGraph instance for the XML source.
+     * @throws IOException if there was an error parsing the source.
+     */
+    public static FilterGraph readXmlSubGraph(
+            MffContext context, String xmlSource, FilterGraph parentGraph)
+            throws IOException {
+        FilterGraph.Builder builder = getBuilderForXmlString(context, xmlSource);
+        return builder.buildSubGraph(parentGraph);
+    }
+
+    /**
+     * Read an XML graph from a resource.
+     *
+     * This function automatically checks each filters' signatures and throws a Runtime Exception
+     * if required ports are unconnected. Use the 3-parameter version to avoid this behavior.
+     *
+     * @param context the MffContext into which to load the graph.
+     * @param resourceId the XML resource ID.
+     * @return the FilterGraph instance for the XML source.
+     * @throws IOException if there was an error reading or parsing the resource.
+     */
+    public static FilterGraph readXmlGraphResource(MffContext context, int resourceId)
+            throws IOException {
+        FilterGraph.Builder builder = getBuilderForXmlResource(context, resourceId);
+        return builder.build();
+    }
+
+    /**
+     * Read an XML graph from a resource.
+     *
+     * This function automatically checks each filters' signatures and throws a Runtime Exception
+     * if required ports are unconnected. Use the 3-parameter version to avoid this behavior.
+     *
+     * @param context the MffContext into which to load the graph.
+     * @param resourceId the XML resource ID.
+     * @return the FilterGraph instance for the XML source.
+     * @throws IOException if there was an error reading or parsing the resource.
+     */
+    public static FilterGraph readXmlSubGraphResource(
+            MffContext context, int resourceId, FilterGraph parentGraph)
+            throws IOException {
+        FilterGraph.Builder builder = getBuilderForXmlResource(context, resourceId);
+        return builder.buildSubGraph(parentGraph);
+    }
+
+    private static FilterGraph.Builder getBuilderForXmlString(MffContext context, String source)
+            throws IOException {
+        XmlGraphReader reader = new XmlGraphReader();
+        CommandStack commands = new CommandStack(context);
+        reader.parseString(source, commands);
+        commands.execute();
+        return commands.getBuilder();
+    }
+
+    private static FilterGraph.Builder getBuilderForXmlResource(MffContext context, int resourceId)
+            throws IOException {
+        InputStream inputStream = context.getApplicationContext().getResources()
+                .openRawResource(resourceId);
+        XmlGraphReader reader = new XmlGraphReader();
+        CommandStack commands = new CommandStack(context);
+        reader.parseInput(inputStream, commands);
+        commands.execute();
+        return commands.getBuilder();
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphRunner.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphRunner.java
new file mode 100644
index 0000000..36aed63
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/GraphRunner.java
@@ -0,0 +1,1023 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.os.ConditionVariable;
+import android.os.SystemClock;
+import android.util.Log;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.Stack;
+import java.util.concurrent.LinkedBlockingQueue;
+
+/**
+ * A GraphRunner schedules and executes the filter nodes of a graph.
+ *
+ * Typically, you create a GraphRunner given a FilterGraph instance, and execute it by calling
+ * {@link #start(FilterGraph)}.
+ *
+ * The scheduling strategy determines how the filter nodes are selected
+ * for scheduling. More precisely, given the set of nodes that can be scheduled, the scheduling
+ * strategy determines which node of this set to select for execution. For instance, an LFU
+ * scheduler (the default) chooses the node that has been executed the least amount of times.
+ */
+public final class GraphRunner {
+
+    private static int PRIORITY_SLEEP = -1;
+    private static int PRIORITY_STOP = -2;
+
+    private static final Event BEGIN_EVENT = new Event(Event.BEGIN, null);
+    private static final Event FLUSH_EVENT = new Event(Event.FLUSH, null);
+    private static final Event HALT_EVENT = new Event(Event.HALT, null);
+    private static final Event KILL_EVENT = new Event(Event.KILL, null);
+    private static final Event PAUSE_EVENT = new Event(Event.PAUSE, null);
+    private static final Event RELEASE_FRAMES_EVENT = new Event(Event.RELEASE_FRAMES, null);
+    private static final Event RESTART_EVENT = new Event(Event.RESTART, null);
+    private static final Event RESUME_EVENT = new Event(Event.RESUME, null);
+    private static final Event STEP_EVENT = new Event(Event.STEP, null);
+    private static final Event STOP_EVENT = new Event(Event.STOP, null);
+
+    private static class State {
+        public static final int STOPPED = 1;
+        public static final int PREPARING = 2;
+        public static final int RUNNING = 4;
+        public static final int PAUSED = 8;
+        public static final int HALTED = 16;
+
+        private int mCurrent = STOPPED;
+
+        public synchronized void setState(int newState) {
+            mCurrent = newState;
+        }
+
+        public synchronized boolean check(int state) {
+            return ((mCurrent & state) == state);
+        }
+
+        public synchronized boolean addState(int state) {
+            if ((mCurrent & state) != state) {
+                mCurrent |= state;
+                return true;
+            }
+            return false;
+        }
+
+        public synchronized boolean removeState(int state) {
+            boolean result = (mCurrent & state) == state;
+            mCurrent &= (~state);
+            return result;
+        }
+
+        public synchronized int current() {
+            return mCurrent;
+        }
+    }
+
+    private static class Event {
+        public static final int PREPARE = 1;
+        public static final int BEGIN = 2;
+        public static final int STEP = 3;
+        public static final int STOP = 4;
+        public static final int PAUSE = 6;
+        public static final int HALT = 7;
+        public static final int RESUME = 8;
+        public static final int RESTART = 9;
+        public static final int FLUSH = 10;
+        public static final int TEARDOWN = 11;
+        public static final int KILL = 12;
+        public static final int RELEASE_FRAMES = 13;
+
+        public int code;
+        public Object object;
+
+        public Event(int code, Object object) {
+            this.code = code;
+            this.object = object;
+        }
+    }
+
+    private final class GraphRunLoop implements Runnable {
+
+        private State mState = new State();
+        private final boolean mAllowOpenGL;
+        private RenderTarget mRenderTarget = null;
+        private LinkedBlockingQueue<Event> mEventQueue = new LinkedBlockingQueue<Event>();
+        private Exception mCaughtException = null;
+        private boolean mClosedSuccessfully = true;
+        private Stack<Filter[]> mFilters = new Stack<Filter[]>();
+        private Stack<SubListener> mSubListeners = new Stack<SubListener>();
+        private Set<FilterGraph> mOpenedGraphs = new HashSet<FilterGraph>();
+        public ConditionVariable mStopCondition = new ConditionVariable(true);
+
+        private void loop() {
+            boolean killed = false;
+            while (!killed) {
+                try {
+                    Event event = nextEvent();
+                    if (event == null) continue;
+                    switch (event.code) {
+                        case Event.PREPARE:
+                            onPrepare((FilterGraph)event.object);
+                            break;
+                        case Event.BEGIN:
+                            onBegin();
+                            break;
+                        case Event.STEP:
+                            onStep();
+                            break;
+                        case Event.STOP:
+                            onStop();
+                            break;
+                        case Event.PAUSE:
+                            onPause();
+                            break;
+                        case Event.HALT:
+                            onHalt();
+                            break;
+                        case Event.RESUME:
+                            onResume();
+                            break;
+                        case Event.RESTART:
+                            onRestart();
+                            break;
+                        case Event.FLUSH:
+                            onFlush();
+                            break;
+                        case Event.TEARDOWN:
+                            onTearDown((FilterGraph)event.object);
+                            break;
+                        case Event.KILL:
+                            killed = true;
+                            break;
+                        case Event.RELEASE_FRAMES:
+                            onReleaseFrames();
+                            break;
+                    }
+                } catch (Exception e) {
+                    if (mCaughtException == null) {
+                        mCaughtException = e;
+                        mClosedSuccessfully = true;
+                        e.printStackTrace();
+                        pushEvent(STOP_EVENT);
+                    } else {
+                        // Exception during exception recovery? Abort all processing. Do not
+                        // overwrite the original exception.
+                        mClosedSuccessfully = false;
+                        mEventQueue.clear();
+                        cleanUp();
+                    }
+                }
+            }
+        }
+
+        public GraphRunLoop(boolean allowOpenGL) {
+            mAllowOpenGL = allowOpenGL;
+        }
+
+        @Override
+        public void run() {
+            onInit();
+            loop();
+            onDestroy();
+        }
+
+        public void enterSubGraph(FilterGraph graph, SubListener listener) {
+            if (mState.check(State.RUNNING)) {
+                onOpenGraph(graph);
+                mSubListeners.push(listener);
+            }
+        }
+
+        public void pushWakeEvent(Event event) {
+            // This is of course not race-condition proof. The worst case is that the event
+            // is pushed even though the queue was not empty, which is acceptible for our cases.
+            if (mEventQueue.isEmpty()) {
+                pushEvent(event);
+            }
+        }
+
+        public void pushEvent(Event event) {
+            mEventQueue.offer(event);
+        }
+
+        public void pushEvent(int eventId, Object object) {
+            mEventQueue.offer(new Event(eventId, object));
+        }
+
+        public boolean checkState(int state) {
+            return mState.check(state);
+        }
+
+        public ConditionVariable getStopCondition() {
+            return mStopCondition;
+        }
+
+        public boolean isOpenGLAllowed() {
+            // Does not need synchronization as mAllowOpenGL flag is final.
+            return mAllowOpenGL;
+        }
+
+        private Event nextEvent() {
+            try {
+                return mEventQueue.take();
+            } catch (InterruptedException e) {
+                // Ignore and keep going.
+                Log.w("GraphRunner", "Event queue processing was interrupted.");
+                return null;
+            }
+        }
+
+        private void onPause() {
+            mState.addState(State.PAUSED);
+        }
+
+        private void onResume() {
+            if (mState.removeState(State.PAUSED)) {
+                if (mState.current() == State.RUNNING) {
+                    pushEvent(STEP_EVENT);
+                }
+            }
+        }
+
+        private void onHalt() {
+            if (mState.addState(State.HALTED) && mState.check(State.RUNNING)) {
+                closeAllFilters();
+            }
+        }
+
+        private void onRestart() {
+            if (mState.removeState(State.HALTED)) {
+                if (mState.current() == State.RUNNING) {
+                    pushEvent(STEP_EVENT);
+                }
+            }
+        }
+
+        private void onDestroy() {
+            mFrameManager.destroyBackings();
+            if (mRenderTarget != null) {
+                mRenderTarget.release();
+                mRenderTarget = null;
+            }
+        }
+
+        private void onReleaseFrames() {
+            mFrameManager.destroyBackings();
+        }
+
+        private void onInit() {
+            mThreadRunner.set(GraphRunner.this);
+            if (getContext().isOpenGLSupported()) {
+                mRenderTarget = RenderTarget.newTarget(1, 1);
+                mRenderTarget.focus();
+            }
+        }
+
+        private void onPrepare(FilterGraph graph) {
+            if (mState.current() == State.STOPPED) {
+                mState.setState(State.PREPARING);
+                mCaughtException = null;
+                onOpenGraph(graph);
+            }
+        }
+
+        private void onOpenGraph(FilterGraph graph) {
+            loadFilters(graph);
+            mOpenedGraphs.add(graph);
+            mScheduler.prepare(currentFilters());
+            pushEvent(BEGIN_EVENT);
+        }
+
+        private void onBegin() {
+            if (mState.current() == State.PREPARING) {
+                mState.setState(State.RUNNING);
+                pushEvent(STEP_EVENT);
+            }
+        }
+
+        private void onStarve() {
+            mFilters.pop();
+            if (mFilters.empty()) {
+                onStop();
+            } else {
+                SubListener listener = mSubListeners.pop();
+                if (listener != null) {
+                    listener.onSubGraphRunEnded(GraphRunner.this);
+                }
+                mScheduler.prepare(currentFilters());
+                pushEvent(STEP_EVENT);
+            }
+        }
+
+        private void onStop() {
+            if (mState.check(State.RUNNING)) {
+                // Close filters if not already halted (and already closed)
+                if (!mState.check(State.HALTED)) {
+                    closeAllFilters();
+                }
+                cleanUp();
+            }
+        }
+
+        private void cleanUp() {
+            mState.setState(State.STOPPED);
+            if (flushOnClose()) {
+                onFlush();
+            }
+            mOpenedGraphs.clear();
+            mFilters.clear();
+            onRunnerStopped(mCaughtException, mClosedSuccessfully);
+            mStopCondition.open();
+        }
+
+        private void onStep() {
+            if (mState.current() == State.RUNNING) {
+                Filter bestFilter = null;
+                long maxPriority = PRIORITY_STOP;
+                mScheduler.beginStep();
+                Filter[] filters = currentFilters();
+                for (int i = 0; i < filters.length; ++i) {
+                    Filter filter = filters[i];
+                    long priority = mScheduler.priorityForFilter(filter);
+                    if (priority > maxPriority) {
+                        maxPriority = priority;
+                        bestFilter = filter;
+                    }
+                }
+                if (maxPriority == PRIORITY_SLEEP) {
+                    // NOOP: When going into sleep mode, we simply do not schedule another node.
+                    // If some other event (such as a resume()) does schedule, then we may schedule
+                    // during sleeping. This is an edge case an irrelevant. (On the other hand,
+                    // going into a dedicated "sleep state" requires highly complex synchronization
+                    // to not "miss" a wake-up event. Thus we choose the more defensive approach
+                    // here).
+                } else if (maxPriority == PRIORITY_STOP) {
+                    onStarve();
+                } else {
+                    scheduleFilter(bestFilter);
+                    pushEvent(STEP_EVENT);
+                }
+            } else {
+                Log.w("GraphRunner", "State is not running! (" + mState.current() + ")");
+            }
+        }
+
+        private void onFlush() {
+           if (mState.check(State.HALTED) || mState.check(State.STOPPED)) {
+               for (FilterGraph graph : mOpenedGraphs) {
+                   graph.flushFrames();
+               }
+           }
+        }
+
+        private void onTearDown(FilterGraph graph) {
+            for (Filter filter : graph.getAllFilters()) {
+                filter.performTearDown();
+            }
+            graph.wipe();
+        }
+
+        private void loadFilters(FilterGraph graph) {
+            Filter[] filters = graph.getAllFilters();
+            mFilters.push(filters);
+        }
+
+        private void closeAllFilters() {
+            for (FilterGraph graph : mOpenedGraphs) {
+                closeFilters(graph);
+            }
+        }
+
+        private void closeFilters(FilterGraph graph) {
+            // [Non-iterator looping]
+            Log.v("GraphRunner", "CLOSING FILTERS");
+            Filter[] filters = graph.getAllFilters();
+            boolean isVerbose = isVerbose();
+            for (int i = 0; i < filters.length; ++i) {
+                if (isVerbose) {
+                    Log.i("GraphRunner", "Closing Filter " + filters[i] + "!");
+                }
+                filters[i].softReset();
+            }
+        }
+
+        private Filter[] currentFilters() {
+            return mFilters.peek();
+        }
+
+        private void scheduleFilter(Filter filter) {
+            long scheduleTime = 0;
+            if (isVerbose()) {
+                scheduleTime = SystemClock.elapsedRealtime();
+                Log.i("GraphRunner", scheduleTime + ": Scheduling " + filter + "!");
+            }
+            filter.execute();
+            if (isVerbose()) {
+                long nowTime = SystemClock.elapsedRealtime();
+                Log.i("GraphRunner",
+                        "-> Schedule time (" + filter + ") = " + (nowTime - scheduleTime) + " ms.");
+            }
+        }
+
+    }
+
+    // GraphRunner.Scheduler classes ///////////////////////////////////////////////////////////////
+    private interface Scheduler {
+        public void prepare(Filter[] filters);
+
+        public int getStrategy();
+
+        public void beginStep();
+
+        public long priorityForFilter(Filter filter);
+
+    }
+
+    private class LruScheduler implements Scheduler {
+
+        private long mNow;
+
+        @Override
+        public void prepare(Filter[] filters) {
+        }
+
+        @Override
+        public int getStrategy() {
+            return STRATEGY_LRU;
+        }
+
+        @Override
+        public void beginStep() {
+            // TODO(renn): We could probably do this with a simple GraphRunner counter that would
+            // represent GraphRunner local time. This would allow us to use integers instead of
+            // longs, and save us calls to the system clock.
+            mNow = SystemClock.elapsedRealtime();
+        }
+
+        @Override
+        public long priorityForFilter(Filter filter) {
+            if (filter.isSleeping()) {
+                return PRIORITY_SLEEP;
+            } else if (filter.canSchedule()) {
+                return mNow - filter.getLastScheduleTime();
+            } else {
+                return PRIORITY_STOP;
+            }
+        }
+
+    }
+
+    private class LfuScheduler implements Scheduler {
+
+        private final int MAX_PRIORITY = Integer.MAX_VALUE;
+
+        @Override
+        public void prepare(Filter[] filters) {
+            // [Non-iterator looping]
+            for (int i = 0; i < filters.length; ++i) {
+                filters[i].resetScheduleCount();
+            }
+        }
+
+        @Override
+        public int getStrategy() {
+            return STRATEGY_LFU;
+        }
+
+        @Override
+        public void beginStep() {
+        }
+
+        @Override
+        public long priorityForFilter(Filter filter) {
+            return filter.isSleeping() ? PRIORITY_SLEEP
+                    : (filter.canSchedule() ? (MAX_PRIORITY - filter.getScheduleCount())
+                            : PRIORITY_STOP);
+        }
+
+    }
+
+    private class OneShotScheduler extends LfuScheduler {
+        private int mCurCount = 1;
+
+        @Override
+        public void prepare(Filter[] filters) {
+            // [Non-iterator looping]
+            for (int i = 0; i < filters.length; ++i) {
+                filters[i].resetScheduleCount();
+            }
+        }
+
+        @Override
+        public int getStrategy() {
+            return STRATEGY_ONESHOT;
+        }
+
+        @Override
+        public void beginStep() {
+        }
+
+        @Override
+        public long priorityForFilter(Filter filter) {
+            return filter.getScheduleCount() < mCurCount ? super.priorityForFilter(filter)
+                    : PRIORITY_STOP;
+        }
+
+    }
+
+    // GraphRunner.Listener callback class /////////////////////////////////////////////////////////
+    public interface Listener {
+        /**
+         * Callback method that is called when the runner completes a run. This method is called
+         * only if the graph completed without an error.
+         */
+        public void onGraphRunnerStopped(GraphRunner runner);
+
+        /**
+         * Callback method that is called when runner encounters an error.
+         *
+         *  Any exceptions thrown in the GraphRunner's thread will cause the run to abort. The
+         * thrown exception is passed to the listener in this method. If no listener is set, the
+         * exception message is logged to the error stream. You will not receive an
+         * {@link #onGraphRunnerStopped(GraphRunner)} callback in case of an error.
+         *
+         * @param exception the exception that was thrown.
+         * @param closedSuccessfully true, if the graph was closed successfully after the error.
+         */
+        public void onGraphRunnerError(Exception exception, boolean closedSuccessfully);
+    }
+
+    public interface SubListener {
+        public void onSubGraphRunEnded(GraphRunner runner);
+    }
+
+    /**
+     * Config class to setup a GraphRunner with a custom configuration.
+     *
+     * The configuration object is passed to the constructor. Any changes to it will not affect
+     * the created GraphRunner instance.
+     */
+    public static class Config {
+        /** The runner's thread priority. */
+        public int threadPriority = Thread.NORM_PRIORITY;
+        /** Whether to allow filters to use OpenGL or not. */
+        public boolean allowOpenGL = true;
+    }
+
+    /** Parameters shared between run-thread and GraphRunner frontend. */
+    private class RunParameters {
+        public Listener listener = null;
+        public boolean isVerbose = false;
+        public boolean flushOnClose = true;
+    }
+
+    // GraphRunner implementation //////////////////////////////////////////////////////////////////
+    /** Schedule strategy: From set of candidates, pick a random one. */
+    public static final int STRATEGY_RANDOM = 1;
+    /** Schedule strategy: From set of candidates, pick node executed least recently executed. */
+    public static final int STRATEGY_LRU = 2;
+    /** Schedule strategy: From set of candidates, pick node executed least number of times. */
+    public static final int STRATEGY_LFU = 3;
+    /** Schedule strategy: Schedules no node more than once. */
+    public static final int STRATEGY_ONESHOT = 4;
+
+    private final MffContext mContext;
+
+    private FilterGraph mRunningGraph = null;
+    private Set<FilterGraph> mGraphs = new HashSet<FilterGraph>();
+
+    private Scheduler mScheduler;
+
+    private GraphRunLoop mRunLoop;
+
+    private Thread mRunThread = null;
+
+    private FrameManager mFrameManager = null;
+
+    private static ThreadLocal<GraphRunner> mThreadRunner = new ThreadLocal<GraphRunner>();
+
+    private RunParameters mParams = new RunParameters();
+
+    /**
+     * Creates a new GraphRunner with the default configuration. You must attach FilterGraph
+     * instances to this runner before you can execute any of these graphs.
+     *
+     * @param context The MffContext instance for this runner.
+     */
+    public GraphRunner(MffContext context) {
+        mContext = context;
+        init(new Config());
+    }
+
+    /**
+     * Creates a new GraphRunner with the specified configuration. You must attach FilterGraph
+     * instances to this runner before you can execute any of these graphs.
+     *
+     * @param context The MffContext instance for this runner.
+     * @param config A Config instance with the configuration of this runner.
+     */
+    public GraphRunner(MffContext context, Config config) {
+        mContext = context;
+        init(config);
+    }
+
+    /**
+     * Returns the currently running graph-runner.
+     * @return The currently running graph-runner.
+     */
+    public static GraphRunner current() {
+        return mThreadRunner.get();
+    }
+
+    /**
+     * Returns the graph that this runner is currently executing. Returns null if no graph is
+     * currently being executed by this runner.
+     *
+     * @return the FilterGraph instance that this GraphRunner is executing.
+     */
+    public synchronized FilterGraph getRunningGraph() {
+        return mRunningGraph;
+    }
+
+    /**
+     * Returns the context that this runner is bound to.
+     *
+     * @return the MffContext instance that this runner is bound to.
+     */
+    public MffContext getContext() {
+        return mContext;
+    }
+
+    /**
+     * Begins graph execution. The graph filters are scheduled and executed until processing
+     * finishes or is stopped.
+     */
+    public synchronized void start(FilterGraph graph) {
+        if (graph.mRunner != this) {
+            throw new IllegalArgumentException("Graph must be attached to runner!");
+        }
+        mRunningGraph = graph;
+        mRunLoop.getStopCondition().close();
+        mRunLoop.pushEvent(Event.PREPARE, graph);
+    }
+
+    /**
+     * Begin executing a sub-graph. This only succeeds if the current runner is already
+     * executing.
+     */
+    public void enterSubGraph(FilterGraph graph, SubListener listener) {
+        if (Thread.currentThread() != mRunThread) {
+            throw new RuntimeException("enterSubGraph must be called from the runner's thread!");
+        }
+        mRunLoop.enterSubGraph(graph, listener);
+    }
+
+    /**
+     * Waits until graph execution has finished or stopped with an error.
+     * Care must be taken when using this method to not block the UI thread. This is typically
+     * used when a graph is run in one-shot mode to compute a result.
+     */
+    public void waitUntilStop() {
+        mRunLoop.getStopCondition().block();
+    }
+
+    /**
+     * Pauses graph execution.
+     */
+    public void pause() {
+        mRunLoop.pushEvent(PAUSE_EVENT);
+    }
+
+    /**
+     * Resumes graph execution after pausing.
+     */
+    public void resume() {
+        mRunLoop.pushEvent(RESUME_EVENT);
+    }
+
+    /**
+     * Stops graph execution.
+     */
+    public void stop() {
+        mRunLoop.pushEvent(STOP_EVENT);
+    }
+
+    /**
+     * Returns whether the graph is currently being executed. A graph is considered to be running,
+     * even if it is paused or in the process of being stopped.
+     *
+     * @return true, if the graph is currently being executed.
+     */
+    public boolean isRunning() {
+        return !mRunLoop.checkState(State.STOPPED);
+    }
+
+    /**
+     * Returns whether the graph is currently paused.
+     *
+     * @return true, if the graph is currently paused.
+     */
+    public boolean isPaused() {
+        return mRunLoop.checkState(State.PAUSED);
+    }
+
+    /**
+     * Returns whether the graph is currently stopped.
+     *
+     * @return true, if the graph is currently stopped.
+     */
+    public boolean isStopped() {
+        return mRunLoop.checkState(State.STOPPED);
+    }
+
+    /**
+     * Sets the filter scheduling strategy. This method can not be called when the GraphRunner is
+     * running.
+     *
+     * @param strategy a constant specifying which scheduler strategy to use.
+     * @throws RuntimeException if the GraphRunner is running.
+     * @throws IllegalArgumentException if invalid strategy is specified.
+     * @see #getSchedulerStrategy()
+     */
+    public void setSchedulerStrategy(int strategy) {
+        if (isRunning()) {
+            throw new RuntimeException(
+                    "Attempting to change scheduling strategy on running " + "GraphRunner!");
+        }
+        createScheduler(strategy);
+    }
+
+    /**
+     * Returns the current scheduling strategy.
+     *
+     * @return the scheduling strategy used by this GraphRunner.
+     * @see #setSchedulerStrategy(int)
+     */
+    public int getSchedulerStrategy() {
+        return mScheduler.getStrategy();
+    }
+
+    /**
+     * Set whether or not the runner is verbose. When set to true, the runner will output individual
+     * scheduling steps that may help identify and debug problems in the graph structure. The
+     * default is false.
+     *
+     * @param isVerbose true, if the GraphRunner should log scheduling details.
+     * @see #isVerbose()
+     */
+    public void setIsVerbose(boolean isVerbose) {
+        synchronized (mParams) {
+            mParams.isVerbose = isVerbose;
+        }
+    }
+
+    /**
+     * Returns whether the GraphRunner is verbose.
+     *
+     * @return true, if the GraphRunner logs scheduling details.
+     * @see #setIsVerbose(boolean)
+     */
+    public boolean isVerbose() {
+        synchronized (mParams) {
+            return mParams.isVerbose;
+        }
+    }
+
+    /**
+     * Returns whether Filters of this GraphRunner can use OpenGL.
+     *
+     * Filters may use OpenGL if the MffContext supports OpenGL and the GraphRunner allows it.
+     *
+     * @return true, if Filters are allowed to use OpenGL.
+     */
+    public boolean isOpenGLSupported() {
+        return mRunLoop.isOpenGLAllowed() && mContext.isOpenGLSupported();
+    }
+
+    /**
+     * Enable flushing all frames from the graph when running completes.
+     *
+     * If this is set to false, then frames may remain in the pipeline even after running completes.
+     * The default value is true.
+     *
+     * @param flush true, if the GraphRunner should flush the graph when running completes.
+     * @see #flushOnClose()
+     */
+    public void setFlushOnClose(boolean flush) {
+        synchronized (mParams) {
+            mParams.flushOnClose = flush;
+        }
+    }
+
+    /**
+     * Returns whether the GraphRunner flushes frames when running completes.
+     *
+     * @return true, if the GraphRunner flushes frames when running completes.
+     * @see #setFlushOnClose(boolean)
+     */
+    public boolean flushOnClose() {
+        synchronized (mParams) {
+            return mParams.flushOnClose;
+        }
+    }
+
+    /**
+     * Sets the listener for receiving runtime events. A GraphRunner.Listener instance can be used
+     * to determine when certain events occur during graph execution (and react on them). See the
+     * {@link GraphRunner.Listener} class for details.
+     *
+     * @param listener the GraphRunner.Listener instance to set.
+     * @see #getListener()
+     */
+    public void setListener(Listener listener) {
+        synchronized (mParams) {
+            mParams.listener = listener;
+        }
+    }
+
+    /**
+     * Returns the currently assigned GraphRunner.Listener.
+     *
+     * @return the currently assigned GraphRunner.Listener instance.
+     * @see #setListener(Listener)
+     */
+    public Listener getListener() {
+        synchronized (mParams) {
+            return mParams.listener;
+        }
+    }
+
+    /**
+     * Returns the FrameManager that manages the runner's frames.
+     *
+     * @return the FrameManager instance that manages the runner's frames.
+     */
+    public FrameManager getFrameManager() {
+        return mFrameManager;
+    }
+
+    /**
+     * Tear down a GraphRunner and all its resources.
+     * <p>
+     * You must make sure that before calling this, no more graphs are attached to this runner.
+     * Typically, graphs are removed from runners when they are torn down.
+     *
+     * @throws IllegalStateException if there are still graphs attached to this runner.
+     */
+    public void tearDown() {
+        synchronized (mGraphs) {
+            if (!mGraphs.isEmpty()) {
+                throw new IllegalStateException("Attempting to tear down runner with "
+                        + mGraphs.size() + " graphs still attached!");
+            }
+        }
+        mRunLoop.pushEvent(KILL_EVENT);
+        // Wait for thread to complete, so that everything is torn down by the time we return.
+        try {
+            mRunThread.join();
+        } catch (InterruptedException e) {
+            Log.e("GraphRunner", "Error waiting for runner thread to finish!");
+        }
+    }
+
+    /**
+     * Release all frames managed by this runner.
+     * <p>
+     * Note, that you must make sure no graphs are attached to this runner before calling this
+     * method, as otherwise Filters in the graph may reference frames that are now released.
+     *
+     * TODO: Eventually, this method should be removed. Instead we should have better analysis
+     * that catches leaking frames from filters.
+     *
+     * @throws IllegalStateException if there are still graphs attached to this runner.
+     */
+    public void releaseFrames() {
+        synchronized (mGraphs) {
+            if (!mGraphs.isEmpty()) {
+                throw new IllegalStateException("Attempting to release frames with "
+                        + mGraphs.size() + " graphs still attached!");
+            }
+        }
+        mRunLoop.pushEvent(RELEASE_FRAMES_EVENT);
+    }
+
+    // Core internal methods ///////////////////////////////////////////////////////////////////////
+    void attachGraph(FilterGraph graph) {
+        synchronized (mGraphs) {
+            mGraphs.add(graph);
+        }
+    }
+
+    void signalWakeUp() {
+        mRunLoop.pushWakeEvent(STEP_EVENT);
+    }
+
+    void begin() {
+        mRunLoop.pushEvent(BEGIN_EVENT);
+    }
+
+    /** Like pause(), but closes all filters. Can be resumed using restart(). */
+    void halt() {
+        mRunLoop.pushEvent(HALT_EVENT);
+    }
+
+    /** Resumes a previously halted runner, and restores it to its non-halted state. */
+    void restart() {
+        mRunLoop.pushEvent(RESTART_EVENT);
+    }
+
+    /**
+     * Tears down the specified graph.
+     *
+     * The graph must be attached to this runner.
+     */
+    void tearDownGraph(FilterGraph graph) {
+        if (graph.getRunner() != this) {
+            throw new IllegalArgumentException("Attempting to tear down graph with foreign "
+                    + "GraphRunner!");
+        }
+        mRunLoop.pushEvent(Event.TEARDOWN, graph);
+        synchronized (mGraphs) {
+            mGraphs.remove(graph);
+        }
+    }
+
+    /**
+     * Remove all frames that are waiting to be processed.
+     *
+     * Removes and releases frames that are waiting in the graph connections of the currently
+     * halted graphs, i.e. frames that are waiting to be processed. This does not include frames
+     * that may be held or cached by filters themselves.
+     *
+     * TODO: With the new sub-graph architecture, this can now be simplified and made public.
+     * It can then no longer rely on opened graphs, and instead flush a graph and all its
+     * sub-graphs.
+     */
+    void flushFrames() {
+        mRunLoop.pushEvent(FLUSH_EVENT);
+    }
+
+    // Private methods /////////////////////////////////////////////////////////////////////////////
+    private void init(Config config) {
+        mFrameManager = new FrameManager(this, FrameManager.FRAME_CACHE_LRU);
+        createScheduler(STRATEGY_LRU);
+        mRunLoop = new GraphRunLoop(config.allowOpenGL);
+        mRunThread = new Thread(mRunLoop);
+        mRunThread.setPriority(config.threadPriority);
+        mRunThread.start();
+        mContext.addRunner(this);
+    }
+
+    private void createScheduler(int strategy) {
+        switch (strategy) {
+            case STRATEGY_LRU:
+                mScheduler = new LruScheduler();
+                break;
+            case STRATEGY_LFU:
+                mScheduler = new LfuScheduler();
+                break;
+            case STRATEGY_ONESHOT:
+                mScheduler = new OneShotScheduler();
+                break;
+            default:
+                throw new IllegalArgumentException(
+                        "Unknown schedule-strategy constant " + strategy + "!");
+        }
+    }
+
+    // Called within the runner's thread
+    private void onRunnerStopped(final Exception exception, final boolean closed) {
+        mRunningGraph = null;
+        synchronized (mParams) {
+            if (mParams.listener != null) {
+                getContext().postRunnable(new Runnable() {
+                    @Override
+                    public void run() {
+                        if (exception == null) {
+                            mParams.listener.onGraphRunnerStopped(GraphRunner.this);
+                        } else {
+                            mParams.listener.onGraphRunnerError(exception, closed);
+                        }
+                    }
+                });
+            } else if (exception != null) {
+                Log.e("GraphRunner",
+                        "Uncaught exception during graph execution! Stack Trace: ");
+                exception.printStackTrace();
+            }
+        }
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ImageShader.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ImageShader.java
new file mode 100644
index 0000000..0ec50a3
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ImageShader.java
@@ -0,0 +1,793 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.graphics.RectF;
+import android.opengl.GLES20;
+import android.util.Log;
+
+import androidx.media.filterfw.geometry.Quad;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+import java.util.Arrays;
+import java.util.HashMap;
+
+/**
+ * Convenience class to perform GL shader operations on image data.
+ * <p>
+ * The ImageShader class greatly simplifies the task of running GL shader language kernels over
+ * Frame data buffers that contain RGBA image data.
+ * </p><p>
+ * TODO: More documentation
+ * </p>
+ */
+public class ImageShader {
+
+    private int mProgram = 0;
+    private boolean mClearsOutput = false;
+    private float[] mClearColor = { 0f, 0f, 0f, 0f };
+    private boolean mBlendEnabled = false;
+    private int mSFactor = GLES20.GL_SRC_ALPHA;
+    private int mDFactor = GLES20.GL_ONE_MINUS_SRC_ALPHA;
+    private int mDrawMode = GLES20.GL_TRIANGLE_STRIP;
+    private int mVertexCount = 4;
+    private int mBaseTexUnit = GLES20.GL_TEXTURE0;
+    private int mClearBuffers = GLES20.GL_COLOR_BUFFER_BIT;
+    private float[] mSourceCoords = new float[] { 0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f };
+    private float[] mTargetCoords = new float[] { -1f, -1f, 1f, -1f, -1f, 1f, 1f, 1f };
+
+    private HashMap<String, ProgramUniform> mUniforms;
+    private HashMap<String, VertexAttribute> mAttributes = new HashMap<String, VertexAttribute>();
+
+    private final static int FLOAT_SIZE = 4;
+
+    private final static String mDefaultVertexShader =
+        "attribute vec4 a_position;\n" +
+        "attribute vec2 a_texcoord;\n" +
+        "varying vec2 v_texcoord;\n" +
+        "void main() {\n" +
+        "  gl_Position = a_position;\n" +
+        "  v_texcoord = a_texcoord;\n" +
+        "}\n";
+
+    private final static String mIdentityShader =
+        "precision mediump float;\n" +
+        "uniform sampler2D tex_sampler_0;\n" +
+        "varying vec2 v_texcoord;\n" +
+        "void main() {\n" +
+        "  gl_FragColor = texture2D(tex_sampler_0, v_texcoord);\n" +
+        "}\n";
+
+    private static class VertexAttribute {
+        private String mName;
+        private boolean mIsConst;
+        private int mIndex;
+        private boolean mShouldNormalize;
+        private int mOffset;
+        private int mStride;
+        private int mComponents;
+        private int mType;
+        private int mVbo;
+        private int mLength;
+        private FloatBuffer mValues;
+
+        public VertexAttribute(String name, int index) {
+            mName = name;
+            mIndex = index;
+            mLength = -1;
+        }
+
+        public void set(boolean normalize, int stride, int components, int type, float[] values) {
+            mIsConst = false;
+            mShouldNormalize = normalize;
+            mStride = stride;
+            mComponents = components;
+            mType = type;
+            mVbo = 0;
+            if (mLength != values.length){
+                initBuffer(values);
+                mLength = values.length;
+            }
+            copyValues(values);
+        }
+
+        public void set(boolean normalize, int offset, int stride, int components, int type,
+                int vbo){
+            mIsConst = false;
+            mShouldNormalize = normalize;
+            mOffset = offset;
+            mStride = stride;
+            mComponents = components;
+            mType = type;
+            mVbo = vbo;
+            mValues = null;
+        }
+
+        public boolean push() {
+            if (mIsConst) {
+                switch (mComponents) {
+                    case 1:
+                        GLES20.glVertexAttrib1fv(mIndex, mValues);
+                        break;
+                    case 2:
+                        GLES20.glVertexAttrib2fv(mIndex, mValues);
+                        break;
+                    case 3:
+                        GLES20.glVertexAttrib3fv(mIndex, mValues);
+                        break;
+                    case 4:
+                        GLES20.glVertexAttrib4fv(mIndex, mValues);
+                        break;
+                    default:
+                        return false;
+                }
+                GLES20.glDisableVertexAttribArray(mIndex);
+            } else {
+                if (mValues != null) {
+                    // Note that we cannot do any size checking here, as the correct component
+                    // count depends on the drawing step. GL should catch such errors then, and
+                    // we will report them to the user.
+                    GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
+                    GLES20.glVertexAttribPointer(mIndex,
+                                                 mComponents,
+                                                 mType,
+                                                 mShouldNormalize,
+                                                 mStride,
+                                                 mValues);
+                } else {
+                    GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVbo);
+                    GLES20.glVertexAttribPointer(mIndex,
+                                                 mComponents,
+                                                 mType,
+                                                 mShouldNormalize,
+                                                 mStride,
+                                                 mOffset);
+                }
+                GLES20.glEnableVertexAttribArray(mIndex);
+            }
+            GLToolbox.checkGlError("Set vertex-attribute values");
+            return true;
+        }
+
+        @Override
+        public String toString() {
+            return mName;
+        }
+
+        private void initBuffer(float[] values) {
+            mValues = ByteBuffer.allocateDirect(values.length * FLOAT_SIZE)
+                .order(ByteOrder.nativeOrder()).asFloatBuffer();
+        }
+
+        private void copyValues(float[] values) {
+            mValues.put(values).position(0);
+        }
+
+    }
+
+    private static final class ProgramUniform {
+        private String mName;
+        private int mLocation;
+        private int mType;
+        private int mSize;
+
+        public ProgramUniform(int program, int index) {
+            int[] len = new int[1];
+            GLES20.glGetProgramiv(program, GLES20.GL_ACTIVE_UNIFORM_MAX_LENGTH, len, 0);
+
+            int[] type = new int[1];
+            int[] size = new int[1];
+            byte[] name = new byte[len[0]];
+            int[] ignore = new int[1];
+
+            GLES20.glGetActiveUniform(program, index, len[0], ignore, 0, size, 0, type, 0, name, 0);
+            mName = new String(name, 0, strlen(name));
+            mLocation = GLES20.glGetUniformLocation(program, mName);
+            mType = type[0];
+            mSize = size[0];
+            GLToolbox.checkGlError("Initializing uniform");
+        }
+
+        public String getName() {
+            return mName;
+        }
+
+        public int getType() {
+            return mType;
+        }
+
+        public int getLocation() {
+            return mLocation;
+        }
+
+        public int getSize() {
+            return mSize;
+        }
+    }
+
+    public ImageShader(String fragmentShader) {
+        mProgram = createProgram(mDefaultVertexShader, fragmentShader);
+        scanUniforms();
+    }
+
+    public ImageShader(String vertexShader, String fragmentShader) {
+        mProgram = createProgram(vertexShader, fragmentShader);
+        scanUniforms();
+    }
+
+    public static ImageShader createIdentity() {
+        return new ImageShader(mIdentityShader);
+    }
+
+    public static ImageShader createIdentity(String vertexShader) {
+        return new ImageShader(vertexShader, mIdentityShader);
+    }
+
+    public static void renderTextureToTarget(TextureSource texture,
+                                             RenderTarget target,
+                                             int width,
+                                             int height) {
+        ImageShader shader = RenderTarget.currentTarget().getIdentityShader();
+        shader.process(texture, target, width, height);
+    }
+
+    public void process(FrameImage2D input, FrameImage2D output) {
+        TextureSource texSource = input.lockTextureSource();
+        RenderTarget renderTarget = output.lockRenderTarget();
+        processMulti(new TextureSource[] { texSource },
+                     renderTarget,
+                     output.getWidth(),
+                     output.getHeight());
+        input.unlock();
+        output.unlock();
+    }
+
+    public void processMulti(FrameImage2D[] inputs, FrameImage2D output) {
+        TextureSource[] texSources = new TextureSource[inputs.length];
+        for (int i = 0; i < inputs.length; ++i) {
+            texSources[i] = inputs[i].lockTextureSource();
+        }
+        RenderTarget renderTarget = output.lockRenderTarget();
+        processMulti(texSources,
+                     renderTarget,
+                     output.getWidth(),
+                     output.getHeight());
+        for (FrameImage2D input : inputs) {
+            input.unlock();
+        }
+        output.unlock();
+    }
+
+    public void process(TextureSource texture, RenderTarget target, int width, int height) {
+        processMulti(new TextureSource[] { texture }, target, width, height);
+    }
+
+    public void processMulti(TextureSource[] sources, RenderTarget target, int width, int height) {
+        GLToolbox.checkGlError("Unknown Operation");
+        checkExecutable();
+        checkTexCount(sources.length);
+        focusTarget(target, width, height);
+        pushShaderState();
+        bindInputTextures(sources);
+        render();
+    }
+
+    public void processNoInput(FrameImage2D output) {
+        RenderTarget renderTarget = output.lockRenderTarget();
+        processNoInput(renderTarget, output.getWidth(), output.getHeight());
+        output.unlock();
+    }
+
+    public void processNoInput(RenderTarget target, int width, int height) {
+        processMulti(new TextureSource[] {}, target, width, height);
+    }
+
+    public int getUniformLocation(String name) {
+        return getProgramUniform(name, true).getLocation();
+    }
+
+    public int getAttributeLocation(String name) {
+        if (name.equals(positionAttributeName()) || name.equals(texCoordAttributeName())) {
+            Log.w("ImageShader", "Attempting to access internal attribute '" + name
+                + "' directly!");
+        }
+        int loc = GLES20.glGetAttribLocation(mProgram, name);
+        if (loc < 0) {
+            throw new RuntimeException("Unknown attribute '" + name + "' in shader program!");
+        }
+        return loc;
+    }
+
+    public void setUniformValue(String uniformName, int value) {
+        useProgram();
+        int uniformHandle = getUniformLocation(uniformName);
+        GLES20.glUniform1i(uniformHandle, value);
+        GLToolbox.checkGlError("Set uniform value (" + uniformName + ")");
+    }
+
+    public void setUniformValue(String uniformName, float value) {
+        useProgram();
+        int uniformHandle = getUniformLocation(uniformName);
+        GLES20.glUniform1f(uniformHandle, value);
+        GLToolbox.checkGlError("Set uniform value (" + uniformName + ")");
+    }
+
+    public void setUniformValue(String uniformName, int[] values) {
+        ProgramUniform uniform = getProgramUniform(uniformName, true);
+        useProgram();
+        int len = values.length;
+        switch (uniform.getType()) {
+            case GLES20.GL_INT:
+                checkUniformAssignment(uniform, len, 1);
+                GLES20.glUniform1iv(uniform.getLocation(), len, values, 0);
+                break;
+            case GLES20.GL_INT_VEC2:
+                checkUniformAssignment(uniform, len, 2);
+                GLES20.glUniform2iv(uniform.getLocation(), len / 2, values, 0);
+                break;
+            case GLES20.GL_INT_VEC3:
+                checkUniformAssignment(uniform, len, 3);
+                GLES20.glUniform2iv(uniform.getLocation(), len / 3, values, 0);
+                break;
+            case GLES20.GL_INT_VEC4:
+                checkUniformAssignment(uniform, len, 4);
+                GLES20.glUniform2iv(uniform.getLocation(), len / 4, values, 0);
+                break;
+            default:
+                throw new RuntimeException("Cannot assign int-array to incompatible uniform type "
+                    + "for uniform '" + uniformName + "'!");
+        }
+        GLToolbox.checkGlError("Set uniform value (" + uniformName + ")");
+    }
+
+
+    public void setUniformValue(String uniformName, float[] values) {
+        ProgramUniform uniform = getProgramUniform(uniformName, true);
+        useProgram();
+        int len = values.length;
+        switch (uniform.getType()) {
+            case GLES20.GL_FLOAT:
+                checkUniformAssignment(uniform, len, 1);
+                GLES20.glUniform1fv(uniform.getLocation(), len, values, 0);
+                break;
+            case GLES20.GL_FLOAT_VEC2:
+                checkUniformAssignment(uniform, len, 2);
+                GLES20.glUniform2fv(uniform.getLocation(), len / 2, values, 0);
+                break;
+            case GLES20.GL_FLOAT_VEC3:
+                checkUniformAssignment(uniform, len, 3);
+                GLES20.glUniform3fv(uniform.getLocation(), len / 3, values, 0);
+                break;
+            case GLES20.GL_FLOAT_VEC4:
+                checkUniformAssignment(uniform, len, 4);
+                GLES20.glUniform4fv(uniform.getLocation(), len / 4, values, 0);
+                break;
+            case GLES20.GL_FLOAT_MAT2:
+                checkUniformAssignment(uniform, len, 4);
+                GLES20.glUniformMatrix2fv(uniform.getLocation(), len / 4, false, values, 0);
+                break;
+            case GLES20.GL_FLOAT_MAT3:
+                checkUniformAssignment(uniform, len, 9);
+                GLES20.glUniformMatrix3fv(uniform.getLocation(), len / 9, false, values, 0);
+                break;
+            case GLES20.GL_FLOAT_MAT4:
+                checkUniformAssignment(uniform, len, 16);
+                GLES20.glUniformMatrix4fv(uniform.getLocation(), len / 16, false, values, 0);
+                break;
+            default:
+                throw new RuntimeException("Cannot assign float-array to incompatible uniform type "
+                    + "for uniform '" + uniformName + "'!");
+        }
+        GLToolbox.checkGlError("Set uniform value (" + uniformName + ")");
+    }
+
+    public void setAttributeValues(String attributeName, float[] data, int components) {
+        VertexAttribute attr = getProgramAttribute(attributeName, true);
+        attr.set(false, FLOAT_SIZE * components, components, GLES20.GL_FLOAT, data);
+    }
+
+    public void setAttributeValues(String attributeName, int vbo, int type, int components,
+                                   int stride, int offset, boolean normalize) {
+        VertexAttribute attr = getProgramAttribute(attributeName, true);
+        attr.set(normalize, offset, stride, components, type, vbo);
+    }
+
+    public void setSourceRect(float x, float y, float width, float height) {
+        setSourceCoords(new float[] { x, y, x + width, y, x, y + height, x + width, y + height });
+    }
+
+    public void setSourceRect(RectF rect) {
+        setSourceRect(rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top);
+    }
+
+    public void setSourceQuad(Quad quad) {
+        setSourceCoords(new float[] { quad.topLeft().x,     quad.topLeft().y,
+                                      quad.topRight().x,    quad.topRight().y,
+                                      quad.bottomLeft().x,  quad.bottomLeft().y,
+                                      quad.bottomRight().x, quad.bottomRight().y });
+    }
+
+    public void setSourceCoords(float[] coords) {
+        if (coords.length != 8) {
+            throw new IllegalArgumentException("Expected 8 coordinates as source coordinates but "
+                + "got " + coords.length + " coordinates!");
+        }
+        mSourceCoords = Arrays.copyOf(coords, 8);
+    }
+
+    public void setSourceTransform(float[] matrix) {
+        if (matrix.length != 16) {
+            throw new IllegalArgumentException("Expected 4x4 matrix for source transform!");
+        }
+        setSourceCoords(new float[] {
+            matrix[12],
+            matrix[13],
+
+            matrix[0] + matrix[12],
+            matrix[1] + matrix[13],
+
+            matrix[4] + matrix[12],
+            matrix[5] + matrix[13],
+
+            matrix[0] + matrix[4] + matrix[12],
+            matrix[1] + matrix[5] + matrix[13],
+        });
+    }
+
+    public void setTargetRect(float x, float y, float width, float height) {
+        setTargetCoords(new float[] { x, y,
+                                      x + width, y,
+                                      x, y + height,
+                                      x + width, y + height });
+    }
+
+    public void setTargetRect(RectF rect) {
+        setTargetCoords(new float[] { rect.left,    rect.top,
+                                      rect.right,   rect.top,
+                                      rect.left,    rect.bottom,
+                                      rect.right,   rect.bottom });
+    }
+
+    public void setTargetQuad(Quad quad) {
+        setTargetCoords(new float[] { quad.topLeft().x,     quad.topLeft().y,
+                                      quad.topRight().x,    quad.topRight().y,
+                                      quad.bottomLeft().x,  quad.bottomLeft().y,
+                                      quad.bottomRight().x, quad.bottomRight().y });
+    }
+
+    public void setTargetCoords(float[] coords) {
+        if (coords.length != 8) {
+            throw new IllegalArgumentException("Expected 8 coordinates as target coordinates but "
+                + "got " + coords.length + " coordinates!");
+        }
+        mTargetCoords = new float[8];
+        for (int i = 0; i < 8; ++i) {
+            mTargetCoords[i] = coords[i] * 2f - 1f;
+        }
+    }
+
+    public void setTargetTransform(float[] matrix) {
+        if (matrix.length != 16) {
+            throw new IllegalArgumentException("Expected 4x4 matrix for target transform!");
+        }
+        setTargetCoords(new float[] {
+            matrix[12],
+            matrix[13],
+
+            matrix[0] + matrix[12],
+            matrix[1] + matrix[13],
+
+            matrix[4] + matrix[12],
+            matrix[5] + matrix[13],
+
+            matrix[0] + matrix[4] + matrix[12],
+            matrix[1] + matrix[5] + matrix[13],
+        });
+    }
+
+    public void setClearsOutput(boolean clears) {
+        mClearsOutput = clears;
+    }
+
+    public boolean getClearsOutput() {
+        return mClearsOutput;
+    }
+
+    public void setClearColor(float[] rgba) {
+        mClearColor = rgba;
+    }
+
+    public float[] getClearColor() {
+        return mClearColor;
+    }
+
+    public void setClearBufferMask(int bufferMask) {
+        mClearBuffers = bufferMask;
+    }
+
+    public int getClearBufferMask() {
+        return mClearBuffers;
+    }
+
+    public void setBlendEnabled(boolean enable) {
+        mBlendEnabled = enable;
+    }
+
+    public boolean getBlendEnabled() {
+        return mBlendEnabled;
+    }
+
+    public void setBlendFunc(int sFactor, int dFactor) {
+        mSFactor = sFactor;
+        mDFactor = dFactor;
+    }
+
+    public void setDrawMode(int drawMode) {
+        mDrawMode = drawMode;
+    }
+
+    public int getDrawMode() {
+        return mDrawMode;
+    }
+
+    public void setVertexCount(int count) {
+        mVertexCount = count;
+    }
+
+    public int getVertexCount() {
+        return mVertexCount;
+    }
+
+    public void setBaseTextureUnit(int baseTexUnit) {
+        mBaseTexUnit = baseTexUnit;
+    }
+
+    public int baseTextureUnit() {
+        return mBaseTexUnit;
+    }
+
+    public String texCoordAttributeName() {
+        return "a_texcoord";
+    }
+
+    public String positionAttributeName() {
+        return "a_position";
+    }
+
+    public String inputTextureUniformName(int index) {
+        return "tex_sampler_" + index;
+    }
+
+    public static int maxTextureUnits() {
+        return GLES20.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS;
+    }
+
+    @Override
+    protected void finalize() throws Throwable {
+        GLES20.glDeleteProgram(mProgram);
+    }
+
+    protected void pushShaderState() {
+        useProgram();
+        updateSourceCoordAttribute();
+        updateTargetCoordAttribute();
+        pushAttributes();
+        if (mClearsOutput) {
+            GLES20.glClearColor(mClearColor[0], mClearColor[1], mClearColor[2], mClearColor[3]);
+            GLES20.glClear(mClearBuffers);
+        }
+        if (mBlendEnabled) {
+            GLES20.glEnable(GLES20.GL_BLEND);
+            GLES20.glBlendFunc(mSFactor, mDFactor);
+        } else {
+            GLES20.glDisable(GLES20.GL_BLEND);
+        }
+        GLToolbox.checkGlError("Set render variables");
+    }
+
+    private void focusTarget(RenderTarget target, int width, int height) {
+        target.focus();
+        GLES20.glViewport(0, 0, width, height);
+        GLToolbox.checkGlError("glViewport");
+    }
+
+    private void bindInputTextures(TextureSource[] sources) {
+        for (int i = 0; i < sources.length; ++i) {
+            // Activate texture unit i
+            GLES20.glActiveTexture(baseTextureUnit() + i);
+
+            // Bind texture
+            sources[i].bind();
+
+            // Assign the texture uniform in the shader to unit i
+            int texUniform = GLES20.glGetUniformLocation(mProgram, inputTextureUniformName(i));
+            if (texUniform >= 0) {
+                GLES20.glUniform1i(texUniform, i);
+            } else {
+                throw new RuntimeException("Shader does not seem to support " + sources.length
+                    + " number of input textures! Missing uniform " + inputTextureUniformName(i)
+                    + "!");
+            }
+            GLToolbox.checkGlError("Binding input texture " + i);
+        }
+    }
+
+    private void pushAttributes() {
+        for (VertexAttribute attr : mAttributes.values()) {
+            if (!attr.push()) {
+                throw new RuntimeException("Unable to assign attribute value '" + attr + "'!");
+            }
+        }
+        GLToolbox.checkGlError("Push Attributes");
+    }
+
+    private void updateSourceCoordAttribute() {
+        // If attribute does not exist, simply do nothing (may be custom shader).
+        VertexAttribute attr = getProgramAttribute(texCoordAttributeName(), false);
+        // A non-null value of mSourceCoords indicates new values to be set.
+        if (mSourceCoords != null && attr != null) {
+            // Upload new source coordinates to GPU
+            attr.set(false, FLOAT_SIZE * 2, 2, GLES20.GL_FLOAT, mSourceCoords);
+        }
+        // Do not set again (even if failed, to not cause endless attempts)
+        mSourceCoords = null;
+    }
+
+    private void updateTargetCoordAttribute() {
+        // If attribute does not exist, simply do nothing (may be custom shader).
+        VertexAttribute attr = getProgramAttribute(positionAttributeName(), false);
+        // A non-null value of mTargetCoords indicates new values to be set.
+        if (mTargetCoords != null && attr != null) {
+            // Upload new target coordinates to GPU
+            attr.set(false, FLOAT_SIZE * 2, 2, GLES20.GL_FLOAT, mTargetCoords);
+        }
+        // Do not set again (even if failed, to not cause endless attempts)
+        mTargetCoords = null;
+    }
+
+    private void render() {
+        GLES20.glDrawArrays(mDrawMode, 0, mVertexCount);
+        GLToolbox.checkGlError("glDrawArrays");
+    }
+
+    private void checkExecutable() {
+        if (mProgram == 0) {
+            throw new RuntimeException("Attempting to execute invalid shader-program!");
+        }
+    }
+
+    private void useProgram() {
+        GLES20.glUseProgram(mProgram);
+        GLToolbox.checkGlError("glUseProgram");
+    }
+
+    private static void checkTexCount(int count) {
+        if (count > maxTextureUnits()) {
+            throw new RuntimeException("Number of textures passed (" + count + ") exceeds the "
+                + "maximum number of allowed texture units (" + maxTextureUnits() + ")!");
+        }
+    }
+
+    private static int loadShader(int shaderType, String source) {
+        int shader = GLES20.glCreateShader(shaderType);
+        if (shader != 0) {
+            GLES20.glShaderSource(shader, source);
+            GLES20.glCompileShader(shader);
+            int[] compiled = new int[1];
+            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+            if (compiled[0] == 0) {
+                String info = GLES20.glGetShaderInfoLog(shader);
+                GLES20.glDeleteShader(shader);
+                shader = 0;
+                throw new RuntimeException("Could not compile shader " + shaderType + ":" + info);
+            }
+        }
+        return shader;
+    }
+
+    private static int createProgram(String vertexSource, String fragmentSource) {
+        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+        if (vertexShader == 0) {
+            throw new RuntimeException("Could not create shader-program as vertex shader "
+                + "could not be compiled!");
+        }
+        int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+        if (pixelShader == 0) {
+            throw new RuntimeException("Could not create shader-program as fragment shader "
+                + "could not be compiled!");
+        }
+
+        int program = GLES20.glCreateProgram();
+        if (program != 0) {
+            GLES20.glAttachShader(program, vertexShader);
+            GLToolbox.checkGlError("glAttachShader");
+            GLES20.glAttachShader(program, pixelShader);
+            GLToolbox.checkGlError("glAttachShader");
+            GLES20.glLinkProgram(program);
+            int[] linkStatus = new int[1];
+            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+            if (linkStatus[0] != GLES20.GL_TRUE) {
+                String info = GLES20.glGetProgramInfoLog(program);
+                GLES20.glDeleteProgram(program);
+                program = 0;
+                throw new RuntimeException("Could not link program: " + info);
+            }
+        }
+
+        GLES20.glDeleteShader(vertexShader);
+        GLES20.glDeleteShader(pixelShader);
+
+        return program;
+    }
+
+    private void scanUniforms() {
+        int uniformCount[] = new int [1];
+        GLES20.glGetProgramiv(mProgram, GLES20.GL_ACTIVE_UNIFORMS, uniformCount, 0);
+        if (uniformCount[0] > 0) {
+            mUniforms = new HashMap<String, ProgramUniform>(uniformCount[0]);
+            for (int i = 0; i < uniformCount[0]; ++i) {
+                ProgramUniform uniform = new ProgramUniform(mProgram, i);
+                mUniforms.put(uniform.getName(), uniform);
+            }
+        }
+    }
+
+    private ProgramUniform getProgramUniform(String name, boolean required) {
+        ProgramUniform result = mUniforms.get(name);
+        if (result == null && required) {
+            throw new IllegalArgumentException("Unknown uniform '" + name + "'!");
+        }
+        return result;
+    }
+
+    private VertexAttribute getProgramAttribute(String name, boolean required) {
+        VertexAttribute result = mAttributes.get(name);
+        if (result == null) {
+            int handle = GLES20.glGetAttribLocation(mProgram, name);
+            if (handle >= 0) {
+                result = new VertexAttribute(name, handle);
+                mAttributes.put(name, result);
+            } else if (required) {
+                throw new IllegalArgumentException("Unknown attribute '" + name + "'!");
+            }
+        }
+        return result;
+    }
+
+    private void checkUniformAssignment(ProgramUniform uniform, int values, int components) {
+        if (values % components != 0) {
+            throw new RuntimeException("Size mismatch: Attempting to assign values of size "
+                + values + " to uniform '" + uniform.getName() + "' (must be multiple of "
+                + components + ")!");
+        } else if (uniform.getSize() != values / components) {
+            throw new RuntimeException("Size mismatch: Cannot assign " + values + " values to "
+                + "uniform '" + uniform.getName() + "'!");
+        }
+    }
+
+    private static int strlen(byte[] strVal) {
+        for (int i = 0; i < strVal.length; ++i) {
+            if (strVal[i] == '\0') {
+                return i;
+            }
+        }
+        return strVal.length;
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/InputPort.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/InputPort.java
new file mode 100644
index 0000000..82749c51
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/InputPort.java
@@ -0,0 +1,327 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import java.lang.reflect.Field;
+
+/**
+ * Input ports are the receiving ports of frames in a filter.
+ * <p>
+ * InputPort instances receive Frame data from connected OutputPort instances of a previous filter.
+ * Frames flow from output ports to input ports. Filters can process frame data by calling
+ * {@link #pullFrame()} on an input port. If the input port is set to wait for an input frame
+ * (see {@link #setWaitsForFrame(boolean)}), there is guaranteed to be Frame on the port before
+ * {@code onProcess()} is called. This is the default setting. Otherwise, calling
+ * {@link #pullFrame()} may return a value of {@code null}.
+ * <p/><p>
+ * InputPorts may be bound to fields of the Filter. When an input port is bound to a field, Frame
+ * values will be assigned to the field once a Frame is received on that port. The Frame value must
+ * be of a type that is compatible with the field type.
+ * </p>
+ */
+public final class InputPort {
+
+    private Filter mFilter;
+    private String mName;
+    private Signature.PortInfo mInfo;
+    private FrameListener mListener = null;
+    private FrameQueue.Builder mQueueBuilder = null;
+    private FrameQueue mQueue = null;
+    private boolean mWaitForFrame = true;
+    private boolean mAutoPullEnabled = false;
+
+    public interface FrameListener {
+        public void onFrameReceived(InputPort port, Frame frame);
+    }
+
+    private class FieldBinding implements FrameListener {
+        private Field mField;
+
+        public FieldBinding(Field field) {
+            mField = field;
+        }
+
+        @Override
+        public void onFrameReceived(InputPort port, Frame frame) {
+            try {
+                if(port.mInfo.type.getNumberOfDimensions() > 0) {
+                    FrameValues frameValues = frame.asFrameValues();
+                    mField.set(mFilter, frameValues.getValues());
+                } else {
+                    FrameValue frameValue = frame.asFrameValue();
+                    mField.set(mFilter, frameValue.getValue());
+                }
+            } catch (Exception e) {
+                throw new RuntimeException("Assigning frame " + frame + " to field "
+                    + mField + " of filter " + mFilter + " caused exception!", e);
+            }
+        }
+    }
+
+    /**
+     * Attach this input port to an output port for frame passing.
+     *
+     * Use this method whenever you plan on passing a Frame through from an input port to an
+     * output port. This must be called from inside
+     * {@link Filter#onInputPortAttached(InputPort) onInputPortAttached}.
+     *
+     * @param outputPort the output port that Frames will be pushed to.
+     */
+    public void attachToOutputPort(OutputPort outputPort) {
+        assertInAttachmentStage();
+        mFilter.openOutputPort(outputPort);
+        mQueueBuilder.attachQueue(outputPort.getQueue());
+    }
+
+    /**
+     * Bind this input port to the specified listener.
+     *
+     * Use this when you wish to be notified of incoming frames. The listener method
+     * {@link FrameListener#onFrameReceived(InputPort, Frame)} will be called once a Frame is pulled
+     * on this port. Typically this is called from inside
+     * {@link Filter#onInputPortAttached(InputPort) onInputPortAttached}, and used in
+     * conjunction with {@link #setAutoPullEnabled(boolean)}. Overrides any previous bindings.
+     *
+     * @param listener the listener to handle incoming Frames.
+     */
+    public void bindToListener(FrameListener listener) {
+        assertInAttachmentStage();
+        mListener = listener;
+    }
+
+    /**
+     * Bind this input port to the specified field.
+     *
+     * Use this when you wish to pull frames directly into a field of the filter. This requires
+     * that the input frames can be interpreted as object-based frames of the field's class.
+     * Overrides any previous bindings.
+     *
+     * This is typically called from inside
+     * {@link Filter#onInputPortAttached(InputPort) onInputPortAttached}, and used in
+     * conjunction with {@link #setAutoPullEnabled(boolean)}.
+     *
+     * @param field the field to pull frame data into.
+     * @see #bindToFieldNamed(String)
+     * @see #setAutoPullEnabled(boolean)
+     */
+    public void bindToField(Field field) {
+        assertInAttachmentStage();
+        mListener = new FieldBinding(field);
+    }
+
+    /**
+     * Bind this input port to the field with the specified name.
+     *
+     * Use this when you wish to pull frames directly into a field of the filter. This requires
+     * that the input frames can be interpreted as object-based frames of the field's class.
+     * Overrides any previous bindings.
+     *
+     * This is typically called from inside
+     * {@link Filter#onInputPortAttached(InputPort) onInputPortAttached}, and used in
+     * conjunction with {@link #setAutoPullEnabled(boolean)}.
+     *
+     * @param fieldName the field to pull frame data into.
+     * @see #bindToField(Field)
+     * @see #setAutoPullEnabled(boolean)
+     */
+    public void bindToFieldNamed(String fieldName) {
+        Field field = findFieldNamed(fieldName, mFilter.getClass());
+        if (field == null) {
+            throw new IllegalArgumentException("Attempting to bind to unknown field '"
+                + fieldName + "'!");
+        }
+        bindToField(field);
+    }
+
+    /**
+     * Set whether the InputPort automatically pulls frames.
+     * This is typically only used when the port is bound to another target.
+     * @param enabled true, if frames should be automatically pulled on this port.
+     */
+    public void setAutoPullEnabled(boolean enabled) {
+        mAutoPullEnabled = enabled;
+    }
+
+    /**
+     * Returns whether the InputPort automatically pulls frames.
+     * @return true, if frames are automatically pulled on this port.
+     */
+    public boolean isAutoPullEnabled() {
+        return mAutoPullEnabled;
+    }
+
+    /**
+     * Pull a waiting a frame from the port.
+     *
+     * Call this to pull a frame from the input port for processing. If no frame is waiting on the
+     * input port, returns null. After this call the port will have no Frame waiting (empty port).
+     * Note, that this returns a frame owned by the input queue. You must detach the frame if you
+     * wish to hold on to it.
+     *
+     * @return Frame instance, or null if no frame is available for pulling.
+     */
+    public synchronized Frame pullFrame() {
+        if (mQueue == null) {
+            throw new IllegalStateException("Cannot pull frame from closed input port!");
+        }
+        Frame frame = mQueue.pullFrame();
+        if (frame != null) {
+            if (mListener != null) {
+                mListener.onFrameReceived(this, frame);
+            }
+            //Log.i("InputPort", "Adding frame " + frame + " to auto-release pool");
+            mFilter.addAutoReleaseFrame(frame);
+            long timestamp = frame.getTimestamp();
+            if (timestamp != Frame.TIMESTAMP_NOT_SET) {
+                mFilter.onPulledFrameWithTimestamp(frame.getTimestamp());
+            }
+        }
+        return frame;
+    }
+
+    public synchronized Frame peek() {
+        if (mQueue == null) {
+            throw new IllegalStateException("Cannot pull frame from closed input port!");
+        }
+        return mQueue.peek();
+    }
+
+    /**
+     * Returns true, if the port is connected.
+     * @return true, if there is an output port that connects to this port.
+     */
+    public boolean isConnected() {
+        return mQueue != null;
+    }
+
+    /**
+     * Returns true, if there is a frame waiting on this port.
+     * @return true, if there is a frame waiting on this port.
+     */
+    public synchronized boolean hasFrame() {
+        return mQueue != null && mQueue.canPull();
+    }
+
+    /**
+     * Sets whether to wait for a frame on this port before processing.
+     * When set to true, the Filter will not be scheduled for processing unless there is a Frame
+     * waiting on this port. The default value is true.
+     *
+     * @param wait true, if the Filter should wait for a Frame before processing.
+     * @see #waitsForFrame()
+     */
+    public void setWaitsForFrame(boolean wait) {
+        mWaitForFrame = wait;
+    }
+
+    /**
+     * Returns whether the filter waits for a frame on this port before processing.
+     * @return true, if the filter waits for a frame on this port before processing.
+     * @see #setWaitsForFrame(boolean)
+     */
+    public boolean waitsForFrame() {
+        return mWaitForFrame;
+    }
+
+    /**
+     * Returns the input port's name.
+     * This is the name that was specified when the input port was connected.
+     *
+     * @return the input port's name.
+     */
+    public String getName() {
+        return mName;
+    }
+
+    /**
+     * Returns the FrameType of this port.
+     * This is the type that was specified when the input port was declared.
+     *
+     * @return the input port's FrameType.
+     */
+    public FrameType getType() {
+        return getQueue().getType();
+    }
+
+    /**
+     * Return the filter object that this port belongs to.
+     *
+     * @return the input port's filter.
+     */
+    public Filter getFilter() {
+        return mFilter;
+    }
+
+    @Override
+    public String toString() {
+        return mFilter.getName() + ":" + mName;
+    }
+
+    // Internal only ///////////////////////////////////////////////////////////////////////////////
+    InputPort(Filter filter, String name, Signature.PortInfo info) {
+        mFilter = filter;
+        mName = name;
+        mInfo = info;
+    }
+
+    boolean conditionsMet() {
+        return !mWaitForFrame || hasFrame();
+    }
+
+    void onOpen(FrameQueue.Builder builder) {
+        mQueueBuilder = builder;
+        mQueueBuilder.setReadType(mInfo.type);
+        mFilter.onInputPortOpen(this);
+    }
+
+    void setQueue(FrameQueue queue) {
+        mQueue = queue;
+        mQueueBuilder = null;
+    }
+
+    FrameQueue getQueue() {
+        return mQueue;
+    }
+
+    void clear() {
+        if (mQueue != null) {
+            mQueue.clear();
+        }
+    }
+
+    private void assertInAttachmentStage() {
+        if (mQueueBuilder == null) {
+            throw new IllegalStateException("Attempting to attach port while not in attachment "
+                + "stage!");
+        }
+    }
+
+    private Field findFieldNamed(String fieldName, Class<?> clazz) {
+        Field field = null;
+        try {
+            field = clazz.getDeclaredField(fieldName);
+            field.setAccessible(true);
+        } catch (NoSuchFieldException e) {
+            Class<?> superClass = clazz.getSuperclass();
+            if (superClass != null) {
+                field = findFieldNamed(fieldName, superClass);
+            }
+        }
+        return field;
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/MffContext.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/MffContext.java
new file mode 100644
index 0000000..b7212f9
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/MffContext.java
@@ -0,0 +1,470 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.app.ActivityManager;
+import android.content.Context;
+import android.content.pm.ConfigurationInfo;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.renderscript.RenderScript;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.ViewGroup;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * The MffContext holds the state and resources of a Mobile Filter Framework processing instance.
+ * Though it is possible to create multiple MffContext instances, typical applications will rely on
+ * a single MffContext to perform all processing within the Mobile Filter Framework.
+ *
+ * The MffContext class declares two methods {@link #onPause()} and {@link #onResume()}, that are
+ * typically called when the application activity is paused and resumed. This will take care of
+ * halting any processing in the context, and releasing resources while the activity is paused.
+ */
+public class MffContext {
+
+    /**
+     * Class to hold configuration information for MffContexts.
+     */
+    public static class Config {
+        /**
+         * Set to true, if this context will make use of the camera.
+         * If your application does not require the camera, the context does not guarantee that
+         * a camera is available for streaming. That is, you may only use a CameraStreamer if
+         * the context's {@link #isCameraStreamingSupported()} returns true.
+         */
+        public boolean requireCamera = true;
+
+        /**
+         * Set to true, if this context requires OpenGL.
+         * If your application does not require OpenGL, the context does not guarantee that OpenGL
+         * is available. That is, you may only use OpenGL (within filters running in this context)
+         * if the context's {@link #isOpenGLSupported()} method returns true.
+         */
+        public boolean requireOpenGL = true;
+
+        /**
+         * On older Android versions the Camera may need a SurfaceView to render into in order to
+         * function. You may specify a dummy SurfaceView here if you do not want the context to
+         * create its own view. Note, that your view may or may not be used. You cannot rely on
+         * your dummy view to be used by the Camera. If you pass null, no dummy view will be used.
+         * In this case your application may not run correctly on older devices if you use the
+         * camera. This flag has no effect if you do not require the camera.
+         */
+        public SurfaceView dummySurface = null;
+
+        /** Force MFF to not use OpenGL in its processing. */
+        public boolean forceNoGL = false;
+    }
+
+    static private class State {
+        public static final int STATE_RUNNING = 1;
+        public static final int STATE_PAUSED = 2;
+        public static final int STATE_DESTROYED = 3;
+
+        public int current = STATE_RUNNING;
+    }
+
+    /** The application context. */
+    private Context mApplicationContext = null;
+
+    /** The set of filter graphs within this context */
+    private Set<FilterGraph> mGraphs = new HashSet<FilterGraph>();
+
+    /** The set of graph runners within this context */
+    private Set<GraphRunner> mRunners = new HashSet<GraphRunner>();
+
+    /** True, if the context preserves frames when paused. */
+    private boolean mPreserveFramesOnPause = false;
+
+    /** The shared CameraStreamer that streams camera frames to CameraSource filters. */
+    private CameraStreamer mCameraStreamer = null;
+
+    /** The current context state. */
+    private State mState = new State();
+
+    /** A dummy SurfaceView that is required for Camera operation on older devices. */
+    private SurfaceView mDummySurfaceView = null;
+
+    /** Handler to execute code in the context's thread, such as issuing callbacks. */
+    private Handler mHandler = null;
+
+    /** Flag whether OpenGL ES 2 is supported in this context. */
+    private boolean mGLSupport;
+
+    /** Flag whether camera streaming is supported in this context. */
+    private boolean mCameraStreamingSupport;
+
+    /** RenderScript base master class. */
+    private RenderScript mRenderScript;
+
+    /**
+     * Creates a new MffContext with the default configuration.
+     *
+     * An MffContext must be attached to a Context object of an application. You may create
+     * multiple MffContexts, however data between them cannot be shared. The context must be
+     * created in a thread with a Looper (such as the main/UI thread).
+     *
+     * On older versions of Android, the MffContext may create a visible dummy view for the
+     * camera to render into. This is a 1x1 SurfaceView that is placed into the top-left corner.
+     *
+     * @param context The application context to attach the MffContext to.
+     */
+    public MffContext(Context context) {
+        init(context, new Config());
+    }
+
+    /**
+     * Creates a new MffContext with the specified configuration.
+     *
+     * An MffContext must be attached to a Context object of an application. You may create
+     * multiple MffContexts, however data between them cannot be shared. The context must be
+     * created in a thread with a Looper (such as the main/UI thread).
+     *
+     * On older versions of Android, the MffContext may create a visible dummy view for the
+     * camera to render into. This is a 1x1 SurfaceView that is placed into the top-left corner.
+     * You may alternatively specify your own SurfaceView in the configuration.
+     *
+     * @param context The application context to attach the MffContext to.
+     * @param config The configuration to use.
+     *
+     * @throws RuntimeException If no context for the requested configuration could be created.
+     */
+    public MffContext(Context context, Config config) {
+        init(context, config);
+    }
+
+    /**
+     * Put all processing in the context on hold.
+     * This is typically called from your application's <code>onPause()</code> method, and will
+     * stop all running graphs (closing their filters). If the context does not preserve frames on
+     * pause (see {@link #setPreserveFramesOnPause(boolean)}) all frames attached to this context
+     * are released.
+     */
+    public void onPause() {
+        synchronized (mState) {
+            if (mState.current == State.STATE_RUNNING) {
+                if (mCameraStreamer != null) {
+                    mCameraStreamer.halt();
+                }
+                stopRunners(true);
+                mState.current = State.STATE_PAUSED;
+            }
+        }
+    }
+
+    /**
+     * Resumes the processing in this context.
+     * This is typically called from the application's <code>onResume()</code> method, and will
+     * resume processing any of the previously stopped filter graphs.
+     */
+    public void onResume() {
+        synchronized (mState) {
+            if (mState.current == State.STATE_PAUSED) {
+                resumeRunners();
+                resumeCamera();
+                mState.current = State.STATE_RUNNING;
+            }
+        }
+    }
+
+    /**
+     * Release all resources associated with this context.
+     * This will also stop any running graphs.
+     */
+    public void release() {
+        synchronized (mState) {
+            if (mState.current != State.STATE_DESTROYED) {
+                if (mCameraStreamer != null) {
+                    mCameraStreamer.stop();
+                    mCameraStreamer.tearDown();
+                }
+                if (Build.VERSION.SDK_INT >= 11) {
+                    maybeDestroyRenderScript();
+                }
+                stopRunners(false);
+                waitUntilStopped();
+                tearDown();
+                mState.current = State.STATE_DESTROYED;
+            }
+        }
+    }
+
+    /**
+     * Set whether frames are preserved when the context is paused.
+     * When passing false, all Frames associated with this context are released. The default
+     * value is true.
+     *
+     * @param preserve true, to preserve frames when the context is paused.
+     *
+     * @see #getPreserveFramesOnPause()
+     */
+    public void setPreserveFramesOnPause(boolean preserve) {
+        mPreserveFramesOnPause = preserve;
+    }
+
+    /**
+     * Returns whether frames are preserved when the context is paused.
+     *
+     * @return true, if frames are preserved when the context is paused.
+     *
+     * @see #setPreserveFramesOnPause(boolean)
+     */
+    public boolean getPreserveFramesOnPause() {
+        return mPreserveFramesOnPause;
+    }
+
+    /**
+     * Returns the application context that the MffContext is attached to.
+     *
+     * @return The application context for this context.
+     */
+    public Context getApplicationContext() {
+        return mApplicationContext;
+    }
+
+    /**
+     * Returns the context's shared CameraStreamer.
+     * Use the CameraStreamer to control the Camera. Frames from the Camera are typically streamed
+     * to CameraSource filters.
+     *
+     * @return The context's CameraStreamer instance.
+     */
+    public CameraStreamer getCameraStreamer() {
+        if (mCameraStreamer == null) {
+            mCameraStreamer = new CameraStreamer(this);
+        }
+        return mCameraStreamer;
+    }
+
+    /**
+     * Set the default EGL config chooser.
+     *
+     * When an EGL context is required by the MFF, the channel sizes specified here are used. The
+     * default sizes are 8 bits per R,G,B,A channel and 0 bits for depth and stencil channels.
+     *
+     * @param redSize The size of the red channel in bits.
+     * @param greenSize The size of the green channel in bits.
+     * @param blueSize The size of the blue channel in bits.
+     * @param alphaSize The size of the alpha channel in bits.
+     * @param depthSize The size of the depth channel in bits.
+     * @param stencilSize The size of the stencil channel in bits.
+     */
+    public static void setEGLConfigChooser(int redSize,
+                                           int greenSize,
+                                           int blueSize,
+                                           int alphaSize,
+                                           int depthSize,
+                                           int stencilSize) {
+        RenderTarget.setEGLConfigChooser(redSize,
+                                         greenSize,
+                                         blueSize,
+                                         alphaSize,
+                                         depthSize,
+                                         stencilSize);
+    }
+
+    /**
+     * Returns true, if this context supports using OpenGL.
+     * @return true, if this context supports using OpenGL.
+     */
+    public final boolean isOpenGLSupported() {
+        return mGLSupport;
+    }
+
+    /**
+     * Returns true, if this context supports camera streaming.
+     * @return true, if this context supports camera streaming.
+     */
+    public final boolean isCameraStreamingSupported() {
+        return mCameraStreamingSupport;
+    }
+
+    @TargetApi(11)
+    public final RenderScript getRenderScript() {
+        if (mRenderScript == null) {
+            mRenderScript = RenderScript.create(mApplicationContext);
+        }
+        return mRenderScript;
+    }
+
+    final void assertOpenGLSupported() {
+        if (!isOpenGLSupported()) {
+            throw new RuntimeException("Attempting to use OpenGL ES 2 in a context that does not "
+                    + "support it!");
+        }
+    }
+
+    void addGraph(FilterGraph graph) {
+        synchronized (mGraphs) {
+            mGraphs.add(graph);
+        }
+    }
+
+    void addRunner(GraphRunner runner) {
+        synchronized (mRunners) {
+            mRunners.add(runner);
+        }
+    }
+
+    SurfaceView getDummySurfaceView() {
+        return mDummySurfaceView;
+    }
+
+    void postRunnable(Runnable runnable) {
+        mHandler.post(runnable);
+    }
+
+    private void init(Context context, Config config) {
+        determineGLSupport(context, config);
+        determineCameraSupport(config);
+        createHandler();
+        mApplicationContext = context.getApplicationContext();
+        fetchDummySurfaceView(context, config);
+    }
+
+    private void fetchDummySurfaceView(Context context, Config config) {
+        if (config.requireCamera && CameraStreamer.requireDummySurfaceView()) {
+            mDummySurfaceView = config.dummySurface != null
+                    ? config.dummySurface
+                    : createDummySurfaceView(context);
+        }
+    }
+
+    private void determineGLSupport(Context context, Config config) {
+        if (config.forceNoGL) {
+            mGLSupport = false;
+        } else {
+            mGLSupport = getPlatformSupportsGLES2(context);
+            if (config.requireOpenGL && !mGLSupport) {
+                throw new RuntimeException("Cannot create context that requires GL support on "
+                        + "this platform!");
+            }
+        }
+    }
+
+    private void determineCameraSupport(Config config) {
+        mCameraStreamingSupport = (CameraStreamer.getNumberOfCameras() > 0);
+        if (config.requireCamera && !mCameraStreamingSupport) {
+            throw new RuntimeException("Cannot create context that requires a camera on "
+                    + "this platform!");
+        }
+    }
+
+    private static boolean getPlatformSupportsGLES2(Context context) {
+        ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
+        ConfigurationInfo configurationInfo = am.getDeviceConfigurationInfo();
+        return configurationInfo.reqGlEsVersion >= 0x20000;
+    }
+
+    private void createHandler() {
+        if (Looper.myLooper() == null) {
+            throw new RuntimeException("MffContext must be created in a thread with a Looper!");
+        }
+        mHandler = new Handler();
+    }
+
+    private void stopRunners(boolean haltOnly) {
+        synchronized (mRunners) {
+            // Halt all runners (does nothing if not running)
+            for (GraphRunner runner : mRunners) {
+                if (haltOnly) {
+                    runner.halt();
+                } else {
+                    runner.stop();
+                }
+            }
+            // Flush all graphs if requested (this is queued up after the call to halt)
+            if (!mPreserveFramesOnPause) {
+                for (GraphRunner runner : mRunners) {
+                    runner.flushFrames();
+                }
+            }
+        }
+    }
+
+    private void resumeRunners() {
+        synchronized (mRunners) {
+            for (GraphRunner runner : mRunners) {
+                runner.restart();
+            }
+        }
+    }
+
+    private void resumeCamera() {
+        // Restart only affects previously halted cameras that were running.
+        if (mCameraStreamer != null) {
+            mCameraStreamer.restart();
+        }
+    }
+
+    private void waitUntilStopped() {
+        for (GraphRunner runner : mRunners) {
+            runner.waitUntilStop();
+        }
+    }
+
+    private void tearDown() {
+        // Tear down graphs
+        for (FilterGraph graph : mGraphs) {
+            graph.tearDown();
+        }
+
+        // Tear down runners
+        for (GraphRunner runner : mRunners) {
+            runner.tearDown();
+        }
+    }
+
+    @SuppressWarnings("deprecation")
+    private SurfaceView createDummySurfaceView(Context context) {
+        // This is only called on Gingerbread devices, so deprecation warning is unnecessary.
+        SurfaceView dummySurfaceView = new SurfaceView(context);
+        dummySurfaceView.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+        // If we have an activity for this context we'll add the SurfaceView to it (as a 1x1 view
+        // in the top-left corner). If not, we warn the user that they may need to add one manually.
+        Activity activity = findActivityForContext(context);
+        if (activity != null) {
+            ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(1, 1);
+            activity.addContentView(dummySurfaceView, params);
+        } else {
+            Log.w("MffContext", "Could not find activity for dummy surface! Consider specifying "
+                    + "your own SurfaceView!");
+        }
+        return dummySurfaceView;
+    }
+
+    private Activity findActivityForContext(Context context) {
+        return (context instanceof Activity) ? (Activity) context : null;
+    }
+
+    @TargetApi(11)
+    private void maybeDestroyRenderScript() {
+        if (mRenderScript != null) {
+            mRenderScript.destroy();
+            mRenderScript = null;
+        }
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/MotionSensor.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/MotionSensor.java
new file mode 100644
index 0000000..95558f2
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/MotionSensor.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Make values from a motion sensor (e.g., accelerometer) available as filter outputs.
+
+package androidx.media.filterpacks.sensors;
+
+import android.content.Context;
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValues;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+public final class MotionSensor extends Filter implements SensorEventListener {
+
+    private SensorManager mSensorManager = null;
+    private Sensor mSensor = null;
+
+    private float[] mValues = new float[3];
+
+    public MotionSensor(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addOutputPort("values", Signature.PORT_REQUIRED, FrameType.array(float.class))
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onPrepare() {
+        mSensorManager = (SensorManager)getContext().getApplicationContext()
+                            .getSystemService(Context.SENSOR_SERVICE);
+        mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_LINEAR_ACCELERATION);
+        // TODO: currently, the type of sensor is hardcoded. Should be able to set the sensor
+        //  type as filter input!
+        mSensorManager.registerListener(this, mSensor, SensorManager.SENSOR_DELAY_UI);
+    }
+
+    @Override
+    protected void onTearDown() {
+        mSensorManager.unregisterListener(this);
+    }
+
+    @Override
+    public final void onAccuracyChanged(Sensor sensor, int accuracy) {
+        // (Do we need to do something when sensor accuracy changes?)
+    }
+
+    @Override
+    public final void onSensorChanged(SensorEvent event) {
+        synchronized(mValues) {
+            mValues[0] = event.values[0];
+            mValues[1] = event.values[1];
+            mValues[2] = event.values[2];
+        }
+    }
+
+    @Override
+    protected void onProcess() {
+        OutputPort outPort = getConnectedOutputPort("values");
+        FrameValues outFrame = outPort.fetchAvailableFrame(null).asFrameValues();
+        synchronized(mValues) {
+            outFrame.setValues(mValues);
+        }
+        outFrame.setTimestamp(System.currentTimeMillis() * 1000000L);
+        outPort.pushFrame(outFrame);
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/NewChromaHistogramFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/NewChromaHistogramFilter.java
new file mode 100644
index 0000000..f524be371
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/NewChromaHistogramFilter.java
@@ -0,0 +1,117 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Extract histogram from image.
+
+package androidx.media.filterpacks.histogram;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameBuffer2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.InputPort;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * ChromaHistogramFilter takes in an image in HSVA format and computes a 2-D histogram with a
+ * 2 dimensional chroma histogram based on hue (column) and saturation (row) at the top and
+ * a 1-D value histogram in the last row. The number of bin in the value histogram equals to
+ * the number of bins in hue.
+ */
+public final class NewChromaHistogramFilter extends Filter {
+
+    private int mHueBins = 6;
+    private int mSaturationBins = 3;
+    private int mValueBins;
+
+    private int mSaturationThreshold = 26; // 255 * 0.1
+    private int mValueThreshold = 51; // 255 * 0.2
+
+    public NewChromaHistogramFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU);
+        FrameType dataOut = FrameType.buffer2D(FrameType.ELEMENT_FLOAT32);
+
+        return new Signature()
+            .addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+            .addInputPort("huebins", Signature.PORT_OPTIONAL, FrameType.single(int.class))
+            .addInputPort("saturationbins", Signature.PORT_OPTIONAL, FrameType.single(int.class))
+            .addInputPort("saturationthreshold", Signature.PORT_OPTIONAL,
+                    FrameType.single(int.class))
+            .addInputPort("valuethreshold", Signature.PORT_OPTIONAL, FrameType.single(int.class))
+            .addOutputPort("histogram", Signature.PORT_REQUIRED, dataOut)
+            .disallowOtherPorts();
+    }
+
+    @Override
+    public void onInputPortOpen(InputPort port) {
+        if (port.getName().equals("huebins")) {
+            port.bindToFieldNamed("mHueBins");
+            port.setAutoPullEnabled(true);
+        } else if (port.getName().equals("saturationbins")) {
+            port.bindToFieldNamed("mSaturationBins");
+            port.setAutoPullEnabled(true);
+        } else if (port.getName().equals("saturationthreshold")) {
+            port.bindToFieldNamed("mSaturationThreshold");
+            port.setAutoPullEnabled(true);
+        } else if (port.getName().equals("valuethreshold")) {
+            port.bindToFieldNamed("mValueThreshold");
+            port.setAutoPullEnabled(true);
+        }
+    }
+
+    @Override
+    protected void onProcess() {
+        FrameBuffer2D imageFrame = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+        OutputPort outPort = getConnectedOutputPort("histogram");
+
+        mValueBins = mHueBins;
+        int[] outDims = new int[] {mHueBins, mSaturationBins + 1};
+        FrameBuffer2D histogramFrame = outPort.fetchAvailableFrame(outDims).asFrameBuffer2D();
+
+        ByteBuffer imageBuffer  = imageFrame.lockBytes(Frame.MODE_READ);
+        ByteBuffer histogramBuffer = histogramFrame.lockBytes(Frame.MODE_READ);
+        histogramBuffer.order(ByteOrder.nativeOrder());
+        FloatBuffer floatHistogram = histogramBuffer.asFloatBuffer();
+
+        // Run native method
+        extractChromaHistogram(imageBuffer, floatHistogram, mHueBins, mSaturationBins, mValueBins,
+                mSaturationThreshold, mValueThreshold);
+
+        imageFrame.unlock();
+        histogramFrame.unlock();
+
+        outPort.pushFrame(histogramFrame);
+    }
+
+    private static native void extractChromaHistogram(ByteBuffer imageBuffer,
+            FloatBuffer histogramBuffer, int hueBins, int saturationBins, int valueBins,
+            int saturationThreshold, int valueThreshold);
+
+    static {
+        System.loadLibrary("smartcamera_jni");
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/NormFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/NormFilter.java
new file mode 100644
index 0000000..e816110
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/NormFilter.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.numeric;
+
+import android.util.Log;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+/**
+ * Filter to calculate the 2-norm of the inputs. i.e. sqrt(x^2 + y^2)
+ * TODO: Add support for more norms in the future.
+ */
+public final class NormFilter extends Filter {
+   private static final String TAG = "NormFilter";
+   private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+
+   public NormFilter(MffContext context, String name) {
+       super(context, name);
+   }
+
+   @Override
+   public Signature getSignature() {
+       FrameType floatT = FrameType.single(float.class);
+       return new Signature()
+           .addInputPort("x", Signature.PORT_REQUIRED, floatT)
+           .addInputPort("y", Signature.PORT_REQUIRED, floatT)
+           .addOutputPort("norm", Signature.PORT_REQUIRED, floatT)
+           .disallowOtherPorts();
+   }
+
+   @Override
+   protected void onProcess() {
+     FrameValue xFrameValue = getConnectedInputPort("x").pullFrame().asFrameValue();
+     float xValue = ((Float)xFrameValue.getValue()).floatValue();
+     FrameValue yFrameValue = getConnectedInputPort("y").pullFrame().asFrameValue();
+     float yValue = ((Float)yFrameValue.getValue()).floatValue();
+
+     float norm = (float) Math.hypot(xValue, yValue);
+     if (mLogVerbose) Log.v(TAG, "Norm = " + norm);
+     OutputPort outPort = getConnectedOutputPort("norm");
+     FrameValue outFrame = outPort.fetchAvailableFrame(null).asFrameValue();
+     outFrame.setValue(norm);
+     outPort.pushFrame(outFrame);
+   }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/OutputPort.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/OutputPort.java
new file mode 100644
index 0000000..06117c3
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/OutputPort.java
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+/**
+ * Output ports are the data emitting ports of filters.
+ * <p>
+ * Filters push data frames onto output-ports, which in turn push them onto their connected input
+ * ports. Output ports must be connected to an input port before data can be pushed onto them.
+ * Input and output ports share their Frame slot, meaning that when a frame is waiting on an output
+ * port, it is also waiting on the connected input port.
+ * </p><p>
+ * Only one frame can be pushed onto an output port at a time. In other words, a Frame must first
+ * be consumed by the target filter before a new frame can be pushed on the output port. If the
+ * output port is set to wait until it becomes free (see {@link #setWaitsUntilAvailable(boolean)}),
+ * it is guaranteed to be available when {@code onProcess()} is called. This is the default setting.
+ * </p>
+ */
+public final class OutputPort {
+
+    private Filter mFilter;
+    private String mName;
+    private Signature.PortInfo mInfo;
+    private FrameQueue.Builder mQueueBuilder = null;
+    private FrameQueue mQueue = null;
+    private boolean mWaitsUntilAvailable = true;
+    private InputPort mTarget = null;
+
+    /**
+     * Returns true, if this port is connected to a target port.
+     * @return true, if this port is connected to a target port.
+     */
+    public boolean isConnected() {
+        return mTarget != null;
+    }
+
+    /**
+     * Returns true, if there is no frame waiting on this port.
+     * @return true, if no Frame instance is waiting on this port.
+     */
+    public boolean isAvailable() {
+        return mQueue == null || mQueue.canPush();
+    }
+
+    /**
+     * Returns a frame for writing.
+     *
+     * Call this method to fetch a new frame to write into. When you have finished writing the
+     * frame data, you can push it into the output queue using {@link #pushFrame(Frame)}. Note,
+     * that the Frame returned is owned by the queue. If you wish to hold on to the frame, you
+     * must detach it.
+     *
+     * @param dimensions the size of the Frame you wish to obtain.
+     * @return a writable Frame instance.
+     */
+    public Frame fetchAvailableFrame(int[] dimensions) {
+        Frame frame = getQueue().fetchAvailableFrame(dimensions);
+        if (frame != null) {
+            //Log.i("OutputPort", "Adding frame " + frame + " to auto-release pool");
+            mFilter.addAutoReleaseFrame(frame);
+        }
+        return frame;
+    }
+
+    /**
+     * Pushes a frame onto this output port.
+     *
+     * This is typically a Frame instance you obtained by previously calling
+     * {@link #fetchAvailableFrame(int[])}, but may come from other sources such as an input port
+     * that is attached to this output port.
+     *
+     * Once you have pushed a frame to an output, you may no longer modify it as it may be shared
+     * among other filters.
+     *
+     * @param frame the frame to push to the output queue.
+     */
+    public void pushFrame(Frame frame) {
+        // Some queues allow pushing without fetching, so we need to make sure queue is open
+        // before pushing!
+        long timestamp = frame.getTimestamp();
+        if (timestamp == Frame.TIMESTAMP_NOT_SET)
+            frame.setTimestamp(mFilter.getCurrentTimestamp());
+        getQueue().pushFrame(frame);
+    }
+
+    /**
+     * Sets whether to wait until this port becomes available before processing.
+     * When set to true, the Filter will not be scheduled for processing unless there is no Frame
+     * waiting on this port. The default value is true.
+     *
+     * @param wait true, if filter should wait for the port to become available before processing.
+     * @see #waitsUntilAvailable()
+     */
+    public void setWaitsUntilAvailable(boolean wait) {
+        mWaitsUntilAvailable = wait;
+    }
+
+    /**
+     * Returns whether the filter waits until this port is available before processing.
+     * @return true, if the filter waits until this port is available before processing.
+     * @see #setWaitsUntilAvailable(boolean)
+     */
+    public boolean waitsUntilAvailable() {
+        return mWaitsUntilAvailable;
+    }
+
+    /**
+     * Returns the output port's name.
+     * This is the name that was specified when the output port was connected.
+     *
+     * @return the output port's name.
+     */
+    public String getName() {
+        return mName;
+    }
+
+    /**
+     * Return the filter object that this port belongs to.
+     *
+     * @return the output port's filter.
+     */
+    public Filter getFilter() {
+        return mFilter;
+    }
+
+    @Override
+    public String toString() {
+        return mFilter.getName() + ":" + mName;
+    }
+
+    OutputPort(Filter filter, String name, Signature.PortInfo info) {
+        mFilter = filter;
+        mName = name;
+        mInfo = info;
+    }
+
+    void setTarget(InputPort target) {
+        mTarget = target;
+    }
+
+    /**
+     * Return the (input) port that this output port is connected to.
+     *
+     * @return the connected port, null if not connected.
+     */
+    public InputPort getTarget() {
+        return mTarget;
+    }
+
+    FrameQueue getQueue() {
+        return mQueue;
+    }
+
+    void setQueue(FrameQueue queue) {
+        mQueue = queue;
+        mQueueBuilder = null;
+    }
+
+    void onOpen(FrameQueue.Builder builder) {
+        mQueueBuilder = builder;
+        mQueueBuilder.setWriteType(mInfo.type);
+        mFilter.onOutputPortOpen(this);
+    }
+
+    boolean isOpen() {
+        return mQueue != null;
+    }
+
+    final boolean conditionsMet() {
+        return !mWaitsUntilAvailable || isAvailable();
+    }
+
+    void clear() {
+        if (mQueue != null) {
+            mQueue.clear();
+        }
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/PixelUtils.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/PixelUtils.java
new file mode 100644
index 0000000..88538d4
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/PixelUtils.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media.filterfw;
+
+import java.nio.ByteBuffer;
+
+/**
+ * A collection of utilities to deal with pixel operations on ByteBuffers.
+ */
+public class PixelUtils {
+
+    /**
+     * Copy pixels from one buffer to another, applying a transformation.
+     *
+     * <p>The transformation is specified by specifying the initial offset in the output buffer, the
+     * stride (in pixels) between each pixel, and the stride (in pixels) between each row. The row
+     * stride is measured as the number of pixels between the start of each row.</p>
+     *
+     * <p>Note that this method is native for efficiency reasons. It does NOT do any bounds checking
+     * other than making sure the buffers are of sufficient size. This means that you can corrupt
+     * memory if specifying incorrect stride values!</p>
+     *
+     * @param input The input buffer containing pixel data.
+     * @param output The output buffer to hold the transformed pixel data.
+     * @param width The width of the input image.
+     * @param height The height of the input image.
+     * @param offset The start offset in the output (in pixels)
+     * @param pixStride The stride between each pixel (in pixels)
+     * @param rowStride The stride between the start of each row (in pixels)
+     */
+    public static void copyPixels(ByteBuffer input,
+            ByteBuffer output,
+            int width,
+            int height,
+            int offset,
+            int pixStride,
+            int rowStride) {
+        if (input.remaining() != output.remaining()) {
+            throw new IllegalArgumentException("Input and output buffers must have the same size!");
+        } else if (input.remaining() % 4 != 0) {
+            throw new IllegalArgumentException("Input buffer size must be a multiple of 4!");
+        } else if (output.remaining() % 4 != 0) {
+            throw new IllegalArgumentException("Output buffer size must be a multiple of 4!");
+        } else if ((width * height * 4) != input.remaining()) {
+            throw new IllegalArgumentException(
+                    "Input buffer size does not match given dimensions!");
+        } else if ((width * height * 4) != output.remaining()) {
+            throw new IllegalArgumentException(
+                    "Output buffer size does not match given dimensions!");
+        }
+        nativeCopyPixels(input, output, width, height, offset, pixStride, rowStride);
+    }
+
+    private static native void nativeCopyPixels(ByteBuffer input,
+            ByteBuffer output,
+            int width,
+            int height,
+            int offset,
+            int pixStride,
+            int rowStride);
+
+    static {
+        System.loadLibrary("smartcamera_jni");
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/RenderTarget.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/RenderTarget.java
new file mode 100644
index 0000000..ab0546d
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/RenderTarget.java
@@ -0,0 +1,444 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.media.MediaRecorder;
+import android.opengl.GLES20;
+import android.opengl.GLUtils;
+import android.os.Build.VERSION;
+import android.util.Log;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+public final class RenderTarget {
+
+    private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+    private static final int EGL_OPENGL_ES2_BIT = 4;
+
+    // Pre-HC devices do not necessarily support multiple display surfaces.
+    private static boolean mSupportsMultipleDisplaySurfaces = (VERSION.SDK_INT >= 11);
+
+    /** A Map that tracks which objects are wrapped by EGLSurfaces */
+    private static HashMap<Object, EGLSurface> mSurfaceSources = new HashMap<Object, EGLSurface>();
+
+    /** A Map for performing reference counting over shared objects across RenderTargets */
+    private static HashMap<Object, Integer> mRefCounts = new HashMap<Object, Integer>();
+
+    /** Stores the RenderTarget that is focused on the current thread. */
+    private static ThreadLocal<RenderTarget> mCurrentTarget = new ThreadLocal<RenderTarget>();
+
+    /** The source for the surface used in this target (if any) */
+    private Object mSurfaceSource = null;
+
+    /** The cached EGLConfig instance. */
+    private static EGLConfig mEglConfig = null;
+
+    /** The display for which the EGLConfig was chosen. We expect only one. */
+    private static EGLDisplay mConfiguredDisplay;
+
+    private EGL10 mEgl;
+    private EGLDisplay mDisplay;
+    private EGLContext mContext;
+    private EGLSurface mSurface;
+    private int mFbo;
+
+    private boolean mOwnsContext;
+    private boolean mOwnsSurface;
+
+    private static HashMap<EGLContext, ImageShader> mIdShaders
+        = new HashMap<EGLContext, ImageShader>();
+
+    private static HashMap<EGLContext, EGLSurface> mDisplaySurfaces
+        = new HashMap<EGLContext, EGLSurface>();
+
+    private static int sRedSize = 8;
+    private static int sGreenSize = 8;
+    private static int sBlueSize = 8;
+    private static int sAlphaSize = 8;
+    private static int sDepthSize = 0;
+    private static int sStencilSize = 0;
+
+    public static RenderTarget newTarget(int width, int height) {
+        EGL10 egl = (EGL10) EGLContext.getEGL();
+        EGLDisplay eglDisplay = createDefaultDisplay(egl);
+        EGLConfig eglConfig = chooseEglConfig(egl, eglDisplay);
+        EGLContext eglContext = createContext(egl, eglDisplay, eglConfig);
+        EGLSurface eglSurface = createSurface(egl, eglDisplay, width, height);
+        RenderTarget result = new RenderTarget(eglDisplay, eglContext, eglSurface, 0, true, true);
+        result.addReferenceTo(eglSurface);
+        return result;
+    }
+
+    public static RenderTarget currentTarget() {
+        // As RenderTargets are immutable, we can safely return the last focused instance on this
+        // thread, as we know it cannot have changed, and therefore must be current.
+        return mCurrentTarget.get();
+    }
+
+    public RenderTarget forTexture(TextureSource texture, int width, int height) {
+        // NOTE: We do not need to lookup any previous bindings of this texture to an FBO, as
+        // multiple FBOs to a single texture is valid.
+        int fbo = GLToolbox.generateFbo();
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo);
+        GLToolbox.checkGlError("glBindFramebuffer");
+        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
+                                      GLES20.GL_COLOR_ATTACHMENT0,
+                                      texture.getTarget(),
+                                      texture.getTextureId(),
+                                      0);
+        GLToolbox.checkGlError("glFramebufferTexture2D");
+        return new RenderTarget(mDisplay, mContext, surface(), fbo, false, false);
+    }
+
+    public RenderTarget forSurfaceHolder(SurfaceHolder surfaceHolder) {
+        EGLConfig eglConfig = chooseEglConfig(mEgl, mDisplay);
+        EGLSurface eglSurf = null;
+        synchronized (mSurfaceSources) {
+            eglSurf = mSurfaceSources.get(surfaceHolder);
+            if (eglSurf == null) {
+                eglSurf = mEgl.eglCreateWindowSurface(mDisplay, eglConfig, surfaceHolder, null);
+                mSurfaceSources.put(surfaceHolder, eglSurf);
+            }
+        }
+        checkEglError(mEgl, "eglCreateWindowSurface");
+        checkSurface(mEgl, eglSurf);
+        RenderTarget result = new RenderTarget(mDisplay, mContext, eglSurf, 0, false, true);
+        result.addReferenceTo(eglSurf);
+        result.setSurfaceSource(surfaceHolder);
+        return result;
+    }
+
+    @TargetApi(11)
+    public RenderTarget forSurfaceTexture(SurfaceTexture surfaceTexture) {
+        EGLConfig eglConfig = chooseEglConfig(mEgl, mDisplay);
+        EGLSurface eglSurf = null;
+        synchronized (mSurfaceSources) {
+            eglSurf = mSurfaceSources.get(surfaceTexture);
+            if (eglSurf == null) {
+                eglSurf = mEgl.eglCreateWindowSurface(mDisplay, eglConfig, surfaceTexture, null);
+                mSurfaceSources.put(surfaceTexture, eglSurf);
+            }
+        }
+        checkEglError(mEgl, "eglCreateWindowSurface");
+        checkSurface(mEgl, eglSurf);
+        RenderTarget result = new RenderTarget(mDisplay, mContext, eglSurf, 0, false, true);
+        result.setSurfaceSource(surfaceTexture);
+        result.addReferenceTo(eglSurf);
+        return result;
+    }
+
+    @TargetApi(11)
+    public RenderTarget forSurface(Surface surface) {
+        EGLConfig eglConfig = chooseEglConfig(mEgl, mDisplay);
+        EGLSurface eglSurf = null;
+        synchronized (mSurfaceSources) {
+            eglSurf = mSurfaceSources.get(surface);
+            if (eglSurf == null) {
+                eglSurf = mEgl.eglCreateWindowSurface(mDisplay, eglConfig, surface, null);
+                mSurfaceSources.put(surface, eglSurf);
+            }
+        }
+        checkEglError(mEgl, "eglCreateWindowSurface");
+        checkSurface(mEgl, eglSurf);
+        RenderTarget result = new RenderTarget(mDisplay, mContext, eglSurf, 0, false, true);
+        result.setSurfaceSource(surface);
+        result.addReferenceTo(eglSurf);
+        return result;
+    }
+
+    public static RenderTarget forMediaRecorder(MediaRecorder mediaRecorder) {
+        throw new RuntimeException("Not yet implemented MediaRecorder -> RenderTarget!");
+    }
+
+    public static void setEGLConfigChooser(int redSize, int greenSize, int blueSize, int alphaSize,
+            int depthSize, int stencilSize) {
+        sRedSize = redSize;
+        sGreenSize = greenSize;
+        sBlueSize = blueSize;
+        sAlphaSize = alphaSize;
+        sDepthSize = depthSize;
+        sStencilSize = stencilSize;
+    }
+
+    public void registerAsDisplaySurface() {
+        if (!mSupportsMultipleDisplaySurfaces) {
+            // Note that while this does in effect change RenderTarget instances (by modifying
+            // their returned EGLSurface), breaking the immutability requirement, it does not modify
+            // the current target. This is important so that the instance returned in
+            // currentTarget() remains accurate.
+            EGLSurface currentSurface = mDisplaySurfaces.get(mContext);
+            if (currentSurface != null && !currentSurface.equals(mSurface)) {
+                throw new RuntimeException("This device supports only a single display surface!");
+            } else {
+                mDisplaySurfaces.put(mContext, mSurface);
+            }
+        }
+    }
+
+    public void unregisterAsDisplaySurface() {
+        if (!mSupportsMultipleDisplaySurfaces) {
+            mDisplaySurfaces.put(mContext, null);
+        }
+    }
+
+    public void focus() {
+        RenderTarget current = mCurrentTarget.get();
+        // We assume RenderTargets are immutable, so that we do not need to focus if the current
+        // RenderTarget has not changed.
+        if (current != this) {
+            mEgl.eglMakeCurrent(mDisplay, surface(), surface(), mContext);
+            mCurrentTarget.set(this);
+        }
+        if (getCurrentFbo() != mFbo) {
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFbo);
+            GLToolbox.checkGlError("glBindFramebuffer");
+        }
+    }
+
+    public static void focusNone() {
+        EGL10 egl = (EGL10) EGLContext.getEGL();
+        egl.eglMakeCurrent(egl.eglGetCurrentDisplay(),
+                           EGL10.EGL_NO_SURFACE,
+                           EGL10.EGL_NO_SURFACE,
+                           EGL10.EGL_NO_CONTEXT);
+        mCurrentTarget.set(null);
+        checkEglError(egl, "eglMakeCurrent");
+    }
+
+    public void swapBuffers() {
+        mEgl.eglSwapBuffers(mDisplay, surface());
+    }
+
+    public EGLContext getContext() {
+        return mContext;
+    }
+
+    public static EGLContext currentContext() {
+        RenderTarget current = RenderTarget.currentTarget();
+        return current != null ? current.getContext() : EGL10.EGL_NO_CONTEXT;
+    }
+
+    public void release() {
+        if (mOwnsContext) {
+            mEgl.eglDestroyContext(mDisplay, mContext);
+            mContext = EGL10.EGL_NO_CONTEXT;
+        }
+        if (mOwnsSurface) {
+            synchronized (mSurfaceSources) {
+                if (removeReferenceTo(mSurface)) {
+                    mEgl.eglDestroySurface(mDisplay, mSurface);
+                    mSurface = EGL10.EGL_NO_SURFACE;
+                    mSurfaceSources.remove(mSurfaceSource);
+                }
+            }
+        }
+        if (mFbo != 0) {
+           GLToolbox.deleteFbo(mFbo);
+       }
+    }
+
+    public void readPixelData(ByteBuffer pixels, int width, int height) {
+        GLToolbox.readTarget(this, pixels, width, height);
+    }
+
+    public ByteBuffer getPixelData(int width, int height) {
+        ByteBuffer pixels = ByteBuffer.allocateDirect(width * height * 4);
+        GLToolbox.readTarget(this, pixels, width, height);
+        return pixels;
+    }
+
+    /**
+     * Returns an identity shader for this context.
+     * You must not modify this shader. Use {@link ImageShader#createIdentity()} if you need to
+     * modify an identity shader.
+     */
+    public ImageShader getIdentityShader() {
+        ImageShader idShader = mIdShaders.get(mContext);
+        if (idShader == null) {
+            idShader = ImageShader.createIdentity();
+            mIdShaders.put(mContext, idShader);
+        }
+        return idShader;
+    }
+
+    @Override
+    public String toString() {
+        return "RenderTarget(" + mDisplay + ", " + mContext + ", " + mSurface + ", " + mFbo + ")";
+    }
+
+    private void setSurfaceSource(Object source) {
+        mSurfaceSource = source;
+    }
+
+    private void addReferenceTo(Object object) {
+        Integer refCount = mRefCounts.get(object);
+        if (refCount != null) {
+            mRefCounts.put(object, refCount + 1);
+        } else {
+            mRefCounts.put(object, 1);
+        }
+    }
+
+    private boolean removeReferenceTo(Object object) {
+        Integer refCount = mRefCounts.get(object);
+        if (refCount != null && refCount > 0) {
+            --refCount;
+            mRefCounts.put(object, refCount);
+            return refCount == 0;
+        } else {
+            Log.e("RenderTarget", "Removing reference of already released: " + object + "!");
+            return false;
+        }
+    }
+
+    private static EGLConfig chooseEglConfig(EGL10 egl, EGLDisplay display) {
+        if (mEglConfig == null || !display.equals(mConfiguredDisplay)) {
+            int[] configsCount = new int[1];
+            EGLConfig[] configs = new EGLConfig[1];
+            int[] configSpec = getDesiredConfig();
+            if (!egl.eglChooseConfig(display, configSpec, configs, 1, configsCount)) {
+                throw new IllegalArgumentException("EGL Error: eglChooseConfig failed " +
+                        getEGLErrorString(egl, egl.eglGetError()));
+            } else if (configsCount[0] > 0) {
+                mEglConfig = configs[0];
+                mConfiguredDisplay = display;
+            }
+        }
+        return mEglConfig;
+    }
+
+    private static int[] getDesiredConfig() {
+        return new int[] {
+                EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+                EGL10.EGL_RED_SIZE, sRedSize,
+                EGL10.EGL_GREEN_SIZE, sGreenSize,
+                EGL10.EGL_BLUE_SIZE, sBlueSize,
+                EGL10.EGL_ALPHA_SIZE, sAlphaSize,
+                EGL10.EGL_DEPTH_SIZE, sDepthSize,
+                EGL10.EGL_STENCIL_SIZE, sStencilSize,
+                EGL10.EGL_NONE
+        };
+    }
+
+    private RenderTarget(EGLDisplay display, EGLContext context, EGLSurface surface, int fbo,
+                         boolean ownsContext, boolean ownsSurface) {
+        mEgl = (EGL10) EGLContext.getEGL();
+        mDisplay = display;
+        mContext = context;
+        mSurface = surface;
+        mFbo = fbo;
+        mOwnsContext = ownsContext;
+        mOwnsSurface = ownsSurface;
+    }
+
+    private EGLSurface surface() {
+        if (mSupportsMultipleDisplaySurfaces) {
+            return mSurface;
+        } else {
+            EGLSurface displaySurface = mDisplaySurfaces.get(mContext);
+            return displaySurface != null ? displaySurface : mSurface;
+        }
+    }
+
+    private static void initEgl(EGL10 egl, EGLDisplay display) {
+        int[] version = new int[2];
+        if (!egl.eglInitialize(display, version)) {
+            throw new RuntimeException("EGL Error: eglInitialize failed " +
+                    getEGLErrorString(egl, egl.eglGetError()));
+        }
+    }
+
+    private static EGLDisplay createDefaultDisplay(EGL10 egl) {
+        EGLDisplay display = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+        checkDisplay(egl, display);
+        initEgl(egl, display);
+        return display;
+    }
+
+    private static EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig config) {
+        int[] attrib_list = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
+        EGLContext ctxt = egl.eglCreateContext(display, config, EGL10.EGL_NO_CONTEXT, attrib_list);
+        checkContext(egl, ctxt);
+        return ctxt;
+    }
+
+    private static EGLSurface createSurface(EGL10 egl, EGLDisplay display, int width, int height) {
+        EGLConfig eglConfig = chooseEglConfig(egl, display);
+        int[] attribs = { EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE };
+        return egl.eglCreatePbufferSurface(display, eglConfig, attribs);
+    }
+
+    private static int getCurrentFbo() {
+        int[] result = new int[1];
+        GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, result, 0);
+        return result[0];
+    }
+
+    private static void checkDisplay(EGL10 egl, EGLDisplay display) {
+        if (display == EGL10.EGL_NO_DISPLAY) {
+            throw new RuntimeException("EGL Error: Bad display: "
+                    + getEGLErrorString(egl, egl.eglGetError()));
+        }
+    }
+
+    private static void checkContext(EGL10 egl, EGLContext context) {
+        if (context == EGL10.EGL_NO_CONTEXT) {
+            throw new RuntimeException("EGL Error: Bad context: "
+                    + getEGLErrorString(egl, egl.eglGetError()));
+        }
+    }
+
+    private static void checkSurface(EGL10 egl, EGLSurface surface) {
+        if (surface == EGL10.EGL_NO_SURFACE) {
+            throw new RuntimeException("EGL Error: Bad surface: "
+                    + getEGLErrorString(egl, egl.eglGetError()));
+        }
+    }
+
+    private static void checkEglError(EGL10 egl, String command) {
+        int error = egl.eglGetError();
+        if (error != EGL10.EGL_SUCCESS) {
+            throw new RuntimeException("Error executing " + command + "! EGL error = 0x"
+                + Integer.toHexString(error));
+        }
+    }
+
+    private static String getEGLErrorString(EGL10 egl, int eglError) {
+        if (VERSION.SDK_INT >= 14) {
+            return getEGLErrorStringICS(egl, eglError);
+        } else {
+            return "EGL Error 0x" + Integer.toHexString(eglError);
+        }
+    }
+
+    @TargetApi(14)
+    private static String getEGLErrorStringICS(EGL10 egl, int eglError) {
+        return GLUtils.getEGLErrorString(egl.eglGetError());
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ResizeFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ResizeFilter.java
new file mode 100644
index 0000000..c334c91
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ResizeFilter.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.transform;
+
+import androidx.media.filterfw.*;
+
+// TODO: In the future this could be done with a meta-filter that simply "hard-codes" the crop
+// parameters.
+public class ResizeFilter extends CropFilter {
+
+    public ResizeFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
+        return new Signature()
+            .addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+            .addInputPort("outputWidth", Signature.PORT_OPTIONAL, FrameType.single(int.class))
+            .addInputPort("outputHeight", Signature.PORT_OPTIONAL, FrameType.single(int.class))
+            .addInputPort("useMipmaps", Signature.PORT_OPTIONAL, FrameType.single(boolean.class))
+            .addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
+            .disallowOtherPorts();
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/RotateFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/RotateFilter.java
new file mode 100644
index 0000000..5db20a4
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/RotateFilter.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.transform;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.ImageShader;
+import androidx.media.filterfw.InputPort;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+import androidx.media.filterfw.geometry.Quad;
+
+public class RotateFilter extends Filter {
+
+    private Quad mSourceRect = Quad.fromRect(0f, 0f, 1f, 1f);
+    private float mRotateAngle = 0;
+    private ImageShader mShader;
+
+    public RotateFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
+        return new Signature()
+            .addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+            .addInputPort("rotateAngle", Signature.PORT_REQUIRED, FrameType.single(float.class))
+            .addInputPort("sourceRect", Signature.PORT_OPTIONAL, FrameType.single(Quad.class))
+            .addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
+            .disallowOtherPorts();
+    }
+
+    @Override
+    public void onInputPortOpen(InputPort port) {
+        if (port.getName().equals("rotateAngle")) {
+            port.bindToFieldNamed("mRotateAngle");
+            port.setAutoPullEnabled(true);
+        } else if (port.getName().equals("sourceRect")) {
+            port.bindToFieldNamed("mSourceRect");
+            port.setAutoPullEnabled(true);
+        }
+    }
+
+    @Override
+    protected void onPrepare() {
+        mShader = ImageShader.createIdentity();
+    }
+
+    @Override
+    protected void onProcess() {
+        OutputPort outPort = getConnectedOutputPort("image");
+        FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+        int[] inDims = inputImage.getDimensions();
+
+        FrameImage2D outputImage = outPort.fetchAvailableFrame(inDims).asFrameImage2D();
+        mShader.setSourceQuad(mSourceRect);
+        Quad targetQuad = mSourceRect.rotated((float) (mRotateAngle / 180 * Math.PI));
+        mShader.setTargetQuad(targetQuad);
+        mShader.process(inputImage, outputImage);
+        outPort.pushFrame(outputImage);
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ScaleFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ScaleFilter.java
new file mode 100644
index 0000000..1c3f328
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ScaleFilter.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.transform;
+
+// TODO: scale filter needs to be able to specify output width and height
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.InputPort;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.Signature;
+
+public class ScaleFilter extends ResizeFilter {
+
+    private float mScale = 1.0f;
+
+    public ScaleFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
+        return new Signature()
+            .addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+            .addInputPort("scale", Signature.PORT_OPTIONAL, FrameType.single(float.class))
+            .addInputPort("useMipmaps", Signature.PORT_OPTIONAL, FrameType.single(boolean.class))
+            .addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
+            .disallowOtherPorts();
+    }
+
+    @Override
+    public void onInputPortOpen(InputPort port) {
+        if (port.getName().equals("scale")) {
+            port.bindToFieldNamed("mScale");
+            port.setAutoPullEnabled(true);
+        } else if (port.getName().equals("useMipmaps")) {
+            port.bindToFieldNamed("mUseMipmaps");
+            port.setAutoPullEnabled(true);
+        }
+    }
+
+    @Override
+    protected int getOutputWidth(int inWidth, int inHeight) {
+        return (int)(inWidth * mScale);
+    }
+
+    @Override
+    protected int getOutputHeight(int inWidth, int inHeight) {
+        return (int)(inHeight * mScale);
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Signature.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Signature.java
new file mode 100644
index 0000000..2c2916f
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Signature.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package androidx.media.filterfw;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map.Entry;
+import java.util.Set;
+
+/**
+ * A Signature holds the specification for a filter's input and output ports.
+ *
+ * A Signature instance must be returned by the filter's {@link Filter#getSignature()} method. It
+ * specifies the number and names of the filter's input and output ports, whether or not they
+ * are required, how data for those ports are accessed, and more. A Signature does not change over
+ * time. This makes Signatures useful for understanding how a filter can be integrated into a
+ * graph.
+ *
+ * There are a number of flags that can be specified for each input and output port. The flag
+ * {@code PORT_REQUIRED} indicates that the user must connect the specified port. On the other hand,
+ * {@code PORT_OPTIONAL} indicates that a port may be connected by the user.
+ *
+ * If ports other than the ones in the Signature are allowed, they default to the most generic
+ * format, that allows passing in any type of Frame. Thus, if more granular access is needed to
+ * a frame's data, it must be specified in the Signature.
+ */
+public class Signature {
+
+    private HashMap<String, PortInfo> mInputPorts = null;
+    private HashMap<String, PortInfo> mOutputPorts = null;
+    private boolean mAllowOtherInputs = true;
+    private boolean mAllowOtherOutputs = true;
+
+    static class PortInfo {
+        public int flags;
+        public FrameType type;
+
+        public PortInfo() {
+            flags = 0;
+            type = FrameType.any();
+        }
+
+        public PortInfo(int flags, FrameType type) {
+            this.flags = flags;
+            this.type = type;
+        }
+
+        public boolean isRequired() {
+            return (flags & PORT_REQUIRED) != 0;
+        }
+
+        public String toString(String ioMode, String name) {
+            String ioName = ioMode + " " + name;
+            String modeName = isRequired() ? "required" : "optional";
+            return modeName + " " + ioName + ": " + type.toString();
+        }
+    }
+
+    /** Indicates that the port must be connected in the graph. */
+    public static final int PORT_REQUIRED = 0x02;
+    /** Indicates that the port may be connected in the graph . */
+    public static final int PORT_OPTIONAL = 0x01;
+
+    /**
+     * Creates a new empty Signature.
+     */
+    public Signature() {
+    }
+
+    /**
+     * Adds an input port to the Signature.
+     *
+     * @param name the name of the input port. Must be unique among input port names.
+     * @param flags a combination of port flags.
+     * @param type the type of the input frame.
+     * @return this Signature instance.
+     */
+    public Signature addInputPort(String name, int flags, FrameType type) {
+        addInputPort(name, new PortInfo(flags, type));
+        return this;
+    }
+
+    /**
+     * Adds an output port to the Signature.
+     *
+     * @param name the name of the output port. Must be unique among output port names.
+     * @param flags a combination of port flags.
+     * @param type the type of the output frame.
+     * @return this Signature instance.
+     */
+    public Signature addOutputPort(String name, int flags, FrameType type) {
+        addOutputPort(name, new PortInfo(flags, type));
+        return this;
+    }
+
+    /**
+     * Disallows the user from adding any other input ports.
+     * Adding any input port not explicitly specified in this Signature will cause an error.
+     * @return this Signature instance.
+     */
+    public Signature disallowOtherInputs() {
+        mAllowOtherInputs = false;
+        return this;
+    }
+
+    /**
+     * Disallows the user from adding any other output ports.
+     * Adding any output port not explicitly specified in this Signature will cause an error.
+     * @return this Signature instance.
+     */
+    public Signature disallowOtherOutputs() {
+        mAllowOtherOutputs = false;
+        return this;
+    }
+
+    /**
+     * Disallows the user from adding any other ports.
+     * Adding any input or output port not explicitly specified in this Signature will cause an
+     * error.
+     * @return this Signature instance.
+     */
+    public Signature disallowOtherPorts() {
+        mAllowOtherInputs = false;
+        mAllowOtherOutputs = false;
+        return this;
+    }
+
+    @Override
+    public String toString() {
+        StringBuffer stringBuffer = new StringBuffer();
+        for (Entry<String, PortInfo> entry : mInputPorts.entrySet()) {
+            stringBuffer.append(entry.getValue().toString("input", entry.getKey()) + "\n");
+        }
+        for (Entry<String, PortInfo> entry : mOutputPorts.entrySet()) {
+            stringBuffer.append(entry.getValue().toString("output", entry.getKey()) + "\n");
+        }
+        if (!mAllowOtherInputs) {
+            stringBuffer.append("disallow other inputs\n");
+        }
+        if (!mAllowOtherOutputs) {
+            stringBuffer.append("disallow other outputs\n");
+        }
+        return stringBuffer.toString();
+    }
+
+    PortInfo getInputPortInfo(String name) {
+        PortInfo result = mInputPorts != null ? mInputPorts.get(name) : null;
+        return result != null ? result : new PortInfo();
+    }
+
+    PortInfo getOutputPortInfo(String name) {
+        PortInfo result = mOutputPorts != null ? mOutputPorts.get(name) : null;
+        return result != null ? result : new PortInfo();
+    }
+
+    void checkInputPortsConform(Filter filter) {
+        Set<String> filterInputs = new HashSet<String>();
+        filterInputs.addAll(filter.getConnectedInputPortMap().keySet());
+        if (mInputPorts != null) {
+            for (Entry<String, PortInfo> entry : mInputPorts.entrySet()) {
+                String portName = entry.getKey();
+                PortInfo portInfo = entry.getValue();
+                InputPort inputPort = filter.getConnectedInputPort(portName);
+                if (inputPort == null && portInfo.isRequired()) {
+                    throw new RuntimeException("Filter " + filter + " does not have required "
+                        + "input port '" + portName + "'!");
+                }
+                filterInputs.remove(portName);
+            }
+        }
+        if (!mAllowOtherInputs && !filterInputs.isEmpty()) {
+            throw new RuntimeException("Filter " + filter + " has invalid input ports: "
+                + filterInputs + "!");
+        }
+    }
+
+    void checkOutputPortsConform(Filter filter) {
+        Set<String> filterOutputs = new HashSet<String>();
+        filterOutputs.addAll(filter.getConnectedOutputPortMap().keySet());
+        if (mOutputPorts != null) {
+            for (Entry<String, PortInfo> entry : mOutputPorts.entrySet()) {
+                String portName = entry.getKey();
+                PortInfo portInfo = entry.getValue();
+                OutputPort outputPort = filter.getConnectedOutputPort(portName);
+                if (outputPort == null && portInfo.isRequired()) {
+                    throw new RuntimeException("Filter " + filter + " does not have required "
+                        + "output port '" + portName + "'!");
+                }
+                filterOutputs.remove(portName);
+            }
+        }
+        if (!mAllowOtherOutputs && !filterOutputs.isEmpty()) {
+            throw new RuntimeException("Filter " + filter + " has invalid output ports: "
+                + filterOutputs + "!");
+        }
+    }
+
+    HashMap<String, PortInfo> getInputPorts() {
+        return mInputPorts;
+    }
+
+    HashMap<String, PortInfo> getOutputPorts() {
+        return mOutputPorts;
+    }
+
+    private void addInputPort(String name, PortInfo portInfo) {
+        if (mInputPorts == null) {
+            mInputPorts = new HashMap<String, PortInfo>();
+        }
+        if (mInputPorts.containsKey(name)) {
+            throw new RuntimeException("Attempting to add duplicate input port '" + name + "'!");
+        }
+        mInputPorts.put(name, portInfo);
+    }
+
+    private void addOutputPort(String name, PortInfo portInfo) {
+        if (mOutputPorts == null) {
+            mOutputPorts = new HashMap<String, PortInfo>();
+        }
+        if (mOutputPorts.containsKey(name)) {
+            throw new RuntimeException("Attempting to add duplicate output port '" + name + "'!");
+        }
+        mOutputPorts.put(name, portInfo);
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SimpleCache.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SimpleCache.java
new file mode 100644
index 0000000..f54621f
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SimpleCache.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * This is a simple LRU cache that is used internally for managing repetitive objects.
+ */
+class SimpleCache<K, V> extends LinkedHashMap<K, V> {
+
+    private int mMaxEntries;
+
+    public SimpleCache(final int maxEntries) {
+        super(maxEntries + 1, 1f, true);
+        mMaxEntries = maxEntries;
+    }
+
+    @Override
+    protected boolean removeEldestEntry(final Map.Entry<K, V> eldest) {
+        return super.size() > mMaxEntries;
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SlotFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SlotFilter.java
new file mode 100644
index 0000000..aaa87c2
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SlotFilter.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+public abstract class SlotFilter extends Filter {
+
+    protected final String mSlotName;
+
+    protected SlotFilter(MffContext context, String name, String slotName) {
+        super(context, name);
+        mSlotName = slotName;
+    }
+
+    protected final FrameType getSlotType() {
+        return getFrameManager().getSlot(mSlotName).getType();
+    }
+
+    protected final boolean slotHasFrame() {
+        return getFrameManager().getSlot(mSlotName).hasFrame();
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SobelFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SobelFilter.java
new file mode 100644
index 0000000..a4c39a1
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SobelFilter.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.image;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.ImageShader;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+import java.nio.ByteBuffer;
+
+public class SobelFilter extends Filter {
+
+    private static final String mGradientXSource =
+              "precision mediump float;\n"
+            + "uniform sampler2D tex_sampler_0;\n"
+            + "uniform vec2 pix;\n"
+            + "varying vec2 v_texcoord;\n"
+            + "void main() {\n"
+            + "  vec4 a1 = -1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(-pix.x, -pix.y));\n"
+            + "  vec4 a2 = -2.0 * texture2D(tex_sampler_0, v_texcoord + vec2(-pix.x, 0.0));\n"
+            + "  vec4 a3 = -1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(-pix.x, +pix.y));\n"
+            + "  vec4 b1 = +1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(+pix.x, -pix.y));\n"
+            + "  vec4 b2 = +2.0 * texture2D(tex_sampler_0, v_texcoord + vec2(+pix.x, 0.0));\n"
+            + "  vec4 b3 = +1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(+pix.x, +pix.y));\n"
+            + "  gl_FragColor = 0.5 + (a1 + a2 + a3 + b1 + b2 + b3) / 8.0;\n"
+            + "}\n";
+
+    private static final String mGradientYSource =
+              "precision mediump float;\n"
+            + "uniform sampler2D tex_sampler_0;\n"
+            + "uniform vec2 pix;\n"
+            + "varying vec2 v_texcoord;\n"
+            + "void main() {\n"
+            + "  vec4 a1 = -1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(-pix.x, -pix.y));\n"
+            + "  vec4 a2 = -2.0 * texture2D(tex_sampler_0, v_texcoord + vec2(0.0,    -pix.y));\n"
+            + "  vec4 a3 = -1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(+pix.x, -pix.y));\n"
+            + "  vec4 b1 = +1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(-pix.x, +pix.y));\n"
+            + "  vec4 b2 = +2.0 * texture2D(tex_sampler_0, v_texcoord + vec2(0.0,    +pix.y));\n"
+            + "  vec4 b3 = +1.0 * texture2D(tex_sampler_0, v_texcoord + vec2(+pix.x, +pix.y));\n"
+            + "  gl_FragColor = 0.5 + (a1 + a2 + a3 + b1 + b2 + b3) / 8.0;\n"
+            + "}\n";
+
+    private static final String mMagnitudeSource =
+            "precision mediump float;\n"
+          + "uniform sampler2D tex_sampler_0;\n"
+          + "uniform sampler2D tex_sampler_1;\n"
+          + "varying vec2 v_texcoord;\n"
+          + "void main() {\n"
+          + "  vec4 gx = 2.0 * texture2D(tex_sampler_0, v_texcoord) - 1.0;\n"
+          + "  vec4 gy = 2.0 * texture2D(tex_sampler_1, v_texcoord) - 1.0;\n"
+          + "  gl_FragColor = vec4(sqrt(gx.rgb * gx.rgb + gy.rgb * gy.rgb), 1.0);\n"
+          + "}\n";
+
+    private static final String mDirectionSource =
+            "precision mediump float;\n"
+          + "uniform sampler2D tex_sampler_0;\n"
+          + "uniform sampler2D tex_sampler_1;\n"
+          + "varying vec2 v_texcoord;\n"
+          + "void main() {\n"
+          + "  vec4 gy = 2.0 * texture2D(tex_sampler_1, v_texcoord) - 1.0;\n"
+          + "  vec4 gx = 2.0 * texture2D(tex_sampler_0, v_texcoord) - 1.0;\n"
+          + "  gl_FragColor = vec4((atan(gy.rgb, gx.rgb) + 3.14) / (2.0 * 3.14), 1.0);\n"
+          + "}\n";
+
+    private ImageShader mGradientXShader;
+    private ImageShader mGradientYShader;
+    private ImageShader mMagnitudeShader;
+    private ImageShader mDirectionShader;
+
+    private FrameType mImageType;
+
+    public SobelFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        // TODO: we will address the issue of READ_GPU / WRITE_GPU when using CPU filters later.
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
+        return new Signature().addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+                .addOutputPort("direction", Signature.PORT_OPTIONAL, imageOut)
+                .addOutputPort("magnitude", Signature.PORT_OPTIONAL, imageOut).disallowOtherPorts();
+    }
+
+    @Override
+    protected void onPrepare() {
+        if (isOpenGLSupported()) {
+            mGradientXShader = new ImageShader(mGradientXSource);
+            mGradientYShader = new ImageShader(mGradientYSource);
+            mMagnitudeShader = new ImageShader(mMagnitudeSource);
+            mDirectionShader = new ImageShader(mDirectionSource);
+            mImageType = FrameType.image2D(
+                    FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU | FrameType.WRITE_GPU);
+        }
+    }
+
+    @Override
+    protected void onProcess() {
+        OutputPort magnitudePort = getConnectedOutputPort("magnitude");
+        OutputPort directionPort = getConnectedOutputPort("direction");
+        FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+        int[] inputDims = inputImage.getDimensions();
+
+        FrameImage2D magImage = (magnitudePort != null) ?
+                magnitudePort.fetchAvailableFrame(inputDims).asFrameImage2D() : null;
+        FrameImage2D dirImage = (directionPort != null) ?
+                directionPort.fetchAvailableFrame(inputDims).asFrameImage2D() : null;
+        if (isOpenGLSupported()) {
+            FrameImage2D gxFrame = Frame.create(mImageType, inputDims).asFrameImage2D();
+            FrameImage2D gyFrame = Frame.create(mImageType, inputDims).asFrameImage2D();
+            mGradientXShader.setUniformValue("pix", new float[] {1f/inputDims[0], 1f/inputDims[1]});
+            mGradientYShader.setUniformValue("pix", new float[] {1f/inputDims[0], 1f/inputDims[1]});
+            mGradientXShader.process(inputImage, gxFrame);
+            mGradientYShader.process(inputImage, gyFrame);
+            FrameImage2D[] gradientFrames = new FrameImage2D[] { gxFrame, gyFrame };
+            if (magnitudePort != null) {
+                mMagnitudeShader.processMulti(gradientFrames, magImage);
+            }
+            if (directionPort != null) {
+                mDirectionShader.processMulti(gradientFrames, dirImage);
+            }
+            gxFrame.release();
+            gyFrame.release();
+        } else {
+            ByteBuffer inputBuffer  = inputImage.lockBytes(Frame.MODE_READ);
+            ByteBuffer magBuffer  = (magImage != null) ?
+                    magImage.lockBytes(Frame.MODE_WRITE) : null;
+            ByteBuffer dirBuffer  = (dirImage != null) ?
+                    dirImage.lockBytes(Frame.MODE_WRITE) : null;
+            sobelOperator(inputImage.getWidth(), inputImage.getHeight(),
+                    inputBuffer, magBuffer, dirBuffer);
+            inputImage.unlock();
+            if (magImage != null) {
+                magImage.unlock();
+            }
+            if (dirImage != null) {
+                dirImage.unlock();
+            }
+        }
+        if (magImage != null) {
+            magnitudePort.pushFrame(magImage);
+        }
+        if (dirImage != null) {
+            directionPort.pushFrame(dirImage);
+        }
+    }
+
+    private static native boolean sobelOperator(int width, int height,
+            ByteBuffer imageBuffer, ByteBuffer magBuffer, ByteBuffer dirBudder);
+
+    static {
+        System.loadLibrary("smartcamera_jni");
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/StatsFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/StatsFilter.java
new file mode 100644
index 0000000..94030c3
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/StatsFilter.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Calculates the mean and standard deviation of the values in the input image.
+// It takes in an RGBA image, but assumes that r, g, b, a are all the same values.
+
+package androidx.media.filterpacks.numeric;
+
+import android.util.Log;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameBuffer2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.InputPort;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+import androidx.media.filterfw.geometry.Quad;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Get the sample mean and variance of a 2-D buffer of bytes over a given rectangle.
+ * TODO: Add more statistics as needed.
+ * TODO: Check if crop rectangle is necessary to be included in this filter.
+ */
+public class StatsFilter extends Filter {
+
+    private static final int MEAN_INDEX = 0;
+    private static final int STDEV_INDEX = 1;
+
+    private final float[] mStats = new float[2];
+
+    private Quad mCropRect = Quad.fromRect(0f, 0f, 1f, 1f);
+    private static final String TAG = "StatsFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+
+    /**
+     * @param context
+     * @param name
+     */
+    public StatsFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType inputFrame = FrameType.buffer2D(FrameType.ELEMENT_INT8);
+        FrameType floatT = FrameType.single(float.class);
+        return new Signature()
+                .addInputPort("buffer", Signature.PORT_REQUIRED, inputFrame)
+                .addInputPort("cropRect", Signature.PORT_OPTIONAL, FrameType.single(Quad.class))
+                .addOutputPort("mean", Signature.PORT_REQUIRED, floatT)
+                .addOutputPort("stdev", Signature.PORT_REQUIRED, floatT)
+                .disallowOtherPorts();
+    }
+
+    @Override
+    public void onInputPortOpen(InputPort port) {
+        if (port.getName().equals("cropRect")) {
+            port.bindToFieldNamed("mCropRect");
+            port.setAutoPullEnabled(true);
+        }
+    }
+
+    private void calcMeanAndStd(ByteBuffer pixelBuffer, int width, int height, Quad quad) {
+        // Native
+        pixelBuffer.rewind();
+        regionscore(pixelBuffer, width, height, quad.topLeft().x, quad.topLeft().y,
+                quad.bottomRight().x, quad.bottomRight().y, mStats);
+        if (mLogVerbose) {
+            Log.v(TAG, "Native calc stats: Mean = " + mStats[MEAN_INDEX] + ", Stdev = "
+                    + mStats[STDEV_INDEX]);
+        }
+    }
+
+    /**
+     * @see androidx.media.filterfw.Filter#onProcess()
+     */
+    @Override
+    protected void onProcess() {
+        FrameBuffer2D inputFrame = getConnectedInputPort("buffer").pullFrame().asFrameImage2D();
+        ByteBuffer pixelBuffer = inputFrame.lockBytes(Frame.MODE_READ);
+
+        calcMeanAndStd(pixelBuffer, inputFrame.getWidth(), inputFrame.getHeight(), mCropRect);
+        inputFrame.unlock();
+
+        OutputPort outPort = getConnectedOutputPort("mean");
+        FrameValue outFrame = outPort.fetchAvailableFrame(null).asFrameValue();
+        outFrame.setValue(mStats[MEAN_INDEX]);
+        outPort.pushFrame(outFrame);
+
+        OutputPort outPortStdev = getConnectedOutputPort("stdev");
+        FrameValue outFrameStdev = outPortStdev.fetchAvailableFrame(null).asFrameValue();
+        outFrameStdev.setValue(mStats[STDEV_INDEX]);
+        outPortStdev.pushFrame(outFrameStdev);
+    }
+
+    private native void regionscore(ByteBuffer imageBuffer, int width, int height, float left,
+            float top, float right, float bottom, float[] statsArray);
+
+    static {
+        System.loadLibrary("smartcamera_jni");
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SurfaceHolderTarget.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SurfaceHolderTarget.java
new file mode 100644
index 0000000..dac723b
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/SurfaceHolderTarget.java
@@ -0,0 +1,197 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.image;
+
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.graphics.RectF;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.View;
+
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.ImageShader;
+import androidx.media.filterfw.InputPort;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.RenderTarget;
+import androidx.media.filterfw.Signature;
+import androidx.media.filterfw.ViewFilter;
+
+public class SurfaceHolderTarget extends ViewFilter {
+
+    private SurfaceHolder mSurfaceHolder = null;
+    private RenderTarget mRenderTarget = null;
+    private ImageShader mShader = null;
+    private boolean mHasSurface = false;
+
+    private SurfaceHolder.Callback mSurfaceHolderListener = new SurfaceHolder.Callback() {
+        @Override
+        public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+            // This just makes sure the holder is still the one we expect.
+            onSurfaceCreated(holder);
+        }
+
+        @Override
+        public void surfaceCreated (SurfaceHolder holder) {
+            onSurfaceCreated(holder);
+        }
+
+        @Override
+        public void surfaceDestroyed (SurfaceHolder holder) {
+            onDestroySurface();
+        }
+    };
+
+    public SurfaceHolderTarget(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public void onBindToView(View view) {
+        if (view instanceof SurfaceView) {
+            SurfaceHolder holder = ((SurfaceView)view).getHolder();
+            if (holder == null) {
+                throw new RuntimeException("Could not get SurfaceHolder from SurfaceView "
+                    + view + "!");
+            }
+            setSurfaceHolder(holder);
+        } else {
+            throw new IllegalArgumentException("View must be a SurfaceView!");
+        }
+    }
+
+    public void setSurfaceHolder(SurfaceHolder holder) {
+        if (isRunning()) {
+            throw new IllegalStateException("Cannot set SurfaceHolder while running!");
+        }
+        mSurfaceHolder = holder;
+    }
+
+    public synchronized void onDestroySurface() {
+        if (mRenderTarget != null) {
+            mRenderTarget.release();
+            mRenderTarget = null;
+        }
+        mHasSurface = false;
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageType = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        return super.getSignature()
+            .addInputPort("image", Signature.PORT_REQUIRED, imageType)
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onInputPortOpen(InputPort port) {
+        super.connectViewInputs(port);
+    }
+
+    @Override
+    protected synchronized void onPrepare() {
+        if (isOpenGLSupported()) {
+            mShader = ImageShader.createIdentity();
+        }
+    }
+
+    @Override
+    protected synchronized void onOpen() {
+        mSurfaceHolder.addCallback(mSurfaceHolderListener);
+        Surface surface = mSurfaceHolder.getSurface();
+        mHasSurface = (surface != null) && surface.isValid();
+    }
+
+    @Override
+    protected synchronized void onProcess() {
+        FrameImage2D image = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+        if (mHasSurface) {
+            // Synchronize the surface holder in case another filter is accessing this surface.
+            synchronized (mSurfaceHolder) {
+                if (isOpenGLSupported()) {
+                    renderGL(image);
+                } else {
+                    renderCanvas(image);
+                }
+            }
+        }
+    }
+
+    /**
+     * Renders the given frame to the screen using GLES2.
+     * @param image the image to render
+     */
+    private void renderGL(FrameImage2D image) {
+        if (mRenderTarget == null) {
+            mRenderTarget = RenderTarget.currentTarget().forSurfaceHolder(mSurfaceHolder);
+            mRenderTarget.registerAsDisplaySurface();
+        }
+        Rect frameRect = new Rect(0, 0, image.getWidth(), image.getHeight());
+        Rect surfRect = mSurfaceHolder.getSurfaceFrame();
+        setupShader(mShader, frameRect, surfRect);
+        mShader.process(image.lockTextureSource(),
+                        mRenderTarget,
+                        surfRect.width(),
+                        surfRect.height());
+        image.unlock();
+        mRenderTarget.swapBuffers();
+    }
+
+    /**
+     * Renders the given frame to the screen using a Canvas.
+     * @param image the image to render
+     */
+    private void renderCanvas(FrameImage2D image) {
+        Canvas canvas = mSurfaceHolder.lockCanvas();
+        Bitmap bitmap = image.toBitmap();
+        Rect sourceRect = new Rect(0, 0, bitmap.getWidth(), bitmap.getHeight());
+        Rect surfaceRect = mSurfaceHolder.getSurfaceFrame();
+        RectF targetRect = getTargetRect(sourceRect, surfaceRect);
+        canvas.drawColor(Color.BLACK);
+        if (targetRect.width() > 0 && targetRect.height() > 0) {
+            canvas.scale(surfaceRect.width(), surfaceRect.height());
+            canvas.drawBitmap(bitmap, sourceRect, targetRect, new Paint());
+        }
+        mSurfaceHolder.unlockCanvasAndPost(canvas);
+    }
+
+    @Override
+    protected synchronized void onClose() {
+        if (mRenderTarget != null) {
+            mRenderTarget.unregisterAsDisplaySurface();
+            mRenderTarget.release();
+            mRenderTarget = null;
+        }
+        if (mSurfaceHolder != null) {
+            mSurfaceHolder.removeCallback(mSurfaceHolderListener);
+        }
+    }
+
+    private synchronized void onSurfaceCreated(SurfaceHolder holder) {
+        if (mSurfaceHolder != holder) {
+            throw new RuntimeException("Unexpected Holder!");
+        }
+        mHasSurface = true;
+    }
+
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/TextViewTarget.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/TextViewTarget.java
new file mode 100644
index 0000000..5aafced
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/TextViewTarget.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.text;
+
+import android.view.View;
+import android.widget.TextView;
+
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.Signature;
+import androidx.media.filterfw.ViewFilter;
+
+public class TextViewTarget extends ViewFilter {
+
+    private TextView mTextView = null;
+
+    public TextViewTarget(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public void onBindToView(View view) {
+        if (view instanceof TextView) {
+            mTextView = (TextView)view;
+        } else {
+            throw new IllegalArgumentException("View must be a TextView!");
+        }
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addInputPort("text", Signature.PORT_REQUIRED, FrameType.single(String.class))
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onProcess() {
+        FrameValue textFrame = getConnectedInputPort("text").pullFrame().asFrameValue();
+        final String text = (String)textFrame.getValue();
+        if (mTextView != null) {
+            mTextView.post(new Runnable() {
+                @Override
+                public void run() {
+                    mTextView.setText(text);
+                }
+            });
+        }
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/TextureSource.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/TextureSource.java
new file mode 100644
index 0000000..30fda82
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/TextureSource.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.graphics.Bitmap;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+
+import java.nio.ByteBuffer;
+
+public class TextureSource {
+
+    private int mTexId;
+    private int mTarget;
+    private boolean mIsOwner;
+    private boolean mIsAllocated = false;
+
+    public static TextureSource fromTexture(int texId, int target) {
+        return new TextureSource(texId, target, false);
+    }
+
+    public static TextureSource fromTexture(int texId) {
+        return new TextureSource(texId, GLES20.GL_TEXTURE_2D, false);
+    }
+
+    public static TextureSource newTexture() {
+        return new TextureSource(GLToolbox.generateTexture(), GLES20.GL_TEXTURE_2D, true);
+    }
+
+    public static TextureSource newExternalTexture() {
+        return new TextureSource(GLToolbox.generateTexture(),
+                                 GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                                 true);
+    }
+
+    public int getTextureId() {
+        return mTexId;
+    }
+
+    public int getTarget() {
+        return mTarget;
+    }
+
+    public void bind() {
+        GLES20.glBindTexture(mTarget, mTexId);
+        GLToolbox.checkGlError("glBindTexture");
+    }
+
+    public void allocate(int width, int height) {
+        //Log.i("TextureSource", "Allocating empty texture " + mTexId + ": " + width + "x" + height + ".");
+        GLToolbox.allocateTexturePixels(mTexId, mTarget, width, height);
+        mIsAllocated = true;
+    }
+
+    public void allocateWithPixels(ByteBuffer pixels, int width, int height) {
+        //Log.i("TextureSource", "Uploading pixels to texture " + mTexId + ": " + width + "x" + height + ".");
+        GLToolbox.setTexturePixels(mTexId, mTarget, pixels, width, height);
+        mIsAllocated = true;
+    }
+
+    public void allocateWithBitmapPixels(Bitmap bitmap) {
+        //Log.i("TextureSource", "Uploading pixels to texture " + mTexId + "!");
+        GLToolbox.setTexturePixels(mTexId, mTarget, bitmap);
+        mIsAllocated = true;
+    }
+
+    public void generateMipmaps() {
+        GLES20.glBindTexture(mTarget, mTexId);
+        GLES20.glTexParameteri(mTarget,
+                               GLES20.GL_TEXTURE_MIN_FILTER,
+                               GLES20.GL_LINEAR_MIPMAP_LINEAR);
+        GLES20.glGenerateMipmap(mTarget);
+        GLES20.glBindTexture(mTarget, 0);
+    }
+
+    public void setParameter(int parameter, int value) {
+        GLES20.glBindTexture(mTarget, mTexId);
+        GLES20.glTexParameteri(mTarget, parameter, value);
+        GLES20.glBindTexture(mTarget, 0);
+    }
+
+    /**
+     * @hide
+     */
+    public void release() {
+        if (GLToolbox.isTexture(mTexId) && mIsOwner) {
+            GLToolbox.deleteTexture(mTexId);
+        }
+        mTexId = GLToolbox.textureNone();
+    }
+
+    @Override
+    public String toString() {
+        return "TextureSource(id=" + mTexId + ", target=" + mTarget + ")";
+    }
+
+    boolean isAllocated() {
+        return mIsAllocated;
+    }
+
+    private TextureSource(int texId, int target, boolean isOwner) {
+        mTexId = texId;
+        mTarget = target;
+        mIsOwner = isOwner;
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Throughput.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Throughput.java
new file mode 100644
index 0000000..c16aae0
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Throughput.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package androidx.media.filterpacks.performance;
+
+public class Throughput {
+
+    private final int mTotalFrames;
+    private final int mPeriodFrames;
+    private final long mPeriodTime;
+    
+    public Throughput(int totalFrames, int periodFrames, long periodTime, int size) {
+        mTotalFrames = totalFrames;
+        mPeriodFrames = periodFrames;
+        mPeriodTime = periodTime;
+    }
+
+    public int getTotalFrameCount() {
+        return mTotalFrames;
+    }
+
+    public int getPeriodFrameCount() {
+        return mPeriodFrames;
+    }
+
+    public long getPeriodTime() {
+        return mPeriodTime;
+    }
+
+    public float getFramesPerSecond() {
+        return mPeriodFrames / (mPeriodTime / 1000.0f);
+    }
+
+    @Override
+    public String toString() {
+        return Math.round(getFramesPerSecond()) + " FPS";
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ThroughputFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ThroughputFilter.java
new file mode 100644
index 0000000..25243a7
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ThroughputFilter.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package androidx.media.filterpacks.performance;
+
+import android.util.Log;
+import android.os.SystemClock;
+
+import androidx.media.filterfw.*;
+
+public class ThroughputFilter extends Filter {
+
+    private int mPeriod = 3;
+    private long mLastTime = 0;
+    private int mTotalFrameCount = 0;
+    private int mPeriodFrameCount = 0;
+
+    public ThroughputFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+
+    @Override
+    public Signature getSignature() {
+        FrameType throughputType = FrameType.single(Throughput.class);
+        return new Signature()
+            .addInputPort("frame", Signature.PORT_REQUIRED, FrameType.any())
+            .addOutputPort("throughput", Signature.PORT_REQUIRED, throughputType)
+            .addOutputPort("frame", Signature.PORT_REQUIRED, FrameType.any())
+            .addInputPort("period", Signature.PORT_OPTIONAL, FrameType.single(int.class))
+            .disallowOtherPorts();
+    }
+
+    @Override
+    public void onInputPortOpen(InputPort port) {
+        if (port.getName().equals("period")) {
+            port.bindToFieldNamed("mPeriod");
+        } else {
+            port.attachToOutputPort(getConnectedOutputPort("frame"));
+        }
+    }
+
+    @Override
+    protected void onOpen() {
+        mTotalFrameCount = 0;
+        mPeriodFrameCount = 0;
+        mLastTime = 0;
+    }
+
+    @Override
+    protected synchronized void onProcess() {
+        Frame inputFrame = getConnectedInputPort("frame").pullFrame();
+
+        // Update stats
+        ++mTotalFrameCount;
+        ++mPeriodFrameCount;
+
+        // Check clock
+        if (mLastTime == 0) {
+            mLastTime = SystemClock.elapsedRealtime();
+        }
+        long curTime = SystemClock.elapsedRealtime();
+
+        // Output throughput info if time period is up
+        if ((curTime - mLastTime) >= (mPeriod * 1000)) {
+            Log.i("Thru", "It is time!");
+            OutputPort tpPort = getConnectedOutputPort("throughput");
+            Throughput throughput = new Throughput(mTotalFrameCount,
+                                                   mPeriodFrameCount,
+                                                   curTime - mLastTime,
+                                                   inputFrame.getElementCount());
+            FrameValue throughputFrame = tpPort.fetchAvailableFrame(null).asFrameValue();
+            throughputFrame.setValue(throughput);
+            tpPort.pushFrame(throughputFrame);
+            mLastTime = curTime;
+            mPeriodFrameCount = 0;
+        }
+
+        getConnectedOutputPort("frame").pushFrame(inputFrame);
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ToGrayValuesFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ToGrayValuesFilter.java
new file mode 100644
index 0000000..8e0fd6c
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ToGrayValuesFilter.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.image;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameBuffer2D;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.ImageShader;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.RenderTarget;
+import androidx.media.filterfw.Signature;
+import androidx.media.filterfw.geometry.Quad;
+
+import java.nio.ByteBuffer;
+
+public class ToGrayValuesFilter extends Filter {
+
+    private final static String mGrayPackFragment =
+        "precision mediump float;\n" +
+        "const vec4 coeff_y = vec4(0.299, 0.587, 0.114, 0);\n" +
+        "uniform sampler2D tex_sampler_0;\n" +
+        "uniform float pix_stride;\n" +
+        "varying vec2 v_texcoord;\n" +
+        "void main() {\n" +
+        "  for (int i = 0; i < 4; i++) {\n" +
+        // Here is an example showing how this works:
+        // Assuming the input texture is 1x4 while the output texture is 1x1
+        // the coordinates of the 4 input pixels will be:
+        // { (0.125, 0.5), (0.375, 0.5), (0.625, 0.5), (0.875, 0.5) }
+        // and the coordinates of the 1 output pixels will be:
+        // { (0.5, 0.5) }
+        // the equation below locates the 4 input pixels from the coordinate of the output pixel
+        "    vec4 p = texture2D(tex_sampler_0,\n" +
+        "                       v_texcoord + vec2(pix_stride * (float(i) - 1.5), 0.0));\n" +
+        "    gl_FragColor[i] = dot(p, coeff_y);\n" +
+        "  }\n" +
+        "}\n";
+
+    private ImageShader mShader;
+
+    private FrameType mImageInType;
+
+    public ToGrayValuesFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        mImageInType = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        FrameType imageOut = FrameType.buffer2D(FrameType.ELEMENT_INT8);
+        return new Signature()
+            .addInputPort("image", Signature.PORT_REQUIRED, mImageInType)
+            .addOutputPort("image", Signature.PORT_REQUIRED, imageOut)
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onPrepare() {
+        if (isOpenGLSupported()) {
+            mShader = new ImageShader(mGrayPackFragment);
+        }
+    }
+
+    @Override
+    protected void onProcess() {
+        OutputPort outPort = getConnectedOutputPort("image");
+        FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+        int[] dim = inputImage.getDimensions();
+        FrameBuffer2D outputFrame;
+        ByteBuffer grayBuffer;
+
+        if (isOpenGLSupported()) {
+            // crop out the portion of inputImage that will be used to generate outputFrame.
+            int modular = dim[0] % 4;
+            int[] outDim = new int[] {dim[0] - modular, dim[1]};
+            outputFrame = outPort.fetchAvailableFrame(outDim).asFrameBuffer2D();
+            grayBuffer = outputFrame.lockBytes(Frame.MODE_WRITE);
+
+            int[] targetDims = new int[] { outDim[0] / 4, outDim[1] };
+            FrameImage2D targetFrame = Frame.create(mImageInType, targetDims).asFrameImage2D();
+            mShader.setSourceQuad(Quad.fromRect(0f, 0f, ((float)outDim[0])/dim[0], 1f));
+            mShader.setUniformValue("pix_stride", 1f / outDim[0]);
+            mShader.process(inputImage, targetFrame);
+            RenderTarget grayTarget = targetFrame.lockRenderTarget();
+            grayTarget.readPixelData(grayBuffer, targetDims[0], targetDims[1]);
+            targetFrame.unlock();
+            targetFrame.release();
+        } else {
+            outputFrame = outPort.fetchAvailableFrame(dim).asFrameBuffer2D();
+            grayBuffer = outputFrame.lockBytes(Frame.MODE_WRITE);
+            ByteBuffer inputBuffer  = inputImage.lockBytes(Frame.MODE_READ);
+            if (!toGrayValues(inputBuffer, grayBuffer)) {
+                throw new RuntimeException(
+                        "Native implementation encountered an error during processing!");
+            }
+            inputImage.unlock();
+        }
+        outputFrame.unlock();
+        outPort.pushFrame(outputFrame);
+    }
+
+    private static native boolean toGrayValues(ByteBuffer imageBuffer, ByteBuffer grayBuffer);
+
+    static {
+        System.loadLibrary("smartcamera_jni");
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ToStringFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ToStringFilter.java
new file mode 100644
index 0000000..7306b61
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ToStringFilter.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.text;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+public class ToStringFilter extends Filter {
+
+    public ToStringFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addInputPort("object", Signature.PORT_REQUIRED, FrameType.single())
+            .addOutputPort("string", Signature.PORT_REQUIRED, FrameType.single(String.class))
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onProcess() {
+        FrameValue objectFrame = getConnectedInputPort("object").pullFrame().asFrameValue();
+        String outStr = objectFrame.getValue().toString();
+        OutputPort outPort = getConnectedOutputPort("string");
+        FrameValue stringFrame = outPort.fetchAvailableFrame(null).asFrameValue();
+        stringFrame.setValue(outStr);
+        outPort.pushFrame(stringFrame);
+    }
+
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/TransformUtils.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/TransformUtils.java
new file mode 100644
index 0000000..8dd1949
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/TransformUtils.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.transform;
+
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.TextureSource;
+
+import java.util.Arrays;
+
+/** Internal class that contains utility functions used by the transform filters. **/
+class TransformUtils {
+
+    public static int powOf2(int x) {
+        --x;
+        // Fill with 1s
+        x |= x >> 1;
+        x |= x >> 2;
+        x |= x >> 4;
+        x |= x >> 8;
+        x |= x >> 16;
+        // Next int is now pow-of-2
+        return x + 1;
+    }
+
+    public static FrameImage2D makeMipMappedFrame(FrameImage2D current, int[] dimensions) {
+        // Note: Future versions of GLES will support NPOT mipmapping. When these become more
+        // widely used, we can add a check here to disable frame expansion on such devices.
+        int[] pow2Dims = new int[] { powOf2(dimensions[0]), powOf2(dimensions[1]) };
+        if (current == null) {
+            FrameType imageType = FrameType.image2D(FrameType.ELEMENT_RGBA8888,
+                                                    FrameType.READ_GPU | FrameType.WRITE_GPU);
+            current = Frame.create(imageType, pow2Dims).asFrameImage2D();
+        } else if (!Arrays.equals(dimensions, current.getDimensions())) {
+            current.resize(pow2Dims);
+        }
+        return current;
+    }
+
+    public static FrameImage2D makeTempFrame(FrameImage2D current, int[] dimensions) {
+        if (current == null) {
+            FrameType imageType = FrameType.image2D(FrameType.ELEMENT_RGBA8888,
+                                                    FrameType.READ_GPU | FrameType.WRITE_GPU);
+            current = Frame.create(imageType, dimensions).asFrameImage2D();
+        } else if (!Arrays.equals(dimensions, current.getDimensions())) {
+            current.resize(dimensions);
+        }
+        return current;
+    }
+
+    public static void generateMipMaps(FrameImage2D frame) {
+        TextureSource texture = frame.lockTextureSource();
+        texture.generateMipmaps();
+        frame.unlock();
+    }
+
+    public static void setTextureParameter(FrameImage2D frame, int param, int value) {
+        TextureSource texture = frame.lockTextureSource();
+        texture.setParameter(param, value);
+        frame.unlock();
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ValueTarget.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ValueTarget.java
new file mode 100644
index 0000000..e2db8af
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ValueTarget.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.base;
+
+import android.os.Handler;
+import android.os.Looper;
+
+import androidx.media.filterfw.*;
+
+public final class ValueTarget extends Filter {
+
+    public static interface ValueListener {
+        public void onReceivedValue(Object value);
+    }
+
+    private ValueListener mListener = null;
+    private Handler mHandler = null;
+
+    public ValueTarget(MffContext context, String name) {
+        super(context, name);
+    }
+
+    public void setListener(ValueListener listener, boolean onCallerThread) {
+        if (isRunning()) {
+            throw new IllegalStateException("Attempting to bind filter to callback while it is "
+                + "running!");
+        }
+        mListener = listener;
+        if (onCallerThread) {
+            if (Looper.myLooper() == null) {
+                throw new IllegalArgumentException("Attempting to set callback on thread which "
+                    + "has no looper!");
+            }
+            mHandler = new Handler();
+        }
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addInputPort("value", Signature.PORT_REQUIRED, FrameType.single())
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onProcess() {
+        FrameValue valueFrame = getConnectedInputPort("value").pullFrame().asFrameValue();
+        if (mListener != null) {
+            if (mHandler != null) {
+                postValueToUiThread(valueFrame.getValue());
+            } else {
+                mListener.onReceivedValue(valueFrame.getValue());
+            }
+        }
+    }
+
+    private void postValueToUiThread(final Object value) {
+        mHandler.post(new Runnable() {
+            @Override
+            public void run() {
+                mListener.onReceivedValue(value);
+            }
+        });
+    }
+
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/VariableSource.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/VariableSource.java
new file mode 100644
index 0000000..69060cb
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/VariableSource.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterpacks.base;
+
+import androidx.media.filterfw.*;
+
+// TODO: Rename back to ValueSource? Seems to make more sense even if we use it as a Variable
+// in some contexts.
+public final class VariableSource extends Filter {
+
+    private Object mValue = null;
+    private OutputPort mOutputPort = null;
+
+    public VariableSource(MffContext context, String name) {
+        super(context, name);
+    }
+
+    public synchronized void setValue(Object value) {
+        mValue = value;
+    }
+
+    public synchronized Object getValue() {
+        return mValue;
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addOutputPort("value", Signature.PORT_REQUIRED, FrameType.single())
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onPrepare() {
+        mOutputPort = getConnectedOutputPort("value");
+    }
+
+    @Override
+    protected synchronized void onProcess() {
+        FrameValue frame = mOutputPort.fetchAvailableFrame(null).asFrameValue();
+        frame.setValue(mValue);
+        mOutputPort.pushFrame(frame);
+    }
+
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ViewFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ViewFilter.java
new file mode 100644
index 0000000..ddb7222
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ViewFilter.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw;
+
+import android.graphics.Rect;
+import android.graphics.RectF;
+import android.view.View;
+
+/**
+ * TODO: Move this to filterpacks/base?
+ */
+public abstract class ViewFilter extends Filter {
+
+    public static final int SCALE_STRETCH = 1;
+    public static final int SCALE_FIT = 2;
+    public static final int SCALE_FILL = 3;
+
+    protected int mScaleMode = SCALE_FIT;
+    protected float[] mClearColor = new float[] { 0f, 0f, 0f, 1f };
+    protected boolean mFlipVertically = true;
+
+    private String mRequestedScaleMode = null;
+
+    protected ViewFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    /**
+     * Binds the filter to a view.
+     * View filters support visualizing data to a view. Check the specific filter documentation
+     * for details. The view may be bound only if the filter's graph is not running.
+     *
+     * @param view the view to bind to.
+     * @throws IllegalStateException if the method is called while the graph is running.
+     */
+    public void bindToView(View view) {
+        if (isRunning()) {
+            throw new IllegalStateException("Attempting to bind filter to view while it is "
+                + "running!");
+        }
+        onBindToView(view);
+    }
+
+    public void setScaleMode(int scaleMode) {
+        if (isRunning()) {
+            throw new IllegalStateException("Attempting to change scale mode while filter is "
+                + "running!");
+        }
+        mScaleMode = scaleMode;
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addInputPort("scaleMode", Signature.PORT_OPTIONAL, FrameType.single(String.class))
+            .addInputPort("flip", Signature.PORT_OPTIONAL, FrameType.single(boolean.class));
+    }
+
+    /**
+     * Subclasses must override this method to bind their filter to the specified view.
+     *
+     * When this method is called, Filter implementations may assume that the graph is not
+     * currently running.
+     */
+    protected abstract void onBindToView(View view);
+
+    /**
+     * TODO: Document.
+     */
+    protected RectF getTargetRect(Rect frameRect, Rect bufferRect) {
+        RectF result = new RectF();
+        if (bufferRect.width() > 0 && bufferRect.height() > 0) {
+            float frameAR = (float)frameRect.width() / frameRect.height();
+            float bufferAR = (float)bufferRect.width() / bufferRect.height();
+            float relativeAR = bufferAR / frameAR;
+            switch (mScaleMode) {
+                case SCALE_STRETCH:
+                    result.set(0f, 0f, 1f, 1f);
+                    break;
+                case SCALE_FIT:
+                    if (relativeAR > 1.0f) {
+                        float x = 0.5f - 0.5f / relativeAR;
+                        float y = 0.0f;
+                        result.set(x, y, x + 1.0f / relativeAR, y + 1.0f);
+                    } else {
+                        float x = 0.0f;
+                        float y = 0.5f - 0.5f * relativeAR;
+                        result.set(x, y, x + 1.0f, y + relativeAR);
+                    }
+                    break;
+                case SCALE_FILL:
+                    if (relativeAR > 1.0f) {
+                        float x = 0.0f;
+                        float y = 0.5f - 0.5f * relativeAR;
+                        result.set(x, y, x + 1.0f, y + relativeAR);
+                    } else {
+                        float x = 0.5f - 0.5f / relativeAR;
+                        float y = 0.0f;
+                        result.set(x, y, x + 1.0f / relativeAR, y + 1.0f);
+                    }
+                    break;
+            }
+        }
+        return result;
+    }
+
+    protected void connectViewInputs(InputPort port) {
+        if (port.getName().equals("scaleMode")) {
+            port.bindToListener(mScaleModeListener);
+            port.setAutoPullEnabled(true);
+        } else if (port.getName().equals("flip")) {
+            port.bindToFieldNamed("mFlipVertically");
+            port.setAutoPullEnabled(true);
+        }
+    }
+
+    protected void setupShader(ImageShader shader, Rect frameRect, Rect outputRect) {
+        shader.setTargetRect(getTargetRect(frameRect, outputRect));
+        shader.setClearsOutput(true);
+        shader.setClearColor(mClearColor);
+        if (mFlipVertically) {
+            shader.setSourceRect(0f, 1f, 1f, -1f);
+        }
+    }
+
+    private InputPort.FrameListener mScaleModeListener = new InputPort.FrameListener() {
+        @Override
+        public void onFrameReceived(InputPort port, Frame frame) {
+            String scaleMode = (String)frame.asFrameValue().getValue();
+            if (!scaleMode.equals(mRequestedScaleMode)) {
+                mRequestedScaleMode = scaleMode;
+                if (scaleMode.equals("stretch")) {
+                    mScaleMode = SCALE_STRETCH;
+                } else if (scaleMode.equals("fit")) {
+                    mScaleMode = SCALE_FIT;
+                } else if (scaleMode.equals("fill")) {
+                    mScaleMode = SCALE_FILL;
+                } else {
+                    throw new RuntimeException("Unknown scale-mode '" + scaleMode + "'!");
+                }
+            }
+        }
+    };
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/AudioSample.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/AudioSample.java
new file mode 100644
index 0000000..c7eec26
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/AudioSample.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.decoder;
+
+public class AudioSample {
+
+    public final int sampleRate;
+    public final int channelCount;
+    public final byte[] bytes;
+
+    public AudioSample(int sampleRate, int channelCount, byte[] bytes) {
+        this.sampleRate = sampleRate;
+        this.channelCount = channelCount;
+        this.bytes = bytes;
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/AudioTrackDecoder.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/AudioTrackDecoder.java
new file mode 100644
index 0000000..0219fd78
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/AudioTrackDecoder.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.decoder;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaFormat;
+
+import androidx.media.filterfw.FrameValue;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ * {@link TrackDecoder} for decoding audio tracks.
+ *
+ * TODO: find out if we always get 16 bits per channel and document.
+ */
+@TargetApi(16)
+public class AudioTrackDecoder extends TrackDecoder {
+
+    private final ByteArrayOutputStream mAudioByteStream; // Guarded by mAudioByteStreamLock.
+    private final Object mAudioByteStreamLock;
+
+    private int mAudioSampleRate;
+    private int mAudioChannelCount;
+    private long mAudioPresentationTimeUs;
+
+    public AudioTrackDecoder(int trackIndex, MediaFormat format, Listener listener) {
+        super(trackIndex, format, listener);
+
+        if (!DecoderUtil.isAudioFormat(format)) {
+            throw new IllegalArgumentException(
+                    "AudioTrackDecoder can only be used with audio formats");
+        }
+
+        mAudioByteStream = new ByteArrayOutputStream();
+        mAudioByteStreamLock = new Object();
+
+        mAudioSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+        mAudioChannelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+    }
+
+    @Override
+    protected MediaCodec initMediaCodec(MediaFormat format) {
+        MediaCodec mediaCodec = MediaCodec.createDecoderByType(
+                format.getString(MediaFormat.KEY_MIME));
+        mediaCodec.configure(format, null, null, 0);
+        return mediaCodec;
+    }
+
+    @Override
+    protected boolean onDataAvailable(
+            MediaCodec codec, ByteBuffer[] buffers, int bufferIndex, BufferInfo info) {
+        ByteBuffer buffer = buffers[bufferIndex];
+        byte[] data = new byte[info.size];
+        buffer.position(info.offset);
+        buffer.get(data, 0, info.size);
+
+        synchronized (mAudioByteStreamLock) {
+            try {
+                if (mAudioByteStream.size() == 0 && data.length > 0) {
+                    mAudioPresentationTimeUs = info.presentationTimeUs;
+                }
+
+                mAudioByteStream.write(data);
+            } catch (IOException e) {
+                // Just drop the audio sample.
+            }
+        }
+
+        buffer.clear();
+        codec.releaseOutputBuffer(bufferIndex, false);
+        notifyListener();
+        return true;
+    }
+
+    /**
+     * Fills the argument {@link FrameValue} with an audio sample containing the audio that was
+     * decoded since the last call of this method. The decoder's buffer is cleared as a result.
+     */
+    public void grabSample(FrameValue audioFrame) {
+        synchronized (mAudioByteStreamLock) {
+            if (audioFrame != null) {
+                AudioSample sample = new AudioSample(
+                        mAudioSampleRate, mAudioChannelCount, mAudioByteStream.toByteArray());
+                audioFrame.setValue(sample);
+                audioFrame.setTimestamp(mAudioPresentationTimeUs * 1000);
+            }
+            clearBuffer();
+        }
+    }
+
+    /**
+     * Clears the decoder's buffer.
+     */
+    public void clearBuffer() {
+        synchronized (mAudioByteStreamLock) {
+            mAudioByteStream.reset();
+        }
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/CpuVideoTrackDecoder.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/CpuVideoTrackDecoder.java
new file mode 100644
index 0000000..96f3059
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/CpuVideoTrackDecoder.java
@@ -0,0 +1,243 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media.filterfw.decoder;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.util.SparseIntArray;
+import androidx.media.filterfw.ColorSpace;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.PixelUtils;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.TreeMap;
+
+/**
+ * {@link TrackDecoder} that decodes a video track and renders the frames onto a
+ * {@link SurfaceTexture}.
+ *
+ * This implementation purely uses CPU based methods to decode and color-convert the frames.
+ */
+@TargetApi(16)
+public class CpuVideoTrackDecoder extends VideoTrackDecoder {
+
+    private static final int COLOR_FORMAT_UNSET = -1;
+
+    private final int mWidth;
+    private final int mHeight;
+
+    private int mColorFormat = COLOR_FORMAT_UNSET;
+    private long mCurrentPresentationTimeUs;
+    private ByteBuffer mDecodedBuffer;
+    private ByteBuffer mUnrotatedBytes;
+
+    protected CpuVideoTrackDecoder(int trackIndex, MediaFormat format, Listener listener) {
+        super(trackIndex, format, listener);
+
+        mWidth = format.getInteger(MediaFormat.KEY_WIDTH);
+        mHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
+    }
+
+    @Override
+    protected MediaCodec initMediaCodec(MediaFormat format) {
+        // Find a codec for our video that can output to one of our supported color-spaces
+        MediaCodec mediaCodec = findDecoderCodec(format, new int[] {
+                CodecCapabilities.COLOR_Format32bitARGB8888,
+                CodecCapabilities.COLOR_FormatYUV420Planar});
+        if (mediaCodec == null) {
+            throw new RuntimeException(
+                    "Could not find a suitable decoder for format: " + format + "!");
+        }
+        mediaCodec.configure(format, null, null, 0);
+        return mediaCodec;
+    }
+
+    @Override
+    protected boolean onDataAvailable(
+            MediaCodec codec, ByteBuffer[] buffers, int bufferIndex, BufferInfo info) {
+
+        mCurrentPresentationTimeUs = info.presentationTimeUs;
+        mDecodedBuffer = buffers[bufferIndex];
+
+        if (mColorFormat == -1) {
+            mColorFormat = codec.getOutputFormat().getInteger(MediaFormat.KEY_COLOR_FORMAT);
+        }
+
+        markFrameAvailable();
+        notifyListener();
+
+        // Wait for the grab before we release this buffer.
+        waitForFrameGrab();
+
+        codec.releaseOutputBuffer(bufferIndex, false);
+
+        return false;
+    }
+
+    @Override
+    protected void copyFrameDataTo(FrameImage2D outputVideoFrame, int rotation) {
+        // Calculate output dimensions
+        int outputWidth = mWidth;
+        int outputHeight = mHeight;
+        if (needSwapDimension(rotation)) {
+            outputWidth = mHeight;
+            outputHeight = mWidth;
+        }
+
+        // Create output frame
+        outputVideoFrame.resize(new int[] {outputWidth, outputHeight});
+        outputVideoFrame.setTimestamp(mCurrentPresentationTimeUs * 1000);
+        ByteBuffer outBytes = outputVideoFrame.lockBytes(Frame.MODE_WRITE);
+
+        // Set data
+        if (rotation == MediaDecoder.ROTATE_NONE) {
+            convertImage(mDecodedBuffer, outBytes, mColorFormat, mWidth, mHeight);
+        } else {
+            if (mUnrotatedBytes == null) {
+                mUnrotatedBytes = ByteBuffer.allocateDirect(mWidth * mHeight * 4);
+            }
+            // TODO: This could be optimized by including the rotation in the color conversion.
+            convertImage(mDecodedBuffer, mUnrotatedBytes, mColorFormat, mWidth, mHeight);
+            copyRotate(mUnrotatedBytes, outBytes, rotation);
+        }
+        outputVideoFrame.unlock();
+    }
+
+    /**
+     * Copy the input data to the output data applying the specified rotation.
+     *
+     * @param input The input image data
+     * @param output Buffer for the output image data
+     * @param rotation The rotation to apply
+     */
+    private void copyRotate(ByteBuffer input, ByteBuffer output, int rotation) {
+        int offset;
+        int pixStride;
+        int rowStride;
+        switch (rotation) {
+            case MediaDecoder.ROTATE_NONE:
+                offset = 0;
+                pixStride = 1;
+                rowStride = mWidth;
+                break;
+            case MediaDecoder.ROTATE_90_LEFT:
+                offset = (mWidth - 1) * mHeight;
+                pixStride = -mHeight;
+                rowStride = 1;
+                break;
+            case MediaDecoder.ROTATE_90_RIGHT:
+                offset = mHeight - 1;
+                pixStride = mHeight;
+                rowStride = -1;
+                break;
+            case MediaDecoder.ROTATE_180:
+                offset = mWidth * mHeight - 1;
+                pixStride = -1;
+                rowStride = -mWidth;
+                break;
+            default:
+                throw new IllegalArgumentException("Unsupported rotation " + rotation + "!");
+        }
+        PixelUtils.copyPixels(input, output, mWidth, mHeight, offset, pixStride, rowStride);
+    }
+
+    /**
+     * Looks for a codec with the specified requirements.
+     *
+     * The set of codecs will be filtered down to those that meet the following requirements:
+     * <ol>
+     *   <li>The codec is a decoder.</li>
+     *   <li>The codec can decode a video of the specified format.</li>
+     *   <li>The codec can decode to one of the specified color formats.</li>
+     * </ol>
+     * If multiple codecs are found, the one with the preferred color-format is taken. Color format
+     * preference is determined by the order of their appearance in the color format array.
+     *
+     * @param format The format the codec must decode.
+     * @param requiredColorFormats Array of target color spaces ordered by preference.
+     * @return A codec that meets the requirements, or null if no such codec was found.
+     */
+    private static MediaCodec findDecoderCodec(MediaFormat format, int[] requiredColorFormats) {
+        TreeMap<Integer, String> candidateCodecs = new TreeMap<Integer, String>();
+        SparseIntArray colorPriorities = intArrayToPriorityMap(requiredColorFormats);
+        for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+            // Get next codec
+            MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+
+            // Check that this is a decoder
+            if (info.isEncoder()) {
+                continue;
+            }
+
+            // Check if this codec can decode the video in question
+            String requiredType = format.getString(MediaFormat.KEY_MIME);
+            String[] supportedTypes = info.getSupportedTypes();
+            Set<String> typeSet = new HashSet<String>(Arrays.asList(supportedTypes));
+
+            // Check if it can decode to one of the required color formats
+            if (typeSet.contains(requiredType)) {
+                CodecCapabilities capabilities = info.getCapabilitiesForType(requiredType);
+                for (int supportedColorFormat : capabilities.colorFormats) {
+                    if (colorPriorities.indexOfKey(supportedColorFormat) >= 0) {
+                        int priority = colorPriorities.get(supportedColorFormat);
+                        candidateCodecs.put(priority, info.getName());
+                    }
+                }
+            }
+        }
+
+        // Pick the best codec (with the highest color priority)
+        if (candidateCodecs.isEmpty()) {
+            return null;
+        } else {
+            String bestCodec = candidateCodecs.firstEntry().getValue();
+            return MediaCodec.createByCodecName(bestCodec);
+        }
+    }
+
+    private static SparseIntArray intArrayToPriorityMap(int[] values) {
+        SparseIntArray result = new SparseIntArray();
+        for (int priority = 0; priority < values.length; ++priority) {
+            result.append(values[priority], priority);
+        }
+        return result;
+    }
+
+    private static void convertImage(
+            ByteBuffer input, ByteBuffer output, int colorFormat, int width, int height) {
+        switch (colorFormat) {
+            case CodecCapabilities.COLOR_Format32bitARGB8888:
+                ColorSpace.convertArgb8888ToRgba8888(input, output, width, height);
+                break;
+            case CodecCapabilities.COLOR_FormatYUV420Planar:
+                ColorSpace.convertYuv420pToRgba8888(input, output, width, height);
+                break;
+            default:
+                throw new RuntimeException("Unsupported color format: " + colorFormat + "!");
+        }
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/DecoderUtil.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/DecoderUtil.java
new file mode 100644
index 0000000..ec0ead0
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/DecoderUtil.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.decoder;
+
+import android.annotation.TargetApi;
+import android.media.MediaFormat;
+
+@TargetApi(16)
+public class DecoderUtil {
+
+    public static boolean isAudioFormat(MediaFormat format) {
+        return format.getString(MediaFormat.KEY_MIME).startsWith("audio/");
+    }
+
+    public static boolean isVideoFormat(MediaFormat format) {
+        return format.getString(MediaFormat.KEY_MIME).startsWith("video/");
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/GpuVideoTrackDecoder.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/GpuVideoTrackDecoder.java
new file mode 100644
index 0000000..bbba9d8
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/GpuVideoTrackDecoder.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.decoder;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.graphics.SurfaceTexture.OnFrameAvailableListener;
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaFormat;
+import android.view.Surface;
+
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.ImageShader;
+import androidx.media.filterfw.TextureSource;
+
+import java.nio.ByteBuffer;
+
+/**
+ * {@link TrackDecoder} that decodes a video track and renders the frames onto a
+ * {@link SurfaceTexture}.
+ *
+ * This implementation uses the GPU for image operations such as copying
+ * and color-space conversion.
+ */
+@TargetApi(16)
+public class GpuVideoTrackDecoder extends VideoTrackDecoder {
+
+    /**
+     * Identity fragment shader for external textures.
+     */
+    private static final String COPY_FRAGMENT_SHADER =
+            "#extension GL_OES_EGL_image_external : require\n" +
+            "precision mediump float;\n" +
+            "uniform samplerExternalOES tex_sampler_0;\n" +
+            "varying vec2 v_texcoord;\n" +
+            "void main() {\n" +
+            "  gl_FragColor = texture2D(tex_sampler_0, v_texcoord);\n" +
+            "}\n";
+
+    private final TextureSource mTextureSource;
+    private final SurfaceTexture mSurfaceTexture; // Access guarded by mFrameMonitor.
+    private final float[] mTransformMatrix;
+
+    private final int mOutputWidth;
+    private final int mOutputHeight;
+
+    private ImageShader mImageShader;
+
+    private long mCurrentPresentationTimeUs;
+
+    public GpuVideoTrackDecoder(
+            int trackIndex, MediaFormat format, Listener listener) {
+        super(trackIndex, format, listener);
+
+        // Create a surface texture to be used by the video track decoder.
+        mTextureSource = TextureSource.newExternalTexture();
+        mSurfaceTexture = new SurfaceTexture(mTextureSource.getTextureId());
+        mSurfaceTexture.detachFromGLContext();
+        mSurfaceTexture.setOnFrameAvailableListener(new OnFrameAvailableListener() {
+            @Override
+            public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+                markFrameAvailable();
+            }
+        });
+
+        mOutputWidth = format.getInteger(MediaFormat.KEY_WIDTH);
+        mOutputHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
+
+        mTransformMatrix = new float[16];
+    }
+
+    @Override
+    protected MediaCodec initMediaCodec(MediaFormat format) {
+        Surface surface = new Surface(mSurfaceTexture);
+        MediaCodec mediaCodec = MediaCodec.createDecoderByType(
+                format.getString(MediaFormat.KEY_MIME));
+        mediaCodec.configure(format, surface, null, 0);
+        surface.release();
+        return mediaCodec;
+    }
+
+    @Override
+    protected boolean onDataAvailable(
+            MediaCodec codec, ByteBuffer[] buffers, int bufferIndex, BufferInfo info) {
+        boolean textureAvailable = waitForFrameGrab();
+
+        mCurrentPresentationTimeUs = info.presentationTimeUs;
+
+        // Only render the next frame if we weren't interrupted.
+        codec.releaseOutputBuffer(bufferIndex, textureAvailable);
+
+        if (textureAvailable) {
+            if (updateTexture()) {
+                notifyListener();
+            }
+        }
+
+        return false;
+    }
+
+    /**
+     * Waits for the texture's {@link OnFrameAvailableListener} to be notified and then updates
+     * the internal {@link SurfaceTexture}.
+     */
+    private boolean updateTexture() {
+        // Wait for the frame we just released to appear in the texture.
+        synchronized (mFrameMonitor) {
+            try {
+                while (!mFrameAvailable) {
+                    mFrameMonitor.wait();
+                }
+                mSurfaceTexture.attachToGLContext(mTextureSource.getTextureId());
+                mSurfaceTexture.updateTexImage();
+                mSurfaceTexture.detachFromGLContext();
+                return true;
+            } catch (InterruptedException e) {
+                return false;
+            }
+        }
+    }
+
+    @Override
+    protected void copyFrameDataTo(FrameImage2D outputVideoFrame, int rotation) {
+        TextureSource targetTexture = TextureSource.newExternalTexture();
+        mSurfaceTexture.attachToGLContext(targetTexture.getTextureId());
+        mSurfaceTexture.getTransformMatrix(mTransformMatrix);
+
+        ImageShader imageShader = getImageShader();
+        imageShader.setSourceTransform(mTransformMatrix);
+
+        int outputWidth = mOutputWidth;
+        int outputHeight = mOutputHeight;
+        if (rotation != 0) {
+            float[] targetCoords = getRotationCoords(rotation);
+            imageShader.setTargetCoords(targetCoords);
+            if (needSwapDimension(rotation)) {
+                outputWidth = mOutputHeight;
+                outputHeight = mOutputWidth;
+            }
+        }
+        outputVideoFrame.resize(new int[] { outputWidth, outputHeight });
+        imageShader.process(
+                targetTexture,
+                outputVideoFrame.lockRenderTarget(),
+                outputWidth,
+                outputHeight);
+        outputVideoFrame.setTimestamp(mCurrentPresentationTimeUs * 1000);
+        outputVideoFrame.unlock();
+        targetTexture.release();
+
+        mSurfaceTexture.detachFromGLContext();
+    }
+
+    @Override
+    public void release() {
+        super.release();
+        synchronized (mFrameMonitor) {
+            mTextureSource.release();
+            mSurfaceTexture.release();
+        }
+    }
+
+    /*
+     * This method has to be called on the MFF processing thread.
+     */
+    private ImageShader getImageShader() {
+        if (mImageShader == null) {
+            mImageShader = new ImageShader(COPY_FRAGMENT_SHADER);
+            mImageShader.setTargetRect(0f, 1f, 1f, -1f);
+        }
+        return mImageShader;
+    }
+
+    /**
+     * Get the quad coords for rotation.
+     * @param rotation applied to the frame, value is one of
+     *   {ROTATE_NONE, ROTATE_90_RIGHT, ROTATE_180, ROTATE_90_LEFT}
+     * @return coords the calculated quad coords for the given rotation
+     */
+    private static float[] getRotationCoords(int rotation) {
+         switch(rotation) {
+             case MediaDecoder.ROTATE_90_RIGHT:
+                 return new float[] { 0f, 0f, 0f, 1f, 1f, 0f, 1f, 1f };
+             case MediaDecoder.ROTATE_180:
+                 return new float[] { 1f, 0f, 0f, 0f, 1f, 1f, 0f, 1f };
+             case MediaDecoder.ROTATE_90_LEFT:
+                 return new float[] { 1f, 1f, 1f, 0f, 0f, 1f, 0f, 0f };
+             case MediaDecoder.ROTATE_NONE:
+                 return new float[] { 0f, 1f, 1f, 1f, 0f, 0f, 1f, 0f };
+             default:
+                 throw new IllegalArgumentException("Unsupported rotation angle.");
+         }
+     }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/MediaDecoder.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/MediaDecoder.java
new file mode 100644
index 0000000..aa57394
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/MediaDecoder.java
@@ -0,0 +1,426 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.decoder;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.MediaMetadataRetriever;
+import android.net.Uri;
+import android.os.Build;
+import android.util.Log;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.RenderTarget;
+
+import java.util.concurrent.LinkedBlockingQueue;
+
+@TargetApi(16)
+public class MediaDecoder implements
+        Runnable,
+        TrackDecoder.Listener {
+
+    public interface Listener {
+        /**
+         * Notifies a listener when a decoded video frame is available. The listener should use
+         * {@link MediaDecoder#grabVideoFrame(FrameImage2D, int)} to grab the video data for this
+         * frame.
+         */
+        void onVideoFrameAvailable();
+
+        /**
+         * Notifies a listener when one or more audio samples are available. The listener should use
+         * {@link MediaDecoder#grabAudioSamples(FrameValue)} to grab the audio samples.
+         */
+        void onAudioSamplesAvailable();
+
+        /**
+         * Notifies a listener that decoding has started. This method is called on the decoder
+         * thread.
+         */
+        void onDecodingStarted();
+
+        /**
+         * Notifies a listener that decoding has stopped. This method is called on the decoder
+         * thread.
+         */
+        void onDecodingStopped();
+
+        /**
+         * Notifies a listener that an error occurred. If an error occurs, {@link MediaDecoder} is
+         * stopped and no more events are reported to this {@link Listener}'s callbacks.
+         * This method is called on the decoder thread.
+         */
+        void onError(Exception e);
+    }
+
+    public static final int ROTATE_NONE = 0;
+    public static final int ROTATE_90_RIGHT = 90;
+    public static final int ROTATE_180 = 180;
+    public static final int ROTATE_90_LEFT = 270;
+
+    private static final String LOG_TAG = "MediaDecoder";
+    private static final boolean DEBUG = false;
+
+    private static final int MAX_EVENTS = 32;
+    private static final int EVENT_START = 0;
+    private static final int EVENT_STOP = 1;
+    private static final int EVENT_EOF = 2;
+
+    private final Listener mListener;
+    private final Uri mUri;
+    private final Context mContext;
+
+    private final LinkedBlockingQueue<Integer> mEventQueue;
+
+    private final Thread mDecoderThread;
+
+    private MediaExtractor mMediaExtractor;
+
+    private RenderTarget mRenderTarget;
+
+    private int mDefaultRotation;
+    private int mVideoTrackIndex;
+    private int mAudioTrackIndex;
+
+    private VideoTrackDecoder mVideoTrackDecoder;
+    private AudioTrackDecoder mAudioTrackDecoder;
+
+    private boolean mStarted;
+
+    private long mStartMicros;
+
+    private boolean mOpenGLEnabled = true;
+
+    private boolean mSignaledEndOfInput;
+    private boolean mSeenEndOfAudioOutput;
+    private boolean mSeenEndOfVideoOutput;
+
+    public MediaDecoder(Context context, Uri uri, Listener listener) {
+        this(context, uri, 0, listener);
+    }
+
+    public MediaDecoder(Context context, Uri uri, long startMicros, Listener listener) {
+        if (context == null) {
+            throw new NullPointerException("context cannot be null");
+        }
+        mContext = context;
+
+        if (uri == null) {
+            throw new NullPointerException("uri cannot be null");
+        }
+        mUri = uri;
+
+        if (startMicros < 0) {
+            throw new IllegalArgumentException("startMicros cannot be negative");
+        }
+        mStartMicros = startMicros;
+
+        if (listener == null) {
+            throw new NullPointerException("listener cannot be null");
+        }
+        mListener = listener;
+
+        mEventQueue = new LinkedBlockingQueue<Integer>(MAX_EVENTS);
+        mDecoderThread = new Thread(this);
+    }
+
+    /**
+     * Set whether decoder may use OpenGL for decoding.
+     *
+     * This must be called before {@link #start()}.
+     *
+     * @param enabled flag whether to enable OpenGL decoding (default is true).
+     */
+    public void setOpenGLEnabled(boolean enabled) {
+        // If event-queue already has events, we have started already.
+        if (mEventQueue.isEmpty()) {
+            mOpenGLEnabled = enabled;
+        } else {
+            throw new IllegalStateException(
+                    "Must call setOpenGLEnabled() before calling start()!");
+        }
+    }
+
+    /**
+     * Returns whether OpenGL is enabled for decoding.
+     *
+     * @return whether OpenGL is enabled for decoding.
+     */
+    public boolean isOpenGLEnabled() {
+        return mOpenGLEnabled;
+    }
+
+    public void start() {
+        mEventQueue.offer(EVENT_START);
+        mDecoderThread.start();
+    }
+
+    public void stop() {
+        stop(true);
+    }
+
+    private void stop(boolean manual) {
+        if (manual) {
+            mEventQueue.offer(EVENT_STOP);
+            mDecoderThread.interrupt();
+        } else {
+            mEventQueue.offer(EVENT_EOF);
+        }
+    }
+
+    @Override
+    public void run() {
+        Integer event;
+        try {
+            while (true) {
+                event = mEventQueue.poll();
+                boolean shouldStop = false;
+                if (event != null) {
+                    switch (event) {
+                        case EVENT_START:
+                            onStart();
+                            break;
+                        case EVENT_EOF:
+                            if (mVideoTrackDecoder != null) {
+                                mVideoTrackDecoder.waitForFrameGrab();
+                            }
+                            // once the last frame has been grabbed, fall through and stop
+                        case EVENT_STOP:
+                            onStop(true);
+                            shouldStop = true;
+                            break;
+                    }
+                } else if (mStarted) {
+                    decode();
+                }
+                if (shouldStop) {
+                    break;
+                }
+
+            }
+        } catch (Exception e) {
+            mListener.onError(e);
+            onStop(false);
+        }
+    }
+
+    private void onStart() throws Exception {
+        if (mOpenGLEnabled) {
+            getRenderTarget().focus();
+        }
+
+        mMediaExtractor = new MediaExtractor();
+        mMediaExtractor.setDataSource(mContext, mUri, null);
+
+        mVideoTrackIndex = -1;
+        mAudioTrackIndex = -1;
+
+        for (int i = 0; i < mMediaExtractor.getTrackCount(); i++) {
+            MediaFormat format = mMediaExtractor.getTrackFormat(i);
+            if (DEBUG) {
+                Log.i(LOG_TAG, "Uri " + mUri + ", track " + i + ": " + format);
+            }
+            if (DecoderUtil.isVideoFormat(format) && mVideoTrackIndex == -1) {
+                mVideoTrackIndex = i;
+            } else if (DecoderUtil.isAudioFormat(format) && mAudioTrackIndex == -1) {
+                mAudioTrackIndex = i;
+            }
+        }
+
+        if (mVideoTrackIndex == -1 && mAudioTrackIndex == -1) {
+            throw new IllegalArgumentException(
+                    "Couldn't find a video or audio track in the provided file");
+        }
+
+        if (mVideoTrackIndex != -1) {
+            MediaFormat videoFormat = mMediaExtractor.getTrackFormat(mVideoTrackIndex);
+            mVideoTrackDecoder = mOpenGLEnabled
+                    ? new GpuVideoTrackDecoder(mVideoTrackIndex, videoFormat, this)
+                    : new CpuVideoTrackDecoder(mVideoTrackIndex, videoFormat, this);
+            mVideoTrackDecoder.init();
+            mMediaExtractor.selectTrack(mVideoTrackIndex);
+            if (Build.VERSION.SDK_INT >= 17) {
+                retrieveDefaultRotation();
+            }
+        }
+
+        if (mAudioTrackIndex != -1) {
+            MediaFormat audioFormat = mMediaExtractor.getTrackFormat(mAudioTrackIndex);
+            mAudioTrackDecoder = new AudioTrackDecoder(mAudioTrackIndex, audioFormat, this);
+            mAudioTrackDecoder.init();
+            mMediaExtractor.selectTrack(mAudioTrackIndex);
+        }
+
+        if (mStartMicros > 0) {
+            mMediaExtractor.seekTo(mStartMicros, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
+        }
+
+        mStarted = true;
+        mListener.onDecodingStarted();
+    }
+
+    @TargetApi(17)
+    private void retrieveDefaultRotation() {
+        MediaMetadataRetriever metadataRetriever = new MediaMetadataRetriever();
+        metadataRetriever.setDataSource(mContext, mUri);
+        String rotationString = metadataRetriever.extractMetadata(
+                MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
+        mDefaultRotation = rotationString == null ? 0 : Integer.parseInt(rotationString);
+    }
+
+    private void onStop(boolean notifyListener) {
+        mMediaExtractor.release();
+        mMediaExtractor = null;
+
+        if (mVideoTrackDecoder != null) {
+            mVideoTrackDecoder.release();
+            mVideoTrackDecoder = null;
+        }
+
+        if (mAudioTrackDecoder != null) {
+            mAudioTrackDecoder.release();
+            mAudioTrackDecoder = null;
+        }
+
+        if (mOpenGLEnabled) {
+            if (mRenderTarget != null) {
+                getRenderTarget().release();
+            }
+            RenderTarget.focusNone();
+        }
+
+        mVideoTrackIndex = -1;
+        mAudioTrackIndex = -1;
+
+        mEventQueue.clear();
+        mStarted = false;
+        if (notifyListener) {
+            mListener.onDecodingStopped();
+        }
+    }
+
+    private void decode() {
+        int sampleTrackIndex = mMediaExtractor.getSampleTrackIndex();
+        if (sampleTrackIndex >= 0) {
+            if (sampleTrackIndex == mVideoTrackIndex) {
+                mVideoTrackDecoder.feedInput(mMediaExtractor);
+            } else if (sampleTrackIndex == mAudioTrackIndex) {
+                mAudioTrackDecoder.feedInput(mMediaExtractor);
+            }
+        } else if (!mSignaledEndOfInput) {
+            if (mVideoTrackDecoder != null) {
+                mVideoTrackDecoder.signalEndOfInput();
+            }
+            if (mAudioTrackDecoder != null) {
+                mAudioTrackDecoder.signalEndOfInput();
+            }
+            mSignaledEndOfInput = true;
+        }
+
+        if (mVideoTrackDecoder != null) {
+            mVideoTrackDecoder.drainOutputBuffer();
+        }
+        if (mAudioTrackDecoder != null) {
+            mAudioTrackDecoder.drainOutputBuffer();
+        }
+    }
+
+    /**
+     * Fills the argument frame with the video data, using the rotation hint obtained from the
+     * file's metadata, if any.
+     *
+     * @see #grabVideoFrame(FrameImage2D, int)
+     */
+    public void grabVideoFrame(FrameImage2D outputVideoFrame) {
+        grabVideoFrame(outputVideoFrame, mDefaultRotation);
+    }
+
+    /**
+     * Fills the argument frame with the video data, the frame will be returned with the given
+     * rotation applied.
+     *
+     * @param outputVideoFrame the output video frame.
+     * @param videoRotation the rotation angle that is applied to the raw decoded frame.
+     *   Value is one of {ROTATE_NONE, ROTATE_90_RIGHT, ROTATE_180, ROTATE_90_LEFT}.
+     */
+    public void grabVideoFrame(FrameImage2D outputVideoFrame, int videoRotation) {
+        if (mVideoTrackDecoder != null && outputVideoFrame != null) {
+            mVideoTrackDecoder.grabFrame(outputVideoFrame, videoRotation);
+        }
+    }
+
+    /**
+     * Fills the argument frame with the audio data.
+     *
+     * @param outputAudioFrame the output audio frame.
+     */
+    public void grabAudioSamples(FrameValue outputAudioFrame) {
+        if (mAudioTrackDecoder != null) {
+            if (outputAudioFrame != null) {
+                mAudioTrackDecoder.grabSample(outputAudioFrame);
+            } else {
+                mAudioTrackDecoder.clearBuffer();
+            }
+        }
+    }
+
+    /**
+     * Gets the duration, in nanoseconds.
+     */
+    public long getDuration() {
+        if (!mStarted) {
+            throw new IllegalStateException("MediaDecoder has not been started");
+        }
+
+        MediaFormat mediaFormat = mMediaExtractor.getTrackFormat(
+                mVideoTrackIndex != -1 ? mVideoTrackIndex : mAudioTrackIndex);
+        return mediaFormat.getLong(MediaFormat.KEY_DURATION) * 1000;
+    }
+
+    private RenderTarget getRenderTarget() {
+        if (mRenderTarget == null) {
+            mRenderTarget = RenderTarget.newTarget(1, 1);
+        }
+        return mRenderTarget;
+    }
+
+    @Override
+    public void onDecodedOutputAvailable(TrackDecoder decoder) {
+        if (decoder == mVideoTrackDecoder) {
+            mListener.onVideoFrameAvailable();
+        } else if (decoder == mAudioTrackDecoder) {
+            mListener.onAudioSamplesAvailable();
+        }
+    }
+
+    @Override
+    public void onEndOfStream(TrackDecoder decoder) {
+        if (decoder == mAudioTrackDecoder) {
+            mSeenEndOfAudioOutput = true;
+        } else if (decoder == mVideoTrackDecoder) {
+            mSeenEndOfVideoOutput = true;
+        }
+
+        if ((mAudioTrackDecoder == null || mSeenEndOfAudioOutput)
+                && (mVideoTrackDecoder == null || mSeenEndOfVideoOutput)) {
+            stop(false);
+        }
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/TrackDecoder.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/TrackDecoder.java
new file mode 100644
index 0000000..c81e8b4
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/TrackDecoder.java
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.decoder;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+
+@TargetApi(16)
+abstract class TrackDecoder {
+
+    interface Listener {
+        void onDecodedOutputAvailable(TrackDecoder decoder);
+
+        void onEndOfStream(TrackDecoder decoder);
+    }
+
+    private static final String LOG_TAG = "TrackDecoder";
+
+    private static final long TIMEOUT_US = 50; // Timeout for en-queueing and de-queueing buffers.
+
+    private static final int NO_INPUT_BUFFER = -1;
+
+    private final int mTrackIndex;
+    private final MediaFormat mMediaFormat;
+    private final Listener mListener;
+
+    private MediaCodec mMediaCodec;
+    private MediaFormat mOutputFormat;
+
+    private ByteBuffer[] mCodecInputBuffers;
+    private ByteBuffer[] mCodecOutputBuffers;
+
+    private boolean mShouldEnqueueEndOfStream;
+
+    /**
+     * @return a configured {@link MediaCodec}.
+     */
+    protected abstract MediaCodec initMediaCodec(MediaFormat format);
+
+    /**
+     * Called when decoded output is available. The implementer is responsible for releasing the
+     * assigned buffer.
+     *
+     * @return {@code true} if any further decoding should be attempted at the moment.
+     */
+    protected abstract boolean onDataAvailable(
+            MediaCodec codec, ByteBuffer[] buffers, int bufferIndex, BufferInfo info);
+
+    protected TrackDecoder(int trackIndex, MediaFormat mediaFormat, Listener listener) {
+        mTrackIndex = trackIndex;
+
+        if (mediaFormat == null) {
+            throw new NullPointerException("mediaFormat cannot be null");
+        }
+        mMediaFormat = mediaFormat;
+
+        if (listener == null) {
+            throw new NullPointerException("listener cannot be null");
+        }
+        mListener = listener;
+    }
+
+    public void init() {
+        mMediaCodec = initMediaCodec(mMediaFormat);
+        mMediaCodec.start();
+        mCodecInputBuffers = mMediaCodec.getInputBuffers();
+        mCodecOutputBuffers = mMediaCodec.getOutputBuffers();
+    }
+
+    public void signalEndOfInput() {
+        mShouldEnqueueEndOfStream = true;
+        tryEnqueueEndOfStream();
+    }
+
+    public void release() {
+        if (mMediaCodec != null) {
+            mMediaCodec.stop();
+            mMediaCodec.release();
+        }
+    }
+
+    protected MediaCodec getMediaCodec() {
+        return mMediaCodec;
+    }
+
+    protected void notifyListener() {
+        mListener.onDecodedOutputAvailable(this);
+    }
+
+    public boolean feedInput(MediaExtractor mediaExtractor) {
+        long presentationTimeUs = 0;
+
+        int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_US);
+        if (inputBufferIndex != NO_INPUT_BUFFER) {
+            ByteBuffer destinationBuffer = mCodecInputBuffers[inputBufferIndex];
+            int sampleSize = mediaExtractor.readSampleData(destinationBuffer, 0);
+            // We don't expect to get a sample without any data, so this should never happen.
+            if (sampleSize < 0) {
+                Log.w(LOG_TAG, "Media extractor had sample but no data.");
+
+                // Signal the end of the track immediately anyway, using the buffer.
+                mMediaCodec.queueInputBuffer(
+                        inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+                return false;
+            }
+
+            presentationTimeUs = mediaExtractor.getSampleTime();
+            mMediaCodec.queueInputBuffer(
+                    inputBufferIndex,
+                    0,
+                    sampleSize,
+                    presentationTimeUs,
+                    0);
+
+            return mediaExtractor.advance()
+                    && mediaExtractor.getSampleTrackIndex() == mTrackIndex;
+        }
+        return false;
+    }
+
+    private void tryEnqueueEndOfStream() {
+        int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_US);
+        // We will always eventually have an input buffer, because we keep trying until the last
+        // decoded frame is output.
+        // The EoS does not need to be signaled if the application stops decoding.
+        if (inputBufferIndex != NO_INPUT_BUFFER) {
+            mMediaCodec.queueInputBuffer(
+                    inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+            mShouldEnqueueEndOfStream = false;
+        }
+    }
+
+    public boolean drainOutputBuffer() {
+        BufferInfo outputInfo = new BufferInfo();
+        int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(outputInfo, TIMEOUT_US);
+
+        if ((outputInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+            mListener.onEndOfStream(this);
+            return false;
+        }
+        if (mShouldEnqueueEndOfStream) {
+            tryEnqueueEndOfStream();
+        }
+        if (outputBufferIndex >= 0) {
+            return onDataAvailable(
+                    mMediaCodec, mCodecOutputBuffers, outputBufferIndex, outputInfo);
+        } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+            mCodecOutputBuffers = mMediaCodec.getOutputBuffers();
+            return true;
+        } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+            mOutputFormat = mMediaCodec.getOutputFormat();
+            Log.d(LOG_TAG, "Output format has changed to " + mOutputFormat);
+            return true;
+        }
+        return false;
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/VideoTrackDecoder.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/VideoTrackDecoder.java
new file mode 100644
index 0000000..06a4305
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/VideoTrackDecoder.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media.filterfw.decoder;
+
+import android.annotation.TargetApi;
+import android.media.MediaFormat;
+import android.util.Log;
+
+import androidx.media.filterfw.FrameImage2D;
+
+/**
+ * Base class for all {@link TrackDecoder} classes that decode video.
+ */
+@TargetApi(16)
+public abstract class VideoTrackDecoder extends TrackDecoder {
+
+    private static final String LOG_TAG = "VideoTrackDecoder";
+
+    protected final Object mFrameMonitor = new Object();
+    protected volatile boolean mFrameAvailable; // Access guarded by mFrameMonitor.
+
+    protected VideoTrackDecoder(int trackIndex, MediaFormat format, Listener listener) {
+        super(trackIndex, format, listener);
+        if (!DecoderUtil.isVideoFormat(format)) {
+            throw new IllegalArgumentException(
+                    "VideoTrackDecoder can only be used with video formats");
+        }
+    }
+
+    public void grabFrame(FrameImage2D outputVideoFrame, int rotation) {
+        synchronized (mFrameMonitor) {
+            if (!mFrameAvailable) {
+                Log.w(LOG_TAG, "frame is not ready - the caller has to wait for a corresponding " +
+                        "onDecodedFrameAvailable() call");
+                return;
+            }
+
+            copyFrameDataTo(outputVideoFrame, rotation);
+
+            mFrameAvailable = false;
+            mFrameMonitor.notifyAll();
+        }
+    }
+
+
+    /**
+     * Waits for the frame to be picked up by the MFF thread, i.e. blocks until the
+     * {@link #grabFrame(FrameImage2D, int)}) method is called.
+     */
+    public boolean waitForFrameGrab() {
+        synchronized (mFrameMonitor) {
+            try {
+                while (mFrameAvailable) {
+                    mFrameMonitor.wait();
+                }
+                return true;
+            } catch (InterruptedException e) {
+                return false;
+            }
+        }
+    }
+
+    protected final void markFrameAvailable() {
+        synchronized (mFrameMonitor) {
+            mFrameAvailable = true;
+            mFrameMonitor.notifyAll();
+        }
+    }
+
+    /**
+     * @return if the frame dimension needs to be swapped,
+     *   i.e. (width,height) becomes (height, width)
+     */
+    protected static boolean needSwapDimension(int rotation) {
+        switch(rotation) {
+            case MediaDecoder.ROTATE_90_RIGHT:
+            case MediaDecoder.ROTATE_90_LEFT:
+                return true;
+            case MediaDecoder.ROTATE_NONE:
+            case MediaDecoder.ROTATE_180:
+                return false;
+            default:
+                throw new IllegalArgumentException("Unsupported rotation angle.");
+        }
+    }
+
+    /**
+     * Subclasses must implement this to copy the video frame data to an MFF frame.
+     *
+     * @param outputVideoFrame The destination frame
+     * @param rotation The desired rotation of the frame
+     */
+    protected abstract void copyFrameDataTo(FrameImage2D outputVideoFrame, int rotation);
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/geometry/Quad.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/geometry/Quad.java
new file mode 100644
index 0000000..4035f7f
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/geometry/Quad.java
@@ -0,0 +1,346 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.geometry;
+
+import android.annotation.SuppressLint;
+import android.graphics.Matrix;
+import android.graphics.PointF;
+import android.graphics.RectF;
+
+/**
+ * The Quad class specifies a (possibly affine transformed) rectangle.
+ *
+ * A Quad instance holds 4 points that define its shape. The points may represent any rectangle that
+ * has been transformed by an affine transformation. This means that Quads can represent translated,
+ * scaled, rotated and sheared/skewed rectangles. As such, Quads are restricted to the set of
+ * parallelograms.
+ *
+ * Each point in the Quad represents a specific corner of the Quad. These are top-left, top-right,
+ * bottom-left, and bottom-right. These labels allow mapping a transformed Quad back to an up-right
+ * Quad, with the point-to-point mapping well-defined. They do not necessarily indicate that e.g.
+ * the top-left corner is actually at the top-left of coordinate space.
+ */
+@SuppressLint("FloatMath")
+public class Quad {
+
+    private final PointF mTopLeft;
+    private final PointF mTopRight;
+    private final PointF mBottomLeft;
+    private final PointF mBottomRight;
+
+    /**
+     * Returns the unit Quad.
+     * The unit Quad has its top-left point at (0, 0) and bottom-right point at (1, 1).
+     * @return the unit Quad.
+     */
+    public static Quad unitQuad() {
+        return new Quad(0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f);
+    }
+
+    /**
+     * Return a Quad from the specified rectangle.
+     *
+     * @param rect a RectF instance.
+     * @return Quad that represents the passed rectangle.
+     */
+    public static Quad fromRect(RectF rect) {
+        return new Quad(new PointF(rect.left, rect.top),
+                        new PointF(rect.right, rect.top),
+                        new PointF(rect.left, rect.bottom),
+                        new PointF(rect.right, rect.bottom));
+    }
+
+    /**
+     * Return a Quad from the specified rectangle coordinates.
+     *
+     * @param x the top left x coordinate
+     * @param y the top left y coordinate
+     * @param width the width of the rectangle
+     * @param height the height of the rectangle
+     * @return Quad that represents the passed rectangle.
+     */
+    public static Quad fromRect(float x, float y, float width, float height) {
+        return new Quad(new PointF(x, y),
+                        new PointF(x + width, y),
+                        new PointF(x, y + height),
+                        new PointF(x + width, y + height));
+    }
+
+    /**
+     * Return a Quad that spans the specified points and height.
+     *
+     * The returned Quad has the specified top-left and top-right points, and the specified height
+     * while maintaining 90 degree angles on all 4 corners.
+     *
+     * @param topLeft the top-left of the quad
+     * @param topRight the top-right of the quad
+     * @param height the height of the quad
+     * @return Quad that spans the specified points and height.
+     */
+    public static Quad fromLineAndHeight(PointF topLeft, PointF topRight, float height) {
+        PointF dp = new PointF(topRight.x - topLeft.x, topRight.y - topLeft.y);
+        float len = dp.length();
+        PointF np = new PointF(height * (dp.y / len), height * (dp.x / len));
+        PointF p2 = new PointF(topLeft.x - np.x, topLeft.y + np.y);
+        PointF p3 = new PointF(topRight.x - np.x, topRight.y + np.y);
+        return new Quad(topLeft, topRight, p2, p3);
+    }
+
+    /**
+     * Return a Quad that represents the specified rotated rectangle.
+     *
+     * The Quad is rotated counter-clockwise around its centroid.
+     *
+     * @param rect the source rectangle
+     * @param angle the angle to rotate the source rectangle in radians
+     * @return the Quad representing the source rectangle rotated by the given angle.
+     */
+    public static Quad fromRotatedRect(RectF rect, float angle) {
+        return Quad.fromRect(rect).rotated(angle);
+    }
+
+    /**
+     * Return a Quad that represents the specified transformed rectangle.
+     *
+     * The transform is applied by multiplying each point (x, y, 1) by the matrix.
+     *
+     * @param rect the source rectangle
+     * @param matrix the transformation matrix
+     * @return the Quad representing the source rectangle transformed by the matrix
+     */
+    public static Quad fromTransformedRect(RectF rect, Matrix matrix) {
+        return Quad.fromRect(rect).transformed(matrix);
+    }
+
+    /**
+     * Returns the transformation matrix to transform the source Quad to the target Quad.
+     *
+     * @param source the source quad
+     * @param target the target quad
+     * @return the transformation matrix to map source to target.
+     */
+    public static Matrix getTransform(Quad source, Quad target) {
+        // We only use the first 3 points as they sufficiently specify the transform
+        Matrix transform = new Matrix();
+        transform.setPolyToPoly(source.asCoords(), 0, target.asCoords(), 0, 3);
+        return transform;
+    }
+
+    /**
+     * The top-left point of the Quad.
+     * @return top-left point of the Quad.
+     */
+    public PointF topLeft() {
+        return mTopLeft;
+    }
+
+    /**
+     * The top-right point of the Quad.
+     * @return top-right point of the Quad.
+     */
+    public PointF topRight() {
+        return mTopRight;
+    }
+
+    /**
+     * The bottom-left point of the Quad.
+     * @return bottom-left point of the Quad.
+     */
+    public PointF bottomLeft() {
+        return mBottomLeft;
+    }
+
+    /**
+     * The bottom-right point of the Quad.
+     * @return bottom-right point of the Quad.
+     */
+    public PointF bottomRight() {
+        return mBottomRight;
+    }
+
+    /**
+     * Rotate the quad by the given angle.
+     *
+     * The Quad is rotated counter-clockwise around its centroid.
+     *
+     * @param angle the angle to rotate in radians
+     * @return the rotated Quad
+     */
+    public Quad rotated(float angle) {
+        PointF center = center();
+        float cosa = (float) Math.cos(angle);
+        float sina = (float) Math.sin(angle);
+
+        PointF topLeft = rotatePoint(topLeft(), center, cosa, sina);
+        PointF topRight = rotatePoint(topRight(), center, cosa, sina);
+        PointF bottomLeft = rotatePoint(bottomLeft(), center, cosa, sina);
+        PointF bottomRight = rotatePoint(bottomRight(), center, cosa, sina);
+
+        return new Quad(topLeft, topRight, bottomLeft, bottomRight);
+    }
+
+    /**
+     * Transform the quad with the given transformation matrix.
+     *
+     * The transform is applied by multiplying each point (x, y, 1) by the matrix.
+     *
+     * @param matrix the transformation matrix
+     * @return the transformed Quad
+     */
+    public Quad transformed(Matrix matrix) {
+        float[] points = asCoords();
+        matrix.mapPoints(points);
+        return new Quad(points);
+    }
+
+    /**
+     * Returns the centroid of the Quad.
+     *
+     * The centroid of the Quad is where the two inner diagonals connecting the opposite corners
+     * meet.
+     *
+     * @return the centroid of the Quad.
+     */
+    public PointF center() {
+        // As the diagonals bisect each other, we can simply return the center of one of the
+        // diagonals.
+        return new PointF((mTopLeft.x + mBottomRight.x) / 2f,
+                          (mTopLeft.y + mBottomRight.y) / 2f);
+    }
+
+    /**
+     * Returns the quad as a float-array of coordinates.
+     * The order of coordinates is top-left, top-right, bottom-left, bottom-right. This is the
+     * default order of coordinates used in ImageShaders, so this method can be used to bind
+     * an attribute to the Quad.
+     */
+    public float[] asCoords() {
+        return new float[] { mTopLeft.x, mTopLeft.y,
+                             mTopRight.x, mTopRight.y,
+                             mBottomLeft.x, mBottomLeft.y,
+                             mBottomRight.x, mBottomRight.y };
+    }
+
+    /**
+     * Grow the Quad outwards by the specified factor.
+     *
+     * This method moves the corner points of the Quad outward along the diagonals that connect
+     * them to the centroid. A factor of 1.0 moves the quad outwards by the distance of the corners
+     * to the centroid.
+     *
+     * @param factor the growth factor
+     * @return the Quad grown by the specified amount
+     */
+    public Quad grow(float factor) {
+        PointF pc = center();
+        return new Quad(factor * (mTopLeft.x - pc.x) + pc.x,
+                        factor * (mTopLeft.y - pc.y) + pc.y,
+                        factor * (mTopRight.x - pc.x) + pc.x,
+                        factor * (mTopRight.y - pc.y) + pc.y,
+                        factor * (mBottomLeft.x - pc.x) + pc.x,
+                        factor * (mBottomLeft.y - pc.y) + pc.y,
+                        factor * (mBottomRight.x - pc.x) + pc.x,
+                        factor * (mBottomRight.y - pc.y) + pc.y);
+    }
+
+    /**
+     * Scale the Quad by the specified factor.
+     *
+     * @param factor the scaling factor
+     * @return the Quad instance scaled by the specified factor.
+     */
+    public Quad scale(float factor) {
+        return new Quad(mTopLeft.x * factor, mTopLeft.y * factor,
+                        mTopRight.x * factor, mTopRight.y * factor,
+                        mBottomLeft.x * factor, mBottomLeft.y * factor,
+                        mBottomRight.x * factor, mBottomRight.y * factor);
+    }
+
+    /**
+     * Scale the Quad by the specified factors in the x and y factors.
+     *
+     * @param sx the x scaling factor
+     * @param sy the y scaling factor
+     * @return the Quad instance scaled by the specified factors.
+     */
+    public Quad scale2(float sx, float sy) {
+        return new Quad(mTopLeft.x * sx, mTopLeft.y * sy,
+                        mTopRight.x * sx, mTopRight.y * sy,
+                        mBottomLeft.x * sx, mBottomLeft.y * sy,
+                        mBottomRight.x * sx, mBottomRight.y * sy);
+    }
+
+    /**
+     * Returns the Quad's left-to-right edge.
+     *
+     * Returns a vector that goes from the Quad's top-left to top-right (or bottom-left to
+     * bottom-right).
+     *
+     * @return the edge vector as a PointF.
+     */
+    public PointF xEdge() {
+        return new PointF(mTopRight.x - mTopLeft.x, mTopRight.y - mTopLeft.y);
+    }
+
+    /**
+     * Returns the Quad's top-to-bottom edge.
+     *
+     * Returns a vector that goes from the Quad's top-left to bottom-left (or top-right to
+     * bottom-right).
+     *
+     * @return the edge vector as a PointF.
+     */
+    public PointF yEdge() {
+        return new PointF(mBottomLeft.x - mTopLeft.x, mBottomLeft.y - mTopLeft.y);
+    }
+
+    @Override
+    public String toString() {
+        return "Quad(" + mTopLeft.x + ", " + mTopLeft.y + ", "
+                       + mTopRight.x + ", " + mTopRight.y + ", "
+                       + mBottomLeft.x + ", " + mBottomLeft.y + ", "
+                       + mBottomRight.x + ", " + mBottomRight.y + ")";
+    }
+
+    private Quad(PointF topLeft, PointF topRight, PointF bottomLeft, PointF bottomRight) {
+        mTopLeft = topLeft;
+        mTopRight = topRight;
+        mBottomLeft = bottomLeft;
+        mBottomRight = bottomRight;
+    }
+
+    private Quad(float x0, float y0, float x1, float y1, float x2, float y2, float x3, float y3) {
+        mTopLeft = new PointF(x0, y0);
+        mTopRight = new PointF(x1, y1);
+        mBottomLeft = new PointF(x2, y2);
+        mBottomRight = new PointF(x3, y3);
+    }
+
+    private Quad(float[] points) {
+        mTopLeft = new PointF(points[0], points[1]);
+        mTopRight = new PointF(points[2], points[3]);
+        mBottomLeft = new PointF(points[4], points[5]);
+        mBottomRight = new PointF(points[6], points[7]);
+    }
+
+    private static PointF rotatePoint(PointF p, PointF c, float cosa, float sina) {
+        float x = (p.x - c.x) * cosa - (p.y - c.y) * sina + c.x;
+        float y = (p.x - c.x) * sina + (p.y - c.y) * cosa + c.y;
+        return new PointF(x,y);
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/AverageFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/AverageFilter.java
new file mode 100644
index 0000000..d873e0a
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/AverageFilter.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Takes sharpness scores in RT and averages them over time
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.util.Log;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+public class AverageFilter extends Filter {
+
+    private static final String TAG = "AverageFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+
+    private static final int NUM_FRAMES = 5;
+    private int counter = 0;
+    private float[] temp = new float[NUM_FRAMES];
+
+    /**
+     * @param context
+     * @param name
+     */
+    public AverageFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType floatT = FrameType.single(float.class);
+        return new Signature()
+        .addInputPort("sharpness", Signature.PORT_REQUIRED, floatT)
+        .addOutputPort("avg", Signature.PORT_REQUIRED, floatT)
+        .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onProcess() {
+        FrameValue inFrameValue = getConnectedInputPort("sharpness").pullFrame().asFrameValue();
+        if (counter < NUM_FRAMES && counter >= 0) {
+            temp[counter] = ((Float)inFrameValue.getValue()).floatValue();
+        }
+
+        counter = (counter + 1) % NUM_FRAMES;
+
+        float output = (temp[0] + temp[1] + temp[2] + temp[3] + temp[4]) / NUM_FRAMES;
+        if (mLogVerbose) Log.v(TAG, "Avg= " + output + "temp1= " + temp[0] + "temp2= " +
+                temp[1] + "temp3= " + temp[2] + "temp4=" + temp[3] + "temp5=" + temp[4]);
+
+        OutputPort outPort = getConnectedOutputPort("avg");
+        FrameValue outFrame = outPort.fetchAvailableFrame(null).asFrameValue();
+        outFrame.setValue(output);
+        outPort.pushFrame(outFrame);
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/AvgBrightnessFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/AvgBrightnessFilter.java
new file mode 100644
index 0000000..88cd44a
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/AvgBrightnessFilter.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.util.Log;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+import java.nio.ByteBuffer;
+
+public class AvgBrightnessFilter extends Filter {
+
+    private static final String TAG = "AvgBrightnessFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+
+    public AvgBrightnessFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU);
+        FrameType floatT = FrameType.single(float.class);
+        return new Signature().addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+                .addOutputPort("brightnessRating", Signature.PORT_OPTIONAL, floatT)
+                .disallowOtherPorts();
+
+    }
+
+    @Override
+    protected void onProcess() {
+        FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+
+        float brightness;
+        ByteBuffer inputBuffer  = inputImage.lockBytes(Frame.MODE_READ);
+
+        brightness = brightnessOperator(inputImage.getWidth(),inputImage.getHeight(), inputBuffer);
+
+        inputImage.unlock();
+
+        if (mLogVerbose) Log.v(TAG, "contrastRatio: " + brightness);
+
+        OutputPort brightnessPort = getConnectedOutputPort("brightnessRating");
+        FrameValue brightnessOutFrame = brightnessPort.fetchAvailableFrame(null).asFrameValue();
+        brightnessOutFrame.setValue(brightness);
+        brightnessPort.pushFrame(brightnessOutFrame);
+    }
+
+    private static native float brightnessOperator(int width, int height, ByteBuffer imageBuffer);
+
+    static {
+        System.loadLibrary("smartcamera_jni");
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/CSVWriterFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/CSVWriterFilter.java
new file mode 100644
index 0000000..ca16c27
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/CSVWriterFilter.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.util.Log;
+
+import java.io.BufferedWriter;
+import java.io.FileWriter;
+import java.io.IOException;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameBuffer2D;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+
+public class CSVWriterFilter extends Filter {
+
+    private static final String TAG = "CSVWriterFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+    private boolean mFirstTime = true;
+    private final static int NUM_FRAMES = 3;
+    private final String mFileName = "/CSVFile.csv";
+
+    public CSVWriterFilter(MffContext context, String name) {
+
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType floatT = FrameType.single(float.class);
+        FrameType stringT = FrameType.single(String.class);
+        FrameType floatArrayT = FrameType.array(float.class);
+
+        return new Signature()
+                .addInputPort("sharpness", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("overExposure", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("underExposure", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("colorfulness", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("contrastRating", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("brightness", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("motionValues", Signature.PORT_REQUIRED, floatArrayT)
+                .addInputPort("imageFileName", Signature.PORT_REQUIRED, stringT)
+                .addInputPort("csvFilePath", Signature.PORT_REQUIRED, stringT)
+                .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onProcess() {
+
+
+
+        Log.v(TAG,"in csv writer on process");
+        FrameValue sharpnessValue =
+                getConnectedInputPort("sharpness").pullFrame().asFrameValue();
+        float sharpness = ((Float)sharpnessValue.getValue()).floatValue();
+
+        FrameValue overExposureValue =
+                getConnectedInputPort("overExposure").pullFrame().asFrameValue();
+        float overExposure = ((Float)overExposureValue.getValue()).floatValue();
+
+        FrameValue underExposureValue =
+                getConnectedInputPort("underExposure").pullFrame().asFrameValue();
+        float underExposure = ((Float)underExposureValue.getValue()).floatValue();
+
+        FrameValue colorfulnessValue =
+                getConnectedInputPort("colorfulness").pullFrame().asFrameValue();
+        float colorfulness = ((Float)colorfulnessValue.getValue()).floatValue();
+
+        FrameValue contrastValue =
+                getConnectedInputPort("contrastRating").pullFrame().asFrameValue();
+        float contrast = ((Float)contrastValue.getValue()).floatValue();
+
+        FrameValue brightnessValue =
+                getConnectedInputPort("brightness").pullFrame().asFrameValue();
+        float brightness = ((Float)brightnessValue.getValue()).floatValue();
+
+        FrameValue motionValuesFrameValue =
+                getConnectedInputPort("motionValues").pullFrame().asFrameValue();
+        float[] motionValues = (float[]) motionValuesFrameValue.getValue();
+        float vectorAccel = (float) Math.sqrt(Math.pow(motionValues[0], 2) +
+                Math.pow(motionValues[1], 2) + Math.pow(motionValues[2], 2));
+
+        FrameValue imageFileNameFrameValue =
+                getConnectedInputPort("imageFileName").pullFrame().asFrameValue();
+        String imageFileName = ((String)imageFileNameFrameValue.getValue());
+
+        FrameValue csvFilePathFrameValue =
+                getConnectedInputPort("csvFilePath").pullFrame().asFrameValue();
+        String csvFilePath = ((String)csvFilePathFrameValue.getValue());
+
+
+        if(mFirstTime) {
+            try {
+                FileWriter fileWriter = new FileWriter(csvFilePath + "/CSVFile.csv");
+                BufferedWriter csvWriter = new BufferedWriter(fileWriter);
+
+                csvWriter.write("FileName,Sharpness,OverExposure,UnderExposure,Colorfulness," +
+                            "ContrastRating,Brightness,Motion");
+                csvWriter.newLine();
+                csvWriter.close();
+            } catch (IOException e) {
+                // TODO Auto-generated catch block
+                e.printStackTrace();
+            }
+            mFirstTime = false;
+        }
+
+        try {
+            Log.v(TAG,"about to write to file");
+            FileWriter fileWriter = new FileWriter(csvFilePath + mFileName, true);
+            BufferedWriter csvWriter = new BufferedWriter(fileWriter);
+
+            csvWriter.write(imageFileName + "," + sharpness + "," + overExposure + "," +
+                    underExposure + "," + colorfulness + "," + contrast + "," + brightness +
+                    "," + vectorAccel);
+            Log.v(TAG, "" + imageFileName + "," + sharpness + "," + overExposure + "," +
+                    underExposure + "," + colorfulness + "," + contrast + "," + brightness +
+                    "," + vectorAccel);
+            csvWriter.newLine();
+            csvWriter.close();
+        } catch (IOException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+            throw new RuntimeException(e);
+        }
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/Camera2Source.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/Camera2Source.java
new file mode 100644
index 0000000..fa0f995
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/Camera2Source.java
@@ -0,0 +1,265 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.os.Handler;
+import android.renderscript.Allocation;
+import android.renderscript.Element;
+import android.renderscript.RenderScript;
+import android.renderscript.ScriptIntrinsicYuvToRGB;
+import android.renderscript.Type;
+import android.util.Log;
+import android.view.Surface;
+import com.android.ex.camera2.blocking.BlockingCameraManager;
+import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class Camera2Source extends Filter implements Allocation.OnBufferAvailableListener {
+
+    private boolean mNewFrameAvailable = false;
+    private FrameType mOutputType;
+    private static final String TAG = "Camera2Source";
+    private CameraManager mCameraManager;
+    private CameraDevice mCamera;
+    private RenderScript mRS;
+    private Surface mSurface;
+    private CameraCharacteristics mProperties;
+    private CameraTestThread mLooperThread;
+    private int mHeight = 480;
+    private int mWidth = 640;
+    private Allocation mAllocationIn;
+    private ScriptIntrinsicYuvToRGB rgbConverter;
+    private Allocation mAllocationOut;
+    private Bitmap mBitmap;
+
+    class MyCameraListener extends CameraManager.AvailabilityListener {
+
+        @Override
+        public void onCameraAvailable(String cameraId) {
+            // TODO Auto-generated method stub
+            Log.v(TAG, "camera available to open");
+        }
+
+        @Override
+        public void onCameraUnavailable(String cameraId) {
+            // TODO Auto-generated method stub
+            Log.v(TAG, "camera unavailable to open");
+        }
+
+    }
+
+    class MyCaptureListener extends CameraDevice.CaptureListener {
+
+        @Override
+        public void onCaptureCompleted(CameraDevice camera, CaptureRequest request,
+                CaptureResult result) {
+            // TODO Auto-generated method stub
+            Log.v(TAG, "in onCaptureComplete");
+
+        }
+
+        @Override
+        public void onCaptureFailed(CameraDevice camera, CaptureRequest request,
+                CaptureFailure failure) {
+            // TODO Auto-generated method stub
+            Log.v(TAG, "onCaptureFailed is being called");
+        }
+
+    }
+
+    public Camera2Source(MffContext context, String name) {
+        super(context, name);
+        mOutputType = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
+
+        Context ctx = context.getApplicationContext();
+        mCameraManager = (CameraManager) ctx.getSystemService(Context.CAMERA_SERVICE);
+
+        mRS = RenderScript.create(context.getApplicationContext());
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+                .addOutputPort("timestamp", Signature.PORT_OPTIONAL, FrameType.single(long.class))
+                .addOutputPort("video", Signature.PORT_REQUIRED, mOutputType)
+                .addOutputPort("orientation", Signature.PORT_REQUIRED,
+                        FrameType.single(float.class))
+                .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onClose() {
+        Log.v(TAG, "onClose being called");
+        try {
+            mCamera.close();
+            mSurface.release();
+            mLooperThread.close();
+        } catch (Exception e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+    }
+
+    @Override
+    protected void onOpen() {
+        mLooperThread = new CameraTestThread();
+        Handler mHandler;
+        try {
+            mHandler = mLooperThread.start();
+        } catch (Exception e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+            throw new RuntimeException(e);
+        }
+
+        try {
+            String backCameraId = "0";
+            BlockingCameraManager blkManager = new BlockingCameraManager(mCameraManager);
+            mCamera = blkManager.openCamera(backCameraId, /*listener*/null, mHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+            throw new RuntimeException(e);
+        } catch (BlockingOpenException e) {
+            e.printStackTrace();
+            throw new RuntimeException(e);
+        }
+
+        Element ele = Element.createPixel(mRS, Element.DataType.UNSIGNED_8,
+                Element.DataKind.PIXEL_YUV);
+
+        rgbConverter = ScriptIntrinsicYuvToRGB.create(mRS,ele);
+        Type.Builder yuvBuilder = new Type.Builder(mRS,ele);
+
+        yuvBuilder.setYuvFormat(ImageFormat.YUV_420_888);
+        yuvBuilder.setX(mWidth);
+        yuvBuilder.setY(mHeight);
+        mAllocationIn = Allocation.createTyped(mRS, yuvBuilder.create(),
+                Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_INPUT);
+        mSurface = mAllocationIn.getSurface();
+        mAllocationIn.setOnBufferAvailableListener(this);
+        rgbConverter.setInput(mAllocationIn);
+
+        mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
+        mAllocationOut = Allocation.createFromBitmap(mRS, mBitmap);
+
+
+        Log.v(TAG, "mcamera: " + mCamera);
+
+        List<Surface> surfaces = new ArrayList<Surface>();
+        surfaces.add(mSurface);
+        CaptureRequest.Builder mCaptureRequest = null;
+        try {
+            mCamera.configureOutputs(surfaces);
+            mCaptureRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+            mCaptureRequest.addTarget(mSurface);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+            throw new RuntimeException(e);
+        }
+
+        try {
+            mCamera.setRepeatingRequest(mCaptureRequest.build(), new MyCaptureListener(),
+                    mHandler);
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+            throw new RuntimeException(e);
+        }
+        mProperties = null;
+        try {
+            mProperties = mCameraManager.getCameraCharacteristics(mCamera.getId());
+        } catch (CameraAccessException e) {
+            e.printStackTrace();
+            throw new RuntimeException(e);
+        }
+
+    }
+
+    @Override
+    protected void onProcess() {
+        Log.v(TAG, "on Process");
+        if (nextFrame()) {
+            OutputPort outPort = getConnectedOutputPort("video");
+
+            // Create a 2D frame that will hold the output
+            int[] dims = new int[] {
+                    mWidth, mHeight
+            };
+            FrameImage2D outputFrame = Frame.create(mOutputType, dims).asFrameImage2D();
+            rgbConverter.forEach(mAllocationOut);
+            mAllocationOut.copyTo(mBitmap);
+            outputFrame.setBitmap(mBitmap);
+            outPort.pushFrame(outputFrame);
+            outputFrame.release();
+
+            OutputPort orientationPort = getConnectedOutputPort("orientation");
+            FrameValue orientationFrame = orientationPort.fetchAvailableFrame(null).asFrameValue();
+
+            // FIXME: Hardcoded value because ORIENTATION returns null, Qualcomm
+            // bug
+            Integer orientation = mProperties.get(CameraCharacteristics.SENSOR_ORIENTATION);
+            float temp;
+            if (orientation != null) {
+                temp = orientation.floatValue();
+            } else {
+                temp = 90.0f;
+            }
+            orientationFrame.setValue(temp);
+            orientationPort.pushFrame(orientationFrame);
+        }
+    }
+
+    private synchronized boolean nextFrame() {
+        boolean frameAvailable = mNewFrameAvailable;
+        if (frameAvailable) {
+            mNewFrameAvailable = false;
+        } else {
+            enterSleepState();
+        }
+        return frameAvailable;
+    }
+
+    public void onBufferAvailable(Allocation a) {
+        Log.v(TAG, "on Buffer Available");
+        a.ioReceive();
+        synchronized (this) {
+            mNewFrameAvailable = true;
+        }
+        wakeUp();
+    }
+
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/CameraTestThread.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/CameraTestThread.java
new file mode 100644
index 0000000..8a0fced
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/CameraTestThread.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.Log;
+
+import java.util.concurrent.TimeoutException;
+
+/**
+ * Camera test thread wrapper for handling camera callbacks
+ */
+public class CameraTestThread implements AutoCloseable {
+    private static final String TAG = "CameraTestThread";
+    private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
+    // Timeout for initializing looper and opening camera in Milliseconds.
+    private static final long WAIT_FOR_COMMAND_TO_COMPLETE = 5000;
+    private Looper mLooper = null;
+    private Handler mHandler = null;
+
+    /**
+     * Create and start a looper thread, return the Handler
+     */
+    public synchronized Handler start() throws Exception {
+        final ConditionVariable startDone = new ConditionVariable();
+        if (mLooper != null || mHandler !=null) {
+            Log.w(TAG, "Looper thread already started");
+            return mHandler;
+        }
+
+        new Thread() {
+            @Override
+            public void run() {
+                if (VERBOSE) Log.v(TAG, "start loopRun");
+                Looper.prepare();
+                // Save the looper so that we can terminate this thread
+                // after we are done with it.
+                mLooper = Looper.myLooper();
+                mHandler = new Handler();
+                startDone.open();
+                Looper.loop();
+                if (VERBOSE) Log.v(TAG, "createLooperThread: finished");
+            }
+        }.start();
+
+        if (VERBOSE) Log.v(TAG, "start waiting for looper");
+        if (!startDone.block(WAIT_FOR_COMMAND_TO_COMPLETE)) {
+            throw new TimeoutException("createLooperThread: start timeout");
+        }
+        return mHandler;
+    }
+
+    /**
+     * Terminate the looper thread
+     */
+    public synchronized void close() throws Exception {
+        if (mLooper == null || mHandler == null) {
+            Log.w(TAG, "Looper thread doesn't start yet");
+            return;
+        }
+
+        if (VERBOSE) Log.v(TAG, "Terminate looper thread");
+        mLooper.quit();
+        mLooper.getThread().join();
+        mLooper = null;
+        mHandler = null;
+    }
+
+    @Override
+    protected void finalize() throws Throwable {
+        try {
+            close();
+        } finally {
+            super.finalize();
+        }
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ContrastRatioFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ContrastRatioFilter.java
new file mode 100644
index 0000000..d918437
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ContrastRatioFilter.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.util.Log;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+import java.nio.ByteBuffer;
+
+public class ContrastRatioFilter extends Filter {
+
+    private static final String TAG = "ContrastRatioFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+
+    public ContrastRatioFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU);
+        FrameType floatT = FrameType.single(float.class);
+        return new Signature().addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+                .addOutputPort("contrastRatingToGoodness", Signature.PORT_REQUIRED, floatT)
+                .disallowOtherPorts();
+
+    }
+
+    @Override
+    protected void onProcess() {
+        FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+
+        float contrastRatio;
+        ByteBuffer inputBuffer  = inputImage.lockBytes(Frame.MODE_READ);
+
+        contrastRatio = contrastOperator(inputImage.getWidth(), inputImage.getHeight(),
+                    inputBuffer);
+
+        inputImage.unlock();
+
+        if (mLogVerbose) Log.v(TAG, "contrastRatio: " + contrastRatio);
+
+        OutputPort contrastToGoodnessPort = getConnectedOutputPort("contrastRatingToGoodness");
+        FrameValue contrastOutFrame2 =
+                contrastToGoodnessPort.fetchAvailableFrame(null).asFrameValue();
+        contrastOutFrame2.setValue(contrastRatio);
+        contrastToGoodnessPort.pushFrame(contrastOutFrame2);
+
+
+    }
+
+    private static native float contrastOperator(int width, int height, ByteBuffer imageBuffer);
+
+    static {
+        System.loadLibrary("smartcamera_jni");
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ExposureFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ExposureFilter.java
new file mode 100644
index 0000000..6128718
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ExposureFilter.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.util.Log;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+import java.nio.ByteBuffer;
+
+public class ExposureFilter extends Filter {
+
+    private FrameType mImageType;
+    private static final String TAG = "ExposureFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+    private static int OVER_EXPOSURE_TOLERANCE = 5;
+
+    public ExposureFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        FrameType floatT = FrameType.single(float.class);
+        return new Signature().addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+                .addOutputPort("overExposedNum", Signature.PORT_OPTIONAL, floatT)
+                .addOutputPort("overExposureRating", Signature.PORT_REQUIRED, floatT)
+                .addOutputPort("underExposedNum", Signature.PORT_OPTIONAL, floatT)
+                .addOutputPort("underExposureRating", Signature.PORT_REQUIRED, floatT)
+                .disallowOtherPorts();
+
+    }
+
+    @Override
+    protected void onProcess() {
+        FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+
+        float overExposedPixels, underExposedPixels;
+        ByteBuffer inputBuffer = inputImage.lockBytes(Frame.MODE_READ);
+
+        overExposedPixels = overExposureOperator(inputImage.getWidth(),
+                                                 inputImage.getHeight(),
+                                                 inputBuffer);
+        underExposedPixels = underExposureOperator(inputImage.getWidth(),
+                                                   inputImage.getHeight(),
+                                                   inputBuffer);
+        inputImage.unlock();
+
+
+        if (mLogVerbose) Log.v(TAG, "underExposedPixelCount: " + underExposedPixels);
+
+        OutputPort underPort = getConnectedOutputPort("underExposedNum");
+        if (underPort != null) {
+            FrameValue underOutFrame = underPort.fetchAvailableFrame(null).asFrameValue();
+            underOutFrame.setValue(underExposedPixels*inputImage.getWidth()*inputImage.getHeight());
+            underPort.pushFrame(underOutFrame);
+        }
+
+
+        OutputPort underPort2 = getConnectedOutputPort("underExposureRating");
+        FrameValue underOutFrame2 = underPort2.fetchAvailableFrame(null).asFrameValue();
+        underOutFrame2.setValue(underExposedPixels);
+        underPort2.pushFrame(underOutFrame2);
+
+        if (mLogVerbose) Log.v(TAG, "overExposedPixelCount: " + overExposedPixels);
+
+        OutputPort overPort = getConnectedOutputPort("overExposedNum");
+        if (overPort != null) {
+            FrameValue overOutFrame = overPort.fetchAvailableFrame(null).asFrameValue();
+            overOutFrame.setValue(overExposedPixels*inputImage.getWidth()*inputImage.getHeight());
+            overPort.pushFrame(overOutFrame);
+        }
+
+
+        OutputPort overPort2 = getConnectedOutputPort("overExposureRating");
+        FrameValue overOutFrame2 = overPort2.fetchAvailableFrame(null).asFrameValue();
+        overOutFrame2.setValue(overExposedPixels);
+        overPort2.pushFrame(overOutFrame2);
+
+    }
+
+    private static native float overExposureOperator(int width, int height,
+            ByteBuffer imageBuffer);
+    private static native float underExposureOperator(int width, int height,
+            ByteBuffer imageBuffer);
+
+    static {
+        System.loadLibrary("smartcamera_jni");
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/FaceSquareFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/FaceSquareFilter.java
new file mode 100644
index 0000000..c4a39e8
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/FaceSquareFilter.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Takes in an array, returns the size of the array
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.graphics.Rect;
+import android.hardware.Camera;
+import android.hardware.Camera.Face;
+import android.util.Log;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.Frame;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValues;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+import java.nio.ByteBuffer;
+
+public class FaceSquareFilter extends Filter {
+
+    private static final String TAG = "FaceSquareFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+
+    private static int FACE_X_RANGE = 2000;
+    private static int WIDTH_OFFSET = 1000;
+    private static int HEIGHT_OFFSET = 1000;
+
+    public FaceSquareFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageType = FrameType.buffer2D(FrameType.ELEMENT_RGBA8888);
+        FrameType facesType = FrameType.array(Camera.Face.class);
+        return new Signature()
+                .addInputPort("image", Signature.PORT_REQUIRED, imageType)
+                .addInputPort("faces", Signature.PORT_REQUIRED, facesType)
+                .addOutputPort("image", Signature.PORT_REQUIRED, imageType)
+                .disallowOtherPorts();
+    }
+
+    /**
+     * @see androidx.media.filterfw.Filter#onProcess()
+     */
+    @Override
+    protected void onProcess() {
+        // Get inputs
+        FrameImage2D imageFrame = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+        FrameValues facesFrame = getConnectedInputPort("faces").pullFrame().asFrameValues();
+        Face[] faces = (Face[]) facesFrame.getValues();
+        int[] dims = imageFrame.getDimensions();
+        ByteBuffer buffer = imageFrame.lockBytes(Frame.MODE_WRITE);
+        byte[] pixels = buffer.array();
+
+        // For every face in faces, draw a white rect around the
+        // face following the rect member of the Face
+        drawBoxes(pixels, faces, dims);
+
+        imageFrame.unlock();
+
+        OutputPort outPort = getConnectedOutputPort("image");
+        outPort.pushFrame(imageFrame);
+    }
+
+    public void drawBoxes(byte[] pixels, Face[] faces, int[] dims) {
+        for(int i = 0; i < faces.length; i++) {
+            Rect tempRect = faces[i].rect;
+            int top = (tempRect.top+HEIGHT_OFFSET)*dims[1]/FACE_X_RANGE;
+            int bottom = (tempRect.bottom+HEIGHT_OFFSET)*dims[1]/FACE_X_RANGE;
+            int left = (tempRect.left+WIDTH_OFFSET)*dims[0]/FACE_X_RANGE;
+            int right = (tempRect.right+WIDTH_OFFSET)*dims[0]/FACE_X_RANGE;
+
+            if (top < 0) {
+                top = 0;
+            } else if (top > dims[1]) {
+                top = dims[1];
+            }
+            if (left < 0) {
+                left = 0;
+            } else if (left > dims[0]) {
+                left = dims[0];
+            }
+            if (bottom > dims[1]) {
+                bottom = dims[1];
+            } else if (bottom < 0) {
+                bottom = 0;
+            }
+            if (right > dims[0]) {
+                right = dims[0];
+            } else if (right < 0) {
+                right = 0;
+            }
+
+            for (int j = 0; j < (bottom - top); j++) {
+                // Left edge
+                if (left > 0 && top > 0) {
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * (top + j) + left) +
+                           ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * (top + j) + left) +
+                           ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * (top + j) + left) +
+                           ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                }
+
+                // Right edge
+                if (right > 0 && top > 0) {
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * (top + j) + right) +
+                           ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * (top + j) + right) +
+                           ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * (top + j) + right) +
+                           ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                }
+
+            }
+            for (int k = 0; k < (right - left); k++) {
+                // Top edge
+                if (top < dims[1]) {
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * top + left + k) +
+                           ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * top + left + k) +
+                           ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * top + left + k) +
+                           ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+
+                }
+                // Bottom edge
+                if (bottom < dims[1]) {
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * bottom + left + k) +
+                           ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * bottom + left + k) +
+                           ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                    pixels[ImageConstants.PIX_CHANNELS * (dims[0] * bottom + left + k) +
+                           ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                }
+
+
+            }
+
+        }
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToSizeFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToSizeFilter.java
new file mode 100644
index 0000000..72452b3
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToSizeFilter.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Takes in an array, returns the size of the array
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.util.Log;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+import java.lang.reflect.Array;
+
+public class FloatArrayToSizeFilter extends Filter {
+
+    private static final String TAG = "FloatArrayToSizeFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+    /**
+     * @param context
+     * @param name
+     */
+    public FloatArrayToSizeFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType intT = FrameType.single(int.class);
+        FrameType floatType = FrameType.array(float.class);
+
+        return new Signature()
+                .addInputPort("array", Signature.PORT_REQUIRED, floatType)
+                .addOutputPort("size", Signature.PORT_REQUIRED, intT)
+                .disallowOtherPorts();
+    }
+
+    /**
+     * @see androidx.media.filterfw.Filter#onProcess()
+     */
+    @Override
+    protected void onProcess() {
+        FrameValue arrayFrame = getConnectedInputPort("array").pullFrame().asFrameValues();
+        Object array = arrayFrame.getValue();
+        int size = Array.getLength(array);
+
+        OutputPort outPort = getConnectedOutputPort("size");
+        FrameValue sizeFrame = outPort.fetchAvailableFrame(null).asFrameValue();
+        sizeFrame.setValue(size);
+        outPort.pushFrame(sizeFrame);
+
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToStrFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToStrFilter.java
new file mode 100644
index 0000000..4606cfb
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToStrFilter.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Takes in an array, returns the size of the array
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.util.Log;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+import java.lang.reflect.Array;
+import java.util.Arrays;
+
+public class FloatArrayToStrFilter extends Filter {
+
+    private static final String TAG = "FloatArrayToStrFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+
+    /**
+     * @param context
+     * @param name
+     */
+    public FloatArrayToStrFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType floatType = FrameType.array(float.class);
+
+        return new Signature()
+                .addInputPort("array", Signature.PORT_REQUIRED, floatType)
+                .addOutputPort("string", Signature.PORT_REQUIRED, FrameType.single(String.class))
+                .disallowOtherPorts();
+    }
+
+    /**
+     * @see androidx.media.filterfw.Filter#onProcess()
+     */
+    @Override
+    protected void onProcess() {
+        FrameValue arrayFrame = getConnectedInputPort("array").pullFrame().asFrameValues();
+        float[] array = (float[]) arrayFrame.getValue();
+        String outstr = Arrays.toString(array);
+
+        OutputPort outPort = getConnectedOutputPort("string");
+        FrameValue stringFrame = outPort.fetchAvailableFrame(null).asFrameValue();
+        stringFrame.setValue(outstr);
+        outPort.pushFrame(stringFrame);
+
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/IfElseFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/IfElseFilter.java
new file mode 100644
index 0000000..9553b75
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/IfElseFilter.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.util.Log;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameBuffer2D;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+
+public class IfElseFilter extends Filter {
+
+    private static final String TAG = "IfElseFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+
+    public IfElseFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        FrameType videoIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+        FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
+
+        return new Signature().addInputPort("falseResult", Signature.PORT_REQUIRED, imageIn)
+                .addInputPort("trueResult", Signature.PORT_REQUIRED, videoIn)
+                .addInputPort("condition", Signature.PORT_REQUIRED, FrameType.single(boolean.class))
+                .addOutputPort("output", Signature.PORT_REQUIRED, imageOut)
+                .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onProcess() {
+        OutputPort outPort = getConnectedOutputPort("output");
+        FrameImage2D trueFrame = getConnectedInputPort("trueResult").pullFrame().asFrameImage2D();
+        FrameImage2D falseFrame = getConnectedInputPort("falseResult").pullFrame().asFrameImage2D();
+        FrameValue boolFrameValue = getConnectedInputPort("condition").pullFrame().asFrameValue();
+        boolean condition = (Boolean) boolFrameValue.getValue();
+        FrameBuffer2D outputFrame;
+        // If the condition is true, then we want to use the camera, else use the gallery
+        if (condition) {
+            outputFrame = trueFrame;
+        } else {
+            outputFrame = falseFrame;
+        }
+        outPort.pushFrame(outputFrame);
+
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ImageConstants.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ImageConstants.java
new file mode 100644
index 0000000..cfae8fc
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ImageConstants.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+public class ImageConstants {
+
+    public static final int MAX_BYTE = 255;
+    public static final int RED_OFFSET = 0;
+    public static final int GREEN_OFFSET = 1;
+    public static final int BLUE_OFFSET = 2;
+    public static final int PIX_CHANNELS = 4;
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ImageGoodnessFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ImageGoodnessFilter.java
new file mode 100644
index 0000000..14ec762
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/ImageGoodnessFilter.java
@@ -0,0 +1,410 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Takes sharpness score, rates the image good if above 10, bad otherwise
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.graphics.Bitmap;
+import android.os.AsyncTask;
+import android.util.Log;
+import android.widget.ImageView;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+public class ImageGoodnessFilter extends Filter {
+
+    private static final String TAG = "ImageGoodnessFilter";
+    private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+
+    private final static String GREAT = "Great Picture!";
+    private final static String GOOD = "Good Picture!";
+    private final static String OK = "Ok Picture";
+    private final static String BAD = "Bad Picture";
+    private final static String AWFUL = "Awful Picture";
+    private final static float SMALL_SCORE_INC = 0.25f;
+    private final static float BIG_SCORE_INC = 0.5f;
+    private final static float LOW_VARIANCE = 0.1f;
+    private final static float MEDIUM_VARIANCE = 10;
+    private final static float HIGH_VARIANCE = 100;
+    private float sharpnessMean = 0;
+    private float sharpnessVar = 0;
+    private float underExposureMean = 0;
+    private float underExposureVar = 0;
+    private float overExposureMean = 0;
+    private float overExposureVar = 0;
+    private float contrastMean = 0;
+    private float contrastVar = 0;
+    private float colorfulnessMean = 0;
+    private float colorfulnessVar = 0;
+    private float brightnessMean = 0;
+    private float brightnessVar = 0;
+
+    private float motionMean = 0;
+    private float scoreMean = 0;
+    private static final float DECAY = 0.03f;
+    /**
+     * @param context
+     * @param name
+     */
+    public ImageGoodnessFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        FrameType floatT = FrameType.single(float.class);
+        FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);
+
+        return new Signature()
+                .addInputPort("sharpness", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("overExposure", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("underExposure", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("colorfulness", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("contrastRating", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("motionValues", Signature.PORT_REQUIRED, FrameType.array(float.class))
+                .addInputPort("brightness", Signature.PORT_REQUIRED, floatT)
+                .addInputPort("capturing", Signature.PORT_REQUIRED, FrameType.single(boolean.class))
+                .addInputPort("image", Signature.PORT_REQUIRED, imageIn)
+                .addOutputPort("goodOrBadPic", Signature.PORT_REQUIRED,
+                        FrameType.single(String.class))
+                .addOutputPort("score", Signature.PORT_OPTIONAL, floatT)
+                .disallowOtherPorts();
+    }
+
+    /**
+     * @see androidx.media.filterfw.Filter#onProcess()
+     */
+    @Override
+    protected void onProcess() {
+        FrameValue sharpnessFrameValue =
+                getConnectedInputPort("sharpness").pullFrame().asFrameValue();
+        float sharpness = ((Float)sharpnessFrameValue.getValue()).floatValue();
+
+        FrameValue overExposureFrameValue =
+                getConnectedInputPort("overExposure").pullFrame().asFrameValue();
+        float overExposure = ((Float)overExposureFrameValue.getValue()).floatValue();
+
+        FrameValue underExposureFrameValue =
+                getConnectedInputPort("underExposure").pullFrame().asFrameValue();
+        float underExposure = ((Float)underExposureFrameValue.getValue()).floatValue();
+
+        FrameValue colorfulnessFrameValue =
+                getConnectedInputPort("colorfulness").pullFrame().asFrameValue();
+        float colorfulness = ((Float)colorfulnessFrameValue.getValue()).floatValue();
+
+        FrameValue contrastRatingFrameValue =
+                getConnectedInputPort("contrastRating").pullFrame().asFrameValue();
+        float contrastRating = ((Float)contrastRatingFrameValue.getValue()).floatValue();
+
+        FrameValue brightnessFrameValue =
+                getConnectedInputPort("brightness").pullFrame().asFrameValue();
+        float brightness = ((Float)brightnessFrameValue.getValue()).floatValue();
+
+        FrameValue motionValuesFrameValue =
+                getConnectedInputPort("motionValues").pullFrame().asFrameValue();
+        float[] motionValues = (float[]) motionValuesFrameValue.getValue();
+
+
+        float vectorAccel = (float) Math.sqrt(Math.pow(motionValues[0], 2) +
+                Math.pow(motionValues[1], 2) + Math.pow(motionValues[2], 2));
+        String outStr;
+
+        FrameValue capturingFrameValue =
+                getConnectedInputPort("capturing").pullFrame().asFrameValue();
+        boolean capturing = (Boolean) capturingFrameValue.getValue();
+
+        FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
+
+
+        // TODO: get rid of magic numbers
+        float score = 0.0f;
+        score = computePictureScore(vectorAccel, sharpness, underExposure, overExposure,
+                    contrastRating, colorfulness, brightness);
+        if (scoreMean == 0) scoreMean = score;
+        else scoreMean = scoreMean * (1 - DECAY) + score * DECAY;
+
+        if (motionMean == 0) motionMean = vectorAccel;
+        else motionMean = motionMean * (1 - DECAY) + vectorAccel * DECAY;
+
+        float classifierScore = classifierComputeScore(vectorAccel, sharpness, underExposure,
+                colorfulness, contrastRating, score);
+
+//        Log.v(TAG, "ClassifierScore:: " + classifierScore);
+        final float GREAT_SCORE = 3.5f;
+        final float GOOD_SCORE = 2.5f;
+        final float OK_SCORE = 1.5f;
+        final float BAD_SCORE = 0.5f;
+
+        if (score >= GREAT_SCORE) {
+            outStr = GREAT;
+        } else if (score >= GOOD_SCORE) {
+            outStr = GOOD;
+        } else if (score >= OK_SCORE) {
+            outStr = OK;
+        } else if (score >= BAD_SCORE) {
+            outStr = BAD;
+        } else {
+            outStr = AWFUL;
+        }
+
+        if(capturing) {
+            if (outStr.equals(GREAT)) {
+                // take a picture
+                Bitmap bitmap = inputImage.toBitmap();
+
+                new AsyncOperation().execute(bitmap);
+                final float RESET_FEATURES = 0.01f;
+                sharpnessMean = RESET_FEATURES;
+                underExposureMean = RESET_FEATURES;
+                overExposureMean = RESET_FEATURES;
+                contrastMean = RESET_FEATURES;
+                colorfulnessMean = RESET_FEATURES;
+                brightnessMean = RESET_FEATURES;
+            }
+        }
+
+        OutputPort outPort = getConnectedOutputPort("goodOrBadPic");
+        FrameValue stringFrame = outPort.fetchAvailableFrame(null).asFrameValue();
+        stringFrame.setValue(outStr);
+        outPort.pushFrame(stringFrame);
+
+        OutputPort scoreOutPort = getConnectedOutputPort("score");
+        FrameValue scoreFrame = scoreOutPort.fetchAvailableFrame(null).asFrameValue();
+        scoreFrame.setValue(score);
+        scoreOutPort.pushFrame(scoreFrame);
+
+    }
+
+    private class AsyncOperation extends AsyncTask<Bitmap, Void, String> {
+        private Bitmap b;
+        protected void onPostExecute(String result) {
+            ImageView view = SmartCamera.getImageView();
+            view.setImageBitmap(b);
+        }
+
+        @Override
+        protected String doInBackground(Bitmap... params) {
+            // TODO Auto-generated method stub
+            b = params[0];
+            return null;
+        }
+
+    }
+    // Returns a number between -1 and 1
+    private float classifierComputeScore(float vectorAccel, float sharpness, float underExposure,
+           float colorfulness, float contrast, float score) {
+        float result = (-0.0223f * sharpness + -0.0563f * underExposure + 0.0137f * colorfulness
+                + 0.3102f * contrast + 0.0314f * vectorAccel + -0.0094f * score + 0.0227f *
+                sharpnessMean + 0.0459f * underExposureMean + -0.3934f * contrastMean +
+                -0.0697f * motionMean + 0.0091f * scoreMean + -0.0152f);
+        return result;
+    }
+
+    // Returns a number between -1 and 4 representing the score for this picture
+    private float computePictureScore(float vector_accel, float sharpness,
+            float underExposure, float overExposure, float contrastRating, float colorfulness,
+            float brightness) {
+        final float ACCELERATION_THRESHOLD_VERY_STEADY = 0.1f;
+        final float ACCELERATION_THRESHOLD_STEADY = 0.3f;
+        final float ACCELERATION_THRESHOLD_MOTION = 2f;
+
+        float score = 0.0f;
+        if (vector_accel > ACCELERATION_THRESHOLD_MOTION) {
+            score -= (BIG_SCORE_INC + BIG_SCORE_INC); // set score to -1, bad pic
+        } else if (vector_accel > ACCELERATION_THRESHOLD_STEADY) {
+            score -= BIG_SCORE_INC;
+            score = subComputeScore(sharpness, underExposure, overExposure, contrastRating,
+                    colorfulness, brightness, score);
+        } else if (vector_accel < ACCELERATION_THRESHOLD_VERY_STEADY) {
+            score += BIG_SCORE_INC;
+            score = subComputeScore(sharpness, underExposure, overExposure, contrastRating,
+                    colorfulness, brightness, score);
+        } else {
+            score = subComputeScore(sharpness, underExposure, overExposure, contrastRating,
+                    colorfulness, brightness, score);
+        }
+        return score;
+    }
+
+    // Changes the score by at most +/- 3.5
+    private float subComputeScore(float sharpness, float underExposure, float overExposure,
+                float contrastRating, float colorfulness, float brightness, float score) {
+        // The score methods return values -0.5 to 0.5
+        final float SHARPNESS_WEIGHT = 2;
+        score += SHARPNESS_WEIGHT * sharpnessScore(sharpness);
+        score += underExposureScore(underExposure);
+        score += overExposureScore(overExposure);
+        score += contrastScore(contrastRating);
+        score += colorfulnessScore(colorfulness);
+        score += brightnessScore(brightness);
+        return score;
+    }
+
+    private float sharpnessScore(float sharpness) {
+        if (sharpnessMean == 0) {
+            sharpnessMean = sharpness;
+            sharpnessVar = 0;
+            return 0;
+        } else {
+            sharpnessMean = sharpnessMean * (1 - DECAY) + sharpness * DECAY;
+            sharpnessVar = sharpnessVar * (1 - DECAY) + (sharpness - sharpnessMean) *
+                    (sharpness - sharpnessMean) * DECAY;
+            if (sharpnessVar < LOW_VARIANCE) {
+                return BIG_SCORE_INC;
+            } else if (sharpness < sharpnessMean && sharpnessVar > MEDIUM_VARIANCE) {
+                return -BIG_SCORE_INC;
+            } else if (sharpness < sharpnessMean) {
+                return -SMALL_SCORE_INC;
+            } else if (sharpness > sharpnessMean && sharpnessVar > HIGH_VARIANCE) {
+                return 0;
+            } else if (sharpness > sharpnessMean && sharpnessVar > MEDIUM_VARIANCE) {
+                return SMALL_SCORE_INC;
+            } else  {
+                return BIG_SCORE_INC; // low variance, sharpness above the mean
+            }
+        }
+    }
+
+    private float underExposureScore(float underExposure) {
+        if (underExposureMean == 0) {
+            underExposureMean = underExposure;
+            underExposureVar = 0;
+            return 0;
+        } else {
+            underExposureMean = underExposureMean * (1 - DECAY) + underExposure * DECAY;
+            underExposureVar = underExposureVar * (1 - DECAY) + (underExposure - underExposureMean)
+                    * (underExposure - underExposureMean) * DECAY;
+            if (underExposureVar < LOW_VARIANCE) {
+                return BIG_SCORE_INC;
+            } else if (underExposure > underExposureMean && underExposureVar > MEDIUM_VARIANCE) {
+                return -BIG_SCORE_INC;
+            } else if (underExposure > underExposureMean) {
+                return -SMALL_SCORE_INC;
+            } else if (underExposure < underExposureMean && underExposureVar > HIGH_VARIANCE) {
+                return 0;
+            } else if (underExposure < underExposureMean && underExposureVar > MEDIUM_VARIANCE) {
+                return SMALL_SCORE_INC;
+            } else {
+                return BIG_SCORE_INC; // low variance, underExposure below the mean
+            }
+        }
+    }
+
+    private float overExposureScore(float overExposure) {
+        if (overExposureMean == 0) {
+            overExposureMean = overExposure;
+            overExposureVar = 0;
+            return 0;
+        } else {
+            overExposureMean = overExposureMean * (1 - DECAY) + overExposure * DECAY;
+            overExposureVar = overExposureVar * (1 - DECAY) + (overExposure - overExposureMean) *
+                    (overExposure - overExposureMean) * DECAY;
+            if (overExposureVar < LOW_VARIANCE) {
+                return BIG_SCORE_INC;
+            } else if (overExposure > overExposureMean && overExposureVar > MEDIUM_VARIANCE) {
+                return -BIG_SCORE_INC;
+            } else if (overExposure > overExposureMean) {
+                return -SMALL_SCORE_INC;
+            } else if (overExposure < overExposureMean && overExposureVar > HIGH_VARIANCE) {
+                return 0;
+            } else if (overExposure < overExposureMean && overExposureVar > MEDIUM_VARIANCE) {
+                return SMALL_SCORE_INC;
+            } else {
+                return BIG_SCORE_INC; // low variance, overExposure below the mean
+            }
+        }
+    }
+
+    private float contrastScore(float contrast) {
+        if (contrastMean == 0) {
+            contrastMean = contrast;
+            contrastVar = 0;
+            return 0;
+        } else {
+            contrastMean = contrastMean * (1 - DECAY) + contrast * DECAY;
+            contrastVar = contrastVar * (1 - DECAY) + (contrast - contrastMean) *
+                    (contrast - contrastMean) * DECAY;
+            if (contrastVar < LOW_VARIANCE) {
+                return BIG_SCORE_INC;
+            } else if (contrast < contrastMean && contrastVar > MEDIUM_VARIANCE) {
+                return -BIG_SCORE_INC;
+            } else if (contrast < contrastMean) {
+                return -SMALL_SCORE_INC;
+            } else if (contrast > contrastMean && contrastVar > 100) {
+                return 0;
+            } else if (contrast > contrastMean && contrastVar > MEDIUM_VARIANCE) {
+                return SMALL_SCORE_INC;
+            } else {
+                return BIG_SCORE_INC; // low variance, contrast above the mean
+            }
+        }
+    }
+
+    private float colorfulnessScore(float colorfulness) {
+        if (colorfulnessMean == 0) {
+            colorfulnessMean = colorfulness;
+            colorfulnessVar = 0;
+            return 0;
+        } else {
+            colorfulnessMean = colorfulnessMean * (1 - DECAY) + colorfulness * DECAY;
+            colorfulnessVar = colorfulnessVar * (1 - DECAY) + (colorfulness - colorfulnessMean) *
+                    (colorfulness - colorfulnessMean) * DECAY;
+            if (colorfulnessVar < LOW_VARIANCE) {
+                return BIG_SCORE_INC;
+            } else if (colorfulness < colorfulnessMean && colorfulnessVar > MEDIUM_VARIANCE) {
+                return -BIG_SCORE_INC;
+            } else if (colorfulness < colorfulnessMean) {
+                return -SMALL_SCORE_INC;
+            } else if (colorfulness > colorfulnessMean && colorfulnessVar > 100) {
+                return 0;
+            } else if (colorfulness > colorfulnessMean && colorfulnessVar > MEDIUM_VARIANCE) {
+                return SMALL_SCORE_INC;
+            } else {
+                return BIG_SCORE_INC; // low variance, colorfulness above the mean
+            }
+        }
+    }
+
+    private float brightnessScore(float brightness) {
+        if (brightnessMean == 0) {
+            brightnessMean = brightness;
+            brightnessVar = 0;
+            return 0;
+        } else {
+            brightnessMean = brightnessMean * (1 - DECAY) + brightness * DECAY;
+            brightnessVar = brightnessVar * (1 - DECAY) + (brightness - brightnessMean) *
+                    (brightness - brightnessMean) * DECAY;
+            if (brightnessVar < LOW_VARIANCE) {
+                return BIG_SCORE_INC;
+            } else if (brightness < brightnessMean && brightnessVar > MEDIUM_VARIANCE) {
+                return -BIG_SCORE_INC;
+            } else if (brightness < brightnessMean) {
+                return -SMALL_SCORE_INC;
+            } else if (brightness > brightnessMean && brightnessVar > 100) {
+                return 0;
+            } else if (brightness > brightnessMean && brightnessVar > MEDIUM_VARIANCE) {
+                return SMALL_SCORE_INC;
+            } else {
+                return BIG_SCORE_INC; // low variance, brightness above the mean
+            }
+        }
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/MotionSensorWTime.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/MotionSensorWTime.java
new file mode 100644
index 0000000..64f3ef3
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/MotionSensorWTime.java
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Make values from a motion sensor (e.g., accelerometer) available as filter outputs.
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.content.Context;
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+import android.util.Log;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.FrameValues;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+public final class MotionSensorWTime extends Filter implements SensorEventListener {
+
+    private SensorManager mSensorManager = null;
+    private Sensor mSensor = null;
+
+    private float[] mValues = new float[3];
+    private float[][] mTemp = new float[3][3];
+    private float[] mAvgValues = new float[3];
+    private int mCounter = 0;
+
+    public MotionSensorWTime(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addOutputPort("values", Signature.PORT_REQUIRED, FrameType.array(float.class))
+            .addOutputPort("timestamp", Signature.PORT_OPTIONAL, FrameType.single(long.class))
+            .disallowOtherPorts();
+    }
+
+    @Override
+    protected void onPrepare() {
+        mSensorManager = (SensorManager)getContext().getApplicationContext()
+                            .getSystemService(Context.SENSOR_SERVICE);
+        mSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_LINEAR_ACCELERATION);
+        // TODO: currently, the type of sensor is hardcoded. Should be able to set the sensor
+        //  type as filter input!
+        mSensorManager.registerListener(this, mSensor, SensorManager.SENSOR_DELAY_UI);
+    }
+
+    @Override
+    protected void onTearDown() {
+        mSensorManager.unregisterListener(this);
+    }
+
+    @Override
+    public final void onAccuracyChanged(Sensor sensor, int accuracy) {
+        // (Do we need to do something when sensor accuracy changes?)
+    }
+
+    @Override
+    public final void onSensorChanged(SensorEvent event) {
+        synchronized(mValues) {
+            mValues[0] = event.values[0];
+            mValues[1] = event.values[1];
+            mValues[2] = event.values[2];
+        }
+    }
+
+    @Override
+    protected void onProcess() {
+        OutputPort outPort = getConnectedOutputPort("values");
+        FrameValues outFrame = outPort.fetchAvailableFrame(null).asFrameValues();
+        synchronized(mValues) {
+            if (mCounter < 3 && mCounter >= 0) {
+                mTemp[0][mCounter] = mValues[0];
+                mTemp[1][mCounter] = mValues[1];
+                mTemp[2][mCounter] = mValues[2];
+            }
+
+            mCounter = (mCounter + 1) % 3;
+
+            mAvgValues[0] = (mTemp[0][0] + mTemp[0][1] + mTemp[0][2]) / 3;
+            mAvgValues[1] = (mTemp[1][0] + mTemp[1][1] + mTemp[1][2]) / 3;
+            mAvgValues[2] = (mTemp[2][0] + mTemp[2][1] + mTemp[2][2]) / 3;
+            outFrame.setValues(mAvgValues);
+        }
+        outFrame.setTimestamp(System.currentTimeMillis() * 1000000L);
+        outPort.pushFrame(outFrame);
+
+        OutputPort timeOutPort = getConnectedOutputPort("timestamp");
+        if (timeOutPort != null) {
+            long timestamp = System.nanoTime();
+            Log.v("MotionSensor", "Timestamp is: " + timestamp);
+            FrameValue timeStampFrame = timeOutPort.fetchAvailableFrame(null).asFrameValue();
+            timeStampFrame.setValue(timestamp);
+            timeOutPort.pushFrame(timeStampFrame);
+        }
+    }
+}
+
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/SmartCamera.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/SmartCamera.java
new file mode 100644
index 0000000..ba0333a
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/SmartCamera.java
@@ -0,0 +1,288 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.app.Activity;
+import android.content.Intent;
+import android.graphics.Bitmap;
+import android.graphics.drawable.BitmapDrawable;
+import android.graphics.drawable.Drawable;
+import android.os.Bundle;
+import android.os.Handler;
+import android.provider.MediaStore;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.AdapterView;
+import android.widget.Button;
+import android.widget.ImageView;
+import android.widget.LinearLayout;
+import android.widget.Spinner;
+import android.widget.TextView;
+import androidx.media.filterfw.FilterGraph;
+import androidx.media.filterfw.GraphReader;
+import androidx.media.filterfw.GraphRunner;
+import androidx.media.filterfw.MffContext;
+
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+
+
+public class SmartCamera extends Activity {
+
+    private SurfaceView mCameraView;
+    private TextView mGoodBadTextView;
+    private TextView mFPSTextView;
+    private TextView mEyesTextView;
+    private TextView mSmilesTextView;
+    private TextView mScoreTextView;
+    private static ImageView mImageView1;
+    private static ImageView mImageView2;
+    private static ImageView mImageView3;
+    private static ImageView mImageView4;
+    private static ImageView mImageView5;
+    private Button mStartStopButton;
+    private TextView mImagesSavedTextView;
+    private Spinner mSpinner;
+    private LinearLayout mLinearLayout;
+
+    private MffContext mContext;
+    private FilterGraph mGraph;
+    private GraphRunner mRunner;
+    private Handler mHandler = new Handler();
+
+    private static final String TAG = "SmartCamera";
+    private static final boolean sUseFacialExpression = false;
+    private boolean isPendingRunGraph = false;
+
+    private static ArrayList<ImageView> mImages;
+    private static int count = -1;
+    private static boolean countHasReachedMax = false;
+    private static int numImages = 0;
+
+    // Function to return the correct image view to display the current bitmap
+    public static ImageView getImageView() {
+        if (count == numImages-1) countHasReachedMax = true;
+        count = (count+1) % numImages;
+        return mImages.get(count);
+    }
+
+    // Function used to run images through the graph, mainly for CSV data generation
+    public void runGraphOnImage(String filePath, String fileName) {
+        if(fileName.endsWith(".jpg") == false) {
+            return;
+        }
+        mGraph.getVariable("gallerySource").setValue(filePath + "/" + fileName);
+        Log.v(TAG, "runGraphOnImage : : " + filePath + " name: " + fileName);
+        mGraph.getVariable("imageName").setValue(fileName);
+        mGraph.getVariable("filePath").setValue(filePath); // wrong
+        try {
+            Thread.sleep(400);
+        } catch (InterruptedException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+    }
+
+    // Function to clear the "Images Saved" text off the screen
+    private void clearImagesSavedTextView() {
+        mImagesSavedTextView.setText("");
+    }
+
+    // Function to capture the images in the current imageviews and save them to the gallery
+    private void captureImages() {
+        ((WaveTriggerFilter) mGraph.getFilter("snapEffect")).trigger();
+        mGraph.getVariable("startCapture").setValue(false);
+        Bitmap bitmap = null;
+        Drawable res = getResources().getDrawable(R.drawable.black_screen);
+        Calendar cal = Calendar.getInstance();
+        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss");
+
+        Log.v(TAG, "numImages: " + numImages + " count: " + count +
+                " hasReachedMax: " + countHasReachedMax);
+        int maxI = countHasReachedMax ? numImages : count+1;
+        if(maxI != 0) {
+            if (maxI == 1) mImagesSavedTextView.setText("Image Saved");
+            else {
+                mImagesSavedTextView.setText("" + maxI + " Images Saved");
+            }
+        }
+        for (int i = 0; i < maxI; i++) {
+            bitmap = ((BitmapDrawable)mImages.get(i).getDrawable()).getBitmap();
+            mImages.get(i).setImageDrawable(res);
+            MediaStore.Images.Media.insertImage(getContentResolver(), bitmap,
+                    sdf.format(cal.getTime()) + "_image" + i + ".jpg", "image " + i);
+        }
+        mStartStopButton.setText("Start");
+        count = -1;
+        countHasReachedMax = false;
+        mSpinner.setEnabled(true);
+        mHandler.postDelayed(new Runnable() {
+            public void run() {
+                clearImagesSavedTextView();
+            }
+        }, 5000);
+    }
+
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.simplecamera);
+        setTitle("Smart Camera");
+
+        mContext = new MffContext(this);
+
+        mCameraView = (SurfaceView) findViewById(R.id.cameraView);
+        mGoodBadTextView = (TextView) findViewById(R.id.goodOrBadTextView);
+        mFPSTextView = (TextView) findViewById(R.id.fpsTextView);
+        mScoreTextView = (TextView) findViewById(R.id.scoreTextView);
+        mStartStopButton = (Button) findViewById(R.id.startButton);
+        mImagesSavedTextView = (TextView) findViewById(R.id.imagesSavedTextView);
+        mImagesSavedTextView.setText("");
+        mSpinner = (Spinner) findViewById(R.id.spinner);
+        mLinearLayout = (LinearLayout) findViewById(R.id.scrollViewLinearLayout);
+        mImages = new ArrayList<ImageView>();
+
+        // Spinner is used to determine how many image views are displayed at the bottom
+        // of the screen. Based on the item position that is selected, we inflate that
+        // many imageviews into the bottom linear layout.
+        mSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
+            @Override
+            public void onItemSelected(AdapterView<?> parentView, View selectedItemView,
+                    int position, long id) {
+                mLinearLayout.removeViews(0,numImages);
+                numImages = position+1;
+                mImages.clear();
+                LayoutInflater inflater = getLayoutInflater();
+                for (int i = 0; i < numImages; i++) {
+                    ImageView tmp = (ImageView) inflater.inflate(R.layout.imageview, null);
+                    mImages.add(tmp);
+                    mLinearLayout.addView(tmp);
+                }
+            }
+
+            @Override
+            public void onNothingSelected(AdapterView<?> parentView) {
+            }
+        });
+
+        numImages = mSpinner.getSelectedItemPosition()+1;
+        mImages.clear();
+        LayoutInflater inflater = getLayoutInflater();
+        for (int i = 0; i < numImages; i++) {
+            ImageView tmp = (ImageView) inflater.inflate(R.layout.imageview, null);
+            mImages.add(tmp);
+            mLinearLayout.addView(tmp);
+
+        }
+
+        // Button used to start and stop the capture of images when they are deemed great
+        mStartStopButton.setOnClickListener(new OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                if (mStartStopButton.getText().equals("Start")) {
+                    mGraph.getVariable("startCapture").setValue(true);
+                    mStartStopButton.setText("Stop");
+                    mSpinner.setEnabled(false);
+                } else {
+                    boolean tmp = (Boolean) mGraph.getVariable("startCapture").getValue();
+                    if (tmp == false) {
+                        return;
+                    }
+                    if (count == numImages-1) countHasReachedMax = true;
+                    captureImages();
+                }
+            }
+        });
+
+        // Button to open the gallery to show the images in there
+        Button galleryOpen = (Button) findViewById(R.id.galleryOpenButton);
+        galleryOpen.setOnClickListener(new OnClickListener() {
+           @Override
+           public void onClick(View v) {
+               Intent openGalleryIntent = new Intent(Intent.ACTION_MAIN);
+               openGalleryIntent.addCategory(Intent.CATEGORY_APP_GALLERY);
+               startActivity(openGalleryIntent);
+           }
+        });
+
+        loadGraph();
+        mGraph.getVariable("startCapture").setValue(false);
+        runGraph();
+    }
+
+    @Override
+    public void onPause() {
+        super.onPause();
+        Log.i(TAG, "onPause");
+        if (mContext != null) {
+            mContext.onPause();
+        }
+    }
+
+    @Override
+    public void onResume() {
+        super.onResume();
+        Log.i(TAG, "onResume");
+        if (mContext != null) {
+            mContext.onResume();
+        }
+        if (isPendingRunGraph) {
+            isPendingRunGraph = false;
+            runGraph();
+        }
+    }
+
+    @Override
+    public void onStop() {
+        super.onStop();
+        Log.i(TAG, "onStop");
+    }
+
+    // Build the Filtergraph for Camera
+    private void loadGraph() {
+        try {
+            mGraph = GraphReader.readXmlGraphResource(mContext, R.raw.camera_graph);
+            mRunner = mGraph.getRunner();
+
+            // Connect views
+            mGraph.bindFilterToView("camViewTarget", mCameraView);
+            mGraph.bindFilterToView("goodOrBadTextView", mGoodBadTextView);
+            mGraph.bindFilterToView("fpsTextView", mFPSTextView);
+            mGraph.bindFilterToView("scoreTextView", mScoreTextView);
+
+            // Used for Facial Expressions
+            if (sUseFacialExpression) {
+                mGraph.bindFilterToView("eyesTextView", mEyesTextView);
+                mGraph.bindFilterToView("smilesTextView", mSmilesTextView);
+            }
+
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    // Asynchronously run the filtergraph
+    private void runGraph() {
+        mRunner.setIsVerbose(true);
+        mRunner.start(mGraph);
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/WaveTriggerFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/WaveTriggerFilter.java
new file mode 100644
index 0000000..9f72940
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/WaveTriggerFilter.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.InputPort;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.OutputPort;
+import androidx.media.filterfw.Signature;
+
+// The Filter is used to generate the camera snap effect.
+// The effect is to give the image a sudden white appearance.
+public final class WaveTriggerFilter extends Filter {
+
+    private boolean mTrigger = false;
+    private boolean mInWaveMode = false;
+    private float mTime = 0f;
+
+    public WaveTriggerFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+            .addOutputPort("value", Signature.PORT_REQUIRED, FrameType.single())
+            .disallowOtherPorts();
+    }
+
+    public synchronized void trigger() {
+        mTrigger = true;
+    }
+
+    @Override
+    public void onInputPortOpen(InputPort port) {
+        if (port.getName().equals("trigger")) {
+            port.bindToFieldNamed("mTrigger");
+            port.setAutoPullEnabled(true);
+        }
+    }
+
+    @Override
+    protected synchronized void onProcess() {
+        // Check if we were triggered
+        if (mTrigger) {
+            mInWaveMode = true;
+            mTrigger = false;
+            mTime = 0.5f;
+        }
+
+        // Calculate output value
+        float value = 0.5f;
+        if (mInWaveMode) {
+            value = -Math.abs(mTime - 1f) + 1f;
+            mTime += 0.2f;
+            if (mTime >= 2f) {
+                mInWaveMode = false;
+            }
+        }
+
+        // Push Value
+        OutputPort outPort = getConnectedOutputPort("value");
+        FrameValue frame = outPort.fetchAvailableFrame(null).asFrameValue();
+        frame.setValue(value);
+        outPort.pushFrame(frame);
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/Android.mk b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/Android.mk
new file mode 100644
index 0000000..50926a6
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/Android.mk
@@ -0,0 +1,39 @@
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := tests
+
+# LOCAL_SDK_VERSION := current
+
+LOCAL_PACKAGE_NAME := SmartCamera-tests
+
+LOCAL_SRC_FILES += $(call all-java-files-under, src)
+
+LOCAL_JAVA_LIBRARIES := android.test.runner
+#LOCAL_STATIC_JAVA_LIBRARIES := filterframework-test-lib
+LOCAL_STATIC_JAVA_LIBRARIES += guava
+
+#LOCAL_JAVA_LIBRARIES := filterframework-test-lib
+LOCAL_STATIC_JAVA_LIBRARIES := guava
+
+LOCAL_STATIC_JAVA_LIBRARIES +=
+LOCAL_PROGUARD_ENABLED := disabled
+
+LOCAL_INSTRUMENTATION_FOR := SmartCamera
+
+include $(BUILD_PACKAGE)
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/AndroidManifest.xml b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/AndroidManifest.xml
new file mode 100644
index 0000000..3363af4
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/AndroidManifest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2013 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="androidx.media.filterfw.samples.simplecamera.tests"
+    android:versionCode="1"
+    android:versionName="1.0" >
+
+    <uses-sdk android:minSdkVersion="9" android:targetSdkVersion="17" />
+
+    <instrumentation
+        android:name="android.test.InstrumentationTestRunner"
+        android:targetPackage="androidx.media.filterfw.samples.simplecamera" />
+
+    <application>
+        <uses-library android:name="android.test.runner" />
+    </application>
+
+</manifest>
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/project.properties b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/project.properties
new file mode 100644
index 0000000..4653837a
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-17
+android.library.reference.1=../filterfw-test-lib
+android.library.reference.2=../filterfw
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/res/.README b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/res/.README
new file mode 100644
index 0000000..c29cd87
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/res/.README
@@ -0,0 +1,3 @@
+The res directory is needed for Eclipse to correctly build the project, but it
+is not possible to check in a directory into git. This file guarantees the res
+directory exists.
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/FrameSourceFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/FrameSourceFilter.java
new file mode 100644
index 0000000..7daa03f
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/FrameSourceFilter.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media.filterfw;
+
+import java.util.LinkedList;
+import java.util.Queue;
+
+/**
+ * A {@link Filter} that pushes out externally injected frames.
+ * <p> When a frame is injected using {@link #injectFrame(Frame)}, this source will push it on its
+ * output port and then sleep until another frame is injected.
+ * <p> Multiple frames may be injected before any frame is pushed out. In this case they will be
+ * queued and pushed in FIFO order.
+ */
+class FrameSourceFilter extends Filter {
+
+    private final Queue<Frame> mFrames = new LinkedList<Frame>();
+
+    FrameSourceFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+                .addOutputPort("output", Signature.PORT_REQUIRED, FrameType.any())
+                .disallowOtherPorts();
+    }
+
+    private synchronized Frame obtainFrame() {
+        if (mFrames.isEmpty()) {
+            enterSleepState();
+            return null;
+        } else {
+            return mFrames.poll();
+        }
+    }
+
+    /**
+     * Call this method to inject a frame that will be pushed in a future execution of the filter.
+     * <p> If multiple frames are injected then they will be pushed one per execution in FIFO order.
+     */
+    public synchronized void injectFrame(Frame frame) {
+        mFrames.add(frame);
+        wakeUp();
+    }
+
+    @Override
+    protected void onProcess() {
+        Frame frame = obtainFrame();
+        if (frame != null) {
+            getConnectedOutputPort("output").pushFrame(frame);
+        }
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/FrameTargetFilter.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/FrameTargetFilter.java
new file mode 100644
index 0000000..1f0e267
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/FrameTargetFilter.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media.filterfw;
+
+/**
+ * A {@link Filter} that consumes frames and allows to register a listener to observe when
+ * a new frame has been consumed.
+ */
+class FrameTargetFilter extends Filter {
+
+    interface Listener {
+        /**
+         * Called each time this filter receives a new frame. The implementer of this method is
+         * responsible for releasing the frame.
+         */
+        void onFramePushed(String filterName, Frame frame);
+    }
+
+    private Listener mListener;
+
+    FrameTargetFilter(MffContext context, String name) {
+        super(context, name);
+    }
+
+    @Override
+    public Signature getSignature() {
+        return new Signature()
+                .addInputPort("input", Signature.PORT_REQUIRED, FrameType.any())
+                .disallowOtherPorts();
+    }
+
+    public synchronized void setListener(Listener listener) {
+        mListener = listener;
+    }
+
+    @Override
+    protected synchronized void onProcess() {
+        Frame frame = getConnectedInputPort("input").pullFrame();
+        if (mListener != null) {
+            frame.retain();
+            mListener.onFramePushed(getName(), frame);
+        }
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/MffFilterTestCase.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/MffFilterTestCase.java
new file mode 100644
index 0000000..84efd28
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/MffFilterTestCase.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media.filterfw;
+
+import androidx.media.filterfw.GraphRunner.Listener;
+import androidx.media.filterfw.Signature.PortInfo;
+
+import com.google.common.util.concurrent.SettableFuture;
+
+import junit.framework.TestCase;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * A {@link TestCase} for testing single MFF filter runs. Implementers should extend this class and
+ * implement the {@link #createFilter(MffContext)} method to create the filter under test. Inside
+ * each test method, the implementer should supply one or more frames for all the filter inputs
+ * (calling {@link #injectInputFrame(String, Frame)}) and then invoke {@link #process()}. Once the
+ * processing finishes, one should call {@link #getOutputFrame(String)} to get and inspect the
+ * output frames.
+ *
+ * TODO: extend this to deal with filters that push multiple output frames.
+ * TODO: relax the requirement that all output ports should be pushed (the implementer should be
+ *       able to tell which ports to wait for before process() returns).
+ * TODO: handle undeclared inputs and outputs.
+ */
+public abstract class MffFilterTestCase extends MffTestCase {
+
+    private static final long DEFAULT_TIMEOUT_MS = 1000;
+
+    private FilterGraph mGraph;
+    private GraphRunner mRunner;
+    private Map<String, Frame> mOutputFrames;
+    private Set<String> mEmptyOutputPorts;
+
+    private SettableFuture<Void> mProcessResult;
+
+    protected abstract Filter createFilter(MffContext mffContext);
+
+    @Override
+    protected void setUp() throws Exception {
+        super.setUp();
+        MffContext mffContext = getMffContext();
+        FilterGraph.Builder graphBuilder = new FilterGraph.Builder(mffContext);
+        Filter filterUnderTest = createFilter(mffContext);
+        graphBuilder.addFilter(filterUnderTest);
+
+        connectInputPorts(mffContext, graphBuilder, filterUnderTest);
+        connectOutputPorts(mffContext, graphBuilder, filterUnderTest);
+
+        mGraph = graphBuilder.build();
+        mRunner = mGraph.getRunner();
+        mRunner.setListener(new Listener() {
+            @Override
+            public void onGraphRunnerStopped(GraphRunner runner) {
+                mProcessResult.set(null);
+            }
+
+            @Override
+            public void onGraphRunnerError(Exception exception, boolean closedSuccessfully) {
+                mProcessResult.setException(exception);
+            }
+        });
+
+        mOutputFrames = new HashMap<String, Frame>();
+        mProcessResult = SettableFuture.create();
+    }
+
+    @Override
+    protected void tearDown() throws Exception {
+        for (Frame frame : mOutputFrames.values()) {
+            frame.release();
+        }
+        mOutputFrames = null;
+
+        mRunner.stop();
+        mRunner = null;
+        mGraph = null;
+
+        mProcessResult = null;
+        super.tearDown();
+    }
+
+    protected void injectInputFrame(String portName, Frame frame) {
+        FrameSourceFilter filter = (FrameSourceFilter) mGraph.getFilter("in_" + portName);
+        filter.injectFrame(frame);
+    }
+
+    /**
+     * Returns the frame pushed out by the filter under test. Should only be called after
+     * {@link #process(long)} has returned.
+     */
+    protected Frame getOutputFrame(String outputPortName) {
+        return mOutputFrames.get("out_" + outputPortName);
+    }
+
+    protected void process(long timeoutMs)
+            throws ExecutionException, TimeoutException, InterruptedException {
+        mRunner.start(mGraph);
+        mProcessResult.get(timeoutMs, TimeUnit.MILLISECONDS);
+    }
+
+    protected void process() throws ExecutionException, TimeoutException, InterruptedException {
+        process(DEFAULT_TIMEOUT_MS);
+    }
+
+    /**
+     * This method should be called to create the input frames inside the test cases (instead of
+     * {@link Frame#create(FrameType, int[])}). This is required to work around a requirement for
+     * the latter method to be called on the MFF thread.
+     */
+    protected Frame createFrame(FrameType type, int[] dimensions) {
+        return new Frame(type, dimensions, mRunner.getFrameManager());
+    }
+
+    private void connectInputPorts(
+            MffContext mffContext, FilterGraph.Builder graphBuilder, Filter filter) {
+        Signature signature = filter.getSignature();
+        for (Entry<String, PortInfo> inputPortEntry : signature.getInputPorts().entrySet()) {
+            Filter inputFilter = new FrameSourceFilter(mffContext, "in_" + inputPortEntry.getKey());
+            graphBuilder.addFilter(inputFilter);
+            graphBuilder.connect(inputFilter, "output", filter, inputPortEntry.getKey());
+        }
+    }
+
+    private void connectOutputPorts(
+            MffContext mffContext, FilterGraph.Builder graphBuilder, Filter filter) {
+        Signature signature = filter.getSignature();
+        mEmptyOutputPorts = new HashSet<String>();
+        OutputFrameListener outputFrameListener = new OutputFrameListener();
+        for (Entry<String, PortInfo> outputPortEntry : signature.getOutputPorts().entrySet()) {
+            FrameTargetFilter outputFilter = new FrameTargetFilter(
+                    mffContext, "out_" + outputPortEntry.getKey());
+            graphBuilder.addFilter(outputFilter);
+            graphBuilder.connect(filter, outputPortEntry.getKey(), outputFilter, "input");
+            outputFilter.setListener(outputFrameListener);
+            mEmptyOutputPorts.add("out_" + outputPortEntry.getKey());
+        }
+    }
+
+    private class OutputFrameListener implements FrameTargetFilter.Listener {
+
+        @Override
+        public void onFramePushed(String filterName, Frame frame) {
+            mOutputFrames.put(filterName, frame);
+            boolean alreadyPushed = !mEmptyOutputPorts.remove(filterName);
+            if (alreadyPushed) {
+                throw new IllegalStateException(
+                        "A frame has been pushed twice to the same output port.");
+            }
+            if (mEmptyOutputPorts.isEmpty()) {
+                // All outputs have been pushed, stop the graph.
+                mRunner.stop();
+            }
+        }
+
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/MffTestCase.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/MffTestCase.java
new file mode 100644
index 0000000..2f33a5c
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/MffTestCase.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media.filterfw;
+
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.test.AndroidTestCase;
+
+import junit.framework.TestCase;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.FutureTask;
+
+/**
+ * A {@link TestCase} for testing objects requiring {@link MffContext}. This test case can only be
+ * used to test the functionality that does not rely on GL support and camera.
+ */
+public class MffTestCase extends AndroidTestCase {
+
+    private HandlerThread mMffContextHandlerThread;
+    private MffContext mMffContext;
+
+    @Override
+    protected void setUp() throws Exception {
+        super.setUp();
+        // MffContext needs to be created on a separate thread to allow MFF to post Runnable's.
+        mMffContextHandlerThread = new HandlerThread("MffContextThread");
+        mMffContextHandlerThread.start();
+        Handler handler = new Handler(mMffContextHandlerThread.getLooper());
+        FutureTask<MffContext> task = new FutureTask<MffContext>(new Callable<MffContext>() {
+            @Override
+            public MffContext call() throws Exception {
+                MffContext.Config config = new MffContext.Config();
+                config.requireCamera = false;
+                config.requireOpenGL = false;
+                config.forceNoGL = true;
+                return new MffContext(getContext(), config);
+            }
+        });
+        handler.post(task);
+        // Wait for the context to be created on the handler thread.
+        mMffContext = task.get();
+    }
+
+    @Override
+    protected void tearDown() throws Exception {
+        mMffContextHandlerThread.getLooper().quit();
+        mMffContextHandlerThread = null;
+        mMffContext.release();
+        mMffContext = null;
+        super.tearDown();
+    }
+
+    protected MffContext getMffContext() {
+        return mMffContext;
+    }
+
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/AverageFilterTest.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/AverageFilterTest.java
new file mode 100644
index 0000000..37b5eb8
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/AverageFilterTest.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media.filterfw.samples.simplecamera;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.MffFilterTestCase;
+
+
+public class AverageFilterTest extends MffFilterTestCase {
+
+    @Override
+    protected Filter createFilter(MffContext mffContext) {
+        return new AverageFilter(mffContext, "averageFilter");
+    }
+
+    public void testAverageFilter() throws Exception {
+        FrameValue frame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        frame.setValue(5f);
+
+        injectInputFrame("sharpness", frame);
+
+        process();
+        assertEquals(1f, ((Float) getOutputFrame("avg").asFrameValue().getValue()).floatValue(),
+                0.001f);
+    }
+
+    public void testAverageFilter2() throws Exception{
+        FrameValue frame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        frame.setValue(4f);
+
+        injectInputFrame("sharpness", frame);
+
+        process();
+        assertEquals(0.8f, ((Float) getOutputFrame("avg").asFrameValue().getValue()).floatValue(),
+                0.001f);
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/AvgBrightnessFilterTest.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/AvgBrightnessFilterTest.java
new file mode 100644
index 0000000..3c8d127
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/AvgBrightnessFilterTest.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+
+import android.net.Uri;
+import android.provider.MediaStore;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.MffFilterTestCase;
+
+
+public class AvgBrightnessFilterTest extends MffFilterTestCase {
+    private AssetManager assetMgr = null;
+    @Override
+    protected Filter createFilter(MffContext mffContext) {
+        assetMgr = mffContext.getApplicationContext().getAssets();
+        return new AvgBrightnessFilter(mffContext, "brightnessFilter");
+    }
+
+    public void testBrightnessFilter() throws Exception{
+        final int INPUT_WIDTH = 480;
+        final int INPUT_HEIGHT = 640;
+        FrameImage2D image =
+                createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU),
+                        new int[] {INPUT_WIDTH,INPUT_HEIGHT}).asFrameImage2D();
+
+        Bitmap bitmap = BitmapFactory.decodeStream(assetMgr.open("0002_000390.jpg"));
+        image.setBitmap(bitmap);
+
+        injectInputFrame("image", image);
+
+        process();
+        final float EXPECTED_RESULT = 0.35f;
+        assertEquals(EXPECTED_RESULT, ((Float) getOutputFrame("brightnessRating").
+                asFrameValue().getValue()).floatValue(), 0.01f);
+    }
+}
\ No newline at end of file
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/ContrastRatioFilterTest.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/ContrastRatioFilterTest.java
new file mode 100644
index 0000000..6072755
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/ContrastRatioFilterTest.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.MffFilterTestCase;
+import androidx.media.filterfw.samples.simplecamera.ContrastRatioFilter;
+
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.net.Uri;
+import android.provider.MediaStore;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeoutException;
+
+
+public class ContrastRatioFilterTest extends MffFilterTestCase {
+    private AssetManager assetMgr = null;
+
+    @Override
+    protected Filter createFilter(MffContext mffContext) {
+        assetMgr = mffContext.getApplicationContext().getAssets();
+        return new ContrastRatioFilter(mffContext, "contrastFilter");
+    }
+
+    public void testContrastFilter() throws Exception {
+
+        final int INPUT_WIDTH = 480;
+        final int INPUT_HEIGHT = 640;
+        FrameImage2D image =
+                createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU),
+                        new int[] {INPUT_WIDTH,INPUT_HEIGHT}).asFrameImage2D();
+
+        Bitmap bitmap = BitmapFactory.decodeStream(assetMgr.open("0002_000390.jpg"));
+        image.setBitmap(bitmap);
+
+        injectInputFrame("image", image);
+
+        process();
+        final float EXPECTED_RESULT = 0.29901487f;
+        assertEquals(EXPECTED_RESULT, ((Float) getOutputFrame("contrastRating").
+                asFrameValue().getValue()).floatValue(), 0.001f);
+
+
+    }
+}
\ No newline at end of file
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/ExposureFilterTest.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/ExposureFilterTest.java
new file mode 100644
index 0000000..25ac212
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/ExposureFilterTest.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.net.Uri;
+import android.provider.MediaStore;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.MffFilterTestCase;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeoutException;
+
+
+public class ExposureFilterTest extends MffFilterTestCase {
+
+    private AssetManager assetMgr = null;
+    @Override
+    protected Filter createFilter(MffContext mffContext) {
+        assetMgr = mffContext.getApplicationContext().getAssets();
+        return new ExposureFilter(mffContext, "exposureFilter");
+    }
+
+    public void testExposureFilter() throws Exception{
+        final int INPUT_WIDTH = 480;
+        final int INPUT_HEIGHT = 640;
+        FrameImage2D image =
+                createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU),
+                        new int[] {INPUT_WIDTH,INPUT_HEIGHT}).asFrameImage2D();
+
+        Bitmap bitmap = BitmapFactory.decodeStream(assetMgr.open("0002_000390.jpg"));
+        image.setBitmap(bitmap);
+
+        injectInputFrame("image", image);
+        process();
+        final float EXPECTED_OVEREXPOSURE = 0.00757f;
+        assertEquals(EXPECTED_OVEREXPOSURE, ((Float) getOutputFrame("overExposureRating").
+                asFrameValue().getValue()).floatValue(), 0.001f);
+        final float EXPECTED_UNDEREXPOSURE = 0.2077f;
+        assertEquals(EXPECTED_UNDEREXPOSURE, ((Float) getOutputFrame("underExposureRating").
+                asFrameValue().getValue()).floatValue(), 0.001f);
+    }
+}
\ No newline at end of file
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/FaceSquareFilterTest.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/FaceSquareFilterTest.java
new file mode 100644
index 0000000..02387fe
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/FaceSquareFilterTest.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.net.Uri;
+import android.provider.MediaStore;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValues;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.MffFilterTestCase;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeoutException;
+
+import android.hardware.Camera;
+import android.hardware.Camera.Face;
+import android.graphics.Rect;
+
+
+public class FaceSquareFilterTest extends MffFilterTestCase {
+
+    private AssetManager assetMgr = null;
+    @Override
+    protected Filter createFilter(MffContext mffContext) {
+        assetMgr = mffContext.getApplicationContext().getAssets();
+        return new FaceSquareFilter(mffContext, "faceSquareFilter");
+    }
+
+    public void testFaceSquareFilter() throws Exception{
+        final int INPUT_WIDTH = 1536;
+        final int INPUT_HEIGHT = 2048;
+        FrameImage2D image =
+                createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU),
+                        new int[] {INPUT_WIDTH,INPUT_HEIGHT}).asFrameImage2D();
+
+        FrameValues facesFrame = createFrame(FrameType.array(Camera.Face.class), new int[] {1,1}).
+                asFrameValues();
+
+        Bitmap bitmap = BitmapFactory.decodeStream(assetMgr.open("XZZ019.jpg"));
+        image.setBitmap(bitmap);
+        injectInputFrame("image", image);
+
+        Face face = new Face();
+        Rect faceRect = new Rect();
+        // These are the values for image 141 with 1 face
+        faceRect.set(-533, -453, 369, 224);
+        face.rect = faceRect;
+        Face[] faces = new Face[1];
+        faces[0] = face;
+        facesFrame.setValue(faces);
+        injectInputFrame("faces", facesFrame);
+        process();
+
+        // ensure the output image has the rectangle in the right place
+        FrameImage2D outputImage = getOutputFrame("image").asFrameImage2D();
+        int[] pixels = new int[bitmap.getByteCount()];
+        bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(),
+                bitmap.getHeight());
+
+        final int FACE_X_RANGE = 2000;
+        final int WIDTH_OFFSET = 1000;
+        final int HEIGHT_OFFSET = 1000;
+
+        int top = (faceRect.top+HEIGHT_OFFSET)*bitmap.getHeight()/FACE_X_RANGE;
+        int bottom = (faceRect.bottom+HEIGHT_OFFSET)*bitmap.getHeight()/FACE_X_RANGE;
+        int left = (faceRect.left+WIDTH_OFFSET)*bitmap.getWidth()/FACE_X_RANGE;
+        int right = (faceRect.right+WIDTH_OFFSET)*bitmap.getWidth()/FACE_X_RANGE;
+
+        if (top < 0) {
+            top = 0;
+        } else if (top > bitmap.getHeight()) {
+            top = bitmap.getHeight();
+        }
+        if (left < 0) {
+            left = 0;
+        } else if (left > bitmap.getWidth()) {
+            left = bitmap.getWidth();
+        }
+        if (bottom > bitmap.getHeight()) {
+            bottom = bitmap.getHeight();
+        } else if (bottom < 0) {
+            bottom = 0;
+        }
+        if (right > bitmap.getWidth()) {
+            right = bitmap.getWidth();
+        } else if (right < 0) {
+            right = 0;
+        }
+
+        for (int j = 0; j < (bottom - top); j++) {
+            // Left edge
+            if (left > 0 && top > 0) {
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + left) +
+                       ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + left) +
+                       ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + left) +
+                       ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+            }
+
+            // Right edge
+            if (right > 0 && top > 0) {
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + right) +
+                       ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + right) +
+                       ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + right) +
+                       ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+            }
+
+        }
+        for (int k = 0; k < (right - left); k++) {
+            // Top edge
+            if (top < bitmap.getHeight()) {
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * top + left + k) +
+                       ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * top + left + k) +
+                       ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * top + left + k) +
+                       ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+
+            }
+            // Bottom edge
+            if (bottom < bitmap.getHeight()) {
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * bottom + left + k) +
+                       ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * bottom + left + k) +
+                       ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+                pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * bottom + left + k) +
+                       ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
+            }
+        }
+
+        Bitmap outputBitmap = outputImage.toBitmap();
+        int[] outputPixels = new int[outputBitmap.getByteCount()];
+        outputBitmap.getPixels(outputPixels, 0, outputBitmap.getWidth(), 0, 0,
+                outputBitmap.getWidth(), outputBitmap.getHeight());
+        int equalCount = 0;
+        for ( int i = 0; i < outputBitmap.getByteCount(); i++) {
+            if (pixels[i] == outputPixels[i])
+                equalCount++;
+        }
+
+        if (equalCount + (0.05f*outputBitmap.getByteCount()) < outputBitmap.getByteCount()) {
+            // Assertion will fail if condition is true
+            assertEquals(equalCount, outputBitmap.getByteCount());
+        }
+    }
+}
\ No newline at end of file
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToSizeFilterTest.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToSizeFilterTest.java
new file mode 100644
index 0000000..0f4c6d0
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToSizeFilterTest.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import androidx.media.filterfw.samples.simplecamera.FloatArrayToSizeFilter;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.MffFilterTestCase;
+
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeoutException;
+
+
+public class FloatArrayToSizeFilterTest extends MffFilterTestCase {
+
+    @Override
+    protected Filter createFilter(MffContext mffContext) {
+        return new FloatArrayToSizeFilter(mffContext, "floatArrayToSizeFilter");
+    }
+
+
+    public void testToSize() throws Exception {
+        FrameValue floatArray = createFrame(FrameType.array(float.class), new int[] { 1 }).
+                asFrameValue();
+        float[] floatArr = { 10f, 15f, 25f };
+        floatArray.setValue(floatArr);
+
+        injectInputFrame("array", floatArray);
+
+        process();
+        assertEquals(3, ((Integer) getOutputFrame("size").asFrameValue().getValue()).intValue());
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToStrFilterTest.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToStrFilterTest.java
new file mode 100644
index 0000000..bf6a197
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/FloatArrayToStrFilterTest.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.MffFilterTestCase;
+import androidx.media.filterfw.samples.simplecamera.FloatArrayToStrFilter;
+
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeoutException;
+
+
+public class FloatArrayToStrFilterTest extends MffFilterTestCase {
+
+    @Override
+    protected Filter createFilter(MffContext mffContext) {
+        return new FloatArrayToStrFilter(mffContext, "floatArrayToStrFilter");
+    }
+
+    public void testToStr() throws Exception {
+        FrameValue floatArray = createFrame(FrameType.array(float.class), new int[] { 1 }).
+                asFrameValue();
+        float[] floatArr = { 10f, 15f, 25f };
+        floatArray.setValue(floatArr);
+
+        injectInputFrame("array", floatArray);
+
+        process();
+
+        assertEquals("[10.0, 15.0, 25.0]", (String) getOutputFrame("string").asFrameValue().
+                getValue());
+    }
+}
\ No newline at end of file
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/IfElseFilterTest.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/IfElseFilterTest.java
new file mode 100644
index 0000000..30835ea
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/IfElseFilterTest.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.net.Uri;
+import android.provider.MediaStore;
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameImage2D;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.MffFilterTestCase;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeoutException;
+
+public class IfElseFilterTest extends MffFilterTestCase {
+    private final static int BIG_INPUT_WIDTH = 1536;
+    private final static int BIG_INPUT_HEIGHT = 2048;
+    private final static int SMALL_INPUT_WIDTH = 480;
+    private final static int SMALL_INPUT_HEIGHT = 640;
+
+    private AssetManager assetMgr = null;
+    @Override
+    protected Filter createFilter(MffContext mffContext) {
+        assetMgr = mffContext.getApplicationContext().getAssets();
+        return new IfElseFilter(mffContext, "ifElseFilter");
+    }
+
+    public void testIfElseFilterTrue() throws Exception {
+        FrameImage2D image =
+                createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU),
+                        new int[] {BIG_INPUT_WIDTH,BIG_INPUT_HEIGHT}).asFrameImage2D();
+        FrameImage2D video =
+                createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU),
+                        new int[] {SMALL_INPUT_WIDTH,SMALL_INPUT_HEIGHT}).asFrameImage2D();
+
+        // Image of legs
+        Bitmap videoBitmap = BitmapFactory.decodeStream(assetMgr.open("0002_000390.jpg"));
+        // Image of a face
+        Bitmap imageBitmap = BitmapFactory.decodeStream(assetMgr.open("XZZ019.jpg"));
+
+        image.setBitmap(imageBitmap);
+        injectInputFrame("falseResult", image);
+        video.setBitmap(videoBitmap);
+        injectInputFrame("trueResult", video);
+
+        FrameValue conditionFrame = createFrame(FrameType.single(boolean.class), new int[] {1}).
+                asFrameValue();
+        conditionFrame.setValue(true);
+        injectInputFrame("condition", conditionFrame);
+
+        process();
+
+        // Ensure that for true, we use the video input
+        FrameImage2D outputImage = getOutputFrame("output").asFrameImage2D();
+        assertEquals(outputImage, video);
+    }
+
+    public void testIfElseFilterFalse() throws Exception {
+
+        FrameImage2D image =
+                createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU),
+                        new int[] {BIG_INPUT_WIDTH,BIG_INPUT_HEIGHT}).asFrameImage2D();
+        FrameImage2D video =
+                createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU),
+                        new int[] {SMALL_INPUT_WIDTH,SMALL_INPUT_HEIGHT}).asFrameImage2D();
+
+        // Image of legs
+        Bitmap videoBitmap = BitmapFactory.decodeStream(assetMgr.open("0002_000390.jpg"));
+        // Image of a face
+        Bitmap imageBitmap = BitmapFactory.decodeStream(assetMgr.open("XZZ019.jpg"));
+
+        image.setBitmap(imageBitmap);
+        injectInputFrame("falseResult", image);
+        video.setBitmap(videoBitmap);
+        injectInputFrame("trueResult", video);
+
+
+        FrameValue conditionFrame = createFrame(FrameType.single(boolean.class), new int[] {1}).
+                asFrameValue();
+        conditionFrame.setValue(false);
+        injectInputFrame("condition", conditionFrame);
+
+        process();
+
+        // Ensure that for true, we use the video input
+        FrameImage2D outputImage = getOutputFrame("output").asFrameImage2D();
+        assertEquals(outputImage, image);
+    }
+}
diff --git a/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/ImageGoodnessFilterTest.java b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/ImageGoodnessFilterTest.java
new file mode 100644
index 0000000..43bc090
--- /dev/null
+++ b/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/ImageGoodnessFilterTest.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.media.filterfw.samples.simplecamera;
+
+import androidx.media.filterfw.Filter;
+import androidx.media.filterfw.FrameType;
+import androidx.media.filterfw.FrameValue;
+import androidx.media.filterfw.MffContext;
+import androidx.media.filterfw.MffFilterTestCase;
+
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeoutException;
+
+public class ImageGoodnessFilterTest extends MffFilterTestCase {
+
+    String AWFUL_STRING = "Awful Picture";
+    String BAD_STRING  = "Bad Picture";
+    String OK_STRING = "Ok Picture";
+    String GOOD_STRING = "Good Picture!";
+    String GREAT_STRING = "Great Picture!";
+    @Override
+    protected Filter createFilter(MffContext mffContext) {
+        return new ImageGoodnessFilter(mffContext, "goodnessFilter");
+    }
+
+    public void testAwfulPicture() throws Exception {
+        FrameValue sharpnessFrame = createFrame(FrameType.single(), new int[] { 1 }).
+                asFrameValue();
+        sharpnessFrame.setValue(10f);
+        FrameValue oEFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        oEFrame.setValue(0.39f);
+        FrameValue uEFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        uEFrame.setValue(0.25f);
+        FrameValue colorFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        colorFrame.setValue(2.1f);
+        FrameValue contrastFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        contrastFrame.setValue(0.18f);
+        FrameValue motionFrame = createFrame(FrameType.array(), new int[] { 1 }).asFrameValue();
+        float[] motionFloatArray = { 9.0f, 3.0f, 2.0f };
+        motionFrame.setValue(motionFloatArray);
+
+        injectInputFrame("sharpness", sharpnessFrame);
+        injectInputFrame("overExposure", oEFrame);
+        injectInputFrame("underExposure", uEFrame);
+        injectInputFrame("colorfulness", colorFrame);
+        injectInputFrame("contrastRating", contrastFrame);
+        injectInputFrame("motionValues", motionFrame);
+
+        process();
+        assertEquals("Awful Picture", (String) getOutputFrame("goodOrBadPic").asFrameValue().
+                getValue());
+    }
+
+    public void testBadPicture() throws Exception {
+        FrameValue sharpnessFrame = createFrame(FrameType.single(), new int[] { 1 }).
+                asFrameValue();
+        sharpnessFrame.setValue(10f);
+        FrameValue oEFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        oEFrame.setValue(0.39f);
+        FrameValue uEFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        uEFrame.setValue(0.25f);
+        FrameValue colorFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        colorFrame.setValue(2.1f);
+        FrameValue contrastFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        contrastFrame.setValue(0.18f);
+        FrameValue motionFrame = createFrame(FrameType.array(), new int[] { 1 }).asFrameValue();
+        float[] motionFloatArray = { 0.0f, 0.0f, 0.0f };
+        motionFrame.setValue(motionFloatArray);
+
+        injectInputFrame("sharpness", sharpnessFrame);
+        injectInputFrame("overExposure", oEFrame);
+        injectInputFrame("underExposure", uEFrame);
+        injectInputFrame("colorfulness", colorFrame);
+        injectInputFrame("contrastRating", contrastFrame);
+        injectInputFrame("motionValues", motionFrame);
+
+        process();
+        assertEquals("Bad Picture", (String) getOutputFrame("goodOrBadPic").asFrameValue().
+                getValue());
+    }
+
+    public void testOkPicture() throws Exception {
+        FrameValue sharpnessFrame = createFrame(FrameType.single(), new int[] { 1 }).
+                asFrameValue();
+        sharpnessFrame.setValue(30f);
+        FrameValue oEFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        oEFrame.setValue(0.39f);
+        FrameValue uEFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        uEFrame.setValue(0.25f);
+        FrameValue colorFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        colorFrame.setValue(2.1f);
+        FrameValue contrastFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        contrastFrame.setValue(0.18f);
+        FrameValue motionFrame = createFrame(FrameType.array(), new int[] { 1 }).asFrameValue();
+        float[] motionFloatArray = { 0.0f, 0.0f, 0.0f };
+        motionFrame.setValue(motionFloatArray);
+
+        injectInputFrame("sharpness", sharpnessFrame);
+        injectInputFrame("overExposure", oEFrame);
+        injectInputFrame("underExposure", uEFrame);
+        injectInputFrame("colorfulness", colorFrame);
+        injectInputFrame("contrastRating", contrastFrame);
+        injectInputFrame("motionValues", motionFrame);
+
+        process();
+        assertEquals("Ok Picture", (String) getOutputFrame("goodOrBadPic").asFrameValue().
+                getValue());
+    }
+
+    public void testGoodPicture() throws Exception {
+        FrameValue sharpnessFrame = createFrame(FrameType.single(), new int[] { 1 }).
+                asFrameValue();
+        sharpnessFrame.setValue(50f);
+        FrameValue oEFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        oEFrame.setValue(0.01f);
+        FrameValue uEFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        uEFrame.setValue(0.01f);
+        FrameValue colorFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        colorFrame.setValue(2.1f);
+        FrameValue contrastFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        contrastFrame.setValue(0.18f);
+        FrameValue motionFrame = createFrame(FrameType.array(), new int[] { 1 }).asFrameValue();
+        float[] motionFloatArray = { 0.0f, 0.0f, 0.0f };
+        motionFrame.setValue(motionFloatArray);
+
+        injectInputFrame("sharpness", sharpnessFrame);
+        injectInputFrame("overExposure", oEFrame);
+        injectInputFrame("underExposure", uEFrame);
+        injectInputFrame("colorfulness", colorFrame);
+        injectInputFrame("contrastRating", contrastFrame);
+        injectInputFrame("motionValues", motionFrame);
+
+        process();
+        assertEquals("Good Picture!", (String) getOutputFrame("goodOrBadPic").asFrameValue().
+                getValue());
+    }
+
+    public void testGreatPicture() throws Exception {
+        FrameValue sharpnessFrame = createFrame(FrameType.single(), new int[] { 1 }).
+                asFrameValue();
+        sharpnessFrame.setValue(50f);
+        FrameValue oEFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        oEFrame.setValue(0.01f);
+        FrameValue uEFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        uEFrame.setValue(0.02f);
+        FrameValue colorFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        colorFrame.setValue(2.1f);
+        FrameValue contrastFrame = createFrame(FrameType.single(), new int[] { 1 }).asFrameValue();
+        contrastFrame.setValue(0.25f);
+        FrameValue motionFrame = createFrame(FrameType.array(), new int[] { 1 }).asFrameValue();
+        float[] motionFloatArray = { 0.0f, 0.0f, 0.0f };
+        motionFrame.setValue(motionFloatArray);
+
+        injectInputFrame("sharpness", sharpnessFrame);
+        injectInputFrame("overExposure", oEFrame);
+        injectInputFrame("underExposure", uEFrame);
+        injectInputFrame("colorfulness", colorFrame);
+        injectInputFrame("contrastRating", contrastFrame);
+        injectInputFrame("motionValues", motionFrame);
+
+        process();
+        assertEquals("Great Picture!", (String) getOutputFrame("goodOrBadPic").asFrameValue().
+                getValue());
+    }
+}