Adding Recognition Service and Listener wrappers

Change-Id: Ib5068fb6d42b6752d09b0828964b6cbe92d015d3
diff --git a/api/current.xml b/api/current.xml
index 04bfe98..00213d6 100644
--- a/api/current.xml
+++ b/api/current.xml
@@ -134157,6 +134157,320 @@
 </package>
 <package name="android.speech"
 >
+<interface name="RecognitionListener"
+ abstract="true"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<method name="onBeginningOfSpeech"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="onBufferReceived"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="buffer" type="byte[]">
+</parameter>
+</method>
+<method name="onEndOfSpeech"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="onError"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="error" type="int">
+</parameter>
+</method>
+<method name="onInit"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="onPartialResults"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="partialResults" type="android.os.Bundle">
+</parameter>
+</method>
+<method name="onReadyForSpeech"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="params" type="android.os.Bundle">
+</parameter>
+</method>
+<method name="onResults"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="results" type="android.os.Bundle">
+</parameter>
+</method>
+<method name="onRmsChanged"
+ return="void"
+ abstract="true"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="rmsdB" type="float">
+</parameter>
+</method>
+</interface>
+<class name="RecognitionManager"
+ extends="java.lang.Object"
+ abstract="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<method name="cancel"
+ return="void"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="createRecognitionManager"
+ return="android.speech.RecognitionManager"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="true"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="context" type="android.content.Context">
+</parameter>
+<parameter name="listener" type="android.speech.RecognitionListener">
+</parameter>
+<parameter name="recognizerIntent" type="android.content.Intent">
+</parameter>
+</method>
+<method name="destroy"
+ return="void"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<method name="isRecognitionAvailable"
+ return="boolean"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="true"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="context" type="android.content.Context">
+</parameter>
+</method>
+<method name="startListening"
+ return="void"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="recognizerIntent" type="android.content.Intent">
+</parameter>
+</method>
+<method name="stopListening"
+ return="void"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
+<field name="AUDIO_ERROR"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="3"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="CLIENT_ERROR"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="5"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="MANAGER_NOT_INITIALIZED_ERROR"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="9"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="NETWORK_ERROR"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="2"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="NETWORK_TIMEOUT_ERROR"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="1"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="NO_MATCH_ERROR"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="7"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="RECOGNITION_RESULTS_STRING_ARRAY"
+ type="java.lang.String"
+ transient="false"
+ volatile="false"
+ value="&quot;recognition_results_string_array&quot;"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="SERVER_BUSY_ERROR"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="8"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="SERVER_ERROR"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="4"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="SPEECH_TIMEOUT_ERROR"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="6"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+</class>
 <class name="RecognizerIntent"
  extends="java.lang.Object"
  abstract="false"
@@ -134275,6 +134589,39 @@
  visibility="public"
 >
 </field>
+<field name="EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS"
+ type="java.lang.String"
+ transient="false"
+ volatile="false"
+ value="&quot;android.speech.extras.SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS&quot;"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS"
+ type="java.lang.String"
+ transient="false"
+ volatile="false"
+ value="&quot;android.speech.extras.SPEECH_INPUT_MINIMUM_LENGTH_MILLIS&quot;"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
+<field name="EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS"
+ type="java.lang.String"
+ transient="false"
+ volatile="false"
+ value="&quot;android.speech.extras.SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS&quot;"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
 <field name="LANGUAGE_MODEL_FREE_FORM"
  type="java.lang.String"
  transient="false"
diff --git a/core/java/android/speech/IRecognitionListener.aidl b/core/java/android/speech/IRecognitionListener.aidl
index 2da2258..5b48bd2 100644
--- a/core/java/android/speech/IRecognitionListener.aidl
+++ b/core/java/android/speech/IRecognitionListener.aidl
@@ -17,7 +17,6 @@
 package android.speech;
 
 import android.os.Bundle;
-import android.speech.RecognitionResult;
 
 /**
  * Listener for speech recognition events, used with RecognitionService.
@@ -26,35 +25,55 @@
  *  {@hide}
  */
 interface IRecognitionListener {
-    /** Called when the endpointer is ready for the user to start speaking. */
-    void onReadyForSpeech(in Bundle noiseParams);
+    /**
+     * Called when the endpointer is ready for the user to start speaking.
+     *
+     * @param params parameters set by the recognition service. Reserved for future use.
+     */
+    void onReadyForSpeech(in Bundle params);
 
-    /** The user has started to speak. */
+    /**
+     * The user has started to speak.
+     */
     void onBeginningOfSpeech();
 
-    /** The sound level in the audio stream has changed. */
+    /**
+     * The sound level in the audio stream has changed.
+     *
+     * @param rmsdB the new RMS dB value
+     */
     void onRmsChanged(in float rmsdB);
 
     /**
-     * More sound has been received. Buffer is a byte buffer containing
-     * a sequence of 16-bit shorts. 
+     * More sound has been received.
+     *
+     * @param buffer the byte buffer containing a sequence of 16-bit shorts.
      */
     void onBufferReceived(in byte[] buffer);
 
-    /** Called after the user stops speaking. */
+    /**
+     * Called after the user stops speaking.
+     */
     void onEndOfSpeech();
 
     /**
-     * A network or recognition error occurred. The code is defined in
-     * {@link android.speech.RecognitionResult}
+     * A network or recognition error occurred.
+     *
+     * @param error code is defined in {@link RecognitionManager}
      */
     void onError(in int error);
 
-    /** 
+    /**
      * Called when recognition results are ready.
-     * @param results: an ordered list of the most likely results (N-best list).
-     * @param key: a key associated with the results. The same results can
-     * be retrieved asynchronously later using the key, if available. 
+     *
+     * @param results a Bundle containing the most likely results (N-best list).
      */
-    void onResults(in List<RecognitionResult> results, long key);
+    void onResults(in Bundle results);
+
+     /**
+     * Called when recognition partial results are ready.
+     *
+     * @param results a Bundle containing the current most likely result.
+     */
+    void onPartialResults(in Bundle results);
 }
diff --git a/core/java/android/speech/IRecognitionService.aidl b/core/java/android/speech/IRecognitionService.aidl
index a18c380..ca9af15 100644
--- a/core/java/android/speech/IRecognitionService.aidl
+++ b/core/java/android/speech/IRecognitionService.aidl
@@ -16,22 +16,41 @@
 
 package android.speech;
 
+import android.os.Bundle;
 import android.content.Intent;
 import android.speech.IRecognitionListener;
-import android.speech.RecognitionResult;
 
-// A Service interface to speech recognition. Call startListening when
-// you want to begin capturing audio; RecognitionService will automatically
-// determine when the user has finished speaking, stream the audio to the
-// recognition servers, and notify you when results are ready.
-/** {@hide} */
+/**
+* A Service interface to speech recognition. Call startListening when
+* you want to begin capturing audio; RecognitionService will automatically
+* determine when the user has finished speaking, stream the audio to the
+* recognition servers, and notify you when results are ready. In most of the cases, 
+* this class should not be used directly, instead use {@link RecognitionManager} for
+* accessing recognition service. 
+* {@hide}
+*/
 interface IRecognitionService {
-    // Start listening for speech. Can only call this from one thread at once.
-    // see RecognizerIntent.java for constants used to specify the intent.
-    void startListening(in Intent recognizerIntent,
-        in IRecognitionListener listener);
-        
-    List<RecognitionResult> getRecognitionResults(in long key);
+    /**
+     * Starts listening for speech. Please note that the recognition service supports
+     * one listener only, therefore, if this function is called from two different threads,
+     * only the latest one will get the notifications
+     *
+     * @param recognizerIntent the intent from which the invocation occurred. Additionally,
+     *        this intent can contain extra parameters to manipulate the behavior of the recognition
+     *        client. For more information see {@link RecognizerIntent}.
+     * @param listener to receive callbacks
+     */
+    void startListening(in Intent recognizerIntent, in IRecognitionListener listener);
 
+    /**
+     * Stops listening for speech. Speech captured so far will be recognized as
+     * if the user had stopped speaking at this point. The function has no effect unless it
+     * is called during the speech capturing.
+     */
+    void stopListening();
+
+    /**
+     * Cancels the speech recognition.
+     */
     void cancel();
 }
diff --git a/core/java/android/speech/RecognitionListener.java b/core/java/android/speech/RecognitionListener.java
new file mode 100644
index 0000000..eab3f40
--- /dev/null
+++ b/core/java/android/speech/RecognitionListener.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.speech;
+
+import android.content.Intent;
+import android.os.Bundle;
+
+/**
+ * Used for receiving notifications from the RecognitionManager when the
+ * recognition related events occur. All the callbacks are executed on the
+ * Application main thread.
+ */
+public interface RecognitionListener {
+
+    /**
+     * Called when RecognitionManager is successfully initialized
+     */
+    void onInit();
+
+    /**
+     * Called when the endpointer is ready for the user to start speaking.
+     * 
+     * @param params parameters set by the recognition service. Reserved for future use.
+     */
+    void onReadyForSpeech(Bundle params);
+
+    /**
+     * The user has started to speak.
+     */
+    void onBeginningOfSpeech();
+
+    /**
+     * The sound level in the audio stream has changed. There is no guarantee that this method will
+     * be called.
+     * 
+     * @param rmsdB the new RMS dB value
+     */
+    void onRmsChanged(float rmsdB);
+
+    /**
+     * More sound has been received. The purpose of this function is to allow giving feedback to the
+     * user regarding the captured audio. There is no guarantee that this method will be called.
+     * 
+     * @param buffer a buffer containing a sequence of big-endian 16-bit integers representing a
+     *        single channel audio stream. The sample rate is implementation dependent.
+     */
+    void onBufferReceived(byte[] buffer);
+
+    /**
+     * Called after the user stops speaking.
+     */
+    void onEndOfSpeech();
+
+    /**
+     * A network or recognition error occurred.
+     * 
+     * @param error code is defined in {@link RecognitionManager}
+     */
+    void onError(int error);
+
+    /**
+     * Called when recognition results are ready.
+     * 
+     * @param results the recognition results. To retrieve the results in {@code
+     *        ArrayList&lt;String&gt;} format use {@link Bundle#getStringArrayList(String)} with
+     *        {@link RecognitionManager#RECOGNITION_RESULTS_STRING_ARRAY} as a parameter
+     */
+    void onResults(Bundle results);
+
+    /**
+     * Called when partial recognition results are available. The callback might be called at any
+     * time between {@link #onBeginningOfSpeech()} and {@link #onResults(Bundle)} when partial
+     * results are ready. This method may be called zero, one or multiple times for each call to
+     * {@link RecognitionManager#startListening(Intent)}, depending on the speech recognition
+     * service implementation.
+     * 
+     * @param partialResults the returned results. To retrieve the results in
+     *        ArrayList&lt;String&gt; format use {@link Bundle#getStringArrayList(String)} with
+     *        {@link RecognitionManager#RECOGNITION_RESULTS_STRING_ARRAY} as a parameter
+     */
+    void onPartialResults(Bundle partialResults);
+
+}
diff --git a/core/java/android/speech/RecognitionManager.java b/core/java/android/speech/RecognitionManager.java
new file mode 100644
index 0000000..79ae480
--- /dev/null
+++ b/core/java/android/speech/RecognitionManager.java
@@ -0,0 +1,321 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.speech;
+
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.content.pm.ResolveInfo;
+import android.os.Bundle;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.util.Log;
+
+import java.util.List;
+
+/**
+ * This class provides access to the speech recognition service. This service allows access to the
+ * speech recognizer. Do not instantiate this class directly, instead, call
+ * {@link RecognitionManager#createRecognitionManager(Context, RecognitionListener, Intent)}. This
+ * class is not thread safe and must be synchronized externally if accessed from multiple threads.
+ */
+public class RecognitionManager {
+    /** DEBUG value to enable verbose debug prints */
+    private final static boolean DBG = false;
+
+    /** Log messages identifier */
+    private static final String TAG = "RecognitionManager";
+
+    /**
+     * Used to retrieve an {@code ArrayList&lt;String&gt;} from the {@link Bundle} passed to the
+     * {@link RecognitionListener#onResults(Bundle)} and
+     * {@link RecognitionListener#onPartialResults(Bundle)} methods. These strings are the possible
+     * recognition results, where the first element is the most likely candidate.
+     */
+    public static final String RECOGNITION_RESULTS_STRING_ARRAY =
+            "recognition_results_string_array";
+
+    /** The actual RecognitionService endpoint */
+    private IRecognitionService mService;
+
+    /** The connection to the actual service */
+    private Connection mConnection;
+
+    /** Context with which the manager was created */
+    private final Context mContext;
+
+    /** Listener that will receive all the callbacks */
+    private final RecognitionListener mListener;
+
+    /** Helper class wrapping the IRecognitionListener */
+    private final InternalRecognitionListener mInternalRecognitionListener;
+
+    /** Network operation timed out. */
+    public static final int NETWORK_TIMEOUT_ERROR = 1;
+
+    /** Other network related errors. */
+    public static final int NETWORK_ERROR = 2;
+
+    /** Audio recording error. */
+    public static final int AUDIO_ERROR = 3;
+
+    /** Server sends error status. */
+    public static final int SERVER_ERROR = 4;
+
+    /** Other client side errors. */
+    public static final int CLIENT_ERROR = 5;
+
+    /** No speech input */
+    public static final int SPEECH_TIMEOUT_ERROR = 6;
+
+    /** No recognition result matched. */
+    public static final int NO_MATCH_ERROR = 7;
+
+    /** RecognitionService busy. */
+    public static final int SERVER_BUSY_ERROR = 8;
+
+    /**
+     * RecognitionManager was not initialized yet, most probably because
+     * {@link RecognitionListener#onInit()} was not called yet.
+     */
+    public static final int MANAGER_NOT_INITIALIZED_ERROR = 9;
+
+    /**
+     * The right way to create a RecognitionManager is by using
+     * {@link #createRecognitionManager} static factory method
+     */
+    private RecognitionManager(final RecognitionListener listener, final Context context) {
+        mInternalRecognitionListener = new InternalRecognitionListener();
+        mContext = context;
+        mListener = listener;
+    }
+
+    /**
+     * Basic ServiceConnection which just records mService variable.
+     */
+    private class Connection implements ServiceConnection {
+
+        public synchronized void onServiceConnected(final ComponentName name,
+                final IBinder service) {
+            mService = IRecognitionService.Stub.asInterface(service);
+            if (mListener != null) {
+                mListener.onInit();
+            }
+            if (DBG) Log.d(TAG, "onServiceConnected - Success");
+        }
+
+        public void onServiceDisconnected(final ComponentName name) {
+            mService = null;
+            mConnection = null;
+            if (DBG) Log.d(TAG, "onServiceDisconnected - Success");
+        }
+    }
+
+    /**
+     * Checks whether a speech recognition service is available on the system. If this method
+     * returns {@code false},
+     * {@link RecognitionManager#createRecognitionManager(Context, RecognitionListener, Intent)}
+     * will fail.
+     * 
+     * @param context with which RecognitionManager will be created
+     * @return {@code true} if recognition is available, {@code false} otherwise
+     */
+    public static boolean isRecognitionAvailable(final Context context) {
+        final List<ResolveInfo> list = context.getPackageManager().queryIntentServices(
+                new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
+        return list != null && list.size() != 0;
+    }
+
+    /**
+     * Factory method to create a new RecognitionManager
+     * 
+     * @param context in which to create RecognitionManager
+     * @param listener that will receive all the callbacks from the created
+     *        {@link RecognitionManager}
+     * @param recognizerIntent contains initialization parameters for the speech recognizer. The
+     *        intent action should be {@link RecognizerIntent#ACTION_RECOGNIZE_SPEECH}. Future
+     *        versions of this API may add startup parameters for speech recognizer.
+     * @return null if a recognition service implementation is not installed or if speech
+     *         recognition is not supported by the device, otherwise a new RecognitionManager is
+     *         returned. The created RecognitionManager can only be used after the
+     *         {@link RecognitionListener#onInit()} method has been called.
+     */
+    public static RecognitionManager createRecognitionManager(final Context context,
+            final RecognitionListener listener, final Intent recognizerIntent) {
+        if (context == null || recognizerIntent == null) {
+            throw new IllegalArgumentException(
+                    "Context and recognizerListener argument cannot be null)");
+        }
+        RecognitionManager manager = new RecognitionManager(listener, context);
+        manager.mConnection = manager.new Connection();
+        if (!context.bindService(recognizerIntent, manager.mConnection, Context.BIND_AUTO_CREATE)) {
+            Log.e(TAG, "bind to recognition service failed");
+            listener.onError(CLIENT_ERROR);
+            return null;
+        }
+        return manager;
+    }
+
+    /**
+     * Checks whether the service is connected
+     *
+     * @param functionName from which the call originated
+     * @return {@code true} if the service was successfully initialized, {@code false} otherwise
+     */
+    private boolean connectToService(final String functionName) {
+        if (mService != null) {
+            return true;
+        }
+        if (mConnection == null) {
+            if (DBG) Log.d(TAG, "restarting connection to the recognition service");
+            mConnection = new Connection();
+            mContext.bindService(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), mConnection,
+                    Context.BIND_AUTO_CREATE);
+        }
+        mInternalRecognitionListener.onError(MANAGER_NOT_INITIALIZED_ERROR);
+        Log.e(TAG, functionName + " was called before service connection was initialized");
+        return false;
+    }
+
+    /**
+     * Starts listening for speech.
+     * 
+     * @param recognizerIntent contains parameters for the recognition to be performed. The intent
+     *        action should be {@link RecognizerIntent#ACTION_RECOGNIZE_SPEECH}. The intent may also
+     *        contain optional extras, see {@link RecognizerIntent}. If these values are not set
+     *        explicitly, default values will be used by the recognizer.
+     */
+    public void startListening(Intent recognizerIntent) {
+        if (recognizerIntent == null) {
+            throw new IllegalArgumentException("recognizerIntent argument cannot be null");
+        }
+        if (!connectToService("startListening")) {
+            return; // service is not connected yet, reconnect in progress
+        }
+        try {
+            mService.startListening(recognizerIntent, mInternalRecognitionListener);
+            if (DBG) Log.d(TAG, "service start listening command succeded");
+        } catch (final RemoteException e) {
+            Log.e(TAG, "startListening() failed", e);
+            mInternalRecognitionListener.onError(CLIENT_ERROR);
+        }
+    }
+
+    /**
+     * Stops listening for speech. Speech captured so far will be recognized as if the user had
+     * stopped speaking at this point. Note that in the default case, this does not need to be
+     * called, as the speech endpointer will automatically stop the recognizer listening when it
+     * determines speech has completed. However, you can manipulate endpointer parameters directly
+     * using the intent extras defined in {@link RecognizerIntent}, in which case you may sometimes
+     * want to manually call this method to stop listening sooner.
+     */
+    public void stopListening() {
+        if (mService == null) {
+            return; // service is not connected, but no need to reconnect at this point
+        }
+        try {
+            mService.stopListening();
+            if (DBG) Log.d(TAG, "service stop listening command succeded");
+        } catch (final RemoteException e) {
+            Log.e(TAG, "stopListening() failed", e);
+            mInternalRecognitionListener.onError(CLIENT_ERROR);
+        }
+    }
+
+    /**
+     * Cancels the speech recognition.
+     */
+    public void cancel() {
+        if (mService == null) {
+            return; // service is not connected, but no need to reconnect at this point
+        }
+        try {
+            mService.cancel();
+            if (DBG) Log.d(TAG, "service cancel command succeded");
+        } catch (final RemoteException e) {
+            Log.e(TAG, "cancel() failed", e);
+            mInternalRecognitionListener.onError(CLIENT_ERROR);
+        }
+    }
+
+    /**
+     * Destroys the RecognitionManager object. Note that after calling this method all method calls
+     * on this object will fail, triggering {@link RecognitionListener#onError}.
+     */
+    public void destroy() {
+        if (mConnection != null) {
+            mContext.unbindService(mConnection);
+        }
+        mService = null;
+    }
+
+    /**
+     * Internal wrapper of IRecognitionListener which will propagate the results
+     * to RecognitionListener
+     */
+    private class InternalRecognitionListener extends IRecognitionListener.Stub {
+
+        public void onBeginningOfSpeech() {
+            if (mListener != null) {
+                mListener.onBeginningOfSpeech();
+            }
+        }
+
+        public void onBufferReceived(final byte[] buffer) {
+            if (mListener != null) {
+                mListener.onBufferReceived(buffer);
+            }
+        }
+
+        public void onEndOfSpeech() {
+            if (mListener != null) {
+                mListener.onEndOfSpeech();
+            }
+        }
+
+        public void onError(final int error) {
+            if (mListener != null) {
+                mListener.onError(error);
+            }
+        }
+
+        public void onReadyForSpeech(final Bundle noiseParams) {
+            if (mListener != null) {
+                mListener.onReadyForSpeech(noiseParams);
+            }
+        }
+
+        public void onResults(final Bundle results) {
+            if (mListener != null) {
+                mListener.onResults(results);
+            }
+        }
+
+        public void onPartialResults(final Bundle results) {
+            if (mListener != null) {
+                mListener.onPartialResults(results);
+            }
+        }
+
+        public void onRmsChanged(final float rmsdB) {
+            if (mListener != null) {
+                mListener.onRmsChanged(rmsdB);
+            }
+        }
+    }
+}
diff --git a/core/java/android/speech/RecognitionResult.aidl b/core/java/android/speech/RecognitionResult.aidl
deleted file mode 100644
index 59e53ab..0000000
--- a/core/java/android/speech/RecognitionResult.aidl
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.speech;
-
-parcelable RecognitionResult;
diff --git a/core/java/android/speech/RecognitionResult.java b/core/java/android/speech/RecognitionResult.java
deleted file mode 100644
index 95715ee..0000000
--- a/core/java/android/speech/RecognitionResult.java
+++ /dev/null
@@ -1,220 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.speech;
-
-import android.os.Parcel;
-import android.os.Parcelable;
-
-/**
- * RecognitionResult is a passive object that stores a single recognized query
- * and its search result.
- * 
- * TODO: Revisit and improve this class, reconciling the different types of actions and
- * the different ways they are represented. Maybe we should have a separate result object
- * for each type, and put them (type/value) in bundle?
- * {@hide}
- */
-public class RecognitionResult implements Parcelable {
-    /**
-     * Status of the recognize request.
-     */
-    public static final int NETWORK_TIMEOUT = 1; // Network operation timed out.
-
-    public static final int NETWORK_ERROR = 2; // Other network related errors.
-
-    public static final int AUDIO_ERROR = 3; // Audio recording error.
-
-    public static final int SERVER_ERROR = 4; // Server sends error status.
-
-    public static final int CLIENT_ERROR = 5; // Other client side errors.
-
-    public static final int SPEECH_TIMEOUT = 6; // No speech input
-
-    public static final int NO_MATCH = 7; // No recognition result matched.
-
-    public static final int SERVICE_BUSY = 8; // RecognitionService busy.
-
-    /**
-     * Type of the recognition results.
-     */
-    public static final int RAW_RECOGNITION_RESULT = 0;
-
-    public static final int WEB_SEARCH_RESULT = 1;
-
-    public static final int CONTACT_RESULT = 2;
-    
-    public static final int ACTION_RESULT = 3;
-
-    /**
-     * A factory method to create a raw RecognitionResult
-     * 
-     * @param sentence the recognized text.
-     */
-    public static RecognitionResult newRawRecognitionResult(String sentence) {
-        return new RecognitionResult(RAW_RECOGNITION_RESULT, sentence, null, null);
-    }
-
-    /**
-     * A factory method to create a RecognitionResult for contacts.
-     * 
-     * @param contact the contact name.
-     * @param phoneType the phone type.
-     * @param callAction whether this result included a command to "call", or
-     *            just the contact name.
-     */
-    public static RecognitionResult newContactResult(String contact, int phoneType,
-            boolean callAction) {
-        return new RecognitionResult(CONTACT_RESULT, contact, phoneType, callAction);
-    }
-
-    /**
-     * A factory method to create a RecognitionResult for a web search query.
-     * 
-     * @param query the query string.
-     * @param html the html page of the search result.
-     * @param url the url that performs the search with the query.
-     */
-    public static RecognitionResult newWebResult(String query, String html, String url) {
-        return new RecognitionResult(WEB_SEARCH_RESULT, query, html, url);
-    }
-    
-    /**
-     * A factory method to create a RecognitionResult for an action.
-     * 
-     * @param action the action type
-     * @param query the query string associated with that action.
-     */
-    public static RecognitionResult newActionResult(int action, String query) {
-        return new RecognitionResult(ACTION_RESULT, action, query);
-    }
-
-    public static final Parcelable.Creator<RecognitionResult> CREATOR =
-            new Parcelable.Creator<RecognitionResult>() {
-
-                public RecognitionResult createFromParcel(Parcel in) {
-                    return new RecognitionResult(in);
-                }
-        
-                public RecognitionResult[] newArray(int size) {
-                    return new RecognitionResult[size];
-                }
-            };
-
-    /**
-     * Result type.
-     */
-    public final int mResultType;
-
-    /**
-     * The recognized string when mResultType is WEB_SEARCH_RESULT. The name of
-     * the contact when mResultType is CONTACT_RESULT. The relevant query when
-     * mResultType is ACTION_RESULT.
-     */
-    public final String mText;
-
-    /**
-     * The HTML result page for the query. If this is null, then the application
-     * must use the url field to get the HTML result page.
-     */
-    public final String mHtml;
-
-    /**
-     * The url to get the result page for the query string. The application must
-     * use this url instead of performing the search with the query.
-     */
-    public final String mUrl;
-
-    /**
-     * Phone number type. This is valid only when mResultType == CONTACT_RESULT.
-     */
-    public final int mPhoneType;
-    
-    /**
-     * Action type.  This is valid only when mResultType == ACTION_RESULT.
-     */
-    public final int mAction;
-
-    /**
-     * Whether a contact recognition result included a command to "call". This
-     * is valid only when mResultType == CONTACT_RESULT.
-     */
-    public final boolean mCallAction;
-
-    private RecognitionResult(int type, int action, String query) {
-        mResultType = type;
-        mAction = action;
-        mText = query;
-        mHtml = null;
-        mUrl = null;
-        mPhoneType = -1;
-        mCallAction = false;
-    }
-    
-    private RecognitionResult(int type, String query, String html, String url) {
-        mResultType = type;
-        mText = query;
-        mHtml = html;
-        mUrl = url;
-        mPhoneType = -1;
-        mAction = -1;
-        mCallAction = false;
-    }
-
-    private RecognitionResult(int type, String query, int phoneType, boolean callAction) {
-        mResultType = type;
-        mText = query;
-        mPhoneType = phoneType;
-        mHtml = null;
-        mUrl = null;
-        mAction = -1;
-        mCallAction = callAction;
-    }
-
-    private RecognitionResult(Parcel in) {
-        mResultType = in.readInt();
-        mText = in.readString();
-        mHtml = in.readString();
-        mUrl = in.readString();
-        mPhoneType = in.readInt();
-        mAction = in.readInt();
-        mCallAction = (in.readInt() == 1);
-    }
-
-    public void writeToParcel(Parcel out, int flags) {
-        out.writeInt(mResultType);
-        out.writeString(mText);
-        out.writeString(mHtml);
-        out.writeString(mUrl);
-        out.writeInt(mPhoneType);
-        out.writeInt(mAction);
-        out.writeInt(mCallAction ? 1 : 0);
-    }
-
-    @Override
-    public String toString() {
-        String resultType[] = {
-                "RAW", "WEB", "CONTACT", "ACTION"
-        };
-        return "[type=" + resultType[mResultType] + ", text=" + mText + ", mUrl=" + mUrl
-                + ", html=" + mHtml + ", mAction=" + mAction + ", mCallAction=" + mCallAction + "]";
-    }
-
-    public int describeContents() {
-        // no special description
-        return 0;
-    }
-}
diff --git a/core/java/android/speech/RecognitionServiceUtil.java b/core/java/android/speech/RecognitionServiceUtil.java
deleted file mode 100644
index 4207543..0000000
--- a/core/java/android/speech/RecognitionServiceUtil.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.speech;
-
-import android.content.ComponentName;
-import android.content.Intent;
-import android.content.ServiceConnection;
-import android.os.Bundle;
-import android.os.IBinder;
-import android.os.RemoteException;
-import android.speech.RecognitionResult;
-import android.util.Log;
-
-import java.util.List;
-
-/**
- * Utils for Google's network-based speech recognizer, which lets you perform
- * speech-to-text translation through RecognitionService. IRecognitionService
- * and IRecognitionListener are the core interfaces; you begin recognition
- * through IRecognitionService and subscribe to callbacks about when the user
- * stopped speaking, results come in, errors, etc. through IRecognitionListener.
- * RecognitionServiceUtil includes default IRecognitionListener and
- * ServiceConnection implementations to reduce the amount of boilerplate.
- *
- * The Service provides no user interface. See RecognitionActivity if you
- * want the standard voice search UI.
- *
- * Below is a small skeleton of how to use the recognizer:
- *
- * ServiceConnection conn = new RecognitionServiceUtil.Connection();
- * mContext.bindService(RecognitionServiceUtil.sDefaultIntent,
- *     conn, Context.BIND_AUTO_CREATE);
- * IRecognitionListener listener = new RecognitionServiceWrapper.NullListener() {
- *         public void onResults(List<String> results) {
- *             // Do something with recognition transcripts
- *         }
- *     }
- *
- * // Must wait for conn.mService to be populated, then call below
- * conn.mService.startListening(null, listener);
- *
- * {@hide}
- */
-public class RecognitionServiceUtil {
-    public static final Intent sDefaultIntent = new Intent(
-            RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
-
-    // Recognize request parameters
-    public static final String USE_LOCATION = "useLocation";
-    public static final String CONTACT_AUTH_TOKEN = "contactAuthToken";
-    
-    // Bundles
-    public static final String NOISE_LEVEL = "NoiseLevel";
-    public static final String SIGNAL_NOISE_RATIO = "SignalNoiseRatio";
-
-    private RecognitionServiceUtil() {}
-
-    /**
-     * IRecognitionListener which does nothing in response to recognition
-     * callbacks. You can subclass from this and override only the methods
-     * whose events you want to respond to.
-     */
-    public static class NullListener extends IRecognitionListener.Stub {
-        public void onReadyForSpeech(Bundle bundle) {}
-        public void onBeginningOfSpeech() {}
-        public void onRmsChanged(float rmsdB) {}
-        public void onBufferReceived(byte[] buf) {}
-        public void onEndOfSpeech() {}
-        public void onError(int error) {}
-        public void onResults(List<RecognitionResult> results, long key) {}
-    }
-
-    /**
-     * Basic ServiceConnection which just records mService variable.
-     */
-    public static class Connection implements ServiceConnection {
-        public IRecognitionService mService;
-
-        public synchronized void onServiceConnected(ComponentName name, IBinder service) {
-            mService = IRecognitionService.Stub.asInterface(service);
-        }
-
-        public void onServiceDisconnected(ComponentName name) {
-            mService = null;
-        }
-    }
-}
diff --git a/core/java/android/speech/RecognizerIntent.java b/core/java/android/speech/RecognizerIntent.java
index ba06744..49991bd 100644
--- a/core/java/android/speech/RecognizerIntent.java
+++ b/core/java/android/speech/RecognizerIntent.java
@@ -84,6 +84,42 @@
     public static final String ACTION_WEB_SEARCH = "android.speech.action.WEB_SEARCH";
 
     /**
+     * The minimum length of an utterance. We will not stop recording before this amount of time.
+     * 
+     * Note that it is extremely rare you'd want to specify this value in an intent. If you don't
+     * have a very good reason to change these, you should leave them as they are. Note also that
+     * certain values may cause undesired or unexpected results - use judiciously! Additionally,
+     * depending on the recognizer implementation, these values may have no effect.
+     */
+    public static final String EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS =
+            "android.speech.extras.SPEECH_INPUT_MINIMUM_LENGTH_MILLIS";
+
+    /**
+     * The amount of time that it should take after we stop hearing speech to consider the input
+     * complete. 
+     * 
+     * Note that it is extremely rare you'd want to specify this value in an intent. If
+     * you don't have a very good reason to change these, you should leave them as they are. Note
+     * also that certain values may cause undesired or unexpected results - use judiciously!
+     * Additionally, depending on the recognizer implementation, these values may have no effect.
+     */
+    public static final String EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS =
+            "android.speech.extras.SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS";
+
+    /**
+     * The amount of time that it should take after we stop hearing speech to consider the input
+     * possibly complete. This is used to prevent the endpointer cutting off during very short
+     * mid-speech pauses. 
+     * 
+     * Note that it is extremely rare you'd want to specify this value in an intent. If
+     * you don't have a very good reason to change these, you should leave them as they are. Note
+     * also that certain values may cause undesired or unexpected results - use judiciously!
+     * Additionally, depending on the recognizer implementation, these values may have no effect.
+     */
+    public static final String EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS =
+            "android.speech.extras.SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS";
+
+    /**
      * Informs the recognizer which speech model to prefer when performing
      * {@link #ACTION_RECOGNIZE_SPEECH}. The recognizer uses this
      * information to fine tune the results. This extra is required. Activities implementing
@@ -111,8 +147,9 @@
     public static final String EXTRA_PROMPT = "android.speech.extra.PROMPT";
 
     /**
-     * Optional language override to inform the recognizer that it should expect speech in
-     * a language different than the one set in the {@link java.util.Locale#getDefault()}. 
+     * Optional IETF language tag (as defined by BCP 47), for example "en-US". This tag informs the
+     * recognizer to perform speech recognition in a language different than the one set in the
+     * {@link java.util.Locale#getDefault()}.
      */
     public static final String EXTRA_LANGUAGE = "android.speech.extra.LANGUAGE";
 
@@ -121,7 +158,7 @@
      * will choose how many results to return. Must be an integer.
      */
     public static final String EXTRA_MAX_RESULTS = "android.speech.extra.MAX_RESULTS";
-    
+
     /**
      * When the intent is {@link #ACTION_RECOGNIZE_SPEECH}, the speech input activity will
      * return results to you via the activity results mechanism.  Alternatively, if you use this