blob: 74f3bc71d0847611ac304d4a0602ab43f72ce713 [file] [log] [blame]
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.compass;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import static javax.microedition.khronos.opengles.GL10.*;
import android.app.Activity;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.opengl.GLSurfaceView;
import android.opengl.GLSurfaceView.Renderer;
import android.os.Bundle;
import android.util.Log;
/**
* This class provides a basic demonstration of how to use the
* {@link android.hardware.SensorManager SensorManager} API to draw
* a 3D compass.
*/
public class CompassActivity extends Activity implements Renderer, SensorEventListener {
private GLSurfaceView mGLSurfaceView;
private SensorManager mSensorManager;
private float[] mGData = new float[3];
private float[] mMData = new float[3];
private float[] mR = new float[16];
private float[] mI = new float[16];
private FloatBuffer mVertexBuffer;
private FloatBuffer mColorBuffer;
private ByteBuffer mIndexBuffer;
private float[] mOrientation = new float[3];
private int mCount;
public CompassActivity() {
}
/** Called with the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE);
mGLSurfaceView = new GLSurfaceView(this);
mGLSurfaceView.setRenderer(this);
setContentView(mGLSurfaceView);
}
@Override
protected void onResume() {
// Ideally a game should implement onResume() and onPause()
// to take appropriate action when the activity looses focus
super.onResume();
mGLSurfaceView.onResume();
Sensor gsensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
Sensor msensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
mSensorManager.registerListener(this, gsensor, SensorManager.SENSOR_DELAY_GAME);
mSensorManager.registerListener(this, msensor, SensorManager.SENSOR_DELAY_GAME);
}
@Override
protected void onPause() {
// Ideally a game should implement onResume() and onPause()
// to take appropriate action when the activity looses focus
super.onPause();
mGLSurfaceView.onPause();
mSensorManager.unregisterListener(this);
}
public void onDrawFrame(GL10 gl) {
/*
* Usually, the first thing one might want to do is to clear
* the screen. The most efficient way of doing this is to use
* glClear().
*/
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
/*
* Now we're ready to draw some 3D objects
*/
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glTranslatef(0, 0, -2);
/*
* All the magic happens here. The rotation matrix mR reported by
* SensorManager.getRotationMatrix() is a 4x4 row-major matrix.
* We need to use its inverse for rendering. The inverse is
* simply calculated by taking the matrix' transpose. However, since
* glMultMatrixf() expects a column-major matrix, we can use mR
* directly!
*/
gl.glMultMatrixf(mR, 0);
// some test code which will be used/cleaned up before we ship this.
//gl.glMultMatrixf(mI, 0);
gl.glVertexPointer(3, GL_FLOAT, 0, mVertexBuffer);
gl.glColorPointer(4, GL_FLOAT, 0, mColorBuffer);
gl.glDrawElements(GL_LINES, 6, GL_UNSIGNED_BYTE, mIndexBuffer);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
/*
* Set our projection matrix. This doesn't have to be done
* each time we draw, but usually a new projection needs to
* be set when the viewport is resized.
*/
float ratio = (float) width / height;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, -1, 1, 1, 10);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
/*
* By default, OpenGL enables features that improve quality
* but reduce performance. One might want to tweak that
* especially on software renderer.
*/
gl.glDisable(GL10.GL_DITHER);
/*
* Some one-time OpenGL initialization can be made here
* probably based on features of this particular context
*/
gl.glClearColor(1,1,1,1);
gl.glEnable(GL10.GL_CULL_FACE);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glEnable(GL10.GL_DEPTH_TEST);
/*
* create / load the our 3D models here
*/
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
float vertices[] = {
0,0,0,
1,0,0,
0,1,0,
0,0,1
};
float colors[] = {
0,0,0,0,
1,0,0,1,
0,1,0,1,
0,0,1,1
};
byte indices[] = { 0, 1, 0, 2, 0, 3 };
// Buffers to be passed to gl*Pointer() functions
// must be direct, i.e., they must be placed on the
// native heap where the garbage collector cannot
// move them.
//
// Buffers with multi-byte datatypes (e.g., short, int, float)
// must have their byte order set to native order
ByteBuffer vbb;
vbb = ByteBuffer.allocateDirect(vertices.length*4);
vbb.order(ByteOrder.nativeOrder());
mVertexBuffer = vbb.asFloatBuffer();
mVertexBuffer.put(vertices);
mVertexBuffer.position(0);
vbb = ByteBuffer.allocateDirect(colors.length*4);
vbb.order(ByteOrder.nativeOrder());
mColorBuffer = vbb.asFloatBuffer();
mColorBuffer.put(colors);
mColorBuffer.position(0);
mIndexBuffer = ByteBuffer.allocateDirect(indices.length);
mIndexBuffer.put(indices);
mIndexBuffer.position(0);
}
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
public void onSensorChanged(SensorEvent event) {
int type = event.sensor.getType();
float[] data;
if (type == Sensor.TYPE_ACCELEROMETER) {
data = mGData;
} else if (type == Sensor.TYPE_MAGNETIC_FIELD) {
data = mMData;
} else {
// we should not be here.
return;
}
for (int i=0 ; i<3 ; i++)
data[i] = event.values[i];
SensorManager.getRotationMatrix(mR, mI, mGData, mMData);
// some test code which will be used/cleaned up before we ship this.
// SensorManager.remapCoordinateSystem(mR,
// SensorManager.AXIS_X, SensorManager.AXIS_Z, mR);
// SensorManager.remapCoordinateSystem(mR,
// SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, mR);
SensorManager.getOrientation(mR, mOrientation);
float incl = SensorManager.getInclination(mI);
if (mCount++ > 50) {
final float rad2deg = (float)(180.0f/Math.PI);
mCount = 0;
Log.d("Compass", "yaw: " + (int)(mOrientation[0]*rad2deg) +
" pitch: " + (int)(mOrientation[1]*rad2deg) +
" roll: " + (int)(mOrientation[2]*rad2deg) +
" incl: " + (int)(incl*rad2deg)
);
}
}
}