Mercurial > sdl-ios-xcode
view android-project/src/org/libsdl/app/SDLActivity.java @ 5204:523409574510
Added an OpenGL ES 2.0 renderer, contributed by itsnotabigtruck
This compiles, but it untested.
author | Sam Lantinga <slouken@libsdl.org> |
---|---|
date | Sun, 06 Feb 2011 00:00:13 -0800 |
parents | a514bfe6952a |
children | c66b2a778b7e |
line wrap: on
line source
package org.libsdl.app; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; import javax.microedition.khronos.egl.*; import android.app.*; import android.content.*; import android.view.*; import android.os.*; import android.util.Log; import android.graphics.*; import android.text.method.*; import android.text.*; import android.media.*; import android.hardware.*; import android.content.*; import java.lang.*; /** SDL Activity */ public class SDLActivity extends Activity { // Main components private static SDLActivity mSingleton; private static SDLSurface mSurface; // Audio private static Thread mAudioThread; private static AudioTrack mAudioTrack; // Load the .so static { System.loadLibrary("SDL"); //System.loadLibrary("SDL_image"); //System.loadLibrary("SDL_mixer"); //System.loadLibrary("SDL_ttf"); System.loadLibrary("main"); } // Setup protected void onCreate(Bundle savedInstanceState) { //Log.v("SDL", "onCreate()"); super.onCreate(savedInstanceState); // So we can call stuff from static callbacks mSingleton = this; // Set up the surface mSurface = new SDLSurface(getApplication()); setContentView(mSurface); SurfaceHolder holder = mSurface.getHolder(); holder.setType(SurfaceHolder.SURFACE_TYPE_GPU); } // Events protected void onPause() { //Log.v("SDL", "onPause()"); super.onPause(); } protected void onResume() { //Log.v("SDL", "onResume()"); super.onResume(); } // Messages from the SDLMain thread static int COMMAND_CHANGE_TITLE = 1; // Handler for the messages Handler commandHandler = new Handler() { public void handleMessage(Message msg) { if (msg.arg1 == COMMAND_CHANGE_TITLE) { setTitle((String)msg.obj); } } }; // Send a message from the SDLMain thread void sendCommand(int command, Object data) { Message msg = commandHandler.obtainMessage(); msg.arg1 = command; msg.obj = data; commandHandler.sendMessage(msg); } // C functions we call public static native void nativeInit(); public static native void nativeQuit(); public static native void onNativeResize(int x, int y, int format); public static native void onNativeKeyDown(int keycode); public static native void onNativeKeyUp(int keycode); public static native void onNativeTouch(int action, float x, float y, float p); public static native void onNativeAccel(float x, float y, float z); public static native void nativeRunAudioThread(); // Java functions called from C public static void createGLContext() { mSurface.initEGL(); } public static void flipBuffers() { mSurface.flipEGL(); } public static void setActivityTitle(String title) { // Called from SDLMain() thread and can't directly affect the view mSingleton.sendCommand(COMMAND_CHANGE_TITLE, title); } // Audio private static Object buf; public static Object audioInit(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) { int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO; int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT; int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1); Log.v("SDL", "SDL audio: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + ((float)sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer"); // Let the user pick a larger buffer if they really want -- but ye // gods they probably shouldn't, the minimums are horrifyingly high // latency already desiredFrames = Math.max(desiredFrames, (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize); mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM); audioStartThread(); Log.v("SDL", "SDL audio: got " + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + ((float)mAudioTrack.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer"); if (is16Bit) { buf = new short[desiredFrames * (isStereo ? 2 : 1)]; } else { buf = new byte[desiredFrames * (isStereo ? 2 : 1)]; } return buf; } public static void audioStartThread() { mAudioThread = new Thread(new Runnable() { public void run() { mAudioTrack.play(); nativeRunAudioThread(); } }); // I'd take REALTIME if I could get it! mAudioThread.setPriority(Thread.MAX_PRIORITY); mAudioThread.start(); } public static void audioWriteShortBuffer(short[] buffer) { for (int i = 0; i < buffer.length; ) { int result = mAudioTrack.write(buffer, i, buffer.length - i); if (result > 0) { i += result; } else if (result == 0) { try { Thread.sleep(1); } catch(InterruptedException e) { // Nom nom } } else { Log.w("SDL", "SDL audio: error return from write(short)"); return; } } } public static void audioWriteByteBuffer(byte[] buffer) { for (int i = 0; i < buffer.length; ) { int result = mAudioTrack.write(buffer, i, buffer.length - i); if (result > 0) { i += result; } else if (result == 0) { try { Thread.sleep(1); } catch(InterruptedException e) { // Nom nom } } else { Log.w("SDL", "SDL audio: error return from write(short)"); return; } } } public static void audioQuit() { if (mAudioThread != null) { try { mAudioThread.join(); } catch(Exception e) { Log.v("SDL", "Problem stopping audio thread: " + e); } mAudioThread = null; //Log.v("SDL", "Finished waiting for audio thread"); } if (mAudioTrack != null) { mAudioTrack.stop(); mAudioTrack = null; } } } /** Simple nativeInit() runnable */ class SDLMain implements Runnable { public void run() { // Runs SDL_main() SDLActivity.nativeInit(); //Log.v("SDL", "SDL thread terminated"); } } /** SDLSurface. This is what we draw on, so we need to know when it's created in order to do anything useful. Because of this, that's where we set up the SDL thread */ class SDLSurface extends SurfaceView implements SurfaceHolder.Callback, View.OnKeyListener, View.OnTouchListener, SensorEventListener { // This is what SDL runs in. It invokes SDL_main(), eventually private Thread mSDLThread; // EGL private objects private EGLContext mEGLContext; private EGLSurface mEGLSurface; private EGLDisplay mEGLDisplay; // Sensors private static SensorManager mSensorManager; // Startup public SDLSurface(Context context) { super(context); getHolder().addCallback(this); setFocusable(true); setFocusableInTouchMode(true); requestFocus(); setOnKeyListener(this); setOnTouchListener(this); mSensorManager = (SensorManager)context.getSystemService("sensor"); } // Called when we have a valid drawing surface public void surfaceCreated(SurfaceHolder holder) { //Log.v("SDL", "surfaceCreated()"); enableSensor(Sensor.TYPE_ACCELEROMETER, true); } // Called when we lose the surface public void surfaceDestroyed(SurfaceHolder holder) { //Log.v("SDL", "surfaceDestroyed()"); // Send a quit message to the application SDLActivity.nativeQuit(); // Now wait for the SDL thread to quit if (mSDLThread != null) { try { mSDLThread.join(); } catch(Exception e) { Log.v("SDL", "Problem stopping thread: " + e); } mSDLThread = null; //Log.v("SDL", "Finished waiting for SDL thread"); } enableSensor(Sensor.TYPE_ACCELEROMETER, false); } // Called when the surface is resized public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { //Log.v("SDL", "surfaceChanged()"); int sdlFormat = 0x85151002; // SDL_PIXELFORMAT_RGB565 by default switch (format) { case PixelFormat.A_8: Log.v("SDL", "pixel format A_8"); break; case PixelFormat.LA_88: Log.v("SDL", "pixel format LA_88"); break; case PixelFormat.L_8: Log.v("SDL", "pixel format L_8"); break; case PixelFormat.RGBA_4444: Log.v("SDL", "pixel format RGBA_4444"); sdlFormat = 0x85421002; // SDL_PIXELFORMAT_RGBA4444 break; case PixelFormat.RGBA_5551: Log.v("SDL", "pixel format RGBA_5551"); sdlFormat = 0x85441002; // SDL_PIXELFORMAT_RGBA5551 break; case PixelFormat.RGBA_8888: Log.v("SDL", "pixel format RGBA_8888"); sdlFormat = 0x86462004; // SDL_PIXELFORMAT_RGBA8888 break; case PixelFormat.RGBX_8888: Log.v("SDL", "pixel format RGBX_8888"); sdlFormat = 0x86262004; // SDL_PIXELFORMAT_RGBX8888 break; case PixelFormat.RGB_332: Log.v("SDL", "pixel format RGB_332"); sdlFormat = 0x84110801; // SDL_PIXELFORMAT_RGB332 break; case PixelFormat.RGB_565: Log.v("SDL", "pixel format RGB_565"); sdlFormat = 0x85151002; // SDL_PIXELFORMAT_RGB565 break; case PixelFormat.RGB_888: Log.v("SDL", "pixel format RGB_888"); // Not sure this is right, maybe SDL_PIXELFORMAT_RGB24 instead? sdlFormat = 0x86161804; // SDL_PIXELFORMAT_RGB888 break; default: Log.v("SDL", "pixel format unknown " + format); break; } SDLActivity.onNativeResize(width, height, sdlFormat); // Now start up the C app thread if (mSDLThread == null) { mSDLThread = new Thread(new SDLMain(), "SDLThread"); mSDLThread.start(); } } // unused public void onDraw(Canvas canvas) {} // EGL functions public boolean initEGL() { Log.v("SDL", "Starting up"); try { EGL10 egl = (EGL10)EGLContext.getEGL(); EGLDisplay dpy = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); int[] version = new int[2]; egl.eglInitialize(dpy, version); int[] configSpec = { //EGL10.EGL_DEPTH_SIZE, 16, EGL10.EGL_NONE }; EGLConfig[] configs = new EGLConfig[1]; int[] num_config = new int[1]; egl.eglChooseConfig(dpy, configSpec, configs, 1, num_config); EGLConfig config = configs[0]; EGLContext ctx = egl.eglCreateContext(dpy, config, EGL10.EGL_NO_CONTEXT, null); EGLSurface surface = egl.eglCreateWindowSurface(dpy, config, this, null); egl.eglMakeCurrent(dpy, surface, surface, ctx); mEGLContext = ctx; mEGLDisplay = dpy; mEGLSurface = surface; } catch(Exception e) { Log.v("SDL", e + ""); for (StackTraceElement s : e.getStackTrace()) { Log.v("SDL", s.toString()); } } return true; } // EGL buffer flip public void flipEGL() { try { EGL10 egl = (EGL10)EGLContext.getEGL(); egl.eglWaitNative(EGL10.EGL_NATIVE_RENDERABLE, null); // drawing here egl.eglWaitGL(); egl.eglSwapBuffers(mEGLDisplay, mEGLSurface); } catch(Exception e) { Log.v("SDL", "flipEGL(): " + e); for (StackTraceElement s : e.getStackTrace()) { Log.v("SDL", s.toString()); } } } // Key events public boolean onKey(View v, int keyCode, KeyEvent event) { if (event.getAction() == KeyEvent.ACTION_DOWN) { //Log.v("SDL", "key down: " + keyCode); SDLActivity.onNativeKeyDown(keyCode); return true; } else if (event.getAction() == KeyEvent.ACTION_UP) { //Log.v("SDL", "key up: " + keyCode); SDLActivity.onNativeKeyUp(keyCode); return true; } return false; } // Touch events public boolean onTouch(View v, MotionEvent event) { int action = event.getAction(); float x = event.getX(); float y = event.getY(); float p = event.getPressure(); // TODO: Anything else we need to pass? SDLActivity.onNativeTouch(action, x, y, p); return true; } // Sensor events public void enableSensor(int sensortype, boolean enabled) { // TODO: This uses getDefaultSensor - what if we have >1 accels? if (enabled) { mSensorManager.registerListener(this, mSensorManager.getDefaultSensor(sensortype), SensorManager.SENSOR_DELAY_GAME, null); } else { mSensorManager.unregisterListener(this, mSensorManager.getDefaultSensor(sensortype)); } } public void onAccuracyChanged(Sensor sensor, int accuracy) { // TODO } public void onSensorChanged(SensorEvent event) { if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { SDLActivity.onNativeAccel(event.values[0], event.values[1], event.values[2]); } } }