Commit ab688d00 authored by Paul Hunkin's avatar Paul Hunkin

Cleanups.

- Moved to 'project' instead of 'testproject'
- Removed extraneous .c files
- Removed the android_libs folder (that was against the NDK agreement anyway)
parent beeccf04
============================================================================== ==============================================================================
Building the Simple DirectMedia Layer for Android Simple DirectMedia Layer for Android
============================================================================== ==============================================================================
Requirements: Android NDK r4 or later Requirements: Android NDK r4 or later
...@@ -11,6 +11,6 @@ Instructions: ...@@ -11,6 +11,6 @@ Instructions:
4. Run 'ndk-build' (a script provided by the NDK). This compiles the C source 4. Run 'ndk-build' (a script provided by the NDK). This compiles the C source
4. Run 'ant' in android/testproject. This compiles the .java and eventually 4. Run 'ant' in android/testproject. This compiles the .java and eventually
creates a .apk with the C source embedded creates a .apk with the C source embedded
6. 'ant install' will push the apk to the device 6. 'ant install' will push the apk to the device or emulator (if connected)
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.libsdl.app"
android:versionCode="1"
android:versionName="1.0">
<application android:label="@string/app_name" android:icon="@drawable/icon">
<activity android:name="SDLActivity"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
# This file is used to override default values used by the Ant build system.
#
# This file must be checked in Version Control Systems, as it is
# integral to the build system of your project.
# This file is only used by the Ant script.
# You can use this to override default values such as
# 'source.dir' for the location of your java source folder and
# 'out.dir' for the location of your output folder.
# You can also use it define how the release builds are signed by declaring
# the following properties:
# 'key.store' for the location of your keystore and
# 'key.alias' for the name of the key to use.
# The password will be asked during the build when you use the 'release' target.
<?xml version="1.0" encoding="UTF-8"?>
<project name="SDLApp" default="help">
<!-- The local.properties file is created and updated by the 'android' tool.
It contains the path to the SDK. It should *NOT* be checked in in Version
Control Systems. -->
<property file="local.properties" />
<!-- The build.properties file can be created by you and is never touched
by the 'android' tool. This is the place to change some of the default property values
used by the Ant rules.
Here are some properties you may want to change/update:
application.package
the name of your application package as defined in the manifest. Used by the
'uninstall' rule.
source.dir
the name of the source directory. Default is 'src'.
out.dir
the name of the output directory. Default is 'bin'.
Properties related to the SDK location or the project target should be updated
using the 'android' tool with the 'update' action.
This file is an integral part of the build system for your application and
should be checked in in Version Control Systems.
-->
<property file="build.properties" />
<!-- The default.properties file is created and updated by the 'android' tool, as well
as ADT.
This file is an integral part of the build system for your application and
should be checked in in Version Control Systems. -->
<property file="default.properties" />
<!-- Custom Android task to deal with the project target, and import the proper rules.
This requires ant 1.6.0 or above. -->
<path id="android.antlibs">
<pathelement path="${sdk.dir}/tools/lib/anttasks.jar" />
<pathelement path="${sdk.dir}/tools/lib/sdklib.jar" />
<pathelement path="${sdk.dir}/tools/lib/androidprefs.jar" />
<pathelement path="${sdk.dir}/tools/lib/apkbuilder.jar" />
<pathelement path="${sdk.dir}/tools/lib/jarutils.jar" />
</path>
<taskdef name="setup"
classname="com.android.ant.SetupTask"
classpathref="android.antlibs" />
<!-- Execute the Android Setup task that will setup some properties specific to the target,
and import the build rules files.
The rules file is imported from
<SDK>/platforms/<target_platform>/templates/android_rules.xml
To customize some build steps for your project:
- copy the content of the main node <project> from android_rules.xml
- paste it in this build.xml below the <setup /> task.
- disable the import by changing the setup task below to <setup import="false" />
This will ensure that the properties are setup correctly but that your customized
build steps are used.
-->
<setup />
</project>
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Project target.
target=android-7
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := sdlapp
SDL := ../../../
LOCAL_CFLAGS := -DANDROID_NDK \
-DDISABLE_IMPORTGL \
-I$(SDL)/include
LOCAL_SRC_FILES := \
android-support.cpp \
lesson05.c \
LOCAL_LDLIBS := -lGLESv1_CM -ldl -llog -lSDL -lgcc -L$(SDL)
include $(BUILD_SHARED_LIBRARY)
/*******************************************************************************
This file links the Java side of Android with libsdl
*******************************************************************************/
#include <jni.h>
#include <sys/time.h>
#include <time.h>
#include <android/log.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <pthread.h>
#define DEBUG
/*******************************************************************************
Globals
*******************************************************************************/
static long _getTime(void){
struct timeval now;
gettimeofday(&now, NULL);
return (long)(now.tv_sec*1000 + now.tv_usec/1000);
}
JNIEnv* mEnv = NULL;
JNIEnv* mAudioThreadEnv = NULL; //See the note below for why this is necessary
JavaVM* mVM = NULL;
//Main activity
jclass mActivityInstance;
//method signatures
jmethodID midCreateGLContext;
jmethodID midFlipBuffers;
jmethodID midEnableFeature;
jmethodID midUpdateAudio;
extern "C" int SDL_main();
extern "C" int Android_OnKeyDown(int keycode);
extern "C" int Android_OnKeyUp(int keycode);
extern "C" void Android_SetScreenResolution(int width, int height);
extern "C" void Android_OnResize(int width, int height, int format);
extern "C" int SDL_SendQuit();
extern "C" void Android_EnableFeature(int featureid, bool enabled);
//If we're not the active app, don't try to render
bool bRenderingEnabled = false;
//Feature IDs
static const int FEATURE_AUDIO = 1;
static const int FEATURE_ACCEL = 2;
//Accelerometer data storage
float fLastAccelerometer[3];
/*******************************************************************************
Functions called by JNI
*******************************************************************************/
//Library init
extern "C" jint JNI_OnLoad(JavaVM* vm, void* reserved){
JNIEnv* env = NULL;
jint result = -1;
if (vm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
return result;
}
mEnv = env;
__android_log_print(ANDROID_LOG_INFO, "SDL", "JNI: OnLoad");
jclass cls = mEnv->FindClass ("org/libsdl/app/SDLActivity");
mActivityInstance = cls;
midCreateGLContext = mEnv->GetStaticMethodID(cls,"createGLContext","()V");
midFlipBuffers = mEnv->GetStaticMethodID(cls,"flipBuffers","()V");
midEnableFeature = mEnv->GetStaticMethodID(cls,"enableFeature","(II)V");
midUpdateAudio = mEnv->GetStaticMethodID(cls,"updateAudio","([B)V");
if(!midCreateGLContext || !midFlipBuffers || !midEnableFeature ||
!midUpdateAudio){
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: Bad mids\n");
}else{
#ifdef DEBUG
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: Good mids\n");
#endif
}
return JNI_VERSION_1_4;
}
//Start up the SDL app
extern "C" void Java_org_libsdl_app_SDLActivity_nativeInit( JNIEnv* env,
jobject obj ){
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: Native Init");
mEnv = env;
bRenderingEnabled = true;
Android_EnableFeature(FEATURE_ACCEL, true);
SDL_main();
}
//Keydown
extern "C" void Java_org_libsdl_app_SDLActivity_onNativeKeyDown(JNIEnv* env,
jobject obj, jint keycode){
int r = Android_OnKeyDown(keycode);
#ifdef DEBUG
__android_log_print(ANDROID_LOG_INFO, "SDL",
"SDL: native key down %d, %d\n", keycode, r);
#endif
}
//Keyup
extern "C" void Java_org_libsdl_app_SDLActivity_onNativeKeyUp(JNIEnv* env,
jobject obj, jint keycode){
int r = Android_OnKeyUp(keycode);
#ifdef DEBUG
__android_log_print(ANDROID_LOG_INFO, "SDL",
"SDL: native key up %d, %d\n", keycode, r);
#endif
}
//Touch
extern "C" void Java_org_libsdl_app_SDLActivity_onNativeTouch(JNIEnv* env,
jobject obj, jint action, jfloat x, jfloat y, jfloat p){
#ifdef DEBUG
__android_log_print(ANDROID_LOG_INFO, "SDL",
"SDL: native touch event %d @ %f/%f, pressure %f\n",
action, x, y, p);
#endif
//TODO: Pass this off to the SDL multitouch stuff
}
//Quit
extern "C" void Java_org_libsdl_app_SDLActivity_nativeQuit( JNIEnv* env,
jobject obj ){
//Stop rendering as we're no longer in the foreground
bRenderingEnabled = false;
//Inject a SDL_QUIT event
int r = SDL_SendQuit();
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: Native quit %d", r);
}
//Screen size
extern "C" void Java_org_libsdl_app_SDLActivity_nativeSetScreenSize(
JNIEnv* env, jobject obj, jint width, jint height){
__android_log_print(ANDROID_LOG_INFO, "SDL",
"SDL: Set screen size on init: %d/%d\n", width, height);
Android_SetScreenResolution(width, height);
}
//Resize
extern "C" void Java_org_libsdl_app_SDLActivity_onNativeResize(
JNIEnv* env, jobject obj, jint width,
jint height, jint format){
Android_OnResize(width, height, format);
}
extern "C" void Java_org_libsdl_app_SDLActivity_onNativeAccel(
JNIEnv* env, jobject obj,
jfloat x, jfloat y, jfloat z){
fLastAccelerometer[0] = x;
fLastAccelerometer[1] = y;
fLastAccelerometer[2] = z;
}
/*******************************************************************************
Functions called by SDL into Java
*******************************************************************************/
extern "C" void Android_CreateContext(){
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: sdl_create_context()\n");
bRenderingEnabled = true;
mEnv->CallStaticVoidMethod(mActivityInstance, midCreateGLContext );
}
extern "C" void Android_Render(){
if(!bRenderingEnabled){
return;
}
//When we get here, we've accumulated a full frame
mEnv->CallStaticVoidMethod(mActivityInstance, midFlipBuffers );
}
extern "C" void Android_EnableFeature(int featureid, bool enabled){
mEnv->CallStaticVoidMethod(mActivityInstance, midEnableFeature,
featureid, (int)enabled);
}
extern "C" void Android_UpdateAudioBuffer(unsigned char *buf, int len){
//Annoyingly we can't just call into Java from any thread. Because the audio
//callback is dispatched from the SDL audio thread (that wasn't made from
//java, we have to do some magic here to let the JVM know about the thread.
//Because everything it touches on the Java side is static anyway, it's
//not a big deal, just annoying.
if(!mAudioThreadEnv){
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: Need to set up audio thread env\n");
mVM->AttachCurrentThread(&mAudioThreadEnv, NULL);
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: ok\n");
}
jbyteArray arr = mAudioThreadEnv->NewByteArray(len);
//blah. We probably should rework this so we avoid the copy.
mAudioThreadEnv->SetByteArrayRegion(arr, 0, len, (jbyte *)buf);
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: copied\n");
mAudioThreadEnv->CallStaticVoidMethod( mActivityInstance,
midUpdateAudio, arr );
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: invoked\n");
}
/*
* This code was created by Jeff Molofee '99
* (ported to Linux/SDL by Ti Leggett '01)
*
* If you've found this code useful, please let me know.
*
* Visit Jeff at http://nehe.gamedev.net/
*
* or for port-specific comments, questions, bugreports etc.
* email to leggett@eecs.tulane.edu
*/
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <signal.h>
#include <android/log.h>
#ifdef ANDROID
#include <GLES/gl.h>
#else
#include <GL/gl.h>
#include <GL/glu.h>
#endif
#include "SDL.h"
/* screen width, height, and bit depth */
#define SCREEN_WIDTH 320
#define SCREEN_HEIGHT 430
#define SCREEN_BPP 16
/* Define our booleans */
#define TRUE 1
#define FALSE 0
/* This is our SDL surface */
SDL_Surface *surface;
int rotation = 0;
/**************************************
gluperspective implementation
**************************************/
void gluPerspective(double fovy, double aspect, double zNear, double zFar){
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
double xmin, xmax, ymin, ymax;
ymax = zNear * tan(fovy * M_PI / 360.0);
ymin = -ymax;
xmin = ymin * aspect;
xmax = ymax * aspect;
glFrustumf(xmin, xmax, ymin, ymax, zNear, zFar);
}
/**************************************
glulookat implementation
**************************************/
void gluLookAt(GLfloat eyex, GLfloat eyey, GLfloat eyez,
GLfloat centerx, GLfloat centery, GLfloat centerz,
GLfloat upx, GLfloat upy, GLfloat upz)
{
GLfloat m[16];
GLfloat x[3], y[3], z[3];
GLfloat mag;
/* Make rotation matrix */
/* Z vector */
z[0] = eyex - centerx;
z[1] = eyey - centery;
z[2] = eyez - centerz;
mag = sqrt(z[0] * z[0] + z[1] * z[1] + z[2] * z[2]);
if (mag) { /* mpichler, 19950515 */
z[0] /= mag;
z[1] /= mag;
z[2] /= mag;
}
/* Y vector */
y[0] = upx;
y[1] = upy;
y[2] = upz;
/* X vector = Y cross Z */
x[0] = y[1] * z[2] - y[2] * z[1];
x[1] = -y[0] * z[2] + y[2] * z[0];
x[2] = y[0] * z[1] - y[1] * z[0];
/* Recompute Y = Z cross X */
y[0] = z[1] * x[2] - z[2] * x[1];
y[1] = -z[0] * x[2] + z[2] * x[0];
y[2] = z[0] * x[1] - z[1] * x[0];
/* mpichler, 19950515 */
/* cross product gives area of parallelogram, which is < 1.0 for
* non-perpendicular unit-length vectors; so normalize x, y here
*/
mag = sqrt(x[0] * x[0] + x[1] * x[1] + x[2] * x[2]);
if (mag) {
x[0] /= mag;
x[1] /= mag;
x[2] /= mag;
}
mag = sqrt(y[0] * y[0] + y[1] * y[1] + y[2] * y[2]);
if (mag) {
y[0] /= mag;
y[1] /= mag;
y[2] /= mag;
}
#define M(row,col) m[col*4+row]
M(0, 0) = x[0];
M(0, 1) = x[1];
M(0, 2) = x[2];
M(0, 3) = 0.0;
M(1, 0) = y[0];
M(1, 1) = y[1];
M(1, 2) = y[2];
M(1, 3) = 0.0;
M(2, 0) = z[0];
M(2, 1) = z[1];
M(2, 2) = z[2];
M(2, 3) = 0.0;
M(3, 0) = 0.0;
M(3, 1) = 0.0;
M(3, 2) = 0.0;
M(3, 3) = 1.0;
#undef M
glMultMatrixf(m);
/* Translate Eye to Origin */
glTranslatef(-eyex, -eyey, -eyez);
}
/* function to release/destroy our resources and restoring the old desktop */
void Quit( int returnCode )
{
/* clean up the window */
SDL_Quit( );
/* and exit appropriately */
exit( returnCode );
}
/* function to reset our viewport after a window resize */
int resizeWindow( int width, int height )
{
/* Height / width ration */
GLfloat ratio;
/* Protect against a divide by zero */
if ( height == 0 )
height = 1;
ratio = ( GLfloat )width / ( GLfloat )height;
/* Setup our viewport. */
glViewport( 0, 0, ( GLsizei )width, ( GLsizei )height );
/* change to the projection matrix and set our viewing volume. */
glMatrixMode( GL_PROJECTION );
glLoadIdentity( );
/* Set our perspective */
gluPerspective( 45.0f, ratio, 0.1f, 100.0f );
/* Make sure we're chaning the model view and not the projection */
glMatrixMode( GL_MODELVIEW );
/* Reset The View */
glLoadIdentity( );
return( TRUE );
}
/* function to handle key press events */
void handleKeyPress( SDL_keysym *keysym )
{
switch ( keysym->sym )
{
case SDLK_ESCAPE:
/* ESC key was pressed */
Quit( 0 );
break;
case SDLK_F1:
/* F1 key was pressed
* this toggles fullscreen mode
*/
SDL_WM_ToggleFullScreen( surface );
break;
case SDLK_LEFT:
rotation -= 30;
break;
case SDLK_RIGHT:
rotation += 30;
break;
default:
break;
}
__android_log_print(ANDROID_LOG_INFO, "SDL","Keycode: %d, %d, %d\n", keysym->sym, SDLK_LEFT, SDLK_RIGHT);
return;
}
/* general OpenGL initialization function */
int initGL( GLvoid )
{
/* Enable smooth shading */
glShadeModel( GL_SMOOTH );
/* Set the background black */
glClearColor( 0.0f, 0.0f, 0.0f, 0.0f );
/* Depth buffer setup */
//glClearDepth( 1.0f );
/* Enables Depth Testing */
glEnable( GL_DEPTH_TEST );
/* The Type Of Depth Test To Do */
glDepthFunc( GL_LEQUAL );
/* Really Nice Perspective Calculations */
glHint( GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST );
return( TRUE );
}
/* Here goes our drawing code */
int drawGLScene( GLvoid )
{
static int Frames = 0;
static int T0 = 0;
glViewport(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT);
glClearColorx(0,0,0,255);
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45, (float)SCREEN_WIDTH / SCREEN_HEIGHT, 0.5f, 150);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
//Camera
gluLookAt(0,0,5, 0,0,0, 0,1,0);
//Draw a triangle
//glRotatef(iRot, 0, 1, 0);
glRotatef( rotation, 0.0f, 1.0f, 0.0f );
glEnableClientState (GL_VERTEX_ARRAY);
glEnableClientState (GL_COLOR_ARRAY);
/* Rotate The Triangle On The Y axis ( NEW ) */
//glRotatef( Frames % 360, 0.0f, 1.0f, 0.0f );
/* GLES variant of drawing a triangle */
const GLfloat triVertices[][9] = {
{ /* Front Triangle */
0.0f, 1.0f, 0.0f, /* Top Of Triangle */
-1.0f, -1.0f, 1.0f, /* Left Of Triangle */
1.0f, -1.0f, 1.0f /* Right Of Triangle */
}, { /* Right Triangle */
0.0f, 1.0f, 0.0f, /* Top Of Triangle */
1.0f, -1.0f, 1.0f, /* Left Of Triangle */
1.0f, -1.0f, -1.0f /* Right Of Triangle */
}, { /* Back Triangle */
0.0f, 1.0f, 0.0f, /* Top Of Triangle */
1.0f, -1.0f, -1.0f, /* Left Of Triangle */
-1.0f, -1.0f, -1.0f /* Right Of Triangle */
}, { /* Left Triangle */
0.0f, 1.0f, 0.0f, /* Top Of Triangle */
-1.0f, -1.0f, -1.0f, /* Left Of Triangle */
-1.0f, -1.0f, 1.0f /* Right Of Triangle */
}
};
/* unlike GL, GLES does not support RGB. We have to use RGBA instead */
const GLfloat triColors[][12] = {
{ /* Front triangle */
1.0f, 0.0f, 0.0f, 1.0f, /* Red */
0.0f, 1.0f, 0.0f, 1.0f, /* Green */
0.0f, 0.0f, 1.0f, 1.0f /* Blue */
}, { /* Right triangle */
1.0f, 0.0f, 0.0f, 1.0f, /* Red */
0.0f, 0.0f, 1.0f, 1.0f, /* Blue */
0.0f, 1.0f, 0.0f, 1.0f /* Green */
}, { /* Back triangle */
1.0f, 0.0f, 0.0f, 1.0f, /* Red */
0.0f, 1.0f, 0.0f, 1.0f, /* Green */
0.0f, 0.0f, 1.0f, 1.0f /* Blue */
}, { /* Left triangle */
1.0f, 0.0f, 0.0f, 1.0f, /* Red */
0.0f, 0.0f, 1.0f, 1.0f, /* Blue */
0.0f, 1.0f, 0.0f, 1.0f /* Green */
}
};
glEnableClientState(GL_COLOR_ARRAY);
int tri=0;
/* Loop through all Triangles */
for(tri=0;tri<sizeof(triVertices)/(9*sizeof(GLfloat));tri++)
{
glVertexPointer(3, GL_FLOAT, 0, triVertices[tri]);
glColorPointer(4, GL_FLOAT, 0, triColors[tri]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 3);
}
//__android_log_print(ANDROID_LOG_INFO, "SDL", "render %d", Frames++);
/* Draw it to the screen */
SDL_GL_SwapBuffers( );
/* Gather our frames per second */
Frames++;
{
GLint t = SDL_GetTicks();
if (t - T0 >= 5000) {
GLfloat seconds = (t - T0) / 1000.0;
GLfloat fps = Frames / seconds;
__android_log_print(ANDROID_LOG_INFO, "SDL","%d frames in %g seconds = %g FPS\n", Frames, seconds, fps);
T0 = t;
Frames = 0;
}
}
rotation++;
return( TRUE );
}
struct
{
SDL_AudioSpec spec;
Uint8 *sound; /* Pointer to wave data */
Uint32 soundlen; /* Length of wave data */
int soundpos; /* Current play position */
} wave;
void SDLCALL
fillerup(void *unused, Uint8 * stream, int len)
{
__android_log_print(ANDROID_LOG_INFO, "SDL","FILLERUP\n");
Uint8 *waveptr;
int waveleft;
/* Set up the pointers */
waveptr = wave.sound + wave.soundpos;
waveleft = wave.soundlen - wave.soundpos;
/* Go! */
while (waveleft <= len) {
SDL_memcpy(stream, waveptr, waveleft);
stream += waveleft;
len -= waveleft;
waveptr = wave.sound;
waveleft = wave.soundlen;
wave.soundpos = 0;
}
SDL_memcpy(stream, waveptr, len);
wave.soundpos += len;
}
void testAudio(){
const char *file = "/sdcard/sample.wav";
/* Load the SDL library */
if (SDL_Init(SDL_INIT_AUDIO) < 0) {
__android_log_print(ANDROID_LOG_INFO, "SDL","Couldn't initialize SDL Audio: %s\n", SDL_GetError());
return;
}else{
__android_log_print(ANDROID_LOG_INFO, "SDL","Init audio ok\n");
}
/* Load the wave file into memory */
if (SDL_LoadWAV(file, &wave.spec, &wave.sound, &wave.soundlen) == NULL) {
__android_log_print(ANDROID_LOG_INFO, "SDL", "Couldn't load %s: %s\n", file, SDL_GetError());
return;
}
wave.spec.callback = fillerup;
__android_log_print(ANDROID_LOG_INFO, "SDL","Loaded: %d\n", wave.soundlen);
/* Initialize fillerup() variables */
if (SDL_OpenAudio(&wave.spec, NULL) < 0) {
__android_log_print(ANDROID_LOG_INFO, "SDL", "Couldn't open audio: %s\n", SDL_GetError());
SDL_FreeWAV(wave.sound);
return;
}
__android_log_print(ANDROID_LOG_INFO, "SDL","Using audio driver: %s\n", SDL_GetCurrentAudioDriver());
/* Let the audio run */
SDL_PauseAudio(0);
__android_log_print(ANDROID_LOG_INFO, "SDL","Playing\n");
while (SDL_GetAudioStatus() == SDL_AUDIO_PLAYING){
//__android_log_print(ANDROID_LOG_INFO, "SDL","Still playing\n");
SDL_Delay(100);
}
__android_log_print(ANDROID_LOG_INFO, "SDL","Closing down\n");
/* Clean up on signal */
SDL_CloseAudio();
SDL_FreeWAV(wave.sound);
}
int SDL_main( int argc, char **argv )
{
__android_log_print(ANDROID_LOG_INFO, "SDL","entry\n");
/* Flags to pass to SDL_SetVideoMode */
int videoFlags;
/* main loop variable */
int done = FALSE;
/* used to collect events */
SDL_Event event;
/* this holds some info about our display */
const SDL_VideoInfo *videoInfo;
/* whether or not the window is active */
int isActive = TRUE;
/* initialize SDL */
if ( SDL_Init( SDL_INIT_VIDEO ) < 0 )
{
__android_log_print(ANDROID_LOG_INFO, "SDL", "Video initialization failed: %s\n",
SDL_GetError( ) );
Quit( 1 );
}
/* Fetch the video info */
videoInfo = SDL_GetVideoInfo( );
if ( !videoInfo )
{
__android_log_print(ANDROID_LOG_INFO, "SDL", "Video query failed: %s\n",
SDL_GetError( ) );
Quit( 1 );
}
/* the flags to pass to SDL_SetVideoMode */
videoFlags = SDL_OPENGL; /* Enable OpenGL in SDL */
videoFlags |= SDL_GL_DOUBLEBUFFER; /* Enable double buffering */
videoFlags |= SDL_HWPALETTE; /* Store the palette in hardware */
videoFlags |= SDL_RESIZABLE; /* Enable window resizing */
/* This checks to see if surfaces can be stored in memory */
if ( videoInfo->hw_available )
videoFlags |= SDL_HWSURFACE;
else
videoFlags |= SDL_SWSURFACE;
/* This checks if hardware blits can be done */
if ( videoInfo->blit_hw )
videoFlags |= SDL_HWACCEL;
/* Sets up OpenGL double buffering */
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 );
/* get a SDL surface */
surface = SDL_SetVideoMode( SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_BPP,
videoFlags );
/* Verify there is a surface */
if ( !surface )
{
__android_log_print(ANDROID_LOG_INFO, "SDL", "Video mode set failed: %s\n", SDL_GetError( ) );
Quit( 1 );
}
__android_log_print(ANDROID_LOG_INFO, "SDL","Made a video mode!\n");
/* initialize OpenGL */
initGL( );
/* resize the initial window */
resizeWindow( SCREEN_WIDTH, SCREEN_HEIGHT );
//testAudio();
/* wait for events */
while ( !done )
{
/* handle the events in the queue */
while ( SDL_PollEvent( &event ) )
{
switch( event.type )
{
case SDL_ACTIVEEVENT:
/* Something's happend with our focus
* If we lost focus or we are iconified, we
* shouldn't draw the screen
*/
if ( event.active.gain == 0 )
isActive = FALSE;
else
isActive = TRUE;
break;
case SDL_VIDEORESIZE:
/* handle resize event */
surface = SDL_SetVideoMode( event.resize.w,
event.resize.h,
16, videoFlags );
if ( !surface )
{
__android_log_print(ANDROID_LOG_INFO, "SDL","Could not get a surface after resize: %s\n", SDL_GetError( ) );
Quit( 1 );
}
resizeWindow( event.resize.w, event.resize.h );
break;
case SDL_KEYDOWN:
/* handle key presses */
handleKeyPress( &event.key.keysym );
break;
case SDL_QUIT:
/* handle quit requests */
done = TRUE;
__android_log_print(ANDROID_LOG_INFO, "SDL","App is shutting down\n");
break;
default:
break;
}
}
/* draw the scene */
if ( isActive )
drawGLScene( );
}
/* clean ourselves up and exit */
Quit( 0 );
/* Should never get here */
return( 0 );
}
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must *NOT* be checked in Version Control Systems,
# as it contains information specific to your local configuration.
# location of the SDK. This is only used by Ant
# For customization when using a Version Control System, please read the
# header note.
sdk.dir=/home/paul/Projects/gsoc/sdk/android-sdk-linux_86
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="Hello World, SDLActivity"
/>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">SDLActivity</string>
</resources>
package org.libsdl.app;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import javax.microedition.khronos.egl.*;
import android.app.*;
import android.content.*;
import android.view.*;
import android.os.*;
import android.util.Log;
import android.graphics.*;
import android.text.method.*;
import android.text.*;
import android.media.*;
import android.hardware.*;
import android.content.*;
import java.lang.*;
/**
SDL Activity
*/
public class SDLActivity extends Activity {
//Main components
private static SDLActivity mSingleton;
private static SDLSurface mSurface;
//Audio
private static AudioTrack mAudioTrack;
private static boolean bAudioIsEnabled;
//Sensors
private static boolean bAccelIsEnabled;
//feature IDs. Must match up on the C side as well.
private static int FEATURE_AUDIO = 1;
private static int FEATURE_ACCEL = 2;
//Load the .so
static {
System.loadLibrary("sdlapp");
}
//Setup
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//So we can call stuff from static callbacks
mSingleton = this;
//Set up the surface
mSurface = new SDLSurface(getApplication());
setContentView(mSurface);
SurfaceHolder holder = mSurface.getHolder();
holder.setType(SurfaceHolder.SURFACE_TYPE_GPU);
}
//Audio
public static boolean initAudio(){
//blah. Hardcoded things are bad. FIXME when we have more sound stuff
//working properly.
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
11025,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_8BIT,
2048,
AudioTrack.MODE_STREAM);
bAudioIsEnabled = true;
return true;
}
//Accel
public static boolean initAccel(){
mSurface.enableSensor(Sensor.TYPE_ACCELEROMETER, true);
bAccelIsEnabled = true;
return true;
}
public static boolean closeAccel(){
mSurface.enableSensor(Sensor.TYPE_ACCELEROMETER, false);
bAccelIsEnabled = false;
return true;
}
//Events
protected void onPause() {
super.onPause();
}
protected void onResume() {
super.onResume();
}
//C functions we call
public static native void nativeInit();
public static native void nativeQuit();
public static native void nativeSetScreenSize(int width, int height);
public static native void onNativeKeyDown(int keycode);
public static native void onNativeKeyUp(int keycode);
public static native void onNativeTouch(int action, float x,
float y, float p);
public static native void onNativeResize(int x, int y, int format);
public static native void onNativeAccel(float x, float y, float z);
//Java functions called from C
private static void createGLContext(){
mSurface.initEGL();
}
public static void flipBuffers(){
mSurface.flipEGL();
}
public static void updateAudio(byte [] buf){
if(mAudioTrack == null){
return;
}
mAudioTrack.write(buf, 0, buf.length);
mAudioTrack.play();
Log.v("SDL","Played some audio");
}
public static void enableFeature(int featureid, int enabled){
Log.v("SDL","Feature " + featureid + " = " + enabled);
//Yuck. This is all horribly inelegent. If it gets to more than a few
//'features' I'll rip this out and make something nicer, I promise :)
if(featureid == FEATURE_AUDIO){
if(enabled == 1){
initAudio();
}else{
//We don't have one of these yet...
//closeAudio();
}
}
else if(featureid == FEATURE_ACCEL){
if(enabled == 1){
initAccel();
}else{
closeAccel();
}
}
}
}
/**
Simple nativeInit() runnable
*/
class SDLRunner implements Runnable{
public void run(){
//SDLActivity.initAudio();
//Runs SDL_main()
SDLActivity.nativeInit();
Log.v("SDL","SDL thread terminated");
}
}
/**
SDLSurface. This is what we draw on, so we need to know when it's created
in order to do anything useful.
Because of this, that's where we set up the SDL thread
*/
class SDLSurface extends SurfaceView implements SurfaceHolder.Callback,
View.OnKeyListener, View.OnTouchListener, SensorEventListener {
//This is what SDL runs in. It invokes SDL_main(), eventually
private Thread mSDLThread;
//EGL private objects
private EGLContext mEGLContext;
private EGLSurface mEGLSurface;
private EGLDisplay mEGLDisplay;
//Sensors
private static SensorManager mSensorManager;
//Startup
public SDLSurface(Context context) {
super(context);
getHolder().addCallback(this);
setFocusable(true);
setFocusableInTouchMode(true);
requestFocus();
setOnKeyListener(this);
setOnTouchListener(this);
mSensorManager = (SensorManager)context.getSystemService("sensor");
}
//Called when we have a valid drawing surface
public void surfaceCreated(SurfaceHolder holder) {
Log.v("SDL","Surface created");
int width = getWidth();
int height = getHeight();
//Set the width and height variables in C before we start SDL so we have
//it available on init
SDLActivity.nativeSetScreenSize(width, height);
//Now start up the C app thread
mSDLThread = new Thread(new SDLRunner(), "SDLThread");
mSDLThread.start();
}
//Called when we lose the surface
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v("SDL","Surface destroyed");
SDLActivity.nativeQuit();
//Now wait for the SDL thread to quit
try{
mSDLThread.wait();
}catch(Exception e){
Log.v("SDL","Problem stopping thread: " + e);
}
}
//Called when the surface is resized
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
Log.v("SDL","Surface resized");
SDLActivity.onNativeResize(width, height, format);
}
//unused
public void onDraw(Canvas canvas) {}
//EGL functions
public boolean initEGL(){
Log.v("SDL","Starting up");
try{
EGL10 egl = (EGL10)EGLContext.getEGL();
EGLDisplay dpy = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
int[] version = new int[2];
egl.eglInitialize(dpy, version);
int[] configSpec = {
//EGL10.EGL_DEPTH_SIZE, 16,
EGL10.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] num_config = new int[1];
egl.eglChooseConfig(dpy, configSpec, configs, 1, num_config);
EGLConfig config = configs[0];
EGLContext ctx = egl.eglCreateContext(dpy, config, EGL10.EGL_NO_CONTEXT, null);
EGLSurface surface = egl.eglCreateWindowSurface(dpy, config, this, null);
egl.eglMakeCurrent(dpy, surface, surface, ctx);
mEGLContext = ctx;
mEGLDisplay = dpy;
mEGLSurface = surface;
}catch(Exception e){
Log.v("SDL", e + "");
for(StackTraceElement s : e.getStackTrace()){
Log.v("SDL", s.toString());
}
}
Log.v("SDL","Done making!");
return true;
}
//EGL buffer flip
public void flipEGL(){
try{
EGL10 egl = (EGL10)EGLContext.getEGL();
GL10 gl = (GL10)mEGLContext.getGL();
egl.eglWaitNative(EGL10.EGL_NATIVE_RENDERABLE, null);
//drawing here
egl.eglWaitGL();
egl.eglSwapBuffers(mEGLDisplay, mEGLSurface);
}catch(Exception e){
Log.v("SDL", "flipEGL(): " + e);
for(StackTraceElement s : e.getStackTrace()){
Log.v("SDL", s.toString());
}
}
}
//Key events
public boolean onKey(View v, int keyCode, KeyEvent event){
if(event.getAction() == KeyEvent.ACTION_DOWN){
SDLActivity.onNativeKeyDown(keyCode);
return true;
}
else if(event.getAction() == KeyEvent.ACTION_UP){
SDLActivity.onNativeKeyUp(keyCode);
return true;
}
return false;
}
//Touch events
public boolean onTouch(View v, MotionEvent event){
int action = event.getAction();
float x = event.getX();
float y = event.getY();
float p = event.getPressure();
//TODO: Anything else we need to pass?
SDLActivity.onNativeTouch(action, x, y, p);
return true;
}
//Sensor events
public void enableSensor(int sensortype, boolean enabled){
//TODO: This uses getDefaultSensor - what if we have >1 accels?
if(enabled){
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(sensortype),
SensorManager.SENSOR_DELAY_GAME, null);
}else{
mSensorManager.unregisterListener(this,
mSensorManager.getDefaultSensor(sensortype));
}
}
public void onAccuracyChanged(Sensor sensor, int accuracy){
//TODO
}
public void onSensorChanged(SensorEvent event){
if(event.sensor.getType() == Sensor.TYPE_ACCELEROMETER){
SDLActivity.onNativeAccel( event.values[0],
event.values[1],
event.values[2] );
}
}
}
...@@ -13,8 +13,8 @@ ...@@ -13,8 +13,8 @@
#include <pthread.h> #include <pthread.h>
#include "importgl.h" //#include "importgl.h"
#include "egl.h" //#include "egl.h"
/******************************************************************************* /*******************************************************************************
Globals Globals
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment