Build the SDL library as a shared object on Android, so it will work correctly with SDL_image and SDL_ttf.

This commit is contained in:
Sam Lantinga 2011-01-06 16:11:21 -08:00
parent e92a0428c3
commit 606b0cda7f
20 changed files with 179 additions and 1286 deletions

41
Android.mk Executable file
View File

@ -0,0 +1,41 @@
LOCAL_PATH := $(call my-dir)
###########################
#
# SDL shared library
#
###########################
include $(CLEAR_VARS)
LOCAL_MODULE := SDL
LOCAL_C_INCLUDES := $(LOCAL_PATH)/include
LOCAL_SRC_FILES := src/SDL_android.cpp \
$(subst $(LOCAL_PATH)/,, \
$(wildcard $(LOCAL_PATH)/src/*.c) \
$(wildcard $(LOCAL_PATH)/src/audio/*.c) \
$(wildcard $(LOCAL_PATH)/src/cpuinfo/*.c) \
$(wildcard $(LOCAL_PATH)/src/events/*.c) \
$(wildcard $(LOCAL_PATH)/src/file/*.c) \
$(wildcard $(LOCAL_PATH)/src/joystick/*.c) \
$(wildcard $(LOCAL_PATH)/src/haptic/*.c) \
$(wildcard $(LOCAL_PATH)/src/stdlib/*.c) \
$(wildcard $(LOCAL_PATH)/src/thread/*.c) \
$(wildcard $(LOCAL_PATH)/src/timer/*.c) \
$(wildcard $(LOCAL_PATH)/src/video/*.c) \
$(wildcard $(LOCAL_PATH)/src/power/*.c) \
$(wildcard $(LOCAL_PATH)/src/audio/android/*.c) \
$(wildcard $(LOCAL_PATH)/src/audio/dummy/*.c) \
$(wildcard $(LOCAL_PATH)/src/video/android/*.c) \
$(wildcard $(LOCAL_PATH)/src/joystick/android/*.c) \
$(wildcard $(LOCAL_PATH)/src/haptic/dummy/*.c) \
$(wildcard $(LOCAL_PATH)/src/atomic/dummy/*.c) \
$(wildcard $(LOCAL_PATH)/src/thread/pthread/*.c) \
$(wildcard $(LOCAL_PATH)/src/timer/unix/*.c) \
$(wildcard $(LOCAL_PATH)/src/loadso/dlopen/*.c))
LOCAL_LDLIBS := -ldl -lGLESv1_CM -llog
include $(BUILD_SHARED_LIBRARY)

View File

@ -1,58 +0,0 @@
# Makefile to build the SDL library
include ./android/config.cfg #get ANDROID_NDK, ANDROID_NDK_HOST
# For NDK R4
TOOLS_PATH=$(ANDROID_NDK)/build/prebuilt/$(ANDROID_NDK_HOST)/arm-eabi-4.2.1/bin
ANDROID_INCLUDES = -I$(ANDROID_NDK)/build/platforms/android-4/arch-arm/usr/include
# For NDK R5
#TOOLS_PATH=$(ANDROID_NDK)/toolchains/arm-eabi-4.4.0/prebuilt/$(ANDROID_NDK_HOST)/bin
#ANDROID_INCLUDES = -I$(ANDROID_NDK)/platforms/android-4/arch-arm/usr/include
INCLUDE = -I./include
CFLAGS = -g -O2 -fno-short-enums $(INCLUDE) $(ANDROID_INCLUDES) -DANDROID -DANDROID_NDK -static
AR = $(TOOLS_PATH)/arm-eabi-ar
RANLIB = $(TOOLS_PATH)/arm-eabi-ranlib
CC = $(TOOLS_PATH)/arm-eabi-gcc
CONFIG_H = include/SDL_config.h
TARGET = libSDL.a
SOURCES = \
src/*.c \
src/audio/*.c \
src/cpuinfo/*.c \
src/events/*.c \
src/file/*.c \
src/joystick/*.c \
src/haptic/*.c \
src/stdlib/*.c \
src/thread/*.c \
src/timer/*.c \
src/video/*.c \
src/power/*.c \
src/audio/android/*.c \
src/audio/dummy/*.c \
src/video/android/*.c \
src/joystick/android/*.c \
src/haptic/dummy/*.c \
src/atomic/dummy/*.c \
src/thread/pthread/*.c \
src/timer/unix/*.c \
src/loadso/dummy/*.c \
OBJECTS = $(shell echo $(SOURCES) | sed -e 's,\.c,\.o,g')
all: $(TARGET)
$(TARGET): $(CONFIG_H) $(OBJECTS)
$(AR) crv $@ $^
$(RANLIB) $@
$(CONFIG_H):
cp $(CONFIG_H).default $(CONFIG_H)
clean:
rm -f $(TARGET) $(OBJECTS)

View File

@ -2,7 +2,8 @@
Simple DirectMedia Layer for Android
================================================================================
Requirements: Android NDK r4 or later
Requirements: Android SDK and Android NDK r4 or later
http://developer.android.com/
================================================================================
How the port works
@ -16,26 +17,44 @@ Java project, along with some C support code that communicates with Java
- This eventually produces a standard Android .apk package
================================================================================
Building an app
================================================================================
Instructions:
1. Edit android/config.cfg to point to the location of the NDK
2. Run 'make -f Makefile.android'. If all goes well, libsdl.a should be created
3. Place your application source files in android/project/jni
4. Edit the Android.mk to include your source files
1. Copy the android-project directory wherever you want your Android project to go
2. Move this SDL directory into the <project>/jni directory
3. Place your application source files in the <project>/jni/src directory
4. Edit <project>/jni/src/Android.mk to include your source files
5. Run 'ndk-build' (a script provided by the NDK). This compiles the C source
6. Edit project/local.properties to point to the SDK directory
6. Run 'ant debug' in android/project. This compiles the .java and eventually
creates a .apk with the C source embedded
7. 'ant install' will push the apk to the device or emulator (if connected)
If you want to use the Eclipse IDE, skip to the Eclipse section below.
6. Edit <project>/local.properties to point to the Android SDK directory
7. Run 'ant debug' in android/project. This compiles the .java and eventually
creates a .apk with the native code embedded
8. 'ant install' will push the apk to the device or emulator (if connected)
================================================================================
Using Eclipse
================================================================================
NEED CONTENT
================================================================================
Loading files
================================================================================
NEED CONTENT
================================================================================
Troubleshooting
================================================================================
NEED CONTENT
================================================================================

View File

@ -1,8 +0,0 @@
# This file sets up paths needed to find the NDK build tools.
# Edit it appropriately for your configuration and save it as "config.cfg".
# This should be the full path to the Android NDK
ANDROID_NDK := /Users/hercules/eclipse/android-ndk-r5
# This should be "linux-x86" for linux, "darwin-x86" for mac
ANDROID_NDK_HOST := darwin-x86

View File

@ -1,15 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.libsdl.app"
android:versionCode="1"
android:versionName="1.0">
<application android:label="@string/app_name" android:icon="@drawable/icon">
<activity android:name="SDLActivity"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -1,17 +0,0 @@
# This file is used to override default values used by the Ant build system.
#
# This file must be checked in Version Control Systems, as it is
# integral to the build system of your project.
# This file is only used by the Ant script.
# You can use this to override default values such as
# 'source.dir' for the location of your java source folder and
# 'out.dir' for the location of your output folder.
# You can also use it define how the release builds are signed by declaring
# the following properties:
# 'key.store' for the location of your keystore and
# 'key.alias' for the name of the key to use.
# The password will be asked during the build when you use the 'release' target.

View File

@ -1,67 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="SDLApp" default="help">
<!-- The local.properties file is created and updated by the 'android' tool.
It contains the path to the SDK. It should *NOT* be checked in in Version
Control Systems. -->
<property file="local.properties" />
<!-- The build.properties file can be created by you and is never touched
by the 'android' tool. This is the place to change some of the default property values
used by the Ant rules.
Here are some properties you may want to change/update:
application.package
the name of your application package as defined in the manifest. Used by the
'uninstall' rule.
source.dir
the name of the source directory. Default is 'src'.
out.dir
the name of the output directory. Default is 'bin'.
Properties related to the SDK location or the project target should be updated
using the 'android' tool with the 'update' action.
This file is an integral part of the build system for your application and
should be checked in in Version Control Systems.
-->
<property file="build.properties" />
<!-- The default.properties file is created and updated by the 'android' tool, as well
as ADT.
This file is an integral part of the build system for your application and
should be checked in in Version Control Systems. -->
<property file="default.properties" />
<!-- Custom Android task to deal with the project target, and import the proper rules.
This requires ant 1.6.0 or above. -->
<path id="android.antlibs">
<pathelement path="${sdk.dir}/tools/lib/anttasks.jar" />
<pathelement path="${sdk.dir}/tools/lib/sdklib.jar" />
<pathelement path="${sdk.dir}/tools/lib/androidprefs.jar" />
<pathelement path="${sdk.dir}/tools/lib/apkbuilder.jar" />
<pathelement path="${sdk.dir}/tools/lib/jarutils.jar" />
</path>
<taskdef name="setup"
classname="com.android.ant.SetupTask"
classpathref="android.antlibs" />
<!-- Execute the Android Setup task that will setup some properties specific to the target,
and import the build rules files.
The rules file is imported from
<SDK>/platforms/<target_platform>/templates/android_rules.xml
To customize some build steps for your project:
- copy the content of the main node <project> from android_rules.xml
- paste it in this build.xml below the <setup /> task.
- disable the import by changing the setup task below to <setup import="false" />
This will ensure that the properties are setup correctly but that your customized
build steps are used.
-->
<setup />
</project>

View File

@ -1,11 +0,0 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Project target.
target=android-4

View File

@ -1,18 +0,0 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := sdlapp
SDL := ../../../
LOCAL_CFLAGS := -DANDROID_NDK \
-DDISABLE_IMPORTGL \
-I$(SDL)/include
LOCAL_SRC_FILES := \
android-support.cpp \
lesson05.c \
LOCAL_LDLIBS := -lGLESv1_CM -ldl -llog -lSDL -lgcc -L$(SDL)
include $(BUILD_SHARED_LIBRARY)

View File

@ -1,574 +0,0 @@
/*
* This code was created by Jeff Molofee '99
* (ported to Linux/SDL by Ti Leggett '01)
*
* If you've found this code useful, please let me know.
*
* Visit Jeff at http://nehe.gamedev.net/
*
* or for port-specific comments, questions, bugreports etc.
* email to leggett@eecs.tulane.edu
*/
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <signal.h>
#include <android/log.h>
#ifdef ANDROID
#include <GLES/gl.h>
#else
#include <GL/gl.h>
#include <GL/glu.h>
#endif
#include "SDL.h"
/* screen width, height, and bit depth */
#define SCREEN_WIDTH 320
#define SCREEN_HEIGHT 430
#define SCREEN_BPP 16
/* Define our booleans */
#define TRUE 1
#define FALSE 0
/* This is our SDL surface */
SDL_Surface *surface;
int rotation = 0;
/**************************************
gluperspective implementation
**************************************/
void gluPerspective(double fovy, double aspect, double zNear, double zFar){
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
double xmin, xmax, ymin, ymax;
ymax = zNear * tan(fovy * M_PI / 360.0);
ymin = -ymax;
xmin = ymin * aspect;
xmax = ymax * aspect;
glFrustumf(xmin, xmax, ymin, ymax, zNear, zFar);
}
/**************************************
glulookat implementation
**************************************/
void gluLookAt(GLfloat eyex, GLfloat eyey, GLfloat eyez,
GLfloat centerx, GLfloat centery, GLfloat centerz,
GLfloat upx, GLfloat upy, GLfloat upz)
{
GLfloat m[16];
GLfloat x[3], y[3], z[3];
GLfloat mag;
/* Make rotation matrix */
/* Z vector */
z[0] = eyex - centerx;
z[1] = eyey - centery;
z[2] = eyez - centerz;
mag = sqrt(z[0] * z[0] + z[1] * z[1] + z[2] * z[2]);
if (mag) { /* mpichler, 19950515 */
z[0] /= mag;
z[1] /= mag;
z[2] /= mag;
}
/* Y vector */
y[0] = upx;
y[1] = upy;
y[2] = upz;
/* X vector = Y cross Z */
x[0] = y[1] * z[2] - y[2] * z[1];
x[1] = -y[0] * z[2] + y[2] * z[0];
x[2] = y[0] * z[1] - y[1] * z[0];
/* Recompute Y = Z cross X */
y[0] = z[1] * x[2] - z[2] * x[1];
y[1] = -z[0] * x[2] + z[2] * x[0];
y[2] = z[0] * x[1] - z[1] * x[0];
/* mpichler, 19950515 */
/* cross product gives area of parallelogram, which is < 1.0 for
* non-perpendicular unit-length vectors; so normalize x, y here
*/
mag = sqrt(x[0] * x[0] + x[1] * x[1] + x[2] * x[2]);
if (mag) {
x[0] /= mag;
x[1] /= mag;
x[2] /= mag;
}
mag = sqrt(y[0] * y[0] + y[1] * y[1] + y[2] * y[2]);
if (mag) {
y[0] /= mag;
y[1] /= mag;
y[2] /= mag;
}
#define M(row,col) m[col*4+row]
M(0, 0) = x[0];
M(0, 1) = x[1];
M(0, 2) = x[2];
M(0, 3) = 0.0;
M(1, 0) = y[0];
M(1, 1) = y[1];
M(1, 2) = y[2];
M(1, 3) = 0.0;
M(2, 0) = z[0];
M(2, 1) = z[1];
M(2, 2) = z[2];
M(2, 3) = 0.0;
M(3, 0) = 0.0;
M(3, 1) = 0.0;
M(3, 2) = 0.0;
M(3, 3) = 1.0;
#undef M
glMultMatrixf(m);
/* Translate Eye to Origin */
glTranslatef(-eyex, -eyey, -eyez);
}
/* function to release/destroy our resources and restoring the old desktop */
void Quit( int returnCode )
{
/* clean up the window */
SDL_Quit( );
/* and exit appropriately */
exit( returnCode );
}
/* function to reset our viewport after a window resize */
int resizeWindow( int width, int height )
{
/* Height / width ration */
GLfloat ratio;
/* Protect against a divide by zero */
if ( height == 0 )
height = 1;
ratio = ( GLfloat )width / ( GLfloat )height;
/* Setup our viewport. */
glViewport( 0, 0, ( GLsizei )width, ( GLsizei )height );
/* change to the projection matrix and set our viewing volume. */
glMatrixMode( GL_PROJECTION );
glLoadIdentity( );
/* Set our perspective */
gluPerspective( 45.0f, ratio, 0.1f, 100.0f );
/* Make sure we're chaning the model view and not the projection */
glMatrixMode( GL_MODELVIEW );
/* Reset The View */
glLoadIdentity( );
return( TRUE );
}
/* function to handle key press events */
void handleKeyPress( SDL_keysym *keysym )
{
switch ( keysym->sym )
{
case SDLK_ESCAPE:
/* ESC key was pressed */
Quit( 0 );
break;
case SDLK_F1:
/* F1 key was pressed
* this toggles fullscreen mode
*/
SDL_WM_ToggleFullScreen( surface );
break;
case SDLK_LEFT:
rotation -= 30;
break;
case SDLK_RIGHT:
rotation += 30;
break;
default:
break;
}
__android_log_print(ANDROID_LOG_INFO, "SDL","Keycode: %d, %d, %d\n", keysym->sym, SDLK_LEFT, SDLK_RIGHT);
return;
}
/* general OpenGL initialization function */
int initGL( GLvoid )
{
/* Enable smooth shading */
glShadeModel( GL_SMOOTH );
/* Set the background black */
glClearColor( 0.0f, 0.0f, 0.0f, 0.0f );
/* Depth buffer setup */
//glClearDepth( 1.0f );
/* Enables Depth Testing */
glEnable( GL_DEPTH_TEST );
/* The Type Of Depth Test To Do */
glDepthFunc( GL_LEQUAL );
/* Really Nice Perspective Calculations */
glHint( GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST );
return( TRUE );
}
/* Here goes our drawing code */
int drawGLScene( GLvoid )
{
static int Frames = 0;
static int T0 = 0;
glViewport(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT);
glClearColorx(0,0,0,255);
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45, (float)SCREEN_WIDTH / SCREEN_HEIGHT, 0.5f, 150);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
//Camera
gluLookAt(0,0,5, 0,0,0, 0,1,0);
//Draw a triangle
//glRotatef(iRot, 0, 1, 0);
glRotatef( rotation, 0.0f, 1.0f, 0.0f );
glEnableClientState (GL_VERTEX_ARRAY);
glEnableClientState (GL_COLOR_ARRAY);
/* Rotate The Triangle On The Y axis ( NEW ) */
//glRotatef( Frames % 360, 0.0f, 1.0f, 0.0f );
/* GLES variant of drawing a triangle */
const GLfloat triVertices[][9] = {
{ /* Front Triangle */
0.0f, 1.0f, 0.0f, /* Top Of Triangle */
-1.0f, -1.0f, 1.0f, /* Left Of Triangle */
1.0f, -1.0f, 1.0f /* Right Of Triangle */
}, { /* Right Triangle */
0.0f, 1.0f, 0.0f, /* Top Of Triangle */
1.0f, -1.0f, 1.0f, /* Left Of Triangle */
1.0f, -1.0f, -1.0f /* Right Of Triangle */
}, { /* Back Triangle */
0.0f, 1.0f, 0.0f, /* Top Of Triangle */
1.0f, -1.0f, -1.0f, /* Left Of Triangle */
-1.0f, -1.0f, -1.0f /* Right Of Triangle */
}, { /* Left Triangle */
0.0f, 1.0f, 0.0f, /* Top Of Triangle */
-1.0f, -1.0f, -1.0f, /* Left Of Triangle */
-1.0f, -1.0f, 1.0f /* Right Of Triangle */
}
};
/* unlike GL, GLES does not support RGB. We have to use RGBA instead */
const GLfloat triColors[][12] = {
{ /* Front triangle */
1.0f, 0.0f, 0.0f, 1.0f, /* Red */
0.0f, 1.0f, 0.0f, 1.0f, /* Green */
0.0f, 0.0f, 1.0f, 1.0f /* Blue */
}, { /* Right triangle */
1.0f, 0.0f, 0.0f, 1.0f, /* Red */
0.0f, 0.0f, 1.0f, 1.0f, /* Blue */
0.0f, 1.0f, 0.0f, 1.0f /* Green */
}, { /* Back triangle */
1.0f, 0.0f, 0.0f, 1.0f, /* Red */
0.0f, 1.0f, 0.0f, 1.0f, /* Green */
0.0f, 0.0f, 1.0f, 1.0f /* Blue */
}, { /* Left triangle */
1.0f, 0.0f, 0.0f, 1.0f, /* Red */
0.0f, 0.0f, 1.0f, 1.0f, /* Blue */
0.0f, 1.0f, 0.0f, 1.0f /* Green */
}
};
glEnableClientState(GL_COLOR_ARRAY);
int tri=0;
/* Loop through all Triangles */
for(tri=0;tri<sizeof(triVertices)/(9*sizeof(GLfloat));tri++)
{
glVertexPointer(3, GL_FLOAT, 0, triVertices[tri]);
glColorPointer(4, GL_FLOAT, 0, triColors[tri]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 3);
}
//__android_log_print(ANDROID_LOG_INFO, "SDL", "render %d", Frames++);
/* Draw it to the screen */
SDL_GL_SwapBuffers( );
/* Gather our frames per second */
Frames++;
{
GLint t = SDL_GetTicks();
if (t - T0 >= 5000) {
GLfloat seconds = (t - T0) / 1000.0;
GLfloat fps = Frames / seconds;
__android_log_print(ANDROID_LOG_INFO, "SDL","%d frames in %g seconds = %g FPS\n", Frames, seconds, fps);
T0 = t;
Frames = 0;
}
}
rotation++;
return( TRUE );
}
struct
{
SDL_AudioSpec spec;
Uint8 *sound; /* Pointer to wave data */
Uint32 soundlen; /* Length of wave data */
int soundpos; /* Current play position */
} wave;
void SDLCALL
fillerup(void *unused, Uint8 * stream, int len)
{
__android_log_print(ANDROID_LOG_INFO, "SDL","FILLERUP\n");
Uint8 *waveptr;
int waveleft;
/* Set up the pointers */
waveptr = wave.sound + wave.soundpos;
waveleft = wave.soundlen - wave.soundpos;
/* Go! */
while (waveleft <= len) {
SDL_memcpy(stream, waveptr, waveleft);
stream += waveleft;
len -= waveleft;
waveptr = wave.sound;
waveleft = wave.soundlen;
wave.soundpos = 0;
}
SDL_memcpy(stream, waveptr, len);
wave.soundpos += len;
}
void testAudio(){
const char *file = "/sdcard/sample.wav";
/* Load the SDL library */
if (SDL_Init(SDL_INIT_AUDIO) < 0) {
__android_log_print(ANDROID_LOG_INFO, "SDL","Couldn't initialize SDL Audio: %s\n", SDL_GetError());
return;
}else{
__android_log_print(ANDROID_LOG_INFO, "SDL","Init audio ok\n");
}
/* Load the wave file into memory */
if (SDL_LoadWAV(file, &wave.spec, &wave.sound, &wave.soundlen) == NULL) {
__android_log_print(ANDROID_LOG_INFO, "SDL", "Couldn't load %s: %s\n", file, SDL_GetError());
return;
}
wave.spec.callback = fillerup;
__android_log_print(ANDROID_LOG_INFO, "SDL","Loaded: %d\n", wave.soundlen);
/* Initialize fillerup() variables */
if (SDL_OpenAudio(&wave.spec, NULL) < 0) {
__android_log_print(ANDROID_LOG_INFO, "SDL", "Couldn't open audio: %s\n", SDL_GetError());
SDL_FreeWAV(wave.sound);
return;
}
__android_log_print(ANDROID_LOG_INFO, "SDL","Using audio driver: %s\n", SDL_GetCurrentAudioDriver());
/* Let the audio run */
SDL_PauseAudio(0);
__android_log_print(ANDROID_LOG_INFO, "SDL","Playing\n");
while (SDL_GetAudioStatus() == SDL_AUDIO_PLAYING){
//__android_log_print(ANDROID_LOG_INFO, "SDL","Still playing\n");
SDL_Delay(100);
}
__android_log_print(ANDROID_LOG_INFO, "SDL","Closing down\n");
/* Clean up on signal */
SDL_CloseAudio();
SDL_FreeWAV(wave.sound);
}
int SDL_main( int argc, char **argv )
{
__android_log_print(ANDROID_LOG_INFO, "SDL","entry\n");
/* Flags to pass to SDL_SetVideoMode */
int videoFlags;
/* main loop variable */
int done = FALSE;
/* used to collect events */
SDL_Event event;
/* this holds some info about our display */
const SDL_VideoInfo *videoInfo;
/* whether or not the window is active */
int isActive = TRUE;
/* initialize SDL */
if ( SDL_Init( SDL_INIT_VIDEO ) < 0 )
{
__android_log_print(ANDROID_LOG_INFO, "SDL", "Video initialization failed: %s\n",
SDL_GetError( ) );
Quit( 1 );
}
/* Fetch the video info */
videoInfo = SDL_GetVideoInfo( );
if ( !videoInfo )
{
__android_log_print(ANDROID_LOG_INFO, "SDL", "Video query failed: %s\n",
SDL_GetError( ) );
Quit( 1 );
}
/* the flags to pass to SDL_SetVideoMode */
videoFlags = SDL_OPENGL; /* Enable OpenGL in SDL */
videoFlags |= SDL_GL_DOUBLEBUFFER; /* Enable double buffering */
videoFlags |= SDL_HWPALETTE; /* Store the palette in hardware */
videoFlags |= SDL_RESIZABLE; /* Enable window resizing */
/* This checks to see if surfaces can be stored in memory */
if ( videoInfo->hw_available )
videoFlags |= SDL_HWSURFACE;
else
videoFlags |= SDL_SWSURFACE;
/* This checks if hardware blits can be done */
if ( videoInfo->blit_hw )
videoFlags |= SDL_HWACCEL;
/* Sets up OpenGL double buffering */
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 );
/* get a SDL surface */
surface = SDL_SetVideoMode( SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_BPP,
videoFlags );
/* Verify there is a surface */
if ( !surface )
{
__android_log_print(ANDROID_LOG_INFO, "SDL", "Video mode set failed: %s\n", SDL_GetError( ) );
Quit( 1 );
}
__android_log_print(ANDROID_LOG_INFO, "SDL","Made a video mode!\n");
/* initialize OpenGL */
initGL( );
/* resize the initial window */
resizeWindow( SCREEN_WIDTH, SCREEN_HEIGHT );
//testAudio();
/* wait for events */
while ( !done )
{
/* handle the events in the queue */
while ( SDL_PollEvent( &event ) )
{
switch( event.type )
{
case SDL_ACTIVEEVENT:
/* Something's happend with our focus
* If we lost focus or we are iconified, we
* shouldn't draw the screen
*/
if ( event.active.gain == 0 )
isActive = FALSE;
else
isActive = TRUE;
break;
case SDL_VIDEORESIZE:
/* handle resize event */
surface = SDL_SetVideoMode( event.resize.w,
event.resize.h,
16, videoFlags );
if ( !surface )
{
__android_log_print(ANDROID_LOG_INFO, "SDL","Could not get a surface after resize: %s\n", SDL_GetError( ) );
Quit( 1 );
}
resizeWindow( event.resize.w, event.resize.h );
break;
case SDL_KEYDOWN:
/* handle key presses */
handleKeyPress( &event.key.keysym );
break;
case SDL_QUIT:
/* handle quit requests */
done = TRUE;
__android_log_print(ANDROID_LOG_INFO, "SDL","App is shutting down\n");
break;
default:
break;
}
}
/* draw the scene */
if ( isActive )
drawGLScene( );
}
/* clean ourselves up and exit */
Quit( 0 );
/* Should never get here */
return( 0 );
}

View File

@ -1,10 +0,0 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must *NOT* be checked in Version Control Systems,
# as it contains information specific to your local configuration.
# location of the SDK. This is only used by Ant
# For customization when using a Version Control System, please read the
# header note.
sdk.dir=/home/paul/Projects/gsoc/sdk/android-sdk-linux_86

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 KiB

View File

@ -1,13 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="Hello World, SDLActivity"
/>
</LinearLayout>

View File

@ -1,4 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">SDLActivity</string>
</resources>

View File

@ -1,388 +0,0 @@
package org.libsdl.app;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import javax.microedition.khronos.egl.*;
import android.app.*;
import android.content.*;
import android.view.*;
import android.os.*;
import android.util.Log;
import android.graphics.*;
import android.text.method.*;
import android.text.*;
import android.media.*;
import android.hardware.*;
import android.content.*;
import java.lang.*;
/**
SDL Activity
*/
public class SDLActivity extends Activity {
//Main components
private static SDLActivity mSingleton;
private static SDLSurface mSurface;
//Audio
private static AudioTrack mAudioTrack;
private static boolean bAudioIsEnabled;
//Sensors
private static boolean bAccelIsEnabled;
//feature IDs. Must match up on the C side as well.
private static int FEATURE_AUDIO = 1;
private static int FEATURE_ACCEL = 2;
//Load the .so
static {
System.loadLibrary("sdlapp");
}
//Setup
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//So we can call stuff from static callbacks
mSingleton = this;
//Set up the surface
mSurface = new SDLSurface(getApplication());
setContentView(mSurface);
SurfaceHolder holder = mSurface.getHolder();
holder.setType(SurfaceHolder.SURFACE_TYPE_GPU);
}
//Audio
public static boolean initAudio(){
//blah. Hardcoded things are bad. FIXME when we have more sound stuff
//working properly.
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
11025,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_8BIT,
2048,
AudioTrack.MODE_STREAM);
bAudioIsEnabled = true;
return true;
}
//Accel
public static boolean initAccel(){
mSurface.enableSensor(Sensor.TYPE_ACCELEROMETER, true);
bAccelIsEnabled = true;
return true;
}
public static boolean closeAccel(){
mSurface.enableSensor(Sensor.TYPE_ACCELEROMETER, false);
bAccelIsEnabled = false;
return true;
}
//Events
protected void onPause() {
super.onPause();
}
protected void onResume() {
super.onResume();
}
//C functions we call
public static native void nativeInit();
public static native void nativeQuit();
public static native void nativeSetScreenSize(int width, int height);
public static native void onNativeKeyDown(int keycode);
public static native void onNativeKeyUp(int keycode);
public static native void onNativeTouch(int action, float x,
float y, float p);
public static native void onNativeResize(int x, int y, int format);
public static native void onNativeAccel(float x, float y, float z);
//Java functions called from C
private static void createGLContext(){
mSurface.initEGL();
}
public static void flipBuffers(){
mSurface.flipEGL();
}
public static void updateAudio(byte [] buf){
if(mAudioTrack == null){
return;
}
mAudioTrack.write(buf, 0, buf.length);
mAudioTrack.play();
Log.v("SDL","Played some audio");
}
public static void enableFeature(int featureid, int enabled){
Log.v("SDL","Feature " + featureid + " = " + enabled);
//Yuck. This is all horribly inelegent. If it gets to more than a few
//'features' I'll rip this out and make something nicer, I promise :)
if(featureid == FEATURE_AUDIO){
if(enabled == 1){
initAudio();
}else{
//We don't have one of these yet...
//closeAudio();
}
}
else if(featureid == FEATURE_ACCEL){
if(enabled == 1){
initAccel();
}else{
closeAccel();
}
}
}
}
/**
Simple nativeInit() runnable
*/
class SDLRunner implements Runnable{
public void run(){
//SDLActivity.initAudio();
//Runs SDL_main()
SDLActivity.nativeInit();
Log.v("SDL","SDL thread terminated");
}
}
/**
SDLSurface. This is what we draw on, so we need to know when it's created
in order to do anything useful.
Because of this, that's where we set up the SDL thread
*/
class SDLSurface extends SurfaceView implements SurfaceHolder.Callback,
View.OnKeyListener, View.OnTouchListener, SensorEventListener {
//This is what SDL runs in. It invokes SDL_main(), eventually
private Thread mSDLThread;
//EGL private objects
private EGLContext mEGLContext;
private EGLSurface mEGLSurface;
private EGLDisplay mEGLDisplay;
//Sensors
private static SensorManager mSensorManager;
//Startup
public SDLSurface(Context context) {
super(context);
getHolder().addCallback(this);
setFocusable(true);
setFocusableInTouchMode(true);
requestFocus();
setOnKeyListener(this);
setOnTouchListener(this);
mSensorManager = (SensorManager)context.getSystemService("sensor");
}
//Called when we have a valid drawing surface
public void surfaceCreated(SurfaceHolder holder) {
Log.v("SDL","Surface created");
int width = getWidth();
int height = getHeight();
//Set the width and height variables in C before we start SDL so we have
//it available on init
SDLActivity.nativeSetScreenSize(width, height);
//Now start up the C app thread
mSDLThread = new Thread(new SDLRunner(), "SDLThread");
mSDLThread.start();
}
//Called when we lose the surface
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v("SDL","Surface destroyed");
SDLActivity.nativeQuit();
//Now wait for the SDL thread to quit
try{
mSDLThread.wait();
}catch(Exception e){
Log.v("SDL","Problem stopping thread: " + e);
}
}
//Called when the surface is resized
public void surfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
Log.v("SDL","Surface resized");
SDLActivity.onNativeResize(width, height, format);
}
//unused
public void onDraw(Canvas canvas) {}
//EGL functions
public boolean initEGL(){
Log.v("SDL","Starting up");
try{
EGL10 egl = (EGL10)EGLContext.getEGL();
EGLDisplay dpy = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
int[] version = new int[2];
egl.eglInitialize(dpy, version);
int[] configSpec = {
//EGL10.EGL_DEPTH_SIZE, 16,
EGL10.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] num_config = new int[1];
egl.eglChooseConfig(dpy, configSpec, configs, 1, num_config);
EGLConfig config = configs[0];
EGLContext ctx = egl.eglCreateContext(dpy, config, EGL10.EGL_NO_CONTEXT, null);
EGLSurface surface = egl.eglCreateWindowSurface(dpy, config, this, null);
egl.eglMakeCurrent(dpy, surface, surface, ctx);
mEGLContext = ctx;
mEGLDisplay = dpy;
mEGLSurface = surface;
}catch(Exception e){
Log.v("SDL", e + "");
for(StackTraceElement s : e.getStackTrace()){
Log.v("SDL", s.toString());
}
}
Log.v("SDL","Done making!");
return true;
}
//EGL buffer flip
public void flipEGL(){
try{
EGL10 egl = (EGL10)EGLContext.getEGL();
GL10 gl = (GL10)mEGLContext.getGL();
egl.eglWaitNative(EGL10.EGL_NATIVE_RENDERABLE, null);
//drawing here
egl.eglWaitGL();
egl.eglSwapBuffers(mEGLDisplay, mEGLSurface);
}catch(Exception e){
Log.v("SDL", "flipEGL(): " + e);
for(StackTraceElement s : e.getStackTrace()){
Log.v("SDL", s.toString());
}
}
}
//Key events
public boolean onKey(View v, int keyCode, KeyEvent event){
if(event.getAction() == KeyEvent.ACTION_DOWN){
SDLActivity.onNativeKeyDown(keyCode);
return true;
}
else if(event.getAction() == KeyEvent.ACTION_UP){
SDLActivity.onNativeKeyUp(keyCode);
return true;
}
return false;
}
//Touch events
public boolean onTouch(View v, MotionEvent event){
int action = event.getAction();
float x = event.getX();
float y = event.getY();
float p = event.getPressure();
//TODO: Anything else we need to pass?
SDLActivity.onNativeTouch(action, x, y, p);
return true;
}
//Sensor events
public void enableSensor(int sensortype, boolean enabled){
//TODO: This uses getDefaultSensor - what if we have >1 accels?
if(enabled){
mSensorManager.registerListener(this,
mSensorManager.getDefaultSensor(sensortype),
SensorManager.SENSOR_DELAY_GAME, null);
}else{
mSensorManager.unregisterListener(this,
mSensorManager.getDefaultSensor(sensortype));
}
}
public void onAccuracyChanged(Sensor sensor, int accuracy){
//TODO
}
public void onSensorChanged(SensorEvent event){
if(event.sensor.getType() == Sensor.TYPE_ACCELEROMETER){
SDLActivity.onNativeAccel( event.values[0],
event.values[1],
event.values[2] );
}
}
}

View File

@ -127,7 +127,7 @@ typedef unsigned int size_t;
#define SDL_HAPTIC_DUMMY 1
/* Enable various shared object loading systems */
#define SDL_LOADSO_DUMMY 1
#define SDL_LOADSO_DLOPEN 1
/* Enable various threading systems */
#define SDL_THREAD_PTHREAD 1

View File

@ -1,31 +1,39 @@
/*
SDL - Simple DirectMedia Layer
Copyright (C) 1997-2010 Sam Lantinga
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Sam Lantinga
slouken@libsdl.org
*/
#include "SDL_config.h"
/*******************************************************************************
This file links the Java side of Android with libsdl
*******************************************************************************/
#include <jni.h>
#include <sys/time.h>
#include <time.h>
#include <android/log.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <pthread.h>
#define DEBUG
/*******************************************************************************
Globals
*******************************************************************************/
static long _getTime(void){
struct timeval now;
gettimeofday(&now, NULL);
return (long)(now.tv_sec*1000 + now.tv_usec/1000);
}
JavaVM* mVM = NULL;
JNIEnv* mEnv = NULL;
JNIEnv* mAudioThreadEnv = NULL; //See the note below for why this is necessary
JavaVM* mVM = NULL;
//Main activity
jclass mActivityInstance;
@ -36,7 +44,6 @@ jmethodID midFlipBuffers;
jmethodID midEnableFeature;
jmethodID midUpdateAudio;
extern "C" int SDL_main(int argc, char *argv[]);
extern "C" int Android_OnKeyDown(int keycode);
extern "C" int Android_OnKeyUp(int keycode);
extern "C" void Android_SetScreenResolution(int width, int height);
@ -57,21 +64,22 @@ float fLastAccelerometer[3];
/*******************************************************************************
Functions called by JNI
*******************************************************************************/
*******************************************************************************/
//Library init
extern "C" jint JNI_OnLoad(JavaVM* vm, void* reserved){
// Library init
extern "C" jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
mVM = vm;
JNIEnv* env = NULL;
jint result = -1;
if (vm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
return result;
}
return JNI_VERSION_1_4;
}
// Called before SDL_main() to initialize JNI bindings
extern "C" void SDL_Android_Init(JNIEnv* env)
{
mEnv = env;
__android_log_print(ANDROID_LOG_INFO, "SDL", "JNI: OnLoad");
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL_Android_Init()");
jclass cls = mEnv->FindClass ("org/libsdl/app/SDLActivity");
mActivityInstance = cls;
@ -81,62 +89,41 @@ extern "C" jint JNI_OnLoad(JavaVM* vm, void* reserved){
midUpdateAudio = mEnv->GetStaticMethodID(cls,"updateAudio","([B)V");
if(!midCreateGLContext || !midFlipBuffers || !midEnableFeature ||
!midUpdateAudio){
!midUpdateAudio) {
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: Bad mids\n");
}else{
} else {
#ifdef DEBUG
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: Good mids\n");
#endif
}
return JNI_VERSION_1_4;
}
//Start up the SDL app
extern "C" void Java_org_libsdl_app_SDLActivity_nativeInit( JNIEnv* env,
jobject obj ){
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: Native Init");
mEnv = env;
bRenderingEnabled = true;
Android_EnableFeature(FEATURE_ACCEL, true);
char *argv[2];
argv[0] = strdup("SDL_app");
argv[1] = NULL;
SDL_main(1, argv);
}
//Keydown
// Keydown
extern "C" void Java_org_libsdl_app_SDLActivity_onNativeKeyDown(JNIEnv* env,
jobject obj, jint keycode){
jobject obj, jint keycode)
{
int r = Android_OnKeyDown(keycode);
#ifdef DEBUG
__android_log_print(ANDROID_LOG_INFO, "SDL",
"SDL: native key down %d, %d\n", keycode, r);
#endif
}
//Keyup
// Keyup
extern "C" void Java_org_libsdl_app_SDLActivity_onNativeKeyUp(JNIEnv* env,
jobject obj, jint keycode){
jobject obj, jint keycode)
{
int r = Android_OnKeyUp(keycode);
#ifdef DEBUG
__android_log_print(ANDROID_LOG_INFO, "SDL",
"SDL: native key up %d, %d\n", keycode, r);
#endif
}
//Touch
// Touch
extern "C" void Java_org_libsdl_app_SDLActivity_onNativeTouch(JNIEnv* env,
jobject obj, jint action, jfloat x, jfloat y, jfloat p){
jobject obj, jint action, jfloat x, jfloat y, jfloat p)
{
#ifdef DEBUG
__android_log_print(ANDROID_LOG_INFO, "SDL",
"SDL: native touch event %d @ %f/%f, pressure %f\n",
@ -144,42 +131,40 @@ extern "C" void Java_org_libsdl_app_SDLActivity_onNativeTouch(JNIEnv* env,
#endif
//TODO: Pass this off to the SDL multitouch stuff
}
//Quit
// Quit
extern "C" void Java_org_libsdl_app_SDLActivity_nativeQuit( JNIEnv* env,
jobject obj ){
jobject obj )
{
// Stop rendering as we're no longer in the foreground
bRenderingEnabled = false;
//Stop rendering as we're no longer in the foreground
bRenderingEnabled = false;
//Inject a SDL_QUIT event
int r = SDL_SendQuit();
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: Native quit %d", r);
// Inject a SDL_QUIT event
SDL_SendQuit();
}
//Screen size
// Screen size
extern "C" void Java_org_libsdl_app_SDLActivity_nativeSetScreenSize(
JNIEnv* env, jobject obj, jint width, jint height){
JNIEnv* env, jobject obj, jint width, jint height)
{
__android_log_print(ANDROID_LOG_INFO, "SDL",
"SDL: Set screen size on init: %d/%d\n", width, height);
Android_SetScreenResolution(width, height);
}
//Resize
// Resize
extern "C" void Java_org_libsdl_app_SDLActivity_onNativeResize(
JNIEnv* env, jobject obj, jint width,
jint height, jint format){
jint height, jint format)
{
Android_OnResize(width, height, format);
}
extern "C" void Java_org_libsdl_app_SDLActivity_onNativeAccel(
JNIEnv* env, jobject obj,
jfloat x, jfloat y, jfloat z){
jfloat x, jfloat y, jfloat z)
{
fLastAccelerometer[0] = x;
fLastAccelerometer[1] = y;
fLastAccelerometer[2] = z;
@ -190,38 +175,39 @@ extern "C" void Java_org_libsdl_app_SDLActivity_onNativeAccel(
/*******************************************************************************
Functions called by SDL into Java
*******************************************************************************/
extern "C" void Android_CreateContext(){
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: sdl_create_context()\n");
extern "C" void Android_CreateContext()
{
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: sdl_create_context()\n");
bRenderingEnabled = true;
bRenderingEnabled = true;
mEnv->CallStaticVoidMethod(mActivityInstance, midCreateGLContext );
}
extern "C" void Android_Render(){
if(!bRenderingEnabled){
extern "C" void Android_Render()
{
if (!bRenderingEnabled) {
return;
}
//When we get here, we've accumulated a full frame
mEnv->CallStaticVoidMethod(mActivityInstance, midFlipBuffers );
// When we get here, we've accumulated a full frame
mEnv->CallStaticVoidMethod(mActivityInstance, midFlipBuffers);
}
extern "C" void Android_EnableFeature(int featureid, bool enabled){
extern "C" void Android_EnableFeature(int featureid, bool enabled)
{
mEnv->CallStaticVoidMethod(mActivityInstance, midEnableFeature,
featureid, (int)enabled);
}
extern "C" void Android_UpdateAudioBuffer(unsigned char *buf, int len){
extern "C" void Android_UpdateAudioBuffer(unsigned char *buf, int len)
{
//Annoyingly we can't just call into Java from any thread. Because the audio
//callback is dispatched from the SDL audio thread (that wasn't made from
//java, we have to do some magic here to let the JVM know about the thread.
//Because everything it touches on the Java side is static anyway, it's
//not a big deal, just annoying.
if(!mAudioThreadEnv){
if(!mAudioThreadEnv) {
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL: Need to set up audio thread env\n");
mVM->AttachCurrentThread(&mAudioThreadEnv, NULL);

View File

@ -0,0 +1,30 @@
/* Include the SDL main definition header */
#include "SDL_main.h"
/*******************************************************************************
Functions called by JNI
*******************************************************************************/
#include <jni.h>
// Called before SDL_main() to initialize JNI bindings in SDL library
extern "C" void SDL_Android_Init(JNIEnv* env);
// Library init
extern "C" jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
return JNI_VERSION_1_4;
}
// Start up the SDL app
extern "C" void Java_org_libsdl_app_SDLActivity_nativeInit( JNIEnv* env, jobject obj )
{
/* This interface could expand with ABI negotiation, calbacks, etc. */
SDL_Android_Init(env);
/* Run the application code! */
char *argv[2];
argv[0] = strdup("SDL_app");
argv[1] = NULL;
SDL_main(1, argv);
}