Merge branch 'master' of https://github.com/jtraynham/ppsspp into jtraynham-master

Conflicts:
	lang
	pspautotests
This commit is contained in:
Henrik Rydgard 2013-06-26 22:56:53 +02:00
commit cc414296d6
9 changed files with 279 additions and 10 deletions

View File

@ -173,6 +173,8 @@ if(NOT MSVC)
if(IOS)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libstdc++")
# armv7s (without resorting to FastMemory) is still a work in progress
# comment out the next line to enable default/"standard" architectures (which is a fat armv7/armv7s binary)
set(CMAKE_OSX_ARCHITECTURES "armv7")
elseif(APPLE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++ -U__STRICT_ANSI__")
@ -460,8 +462,8 @@ elseif(IOS)
ios/AppDelegate.h
ios/ViewController.mm
ios/ViewController.h
ios/AudioEngine.mm
ios/AudioEngine.h
ios/iOSCoreAudio.cpp
ios/iOSCoreAudio.h
ios/iCade/iCadeReaderView.h
ios/iCade/iCadeReaderView.m
ios/iCade/iCadeState.h)
@ -1143,9 +1145,12 @@ if(USE_FFMPEG)
endif()
if(APPLE)
set(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} bz2 "-framework VideoDecodeAcceleration" "-framework CoreVideo")
set(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} bz2 "-framework CoreVideo")
if (NOT IOS)
set(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} "-framework VideoDecodeAcceleration")
endif()
endif(APPLE)
set(LinkCommon ${LinkCommon} ${FFMPEG_LIBRARIES})
add_definitions(-DUSE_FFMPEG)
endif(USE_FFMPEG)

View File

@ -132,7 +132,10 @@ void CPUInfo::Detect()
bool isVFP4 = false;
#ifdef IOS
isVFP3 = true;
// TODO: Check for swift arch (VFP4)
// Check for swift arch (VFP4`)
#ifdef __ARM_ARCH_7S__
isVFP4 = true;
#endif // #ifdef __ARM_ARCH_7S__
#elif defined(BLACKBERRY)
isVFP3 = true;
const char cpuInfoPath[] = "/pps/services/hw_info/inventory";

View File

@ -105,6 +105,17 @@ void Jit::Comp_FPULS(u32 op)
}
ADD(R0, R0, R11);
}
#ifdef __ARM_ARCH_7S__
FixupBranch skip;
if (doCheck) {
skip = B_CC(CC_EQ);
}
VLDR(fpr.R(ft), R0, 0);
if (doCheck) {
SetJumpTarget(skip);
SetCC(CC_AL);
}
#else
VLDR(fpr.R(ft), R0, 0);
if (doCheck) {
SetCC(CC_EQ);
@ -112,6 +123,7 @@ void Jit::Comp_FPULS(u32 op)
VMOV(fpr.R(ft), R0);
SetCC(CC_AL);
}
#endif
break;
case 57: //Memory::Write_U32(FI(ft), addr); break; //swc1
@ -129,10 +141,22 @@ void Jit::Comp_FPULS(u32 op)
}
ADD(R0, R0, R11);
}
#ifdef __ARM_ARCH_7S__
FixupBranch skip2;
if (doCheck) {
skip2 = B_CC(CC_EQ);
}
VSTR(fpr.R(ft), R0, 0);
if (doCheck) {
SetJumpTarget(skip2);
SetCC(CC_AL);
}
#else
VSTR(fpr.R(ft), R0, 0);
if (doCheck) {
SetCC(CC_AL);
}
#endif
break;
default:

View File

@ -216,12 +216,24 @@ namespace MIPSComp
}
ADD(R0, R0, R11);
}
#ifdef __ARM_ARCH_7S__
FixupBranch skip;
if (doCheck) {
skip = B_CC(CC_EQ);
}
VLDR(fpr.V(vt), R0, 0);
if (doCheck) {
SetJumpTarget(skip);
SetCC(CC_AL);
}
#else
VLDR(fpr.V(vt), R0, 0);
if (doCheck) {
SetCC(CC_EQ);
MOVI2F(fpr.V(vt), 0.0f, R0);
SetCC(CC_AL);
}
#endif
}
break;
@ -243,10 +255,22 @@ namespace MIPSComp
}
ADD(R0, R0, R11);
}
#ifdef __ARM_ARCH_7S__
FixupBranch skip;
if (doCheck) {
skip = B_CC(CC_EQ);
}
VSTR(fpr.V(vt), R0, 0);
if (doCheck) {
SetJumpTarget(skip);
SetCC(CC_AL);
}
#else
VSTR(fpr.V(vt), R0, 0);
if (doCheck) {
SetCC(CC_AL);
}
#endif
}
break;
@ -289,6 +313,20 @@ namespace MIPSComp
ADD(R0, R0, R11);
}
#ifdef __ARM_ARCH_7S__
FixupBranch skip;
if (doCheck) {
skip = B_CC(CC_EQ);
}
for (int i = 0; i < 4; i++)
VLDR(fpr.V(vregs[i]), R0, i * 4);
if (doCheck) {
SetJumpTarget(skip);
SetCC(CC_AL);
}
#else
for (int i = 0; i < 4; i++)
VLDR(fpr.V(vregs[i]), R0, i * 4);
@ -299,6 +337,7 @@ namespace MIPSComp
VMOV(fpr.V(vregs[i]), R0);
SetCC(CC_AL);
}
#endif
}
break;
@ -324,12 +363,27 @@ namespace MIPSComp
ADD(R0, R0, R11);
}
#ifdef __ARM_ARCH_7S__
FixupBranch skip;
if (doCheck) {
skip = B_CC(CC_EQ);
}
for (int i = 0; i < 4; i++)
VSTR(fpr.V(vregs[i]), R0, i * 4);
if (doCheck) {
SetJumpTarget(skip);
SetCC(CC_AL);
}
#else
for (int i = 0; i < 4; i++)
VSTR(fpr.V(vregs[i]), R0, i * 4);
if (doCheck) {
SetCC(CC_AL);
}
#endif
}
break;

View File

@ -34,7 +34,7 @@
#include "base/NativeApp.h"
#include "file/vfs.h"
#include "file/zip_read.h"
#include "ext/jpge/jpge.h"
#include "native/ext/jpge/jpge.h"
#include "gfx_es2/gl_state.h"
#include "gfx/gl_lost_manager.h"
#include "gfx/texture.h"
@ -79,6 +79,10 @@ static UI::Theme ui_theme;
#include <mach-o/dyld.h>
#endif
#ifdef IOS
#include "ios/iOSCoreAudio.h"
#endif
Texture *uiTexture;
ScreenManager *screenManager;
@ -171,9 +175,17 @@ std::string boot_filename = "";
void NativeHost::InitSound(PMixer *mixer) {
g_mixer = mixer;
#ifdef IOS
iOSCoreAudioInit();
#endif
}
void NativeHost::ShutdownSound() {
#ifdef IOS
iOSCoreAudioShutdown();
#endif
g_mixer = 0;
}

View File

@ -7,7 +7,10 @@ Prerequisites:
* Xcode (from the Mac App Store) with command line tools installed
* MacPorts (from macports.org); easiest to install with their package installers
* cmake build system (from MacPorts); run "sudo port install cmake" from the command line
* gas-preprocessor (to compile ffmpeg libs); download the zip from https://github.com/mansr/gas-preprocessor, unzip and from the command line run:
If you need to build ffmpeg yourself too, then you'll also need:
* gas-preprocessor; download the zip from https://github.com/mansr/gas-preprocessor, unzip and from the command line run:
sudo cp gas-preprocessor.pl /usr/bin/
sudo chmod +rw /usr/bin/gas-preprocessor.pl
@ -25,7 +28,7 @@ Change directory to the newly created ppsspp directory and run:
git submodule update --init
Change directory into ffmpeg and run (this will take a while):
The above command will pull in the submodules required by PPSSPP, including the native, ffmpeg, and lang directories. Included in the ffmpeg directory should be the necessary libs and includes for ffmpeg, so most people can skip the next command. However, if you need to recompile ffmpeg for some reason, change directory into ffmpeg and run (this will take a while):
./ios-build.sh

View File

@ -131,8 +131,6 @@ ViewController* sharedViewController;
dp_xscale = (float)dp_xres / (float)pixel_xres;
dp_yscale = (float)dp_yres / (float)pixel_yres;
if (g_Config.bEnableSound)
self.audioEngine = [[[AudioEngine alloc] init] autorelease];
/*
UISwipeGestureRecognizer* gesture = [[[UISwipeGestureRecognizer alloc] initWithTarget:self action:@selector(swipeGesture:)] autorelease];
[self.view addGestureRecognizer:gesture];

148
ios/iOSCoreAudio.cpp Normal file
View File

@ -0,0 +1,148 @@
// Copyright (c) 2012- PPSSPP Project.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 2.0 or later versions.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License 2.0 for more details.
// A copy of the GPL 2.0 should have been included with the program.
// If not, see http://www.gnu.org/licenses/
// Official git repository and contact information can be found at
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
// This code implements the emulated audio using CoreAudio for iOS
// Originally written by jtraynham
#include "iOSCoreAudio.h"
#include <AudioToolbox/AudioToolbox.h>
#define SAMPLE_RATE 44100
#define STREAM_MAX_FRAME_COUNT 2048
static short stream[STREAM_MAX_FRAME_COUNT * 2 * 2]; // frames * sample size * number of channels
AudioComponentInstance audioInstance = nil;
int NativeMix(short *audio, int num_samples);
OSStatus iOSCoreAudioCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData)
{
// see if we have any sound to play
UInt32 frames = (inNumberFrames > STREAM_MAX_FRAME_COUNT ? STREAM_MAX_FRAME_COUNT : inNumberFrames);
UInt32 framesReady = NativeMix(stream, frames);
if (framesReady == 0) {
// oops, we don't currently have any sound, so return silence
*ioActionFlags |= kAudioUnitRenderAction_OutputIsSilence;
return noErr;
}
// grab the output buffer and copy data into it
AudioSampleType *output = (AudioSampleType *)ioData->mBuffers[0].mData;
UInt32 bytesReady = framesReady * sizeof(short) * 2;
memcpy(output, stream, bytesReady);
// make sure and tell it how much audio data is there
ioData->mBuffers[0].mDataByteSize = bytesReady;
return noErr;
}
void iOSCoreAudioInit()
{
if (!audioInstance) {
OSErr err;
// first, grab the default output
AudioComponentDescription defaultOutputDescription;
defaultOutputDescription.componentType = kAudioUnitType_Output;
defaultOutputDescription.componentSubType = kAudioUnitSubType_RemoteIO;
defaultOutputDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
defaultOutputDescription.componentFlags = 0;
defaultOutputDescription.componentFlagsMask = 0;
AudioComponent defaultOutput = AudioComponentFindNext(NULL, &defaultOutputDescription);
// create our instance
err = AudioComponentInstanceNew(defaultOutput, &audioInstance);
if (err != noErr) {
audioInstance = nil;
return;
}
// create our callback so we can give it the audio data
AURenderCallbackStruct input;
input.inputProc = iOSCoreAudioCallback;
input.inputProcRefCon = NULL;
err = AudioUnitSetProperty(audioInstance,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input,
0,
&input,
sizeof(input));
if (err != noErr) {
AudioComponentInstanceDispose(audioInstance);
audioInstance = nil;
return;
}
// setup the audio format we'll be using (stereo pcm)
AudioStreamBasicDescription streamFormat;
memset(&streamFormat, 0, sizeof(streamFormat));
streamFormat.mSampleRate = SAMPLE_RATE;
streamFormat.mFormatID = kAudioFormatLinearPCM;
streamFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
streamFormat.mBitsPerChannel = sizeof(AudioSampleType) * 8;
streamFormat.mChannelsPerFrame = 2;
streamFormat.mFramesPerPacket = 1;
streamFormat.mBytesPerFrame = (streamFormat.mBitsPerChannel / 8) * streamFormat.mChannelsPerFrame;
streamFormat.mBytesPerPacket = streamFormat.mBytesPerFrame * streamFormat.mFramesPerPacket;
err = AudioUnitSetProperty(audioInstance,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
0,
&streamFormat,
sizeof(AudioStreamBasicDescription));
if (err != noErr) {
AudioComponentInstanceDispose(audioInstance);
audioInstance = nil;
return;
}
// k, all setup, so init
err = AudioUnitInitialize(audioInstance);
if (err != noErr) {
AudioComponentInstanceDispose(audioInstance);
audioInstance = nil;
return;
}
// finally start playback
err = AudioOutputUnitStart(audioInstance);
if (err != noErr) {
AudioUnitUninitialize(audioInstance);
AudioComponentInstanceDispose(audioInstance);
audioInstance = nil;
return;
}
// we're good to go
}
}
void iOSCoreAudioShutdown()
{
if (audioInstance) {
AudioOutputUnitStop(audioInstance);
AudioUnitUninitialize(audioInstance);
AudioComponentInstanceDispose(audioInstance);
audioInstance = nil;
}
}

22
ios/iOSCoreAudio.h Normal file
View File

@ -0,0 +1,22 @@
// Copyright (c) 2012- PPSSPP Project.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 2.0 or later versions.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License 2.0 for more details.
// A copy of the GPL 2.0 should have been included with the program.
// If not, see http://www.gnu.org/licenses/
// Official git repository and contact information can be found at
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
// This code implements the emulated audio using CoreAudio for iOS
// Originally written by jtraynham
void iOSCoreAudioInit();
void iOSCoreAudioShutdown();