mirror of
https://github.com/Relintai/sdl2_frt.git
synced 2024-12-25 09:17:12 +01:00
android: implement audio capture support.
This commit is contained in:
parent
b78ec97496
commit
8f0af77354
@ -17,6 +17,9 @@
|
|||||||
<!-- Allow writing to external storage -->
|
<!-- Allow writing to external storage -->
|
||||||
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
|
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
|
||||||
|
|
||||||
|
<!-- if you want to capture audio, uncomment this. -->
|
||||||
|
<!-- <uses-permission android:name="android.permission.RECORD_AUDIO" /> -->
|
||||||
|
|
||||||
<!-- Create a Java class extending SDLActivity and place it in a
|
<!-- Create a Java class extending SDLActivity and place it in a
|
||||||
directory under src matching the package, e.g.
|
directory under src matching the package, e.g.
|
||||||
src/com/gamemaker/game/MyGame.java
|
src/com/gamemaker/game/MyGame.java
|
||||||
|
@ -59,6 +59,7 @@ public class SDLActivity extends Activity {
|
|||||||
|
|
||||||
// Audio
|
// Audio
|
||||||
protected static AudioTrack mAudioTrack;
|
protected static AudioTrack mAudioTrack;
|
||||||
|
protected static AudioRecord mAudioRecord;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method is called by SDL before loading the native shared libraries.
|
* This method is called by SDL before loading the native shared libraries.
|
||||||
@ -106,6 +107,7 @@ public class SDLActivity extends Activity {
|
|||||||
mJoystickHandler = null;
|
mJoystickHandler = null;
|
||||||
mSDLThread = null;
|
mSDLThread = null;
|
||||||
mAudioTrack = null;
|
mAudioTrack = null;
|
||||||
|
mAudioRecord = null;
|
||||||
mExitCalledFromJava = false;
|
mExitCalledFromJava = false;
|
||||||
mBrokenLibraries = false;
|
mBrokenLibraries = false;
|
||||||
mIsPaused = false;
|
mIsPaused = false;
|
||||||
@ -544,7 +546,7 @@ public class SDLActivity extends Activity {
|
|||||||
/**
|
/**
|
||||||
* This method is called by SDL using JNI.
|
* This method is called by SDL using JNI.
|
||||||
*/
|
*/
|
||||||
public static int audioInit(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
|
public static int audioOpen(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
|
||||||
int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO;
|
int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO;
|
||||||
int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
|
int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
|
||||||
int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);
|
int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);
|
||||||
@ -623,13 +625,72 @@ public class SDLActivity extends Activity {
|
|||||||
/**
|
/**
|
||||||
* This method is called by SDL using JNI.
|
* This method is called by SDL using JNI.
|
||||||
*/
|
*/
|
||||||
public static void audioQuit() {
|
public static int captureOpen(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
|
||||||
|
int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO;
|
||||||
|
int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
|
||||||
|
int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);
|
||||||
|
|
||||||
|
Log.v(TAG, "SDL capture: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer");
|
||||||
|
|
||||||
|
// Let the user pick a larger buffer if they really want -- but ye
|
||||||
|
// gods they probably shouldn't, the minimums are horrifyingly high
|
||||||
|
// latency already
|
||||||
|
desiredFrames = Math.max(desiredFrames, (AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize);
|
||||||
|
|
||||||
|
if (mAudioRecord == null) {
|
||||||
|
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRate,
|
||||||
|
channelConfig, audioFormat, desiredFrames * frameSize);
|
||||||
|
|
||||||
|
// see notes about AudioTrack state in audioOpen(), above. Probably also applies here.
|
||||||
|
if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
|
||||||
|
Log.e(TAG, "Failed during initialization of AudioRecord");
|
||||||
|
mAudioRecord.release();
|
||||||
|
mAudioRecord = null;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
mAudioRecord.startRecording();
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.v(TAG, "SDL capture: got " + ((mAudioRecord.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioRecord.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer");
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method is called by SDL using JNI. */
|
||||||
|
public static int captureReadShortBuffer(short[] buffer, boolean blocking) {
|
||||||
|
// !!! FIXME: this is available in API Level 23. Until then, we always block. :(
|
||||||
|
//return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
|
||||||
|
return mAudioRecord.read(buffer, 0, buffer.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method is called by SDL using JNI. */
|
||||||
|
public static int captureReadByteBuffer(byte[] buffer, boolean blocking) {
|
||||||
|
// !!! FIXME: this is available in API Level 23. Until then, we always block. :(
|
||||||
|
//return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
|
||||||
|
return mAudioRecord.read(buffer, 0, buffer.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/** This method is called by SDL using JNI. */
|
||||||
|
public static void audioClose() {
|
||||||
if (mAudioTrack != null) {
|
if (mAudioTrack != null) {
|
||||||
mAudioTrack.stop();
|
mAudioTrack.stop();
|
||||||
|
mAudioTrack.release();
|
||||||
mAudioTrack = null;
|
mAudioTrack = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** This method is called by SDL using JNI. */
|
||||||
|
public static void captureClose() {
|
||||||
|
if (mAudioRecord != null) {
|
||||||
|
mAudioRecord.stop();
|
||||||
|
mAudioRecord.release();
|
||||||
|
mAudioRecord = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// Input
|
// Input
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -24,6 +24,7 @@
|
|||||||
|
|
||||||
/* Output audio to Android */
|
/* Output audio to Android */
|
||||||
|
|
||||||
|
#include "SDL_assert.h"
|
||||||
#include "SDL_audio.h"
|
#include "SDL_audio.h"
|
||||||
#include "../SDL_audio_c.h"
|
#include "../SDL_audio_c.h"
|
||||||
#include "SDL_androidaudio.h"
|
#include "SDL_androidaudio.h"
|
||||||
@ -33,23 +34,21 @@
|
|||||||
#include <android/log.h>
|
#include <android/log.h>
|
||||||
|
|
||||||
static SDL_AudioDevice* audioDevice = NULL;
|
static SDL_AudioDevice* audioDevice = NULL;
|
||||||
|
static SDL_AudioDevice* captureDevice = NULL;
|
||||||
|
|
||||||
static int
|
static int
|
||||||
AndroidAUD_OpenDevice(_THIS, void *handle, const char *devname, int iscapture)
|
AndroidAUD_OpenDevice(_THIS, void *handle, const char *devname, int iscapture)
|
||||||
{
|
{
|
||||||
SDL_AudioFormat test_format;
|
SDL_AudioFormat test_format;
|
||||||
|
|
||||||
|
SDL_assert((captureDevice == NULL) || !iscapture);
|
||||||
|
SDL_assert((audioDevice == NULL) || iscapture);
|
||||||
|
|
||||||
if (iscapture) {
|
if (iscapture) {
|
||||||
/* TODO: implement capture */
|
captureDevice = this;
|
||||||
return SDL_SetError("Capture not supported on Android");
|
} else {
|
||||||
}
|
|
||||||
|
|
||||||
/* !!! FIXME: higher level will prevent this now. Lose this check (and global?). */
|
|
||||||
if (audioDevice != NULL) {
|
|
||||||
return SDL_SetError("Only one audio device at a time please!");
|
|
||||||
}
|
|
||||||
|
|
||||||
audioDevice = this;
|
audioDevice = this;
|
||||||
|
}
|
||||||
|
|
||||||
this->hidden = (struct SDL_PrivateAudioData *) SDL_calloc(1, (sizeof *this->hidden));
|
this->hidden = (struct SDL_PrivateAudioData *) SDL_calloc(1, (sizeof *this->hidden));
|
||||||
if (this->hidden == NULL) {
|
if (this->hidden == NULL) {
|
||||||
@ -83,15 +82,16 @@ AndroidAUD_OpenDevice(_THIS, void *handle, const char *devname, int iscapture)
|
|||||||
this->spec.freq = 48000;
|
this->spec.freq = 48000;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* TODO: pass in/return a (Java) device ID, also whether we're opening for input or output */
|
/* TODO: pass in/return a (Java) device ID */
|
||||||
this->spec.samples = Android_JNI_OpenAudioDevice(this->spec.freq, this->spec.format == AUDIO_U8 ? 0 : 1, this->spec.channels, this->spec.samples);
|
this->spec.samples = Android_JNI_OpenAudioDevice(iscapture, this->spec.freq, this->spec.format == AUDIO_U8 ? 0 : 1, this->spec.channels, this->spec.samples);
|
||||||
SDL_CalculateAudioSpec(&this->spec);
|
|
||||||
|
|
||||||
if (this->spec.samples == 0) {
|
if (this->spec.samples == 0) {
|
||||||
/* Init failed? */
|
/* Init failed? */
|
||||||
return SDL_SetError("Java-side initialization failed!");
|
return SDL_SetError("Java-side initialization failed!");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
SDL_CalculateAudioSpec(&this->spec);
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -107,18 +107,33 @@ AndroidAUD_GetDeviceBuf(_THIS)
|
|||||||
return Android_JNI_GetAudioBuffer();
|
return Android_JNI_GetAudioBuffer();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static int
|
||||||
|
AndroidAUD_CaptureFromDevice(_THIS, void *buffer, int buflen)
|
||||||
|
{
|
||||||
|
return Android_JNI_CaptureAudioBuffer(buffer, buflen);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
AndroidAUD_FlushCapture(_THIS)
|
||||||
|
{
|
||||||
|
Android_JNI_FlushCapturedAudio();
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
AndroidAUD_CloseDevice(_THIS)
|
AndroidAUD_CloseDevice(_THIS)
|
||||||
{
|
{
|
||||||
/* At this point SDL_CloseAudioDevice via close_audio_device took care of terminating the audio thread
|
/* At this point SDL_CloseAudioDevice via close_audio_device took care of terminating the audio thread
|
||||||
so it's safe to terminate the Java side buffer and AudioTrack
|
so it's safe to terminate the Java side buffer and AudioTrack
|
||||||
*/
|
*/
|
||||||
Android_JNI_CloseAudioDevice();
|
Android_JNI_CloseAudioDevice(this->iscapture);
|
||||||
|
if (this->iscapture) {
|
||||||
if (audioDevice == this) {
|
SDL_assert(captureDevice == this);
|
||||||
SDL_free(this->hidden);
|
captureDevice = NULL;
|
||||||
|
} else {
|
||||||
|
SDL_assert(audioDevice == this);
|
||||||
audioDevice = NULL;
|
audioDevice = NULL;
|
||||||
}
|
}
|
||||||
|
SDL_free(this->hidden);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int
|
static int
|
||||||
@ -129,9 +144,11 @@ AndroidAUD_Init(SDL_AudioDriverImpl * impl)
|
|||||||
impl->PlayDevice = AndroidAUD_PlayDevice;
|
impl->PlayDevice = AndroidAUD_PlayDevice;
|
||||||
impl->GetDeviceBuf = AndroidAUD_GetDeviceBuf;
|
impl->GetDeviceBuf = AndroidAUD_GetDeviceBuf;
|
||||||
impl->CloseDevice = AndroidAUD_CloseDevice;
|
impl->CloseDevice = AndroidAUD_CloseDevice;
|
||||||
|
impl->CaptureFromDevice = AndroidAUD_CaptureFromDevice;
|
||||||
|
impl->FlushCapture = AndroidAUD_FlushCapture;
|
||||||
|
|
||||||
/* and the capabilities */
|
/* and the capabilities */
|
||||||
impl->HasCaptureSupport = 0; /* TODO */
|
impl->HasCaptureSupport = SDL_TRUE;
|
||||||
impl->OnlyHasDefaultOutputDevice = 1;
|
impl->OnlyHasDefaultOutputDevice = 1;
|
||||||
impl->OnlyHasDefaultCaptureDevice = 1;
|
impl->OnlyHasDefaultCaptureDevice = 1;
|
||||||
|
|
||||||
@ -159,6 +176,19 @@ void AndroidAUD_PauseDevices(void)
|
|||||||
private->resume = SDL_TRUE;
|
private->resume = SDL_TRUE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(captureDevice != NULL && captureDevice->hidden != NULL) {
|
||||||
|
private = (struct SDL_PrivateAudioData *) captureDevice->hidden;
|
||||||
|
if (SDL_AtomicGet(&captureDevice->paused)) {
|
||||||
|
/* The device is already paused, leave it alone */
|
||||||
|
private->resume = SDL_FALSE;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
SDL_LockMutex(captureDevice->mixer_lock);
|
||||||
|
SDL_AtomicSet(&captureDevice->paused, 1);
|
||||||
|
private->resume = SDL_TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Resume (unblock) all non already paused audio devices by releasing their mixer lock */
|
/* Resume (unblock) all non already paused audio devices by releasing their mixer lock */
|
||||||
@ -174,6 +204,15 @@ void AndroidAUD_ResumeDevices(void)
|
|||||||
SDL_UnlockMutex(audioDevice->mixer_lock);
|
SDL_UnlockMutex(audioDevice->mixer_lock);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(captureDevice != NULL && captureDevice->hidden != NULL) {
|
||||||
|
private = (struct SDL_PrivateAudioData *) captureDevice->hidden;
|
||||||
|
if (private->resume) {
|
||||||
|
SDL_AtomicSet(&captureDevice->paused, 0);
|
||||||
|
private->resume = SDL_FALSE;
|
||||||
|
SDL_UnlockMutex(captureDevice->mixer_lock);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -71,10 +71,14 @@ static jclass mActivityClass;
|
|||||||
|
|
||||||
/* method signatures */
|
/* method signatures */
|
||||||
static jmethodID midGetNativeSurface;
|
static jmethodID midGetNativeSurface;
|
||||||
static jmethodID midAudioInit;
|
static jmethodID midAudioOpen;
|
||||||
static jmethodID midAudioWriteShortBuffer;
|
static jmethodID midAudioWriteShortBuffer;
|
||||||
static jmethodID midAudioWriteByteBuffer;
|
static jmethodID midAudioWriteByteBuffer;
|
||||||
static jmethodID midAudioQuit;
|
static jmethodID midAudioClose;
|
||||||
|
static jmethodID midCaptureOpen;
|
||||||
|
static jmethodID midCaptureReadShortBuffer;
|
||||||
|
static jmethodID midCaptureReadByteBuffer;
|
||||||
|
static jmethodID midCaptureClose;
|
||||||
static jmethodID midPollInputDevices;
|
static jmethodID midPollInputDevices;
|
||||||
|
|
||||||
/* Accelerometer data storage */
|
/* Accelerometer data storage */
|
||||||
@ -118,21 +122,31 @@ JNIEXPORT void JNICALL SDL_Android_Init(JNIEnv* mEnv, jclass cls)
|
|||||||
|
|
||||||
midGetNativeSurface = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
midGetNativeSurface = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
||||||
"getNativeSurface","()Landroid/view/Surface;");
|
"getNativeSurface","()Landroid/view/Surface;");
|
||||||
midAudioInit = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
midAudioOpen = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
||||||
"audioInit", "(IZZI)I");
|
"audioOpen", "(IZZI)I");
|
||||||
midAudioWriteShortBuffer = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
midAudioWriteShortBuffer = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
||||||
"audioWriteShortBuffer", "([S)V");
|
"audioWriteShortBuffer", "([S)V");
|
||||||
midAudioWriteByteBuffer = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
midAudioWriteByteBuffer = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
||||||
"audioWriteByteBuffer", "([B)V");
|
"audioWriteByteBuffer", "([B)V");
|
||||||
midAudioQuit = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
midAudioClose = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
||||||
"audioQuit", "()V");
|
"audioClose", "()V");
|
||||||
|
midCaptureOpen = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
||||||
|
"captureOpen", "(IZZI)I");
|
||||||
|
midCaptureReadShortBuffer = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
||||||
|
"captureReadShortBuffer", "([SZ)I");
|
||||||
|
midCaptureReadByteBuffer = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
||||||
|
"captureReadByteBuffer", "([BZ)I");
|
||||||
|
midCaptureClose = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
||||||
|
"captureClose", "()V");
|
||||||
midPollInputDevices = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
midPollInputDevices = (*mEnv)->GetStaticMethodID(mEnv, mActivityClass,
|
||||||
"pollInputDevices", "()V");
|
"pollInputDevices", "()V");
|
||||||
|
|
||||||
bHasNewData = SDL_FALSE;
|
bHasNewData = SDL_FALSE;
|
||||||
|
|
||||||
if (!midGetNativeSurface || !midAudioInit ||
|
if (!midGetNativeSurface ||
|
||||||
!midAudioWriteShortBuffer || !midAudioWriteByteBuffer || !midAudioQuit || !midPollInputDevices) {
|
!midAudioOpen || !midAudioWriteShortBuffer || !midAudioWriteByteBuffer || !midAudioClose || !
|
||||||
|
!midCaptureOpen || !midCaptureReadShortBuffer || !midCaptureReadByteBuffer || !midCaptureClose ||
|
||||||
|
!midPollInputDevices) {
|
||||||
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL: Couldn't locate Java callbacks, check that they're named and typed correctly");
|
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL: Couldn't locate Java callbacks, check that they're named and typed correctly");
|
||||||
}
|
}
|
||||||
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL_Android_Init() finished!");
|
__android_log_print(ANDROID_LOG_INFO, "SDL", "SDL_Android_Init() finished!");
|
||||||
@ -556,11 +570,14 @@ int Android_JNI_SetupThread(void)
|
|||||||
static jboolean audioBuffer16Bit = JNI_FALSE;
|
static jboolean audioBuffer16Bit = JNI_FALSE;
|
||||||
static jobject audioBuffer = NULL;
|
static jobject audioBuffer = NULL;
|
||||||
static void* audioBufferPinned = NULL;
|
static void* audioBufferPinned = NULL;
|
||||||
|
static jboolean captureBuffer16Bit = JNI_FALSE;
|
||||||
|
static jobject captureBuffer = NULL;
|
||||||
|
|
||||||
int Android_JNI_OpenAudioDevice(int sampleRate, int is16Bit, int channelCount, int desiredBufferFrames)
|
int Android_JNI_OpenAudioDevice(int iscapture, int sampleRate, int is16Bit, int channelCount, int desiredBufferFrames)
|
||||||
{
|
{
|
||||||
jboolean audioBufferStereo;
|
jboolean audioBufferStereo;
|
||||||
int audioBufferFrames;
|
int audioBufferFrames;
|
||||||
|
jobject jbufobj = NULL;
|
||||||
jboolean isCopy;
|
jboolean isCopy;
|
||||||
|
|
||||||
JNIEnv *env = Android_JNI_GetEnv();
|
JNIEnv *env = Android_JNI_GetEnv();
|
||||||
@ -570,15 +587,25 @@ int Android_JNI_OpenAudioDevice(int sampleRate, int is16Bit, int channelCount, i
|
|||||||
}
|
}
|
||||||
Android_JNI_SetupThread();
|
Android_JNI_SetupThread();
|
||||||
|
|
||||||
__android_log_print(ANDROID_LOG_VERBOSE, "SDL", "SDL audio: opening device");
|
|
||||||
audioBuffer16Bit = is16Bit;
|
|
||||||
audioBufferStereo = channelCount > 1;
|
audioBufferStereo = channelCount > 1;
|
||||||
|
|
||||||
if ((*env)->CallStaticIntMethod(env, mActivityClass, midAudioInit, sampleRate, audioBuffer16Bit, audioBufferStereo, desiredBufferFrames) != 0) {
|
if (iscapture) {
|
||||||
|
__android_log_print(ANDROID_LOG_VERBOSE, "SDL", "SDL audio: opening device for capture");
|
||||||
|
captureBuffer16Bit = is16Bit;
|
||||||
|
if ((*env)->CallStaticIntMethod(env, mActivityClass, midCaptureOpen, sampleRate, audioBuffer16Bit, audioBufferStereo, desiredBufferFrames) != 0) {
|
||||||
|
/* Error during audio initialization */
|
||||||
|
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: error on AudioRecord initialization!");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
__android_log_print(ANDROID_LOG_VERBOSE, "SDL", "SDL audio: opening device for output");
|
||||||
|
audioBuffer16Bit = is16Bit;
|
||||||
|
if ((*env)->CallStaticIntMethod(env, mActivityClass, midAudioOpen, sampleRate, audioBuffer16Bit, audioBufferStereo, desiredBufferFrames) != 0) {
|
||||||
/* Error during audio initialization */
|
/* Error during audio initialization */
|
||||||
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: error on AudioTrack initialization!");
|
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: error on AudioTrack initialization!");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Allocating the audio buffer from the Java side and passing it as the return value for audioInit no longer works on
|
/* Allocating the audio buffer from the Java side and passing it as the return value for audioInit no longer works on
|
||||||
* Android >= 4.2 due to a "stale global reference" error. So now we allocate this buffer directly from this side. */
|
* Android >= 4.2 due to a "stale global reference" error. So now we allocate this buffer directly from this side. */
|
||||||
@ -586,31 +613,43 @@ int Android_JNI_OpenAudioDevice(int sampleRate, int is16Bit, int channelCount, i
|
|||||||
if (is16Bit) {
|
if (is16Bit) {
|
||||||
jshortArray audioBufferLocal = (*env)->NewShortArray(env, desiredBufferFrames * (audioBufferStereo ? 2 : 1));
|
jshortArray audioBufferLocal = (*env)->NewShortArray(env, desiredBufferFrames * (audioBufferStereo ? 2 : 1));
|
||||||
if (audioBufferLocal) {
|
if (audioBufferLocal) {
|
||||||
audioBuffer = (*env)->NewGlobalRef(env, audioBufferLocal);
|
jbufobj = (*env)->NewGlobalRef(env, audioBufferLocal);
|
||||||
(*env)->DeleteLocalRef(env, audioBufferLocal);
|
(*env)->DeleteLocalRef(env, audioBufferLocal);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
jbyteArray audioBufferLocal = (*env)->NewByteArray(env, desiredBufferFrames * (audioBufferStereo ? 2 : 1));
|
jbyteArray audioBufferLocal = (*env)->NewByteArray(env, desiredBufferFrames * (audioBufferStereo ? 2 : 1));
|
||||||
if (audioBufferLocal) {
|
if (audioBufferLocal) {
|
||||||
audioBuffer = (*env)->NewGlobalRef(env, audioBufferLocal);
|
jbufobj = (*env)->NewGlobalRef(env, audioBufferLocal);
|
||||||
(*env)->DeleteLocalRef(env, audioBufferLocal);
|
(*env)->DeleteLocalRef(env, audioBufferLocal);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (audioBuffer == NULL) {
|
if (jbufobj == NULL) {
|
||||||
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: could not allocate an audio buffer!");
|
__android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: could not allocate an audio buffer!");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (iscapture) {
|
||||||
|
captureBuffer = jbufobj;
|
||||||
|
} else {
|
||||||
|
audioBuffer = jbufobj;
|
||||||
|
}
|
||||||
|
|
||||||
isCopy = JNI_FALSE;
|
isCopy = JNI_FALSE;
|
||||||
if (audioBuffer16Bit) {
|
|
||||||
|
if (is16Bit) {
|
||||||
|
if (!iscapture) {
|
||||||
audioBufferPinned = (*env)->GetShortArrayElements(env, (jshortArray)audioBuffer, &isCopy);
|
audioBufferPinned = (*env)->GetShortArrayElements(env, (jshortArray)audioBuffer, &isCopy);
|
||||||
|
}
|
||||||
audioBufferFrames = (*env)->GetArrayLength(env, (jshortArray)audioBuffer);
|
audioBufferFrames = (*env)->GetArrayLength(env, (jshortArray)audioBuffer);
|
||||||
} else {
|
} else {
|
||||||
|
if (!iscapture) {
|
||||||
audioBufferPinned = (*env)->GetByteArrayElements(env, (jbyteArray)audioBuffer, &isCopy);
|
audioBufferPinned = (*env)->GetByteArrayElements(env, (jbyteArray)audioBuffer, &isCopy);
|
||||||
|
}
|
||||||
audioBufferFrames = (*env)->GetArrayLength(env, (jbyteArray)audioBuffer);
|
audioBufferFrames = (*env)->GetArrayLength(env, (jbyteArray)audioBuffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (audioBufferStereo) {
|
if (audioBufferStereo) {
|
||||||
audioBufferFrames /= 2;
|
audioBufferFrames /= 2;
|
||||||
}
|
}
|
||||||
@ -638,18 +677,75 @@ void Android_JNI_WriteAudioBuffer(void)
|
|||||||
/* JNI_COMMIT means the changes are committed to the VM but the buffer remains pinned */
|
/* JNI_COMMIT means the changes are committed to the VM but the buffer remains pinned */
|
||||||
}
|
}
|
||||||
|
|
||||||
void Android_JNI_CloseAudioDevice(void)
|
int Android_JNI_CaptureAudioBuffer(void *buffer, int buflen)
|
||||||
|
{
|
||||||
|
JNIEnv *env = Android_JNI_GetEnv();
|
||||||
|
jboolean isCopy = JNI_FALSE;
|
||||||
|
jint br;
|
||||||
|
|
||||||
|
if (captureBuffer16Bit) {
|
||||||
|
SDL_assert((*env)->GetArrayLength(env, (jshortArray)captureBuffer) == (buflen / 2));
|
||||||
|
br = (*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadShortBuffer, (jshortArray)captureBuffer, JNI_TRUE);
|
||||||
|
if (br > 0) {
|
||||||
|
jshort *ptr = (*env)->GetShortArrayElements(env, (jshortArray)captureBuffer, &isCopy);
|
||||||
|
br *= 2;
|
||||||
|
SDL_memcpy(buffer, ptr, br);
|
||||||
|
(*env)->ReleaseShortArrayElements(env, (jshortArray)captureBuffer, (jshort *)ptr, JNI_ABORT);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
SDL_assert((*env)->GetArrayLength(env, (jshortArray)captureBuffer) == buflen);
|
||||||
|
br = (*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadByteBuffer, (jbyteArray)captureBuffer, JNI_TRUE);
|
||||||
|
if (br > 0) {
|
||||||
|
jbyte *ptr = (*env)->GetByteArrayElements(env, (jbyteArray)captureBuffer, &isCopy);
|
||||||
|
SDL_memcpy(buffer, ptr, br);
|
||||||
|
(*env)->ReleaseByteArrayElements(env, (jbyteArray)captureBuffer, (jbyte *)ptr, JNI_ABORT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (int) br;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Android_JNI_FlushCapturedAudio(void)
|
||||||
|
{
|
||||||
|
JNIEnv *env = Android_JNI_GetEnv();
|
||||||
|
#if 0 /* !!! FIXME: this needs API 23, or it'll do blocking reads and never end. */
|
||||||
|
if (captureBuffer16Bit) {
|
||||||
|
const jint len = (*env)->GetArrayLength(env, (jshortArray)captureBuffer);
|
||||||
|
while ((*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadShortBuffer, (jshortArray)captureBuffer, JNI_FALSE) == len) { /* spin */ }
|
||||||
|
} else {
|
||||||
|
const jint len = (*env)->GetArrayLength(env, (jbyteArray)captureBuffer);
|
||||||
|
while ((*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadByteBuffer, (jbyteArray)captureBuffer, JNI_FALSE) == len) { /* spin */ }
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
if (captureBuffer16Bit) {
|
||||||
|
const jint len = (*env)->GetArrayLength(env, (jshortArray)captureBuffer);
|
||||||
|
(*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadShortBuffer, (jshortArray)captureBuffer, JNI_FALSE);
|
||||||
|
} else {
|
||||||
|
const jint len = (*env)->GetArrayLength(env, (jbyteArray)captureBuffer);
|
||||||
|
(*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadByteBuffer, (jbyteArray)captureBuffer, JNI_FALSE);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
void Android_JNI_CloseAudioDevice(const int iscapture)
|
||||||
{
|
{
|
||||||
JNIEnv *env = Android_JNI_GetEnv();
|
JNIEnv *env = Android_JNI_GetEnv();
|
||||||
|
|
||||||
(*env)->CallStaticVoidMethod(env, mActivityClass, midAudioQuit);
|
if (iscapture) {
|
||||||
|
(*env)->CallStaticVoidMethod(env, mActivityClass, midCaptureClose);
|
||||||
|
if (captureBuffer) {
|
||||||
|
(*env)->DeleteGlobalRef(env, captureBuffer);
|
||||||
|
captureBuffer = NULL;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
(*env)->CallStaticVoidMethod(env, mActivityClass, midAudioClose);
|
||||||
if (audioBuffer) {
|
if (audioBuffer) {
|
||||||
(*env)->DeleteGlobalRef(env, audioBuffer);
|
(*env)->DeleteGlobalRef(env, audioBuffer);
|
||||||
audioBuffer = NULL;
|
audioBuffer = NULL;
|
||||||
audioBufferPinned = NULL;
|
audioBufferPinned = NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Test for an exception and call SDL_SetError with its detail if one occurs */
|
/* Test for an exception and call SDL_SetError with its detail if one occurs */
|
||||||
/* If the parameter silent is truthy then SDL_SetError() will not be called. */
|
/* If the parameter silent is truthy then SDL_SetError() will not be called. */
|
||||||
|
@ -40,10 +40,12 @@ extern void Android_JNI_HideTextInput(void);
|
|||||||
extern ANativeWindow* Android_JNI_GetNativeWindow(void);
|
extern ANativeWindow* Android_JNI_GetNativeWindow(void);
|
||||||
|
|
||||||
/* Audio support */
|
/* Audio support */
|
||||||
extern int Android_JNI_OpenAudioDevice(int sampleRate, int is16Bit, int channelCount, int desiredBufferFrames);
|
extern int Android_JNI_OpenAudioDevice(int iscapture, int sampleRate, int is16Bit, int channelCount, int desiredBufferFrames);
|
||||||
extern void* Android_JNI_GetAudioBuffer(void);
|
extern void* Android_JNI_GetAudioBuffer(void);
|
||||||
extern void Android_JNI_WriteAudioBuffer(void);
|
extern void Android_JNI_WriteAudioBuffer(void);
|
||||||
extern void Android_JNI_CloseAudioDevice(void);
|
extern int Android_JNI_CaptureAudioBuffer(void *buffer, int buflen);
|
||||||
|
extern void Android_JNI_FlushCapturedAudio(void);
|
||||||
|
extern void Android_JNI_CloseAudioDevice(const int iscapture);
|
||||||
|
|
||||||
#include "SDL_rwops.h"
|
#include "SDL_rwops.h"
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user