1.1 --- a/android-project/app/src/main/java/org/libsdl/app/SDLAudioManager.java Tue Oct 09 20:12:40 2018 -0700
1.2 +++ b/android-project/app/src/main/java/org/libsdl/app/SDLAudioManager.java Tue Oct 09 20:12:43 2018 -0700
1.3 @@ -1,6 +1,7 @@
1.4 package org.libsdl.app;
1.5
1.6 import android.media.*;
1.7 +import android.os.Build;
1.8 import android.util.Log;
1.9
1.10 public class SDLAudioManager
1.11 @@ -17,41 +18,250 @@
1.12
1.13 // Audio
1.14
1.15 - /**
1.16 - * This method is called by SDL using JNI.
1.17 - */
1.18 - public static int audioOpen(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
1.19 - int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO;
1.20 - int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
1.21 - int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);
1.22 + protected static String getAudioFormatString(int audioFormat) {
1.23 + switch (audioFormat) {
1.24 + case AudioFormat.ENCODING_PCM_8BIT:
1.25 + return "8-bit";
1.26 + case AudioFormat.ENCODING_PCM_16BIT:
1.27 + return "16-bit";
1.28 + case AudioFormat.ENCODING_PCM_FLOAT:
1.29 + return "float";
1.30 + default:
1.31 + return Integer.toString(audioFormat);
1.32 + }
1.33 + }
1.34
1.35 - Log.v(TAG, "SDL audio: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer");
1.36 + protected static int[] open(boolean isCapture, int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
1.37 + int channelConfig;
1.38 + int sampleSize;
1.39 + int frameSize;
1.40 +
1.41 + Log.v(TAG, "Opening " + (isCapture ? "capture" : "playback") + ", requested " + desiredFrames + " frames of " + desiredChannels + " channel " + getAudioFormatString(audioFormat) + " audio at " + sampleRate + " Hz");
1.42 +
1.43 + /* On older devices let's use known good settings */
1.44 + if (Build.VERSION.SDK_INT < 21) {
1.45 + if (desiredChannels > 2) {
1.46 + desiredChannels = 2;
1.47 + }
1.48 + if (sampleRate < 8000) {
1.49 + sampleRate = 8000;
1.50 + } else if (sampleRate > 48000) {
1.51 + sampleRate = 48000;
1.52 + }
1.53 + }
1.54 +
1.55 + if (audioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
1.56 + int minSDKVersion = (isCapture ? 23 : 21);
1.57 + if (Build.VERSION.SDK_INT < minSDKVersion) {
1.58 + audioFormat = AudioFormat.ENCODING_PCM_16BIT;
1.59 + }
1.60 + }
1.61 + switch (audioFormat)
1.62 + {
1.63 + case AudioFormat.ENCODING_PCM_8BIT:
1.64 + sampleSize = 1;
1.65 + break;
1.66 + case AudioFormat.ENCODING_PCM_16BIT:
1.67 + sampleSize = 2;
1.68 + break;
1.69 + case AudioFormat.ENCODING_PCM_FLOAT:
1.70 + sampleSize = 4;
1.71 + break;
1.72 + default:
1.73 + Log.v(TAG, "Requested format " + audioFormat + ", getting ENCODING_PCM_16BIT");
1.74 + audioFormat = AudioFormat.ENCODING_PCM_16BIT;
1.75 + sampleSize = 2;
1.76 + break;
1.77 + }
1.78 +
1.79 + if (isCapture) {
1.80 + switch (desiredChannels) {
1.81 + case 1:
1.82 + channelConfig = AudioFormat.CHANNEL_IN_MONO;
1.83 + break;
1.84 + case 2:
1.85 + channelConfig = AudioFormat.CHANNEL_IN_STEREO;
1.86 + break;
1.87 + default:
1.88 + Log.v(TAG, "Requested " + desiredChannels + " channels, getting stereo");
1.89 + desiredChannels = 2;
1.90 + channelConfig = AudioFormat.CHANNEL_IN_STEREO;
1.91 + break;
1.92 + }
1.93 + } else {
1.94 + switch (desiredChannels) {
1.95 + case 1:
1.96 + channelConfig = AudioFormat.CHANNEL_OUT_MONO;
1.97 + break;
1.98 + case 2:
1.99 + channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
1.100 + break;
1.101 + case 3:
1.102 + channelConfig = AudioFormat.CHANNEL_OUT_STEREO | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
1.103 + break;
1.104 + case 4:
1.105 + channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
1.106 + break;
1.107 + case 5:
1.108 + channelConfig = AudioFormat.CHANNEL_OUT_QUAD | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
1.109 + break;
1.110 + case 6:
1.111 + channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
1.112 + break;
1.113 + case 7:
1.114 + channelConfig = AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER;
1.115 + break;
1.116 + case 8:
1.117 + if (Build.VERSION.SDK_INT >= 23) {
1.118 + channelConfig = AudioFormat.CHANNEL_OUT_7POINT1_SURROUND;
1.119 + } else {
1.120 + Log.v(TAG, "Requested " + desiredChannels + " channels, getting 5.1 surround");
1.121 + desiredChannels = 6;
1.122 + channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
1.123 + }
1.124 + break;
1.125 + default:
1.126 + Log.v(TAG, "Requested " + desiredChannels + " channels, getting stereo");
1.127 + desiredChannels = 2;
1.128 + channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
1.129 + break;
1.130 + }
1.131 +
1.132 +/*
1.133 + Log.v(TAG, "Speaker configuration (and order of channels):");
1.134 +
1.135 + if ((channelConfig & 0x00000004) != 0) {
1.136 + Log.v(TAG, " CHANNEL_OUT_FRONT_LEFT");
1.137 + }
1.138 + if ((channelConfig & 0x00000008) != 0) {
1.139 + Log.v(TAG, " CHANNEL_OUT_FRONT_RIGHT");
1.140 + }
1.141 + if ((channelConfig & 0x00000010) != 0) {
1.142 + Log.v(TAG, " CHANNEL_OUT_FRONT_CENTER");
1.143 + }
1.144 + if ((channelConfig & 0x00000020) != 0) {
1.145 + Log.v(TAG, " CHANNEL_OUT_LOW_FREQUENCY");
1.146 + }
1.147 + if ((channelConfig & 0x00000040) != 0) {
1.148 + Log.v(TAG, " CHANNEL_OUT_BACK_LEFT");
1.149 + }
1.150 + if ((channelConfig & 0x00000080) != 0) {
1.151 + Log.v(TAG, " CHANNEL_OUT_BACK_RIGHT");
1.152 + }
1.153 + if ((channelConfig & 0x00000100) != 0) {
1.154 + Log.v(TAG, " CHANNEL_OUT_FRONT_LEFT_OF_CENTER");
1.155 + }
1.156 + if ((channelConfig & 0x00000200) != 0) {
1.157 + Log.v(TAG, " CHANNEL_OUT_FRONT_RIGHT_OF_CENTER");
1.158 + }
1.159 + if ((channelConfig & 0x00000400) != 0) {
1.160 + Log.v(TAG, " CHANNEL_OUT_BACK_CENTER");
1.161 + }
1.162 + if ((channelConfig & 0x00000800) != 0) {
1.163 + Log.v(TAG, " CHANNEL_OUT_SIDE_LEFT");
1.164 + }
1.165 + if ((channelConfig & 0x00001000) != 0) {
1.166 + Log.v(TAG, " CHANNEL_OUT_SIDE_RIGHT");
1.167 + }
1.168 +*/
1.169 + }
1.170 + frameSize = (sampleSize * desiredChannels);
1.171
1.172 // Let the user pick a larger buffer if they really want -- but ye
1.173 // gods they probably shouldn't, the minimums are horrifyingly high
1.174 // latency already
1.175 - desiredFrames = Math.max(desiredFrames, (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize);
1.176 + int minBufferSize;
1.177 + if (isCapture) {
1.178 + minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
1.179 + } else {
1.180 + minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
1.181 + }
1.182 + desiredFrames = Math.max(desiredFrames, (minBufferSize + frameSize - 1) / frameSize);
1.183
1.184 - if (mAudioTrack == null) {
1.185 - mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
1.186 - channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM);
1.187 + int[] results = new int[4];
1.188
1.189 - // Instantiating AudioTrack can "succeed" without an exception and the track may still be invalid
1.190 - // Ref: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/media/java/android/media/AudioTrack.java
1.191 - // Ref: http://developer.android.com/reference/android/media/AudioTrack.html#getState()
1.192 + if (isCapture) {
1.193 + if (mAudioRecord == null) {
1.194 + mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRate,
1.195 + channelConfig, audioFormat, desiredFrames * frameSize);
1.196
1.197 - if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
1.198 - Log.e(TAG, "Failed during initialization of Audio Track");
1.199 - mAudioTrack = null;
1.200 - return -1;
1.201 + // see notes about AudioTrack state in audioOpen(), above. Probably also applies here.
1.202 + if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
1.203 + Log.e(TAG, "Failed during initialization of AudioRecord");
1.204 + mAudioRecord.release();
1.205 + mAudioRecord = null;
1.206 + return null;
1.207 + }
1.208 +
1.209 + mAudioRecord.startRecording();
1.210 }
1.211
1.212 - mAudioTrack.play();
1.213 + results[0] = mAudioRecord.getSampleRate();
1.214 + results[1] = mAudioRecord.getAudioFormat();
1.215 + results[2] = mAudioRecord.getChannelCount();
1.216 + results[3] = desiredFrames;
1.217 +
1.218 + } else {
1.219 + if (mAudioTrack == null) {
1.220 + mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM);
1.221 +
1.222 + // Instantiating AudioTrack can "succeed" without an exception and the track may still be invalid
1.223 + // Ref: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/media/java/android/media/AudioTrack.java
1.224 + // Ref: http://developer.android.com/reference/android/media/AudioTrack.html#getState()
1.225 + if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
1.226 + /* Try again, with safer values */
1.227 +
1.228 + Log.e(TAG, "Failed during initialization of Audio Track");
1.229 + mAudioTrack.release();
1.230 + mAudioTrack = null;
1.231 + return null;
1.232 + }
1.233 +
1.234 + mAudioTrack.play();
1.235 + }
1.236 +
1.237 + results[0] = mAudioTrack.getSampleRate();
1.238 + results[1] = mAudioTrack.getAudioFormat();
1.239 + results[2] = mAudioTrack.getChannelCount();
1.240 + results[3] = desiredFrames;
1.241 }
1.242
1.243 - Log.v(TAG, "SDL audio: got " + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioTrack.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer");
1.244 + Log.v(TAG, "Opening " + (isCapture ? "capture" : "playback") + ", got " + results[3] + " frames of " + results[2] + " channel " + getAudioFormatString(results[1]) + " audio at " + results[0] + " Hz");
1.245
1.246 - return 0;
1.247 + return results;
1.248 + }
1.249 +
1.250 + /**
1.251 + * This method is called by SDL using JNI.
1.252 + */
1.253 + public static int[] audioOpen(int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
1.254 + return open(false, sampleRate, audioFormat, desiredChannels, desiredFrames);
1.255 + }
1.256 +
1.257 + /**
1.258 + * This method is called by SDL using JNI.
1.259 + */
1.260 + public static void audioWriteFloatBuffer(float[] buffer) {
1.261 + if (mAudioTrack == null) {
1.262 + Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
1.263 + return;
1.264 + }
1.265 +
1.266 + for (int i = 0; i < buffer.length;) {
1.267 + int result = mAudioTrack.write(buffer, i, buffer.length - i, AudioTrack.WRITE_BLOCKING);
1.268 + if (result > 0) {
1.269 + i += result;
1.270 + } else if (result == 0) {
1.271 + try {
1.272 + Thread.sleep(1);
1.273 + } catch(InterruptedException e) {
1.274 + // Nom nom
1.275 + }
1.276 + } else {
1.277 + Log.w(TAG, "SDL audio: error return from write(float)");
1.278 + return;
1.279 + }
1.280 + }
1.281 }
1.282
1.283 /**
1.284 @@ -63,7 +273,7 @@
1.285 return;
1.286 }
1.287
1.288 - for (int i = 0; i < buffer.length; ) {
1.289 + for (int i = 0; i < buffer.length;) {
1.290 int result = mAudioTrack.write(buffer, i, buffer.length - i);
1.291 if (result > 0) {
1.292 i += result;
1.293 @@ -109,53 +319,33 @@
1.294 /**
1.295 * This method is called by SDL using JNI.
1.296 */
1.297 - public static int captureOpen(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
1.298 - int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO;
1.299 - int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
1.300 - int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);
1.301 + public static int[] captureOpen(int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
1.302 + return open(true, sampleRate, audioFormat, desiredChannels, desiredFrames);
1.303 + }
1.304
1.305 - Log.v(TAG, "SDL capture: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer");
1.306 -
1.307 - // Let the user pick a larger buffer if they really want -- but ye
1.308 - // gods they probably shouldn't, the minimums are horrifyingly high
1.309 - // latency already
1.310 - desiredFrames = Math.max(desiredFrames, (AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize);
1.311 -
1.312 - if (mAudioRecord == null) {
1.313 - mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRate,
1.314 - channelConfig, audioFormat, desiredFrames * frameSize);
1.315 -
1.316 - // see notes about AudioTrack state in audioOpen(), above. Probably also applies here.
1.317 - if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
1.318 - Log.e(TAG, "Failed during initialization of AudioRecord");
1.319 - mAudioRecord.release();
1.320 - mAudioRecord = null;
1.321 - return -1;
1.322 - }
1.323 -
1.324 - mAudioRecord.startRecording();
1.325 - }
1.326 -
1.327 - Log.v(TAG, "SDL capture: got " + ((mAudioRecord.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioRecord.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer");
1.328 -
1.329 - return 0;
1.330 + /** This method is called by SDL using JNI. */
1.331 + public static int captureReadFloatBuffer(float[] buffer, boolean blocking) {
1.332 + return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
1.333 }
1.334
1.335 /** This method is called by SDL using JNI. */
1.336 public static int captureReadShortBuffer(short[] buffer, boolean blocking) {
1.337 - // !!! FIXME: this is available in API Level 23. Until then, we always block. :(
1.338 - //return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
1.339 - return mAudioRecord.read(buffer, 0, buffer.length);
1.340 + if (Build.VERSION.SDK_INT < 23) {
1.341 + return mAudioRecord.read(buffer, 0, buffer.length);
1.342 + } else {
1.343 + return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
1.344 + }
1.345 }
1.346
1.347 /** This method is called by SDL using JNI. */
1.348 public static int captureReadByteBuffer(byte[] buffer, boolean blocking) {
1.349 - // !!! FIXME: this is available in API Level 23. Until then, we always block. :(
1.350 - //return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
1.351 - return mAudioRecord.read(buffer, 0, buffer.length);
1.352 + if (Build.VERSION.SDK_INT < 23) {
1.353 + return mAudioRecord.read(buffer, 0, buffer.length);
1.354 + } else {
1.355 + return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
1.356 + }
1.357 }
1.358
1.359 -
1.360 /** This method is called by SDL using JNI. */
1.361 public static void audioClose() {
1.362 if (mAudioTrack != null) {
2.1 --- a/src/audio/SDL_audio.c Tue Oct 09 20:12:40 2018 -0700
2.2 +++ b/src/audio/SDL_audio.c Tue Oct 09 20:12:43 2018 -0700
2.3 @@ -1130,8 +1130,9 @@
2.4 }
2.5 case 1: /* Mono */
2.6 case 2: /* Stereo */
2.7 - case 4: /* surround */
2.8 - case 6: /* surround with center and lfe */
2.9 + case 4: /* Quadrophonic */
2.10 + case 6: /* 5.1 surround */
2.11 + case 8: /* 7.1 surround */
2.12 break;
2.13 default:
2.14 SDL_SetError("Unsupported number of audio channels.");
3.1 --- a/src/audio/android/SDL_androidaudio.c Tue Oct 09 20:12:40 2018 -0700
3.2 +++ b/src/audio/android/SDL_androidaudio.c Tue Oct 09 20:12:43 2018 -0700
3.3 @@ -57,7 +57,9 @@
3.4
3.5 test_format = SDL_FirstAudioFormat(this->spec.format);
3.6 while (test_format != 0) { /* no "UNKNOWN" constant */
3.7 - if ((test_format == AUDIO_U8) || (test_format == AUDIO_S16LSB)) {
3.8 + if ((test_format == AUDIO_U8) ||
3.9 + (test_format == AUDIO_S16) ||
3.10 + (test_format == AUDIO_F32)) {
3.11 this->spec.format = test_format;
3.12 break;
3.13 }
3.14 @@ -69,25 +71,8 @@
3.15 return SDL_SetError("No compatible audio format!");
3.16 }
3.17
3.18 - if (this->spec.channels > 1) {
3.19 - this->spec.channels = 2;
3.20 - } else {
3.21 - this->spec.channels = 1;
3.22 - }
3.23 -
3.24 - if (this->spec.freq < 8000) {
3.25 - this->spec.freq = 8000;
3.26 - }
3.27 - if (this->spec.freq > 48000) {
3.28 - this->spec.freq = 48000;
3.29 - }
3.30 -
3.31 - /* TODO: pass in/return a (Java) device ID */
3.32 - this->spec.samples = Android_JNI_OpenAudioDevice(iscapture, this->spec.freq, this->spec.format == AUDIO_U8 ? 0 : 1, this->spec.channels, this->spec.samples);
3.33 -
3.34 - if (this->spec.samples == 0) {
3.35 - /* Init failed? */
3.36 - return SDL_SetError("Java-side initialization failed!");
3.37 + if (Android_JNI_OpenAudioDevice(iscapture, &this->spec) < 0) {
3.38 + return -1;
3.39 }
3.40
3.41 SDL_CalculateAudioSpec(&this->spec);
4.1 --- a/src/core/android/SDL_android.c Tue Oct 09 20:12:40 2018 -0700
4.2 +++ b/src/core/android/SDL_android.c Tue Oct 09 20:12:43 2018 -0700
4.3 @@ -61,6 +61,10 @@
4.4 #define SDL_JAVA_CONTROLLER_INTERFACE(function) CONCAT1(SDL_JAVA_PREFIX, SDLControllerManager, function)
4.5 #define SDL_JAVA_INTERFACE_INPUT_CONNECTION(function) CONCAT1(SDL_JAVA_PREFIX, SDLInputConnection, function)
4.6
4.7 +/* Audio encoding definitions */
4.8 +#define ENCODING_PCM_8BIT 3
4.9 +#define ENCODING_PCM_16BIT 2
4.10 +#define ENCODING_PCM_FLOAT 4
4.11
4.12 /* Java class SDLActivity */
4.13 JNIEXPORT void JNICALL SDL_JAVA_INTERFACE(nativeSetupJNI)(
4.14 @@ -248,12 +252,14 @@
4.15
4.16 /* method signatures */
4.17 static jmethodID midAudioOpen;
4.18 +static jmethodID midAudioWriteByteBuffer;
4.19 static jmethodID midAudioWriteShortBuffer;
4.20 -static jmethodID midAudioWriteByteBuffer;
4.21 +static jmethodID midAudioWriteFloatBuffer;
4.22 static jmethodID midAudioClose;
4.23 static jmethodID midCaptureOpen;
4.24 +static jmethodID midCaptureReadByteBuffer;
4.25 static jmethodID midCaptureReadShortBuffer;
4.26 -static jmethodID midCaptureReadByteBuffer;
4.27 +static jmethodID midCaptureReadFloatBuffer;
4.28 static jmethodID midCaptureClose;
4.29
4.30 /* controller manager */
4.31 @@ -397,24 +403,28 @@
4.32 mAudioManagerClass = (jclass)((*mEnv)->NewGlobalRef(mEnv, cls));
4.33
4.34 midAudioOpen = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.35 - "audioOpen", "(IZZI)I");
4.36 + "audioOpen", "(IIII)[I");
4.37 + midAudioWriteByteBuffer = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.38 + "audioWriteByteBuffer", "([B)V");
4.39 midAudioWriteShortBuffer = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.40 "audioWriteShortBuffer", "([S)V");
4.41 - midAudioWriteByteBuffer = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.42 - "audioWriteByteBuffer", "([B)V");
4.43 + midAudioWriteFloatBuffer = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.44 + "audioWriteFloatBuffer", "([F)V");
4.45 midAudioClose = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.46 "audioClose", "()V");
4.47 midCaptureOpen = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.48 - "captureOpen", "(IZZI)I");
4.49 + "captureOpen", "(IIII)[I");
4.50 + midCaptureReadByteBuffer = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.51 + "captureReadByteBuffer", "([BZ)I");
4.52 midCaptureReadShortBuffer = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.53 "captureReadShortBuffer", "([SZ)I");
4.54 - midCaptureReadByteBuffer = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.55 - "captureReadByteBuffer", "([BZ)I");
4.56 + midCaptureReadFloatBuffer = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.57 + "captureReadFloatBuffer", "([FZ)I");
4.58 midCaptureClose = (*mEnv)->GetStaticMethodID(mEnv, mAudioManagerClass,
4.59 "captureClose", "()V");
4.60
4.61 - if (!midAudioOpen || !midAudioWriteShortBuffer || !midAudioWriteByteBuffer || !midAudioClose ||
4.62 - !midCaptureOpen || !midCaptureReadShortBuffer || !midCaptureReadByteBuffer || !midCaptureClose) {
4.63 + if (!midAudioOpen || !midAudioWriteByteBuffer || !midAudioWriteShortBuffer || !midAudioWriteFloatBuffer || !midAudioClose ||
4.64 + !midCaptureOpen || !midCaptureReadByteBuffer || !midCaptureReadShortBuffer || !midCaptureReadFloatBuffer || !midCaptureClose) {
4.65 __android_log_print(ANDROID_LOG_WARN, "SDL", "Missing some Java callbacks, do you have the latest version of SDLAudioManager.java?");
4.66 }
4.67
4.68 @@ -1043,17 +1053,19 @@
4.69 /*
4.70 * Audio support
4.71 */
4.72 -static jboolean audioBuffer16Bit = JNI_FALSE;
4.73 +static int audioBufferFormat = 0;
4.74 static jobject audioBuffer = NULL;
4.75 static void* audioBufferPinned = NULL;
4.76 -static jboolean captureBuffer16Bit = JNI_FALSE;
4.77 +static int captureBufferFormat = 0;
4.78 static jobject captureBuffer = NULL;
4.79
4.80 -int Android_JNI_OpenAudioDevice(int iscapture, int sampleRate, int is16Bit, int channelCount, int desiredBufferFrames)
4.81 +int Android_JNI_OpenAudioDevice(int iscapture, SDL_AudioSpec *spec)
4.82 {
4.83 - jboolean isStereo;
4.84 + int audioformat;
4.85 int numBufferFrames;
4.86 jobject jbufobj = NULL;
4.87 + jobject result;
4.88 + int *resultElements;
4.89 jboolean isCopy;
4.90
4.91 JNIEnv *env = Android_JNI_GetEnv();
4.92 @@ -1063,78 +1075,123 @@
4.93 }
4.94 Android_JNI_SetupThread();
4.95
4.96 - isStereo = channelCount > 1;
4.97 + switch (spec->format) {
4.98 + case AUDIO_U8:
4.99 + audioformat = ENCODING_PCM_8BIT;
4.100 + break;
4.101 + case AUDIO_S16:
4.102 + audioformat = ENCODING_PCM_16BIT;
4.103 + break;
4.104 + case AUDIO_F32:
4.105 + audioformat = ENCODING_PCM_FLOAT;
4.106 + break;
4.107 + default:
4.108 + return SDL_SetError("Unsupported audio format: 0x%x", spec->format);
4.109 + }
4.110
4.111 if (iscapture) {
4.112 __android_log_print(ANDROID_LOG_VERBOSE, "SDL", "SDL audio: opening device for capture");
4.113 - captureBuffer16Bit = is16Bit;
4.114 - if ((*env)->CallStaticIntMethod(env, mAudioManagerClass, midCaptureOpen, sampleRate, is16Bit, isStereo, desiredBufferFrames) != 0) {
4.115 - /* Error during audio initialization */
4.116 - __android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: error on AudioRecord initialization!");
4.117 - return 0;
4.118 - }
4.119 + result = (*env)->CallStaticObjectMethod(env, mAudioManagerClass, midCaptureOpen, spec->freq, audioformat, spec->channels, spec->samples);
4.120 } else {
4.121 __android_log_print(ANDROID_LOG_VERBOSE, "SDL", "SDL audio: opening device for output");
4.122 - audioBuffer16Bit = is16Bit;
4.123 - if ((*env)->CallStaticIntMethod(env, mAudioManagerClass, midAudioOpen, sampleRate, is16Bit, isStereo, desiredBufferFrames) != 0) {
4.124 - /* Error during audio initialization */
4.125 - __android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: error on AudioTrack initialization!");
4.126 - return 0;
4.127 - }
4.128 + result = (*env)->CallStaticObjectMethod(env, mAudioManagerClass, midAudioOpen, spec->freq, audioformat, spec->channels, spec->samples);
4.129 }
4.130 + if (result == NULL) {
4.131 + /* Error during audio initialization, error printed from Java */
4.132 + return SDL_SetError("Java-side initialization failed");
4.133 + }
4.134 +
4.135 + if ((*env)->GetArrayLength(env, (jintArray)result) != 4) {
4.136 + return SDL_SetError("Unexpected results from Java, expected 4, got %d", (*env)->GetArrayLength(env, (jintArray)result));
4.137 + }
4.138 + isCopy = JNI_FALSE;
4.139 + resultElements = (*env)->GetIntArrayElements(env, (jintArray)result, &isCopy);
4.140 + spec->freq = resultElements[0];
4.141 + audioformat = resultElements[1];
4.142 + switch (audioformat) {
4.143 + case ENCODING_PCM_8BIT:
4.144 + spec->format = AUDIO_U8;
4.145 + break;
4.146 + case ENCODING_PCM_16BIT:
4.147 + spec->format = AUDIO_S16;
4.148 + break;
4.149 + case ENCODING_PCM_FLOAT:
4.150 + spec->format = AUDIO_F32;
4.151 + break;
4.152 + default:
4.153 + return SDL_SetError("Unexpected audio format from Java: %d\n", audioformat);
4.154 + }
4.155 + spec->channels = resultElements[2];
4.156 + spec->samples = resultElements[3];
4.157 + (*env)->ReleaseIntArrayElements(env, (jintArray)result, resultElements, JNI_ABORT);
4.158 + (*env)->DeleteLocalRef(env, result);
4.159
4.160 /* Allocating the audio buffer from the Java side and passing it as the return value for audioInit no longer works on
4.161 * Android >= 4.2 due to a "stale global reference" error. So now we allocate this buffer directly from this side. */
4.162 -
4.163 - if (is16Bit) {
4.164 - jshortArray audioBufferLocal = (*env)->NewShortArray(env, desiredBufferFrames * (isStereo ? 2 : 1));
4.165 - if (audioBufferLocal) {
4.166 - jbufobj = (*env)->NewGlobalRef(env, audioBufferLocal);
4.167 - (*env)->DeleteLocalRef(env, audioBufferLocal);
4.168 + switch (audioformat) {
4.169 + case ENCODING_PCM_8BIT:
4.170 + {
4.171 + jbyteArray audioBufferLocal = (*env)->NewByteArray(env, spec->samples * spec->channels);
4.172 + if (audioBufferLocal) {
4.173 + jbufobj = (*env)->NewGlobalRef(env, audioBufferLocal);
4.174 + (*env)->DeleteLocalRef(env, audioBufferLocal);
4.175 + }
4.176 }
4.177 - }
4.178 - else {
4.179 - jbyteArray audioBufferLocal = (*env)->NewByteArray(env, desiredBufferFrames * (isStereo ? 2 : 1));
4.180 - if (audioBufferLocal) {
4.181 - jbufobj = (*env)->NewGlobalRef(env, audioBufferLocal);
4.182 - (*env)->DeleteLocalRef(env, audioBufferLocal);
4.183 + break;
4.184 + case ENCODING_PCM_16BIT:
4.185 + {
4.186 + jshortArray audioBufferLocal = (*env)->NewShortArray(env, spec->samples * spec->channels);
4.187 + if (audioBufferLocal) {
4.188 + jbufobj = (*env)->NewGlobalRef(env, audioBufferLocal);
4.189 + (*env)->DeleteLocalRef(env, audioBufferLocal);
4.190 + }
4.191 }
4.192 + break;
4.193 + case ENCODING_PCM_FLOAT:
4.194 + {
4.195 + jfloatArray audioBufferLocal = (*env)->NewFloatArray(env, spec->samples * spec->channels);
4.196 + if (audioBufferLocal) {
4.197 + jbufobj = (*env)->NewGlobalRef(env, audioBufferLocal);
4.198 + (*env)->DeleteLocalRef(env, audioBufferLocal);
4.199 + }
4.200 + }
4.201 + break;
4.202 + default:
4.203 + return SDL_SetError("Unexpected audio format from Java: %d\n", audioformat);
4.204 }
4.205
4.206 if (jbufobj == NULL) {
4.207 - __android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: could not allocate an audio buffer!");
4.208 - return 0;
4.209 + __android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: could not allocate an audio buffer");
4.210 + return SDL_OutOfMemory();
4.211 }
4.212
4.213 if (iscapture) {
4.214 + captureBufferFormat = audioformat;
4.215 captureBuffer = jbufobj;
4.216 } else {
4.217 + audioBufferFormat = audioformat;
4.218 audioBuffer = jbufobj;
4.219 }
4.220 + numBufferFrames = (*env)->GetArrayLength(env, (jarray)jbufobj);
4.221
4.222 - isCopy = JNI_FALSE;
4.223 + if (!iscapture) {
4.224 + isCopy = JNI_FALSE;
4.225
4.226 - if (is16Bit) {
4.227 - if (iscapture) {
4.228 - numBufferFrames = (*env)->GetArrayLength(env, (jshortArray)captureBuffer);
4.229 - } else {
4.230 + switch (audioformat) {
4.231 + case ENCODING_PCM_8BIT:
4.232 + audioBufferPinned = (*env)->GetByteArrayElements(env, (jbyteArray)audioBuffer, &isCopy);
4.233 + break;
4.234 + case ENCODING_PCM_16BIT:
4.235 audioBufferPinned = (*env)->GetShortArrayElements(env, (jshortArray)audioBuffer, &isCopy);
4.236 - numBufferFrames = (*env)->GetArrayLength(env, (jshortArray)audioBuffer);
4.237 - }
4.238 - } else {
4.239 - if (iscapture) {
4.240 - numBufferFrames = (*env)->GetArrayLength(env, (jbyteArray)captureBuffer);
4.241 - } else {
4.242 - audioBufferPinned = (*env)->GetByteArrayElements(env, (jbyteArray)audioBuffer, &isCopy);
4.243 - numBufferFrames = (*env)->GetArrayLength(env, (jbyteArray)audioBuffer);
4.244 + break;
4.245 + case ENCODING_PCM_FLOAT:
4.246 + audioBufferPinned = (*env)->GetFloatArrayElements(env, (jfloatArray)audioBuffer, &isCopy);
4.247 + break;
4.248 + default:
4.249 + return SDL_SetError("Unexpected audio format from Java: %d\n", audioformat);
4.250 }
4.251 }
4.252 -
4.253 - if (isStereo) {
4.254 - numBufferFrames /= 2;
4.255 - }
4.256 -
4.257 - return numBufferFrames;
4.258 + return 0;
4.259 }
4.260
4.261 int Android_JNI_GetDisplayDPI(float *ddpi, float *xdpi, float *ydpi)
4.262 @@ -1178,12 +1235,22 @@
4.263 {
4.264 JNIEnv *mAudioEnv = Android_JNI_GetEnv();
4.265
4.266 - if (audioBuffer16Bit) {
4.267 + switch (audioBufferFormat) {
4.268 + case ENCODING_PCM_8BIT:
4.269 + (*mAudioEnv)->ReleaseByteArrayElements(mAudioEnv, (jbyteArray)audioBuffer, (jbyte *)audioBufferPinned, JNI_COMMIT);
4.270 + (*mAudioEnv)->CallStaticVoidMethod(mAudioEnv, mAudioManagerClass, midAudioWriteByteBuffer, (jbyteArray)audioBuffer);
4.271 + break;
4.272 + case ENCODING_PCM_16BIT:
4.273 (*mAudioEnv)->ReleaseShortArrayElements(mAudioEnv, (jshortArray)audioBuffer, (jshort *)audioBufferPinned, JNI_COMMIT);
4.274 (*mAudioEnv)->CallStaticVoidMethod(mAudioEnv, mAudioManagerClass, midAudioWriteShortBuffer, (jshortArray)audioBuffer);
4.275 - } else {
4.276 - (*mAudioEnv)->ReleaseByteArrayElements(mAudioEnv, (jbyteArray)audioBuffer, (jbyte *)audioBufferPinned, JNI_COMMIT);
4.277 - (*mAudioEnv)->CallStaticVoidMethod(mAudioEnv, mAudioManagerClass, midAudioWriteByteBuffer, (jbyteArray)audioBuffer);
4.278 + break;
4.279 + case ENCODING_PCM_FLOAT:
4.280 + (*mAudioEnv)->ReleaseFloatArrayElements(mAudioEnv, (jfloatArray)audioBuffer, (jfloat *)audioBufferPinned, JNI_COMMIT);
4.281 + (*mAudioEnv)->CallStaticVoidMethod(mAudioEnv, mAudioManagerClass, midAudioWriteFloatBuffer, (jfloatArray)audioBuffer);
4.282 + break;
4.283 + default:
4.284 + __android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: unhandled audio buffer format");
4.285 + break;
4.286 }
4.287
4.288 /* JNI_COMMIT means the changes are committed to the VM but the buffer remains pinned */
4.289 @@ -1195,16 +1262,8 @@
4.290 jboolean isCopy = JNI_FALSE;
4.291 jint br;
4.292
4.293 - if (captureBuffer16Bit) {
4.294 - SDL_assert((*env)->GetArrayLength(env, (jshortArray)captureBuffer) == (buflen / 2));
4.295 - br = (*env)->CallStaticIntMethod(env, mAudioManagerClass, midCaptureReadShortBuffer, (jshortArray)captureBuffer, JNI_TRUE);
4.296 - if (br > 0) {
4.297 - jshort *ptr = (*env)->GetShortArrayElements(env, (jshortArray)captureBuffer, &isCopy);
4.298 - br *= 2;
4.299 - SDL_memcpy(buffer, ptr, br);
4.300 - (*env)->ReleaseShortArrayElements(env, (jshortArray)captureBuffer, (jshort *)ptr, JNI_ABORT);
4.301 - }
4.302 - } else {
4.303 + switch (captureBufferFormat) {
4.304 + case ENCODING_PCM_8BIT:
4.305 SDL_assert((*env)->GetArrayLength(env, (jshortArray)captureBuffer) == buflen);
4.306 br = (*env)->CallStaticIntMethod(env, mAudioManagerClass, midCaptureReadByteBuffer, (jbyteArray)captureBuffer, JNI_TRUE);
4.307 if (br > 0) {
4.308 @@ -1212,27 +1271,75 @@
4.309 SDL_memcpy(buffer, ptr, br);
4.310 (*env)->ReleaseByteArrayElements(env, (jbyteArray)captureBuffer, (jbyte *)ptr, JNI_ABORT);
4.311 }
4.312 + break;
4.313 + case ENCODING_PCM_16BIT:
4.314 + SDL_assert((*env)->GetArrayLength(env, (jshortArray)captureBuffer) == (buflen / sizeof(Sint16)));
4.315 + br = (*env)->CallStaticIntMethod(env, mAudioManagerClass, midCaptureReadShortBuffer, (jshortArray)captureBuffer, JNI_TRUE);
4.316 + if (br > 0) {
4.317 + jshort *ptr = (*env)->GetShortArrayElements(env, (jshortArray)captureBuffer, &isCopy);
4.318 + br *= sizeof(Sint16);
4.319 + SDL_memcpy(buffer, ptr, br);
4.320 + (*env)->ReleaseShortArrayElements(env, (jshortArray)captureBuffer, (jshort *)ptr, JNI_ABORT);
4.321 + }
4.322 + break;
4.323 + case ENCODING_PCM_FLOAT:
4.324 + SDL_assert((*env)->GetArrayLength(env, (jfloatArray)captureBuffer) == (buflen / sizeof(float)));
4.325 + br = (*env)->CallStaticIntMethod(env, mAudioManagerClass, midCaptureReadFloatBuffer, (jfloatArray)captureBuffer, JNI_TRUE);
4.326 + if (br > 0) {
4.327 + jfloat *ptr = (*env)->GetFloatArrayElements(env, (jfloatArray)captureBuffer, &isCopy);
4.328 + br *= sizeof(float);
4.329 + SDL_memcpy(buffer, ptr, br);
4.330 + (*env)->ReleaseFloatArrayElements(env, (jfloatArray)captureBuffer, (jfloat *)ptr, JNI_ABORT);
4.331 + }
4.332 + break;
4.333 + default:
4.334 + __android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: unhandled capture buffer format");
4.335 + break;
4.336 }
4.337 -
4.338 - return (int) br;
4.339 + return br;
4.340 }
4.341
4.342 void Android_JNI_FlushCapturedAudio(void)
4.343 {
4.344 JNIEnv *env = Android_JNI_GetEnv();
4.345 #if 0 /* !!! FIXME: this needs API 23, or it'll do blocking reads and never end. */
4.346 - if (captureBuffer16Bit) {
4.347 - const jint len = (*env)->GetArrayLength(env, (jshortArray)captureBuffer);
4.348 - while ((*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadShortBuffer, (jshortArray)captureBuffer, JNI_FALSE) == len) { /* spin */ }
4.349 - } else {
4.350 - const jint len = (*env)->GetArrayLength(env, (jbyteArray)captureBuffer);
4.351 - while ((*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadByteBuffer, (jbyteArray)captureBuffer, JNI_FALSE) == len) { /* spin */ }
4.352 + switch (captureBufferFormat) {
4.353 + case ENCODING_PCM_8BIT:
4.354 + {
4.355 + const jint len = (*env)->GetArrayLength(env, (jbyteArray)captureBuffer);
4.356 + while ((*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadByteBuffer, (jbyteArray)captureBuffer, JNI_FALSE) == len) { /* spin */ }
4.357 + }
4.358 + break;
4.359 + case ENCODING_PCM_16BIT:
4.360 + {
4.361 + const jint len = (*env)->GetArrayLength(env, (jshortArray)captureBuffer);
4.362 + while ((*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadShortBuffer, (jshortArray)captureBuffer, JNI_FALSE) == len) { /* spin */ }
4.363 + }
4.364 + break;
4.365 + case ENCODING_PCM_FLOAT:
4.366 + {
4.367 + const jint len = (*env)->GetArrayLength(env, (jfloatArray)captureBuffer);
4.368 + while ((*env)->CallStaticIntMethod(env, mActivityClass, midCaptureReadFloatBuffer, (jfloatArray)captureBuffer, JNI_FALSE) == len) { /* spin */ }
4.369 + }
4.370 + break;
4.371 + default:
4.372 + __android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: flushing unhandled capture buffer format");
4.373 + break;
4.374 }
4.375 #else
4.376 - if (captureBuffer16Bit) {
4.377 + switch (captureBufferFormat) {
4.378 + case ENCODING_PCM_8BIT:
4.379 + (*env)->CallStaticIntMethod(env, mAudioManagerClass, midCaptureReadByteBuffer, (jbyteArray)captureBuffer, JNI_FALSE);
4.380 + break;
4.381 + case ENCODING_PCM_16BIT:
4.382 (*env)->CallStaticIntMethod(env, mAudioManagerClass, midCaptureReadShortBuffer, (jshortArray)captureBuffer, JNI_FALSE);
4.383 - } else {
4.384 - (*env)->CallStaticIntMethod(env, mAudioManagerClass, midCaptureReadByteBuffer, (jbyteArray)captureBuffer, JNI_FALSE);
4.385 + break;
4.386 + case ENCODING_PCM_FLOAT:
4.387 + (*env)->CallStaticIntMethod(env, mAudioManagerClass, midCaptureReadFloatBuffer, (jfloatArray)captureBuffer, JNI_FALSE);
4.388 + break;
4.389 + default:
4.390 + __android_log_print(ANDROID_LOG_WARN, "SDL", "SDL audio: flushing unhandled capture buffer format");
4.391 + break;
4.392 }
4.393 #endif
4.394 }
5.1 --- a/src/core/android/SDL_android.h Tue Oct 09 20:12:40 2018 -0700
5.2 +++ b/src/core/android/SDL_android.h Tue Oct 09 20:12:43 2018 -0700
5.3 @@ -31,6 +31,7 @@
5.4 #include <EGL/eglplatform.h>
5.5 #include <android/native_window_jni.h>
5.6
5.7 +#include "SDL_audio.h"
5.8 #include "SDL_rect.h"
5.9
5.10 /* Interface from the SDL library into the Android Java activity */
5.11 @@ -47,7 +48,7 @@
5.12 extern int Android_JNI_GetDisplayDPI(float *ddpi, float *xdpi, float *ydpi);
5.13
5.14 /* Audio support */
5.15 -extern int Android_JNI_OpenAudioDevice(int iscapture, int sampleRate, int is16Bit, int channelCount, int desiredBufferFrames);
5.16 +extern int Android_JNI_OpenAudioDevice(int iscapture, SDL_AudioSpec *spec);
5.17 extern void* Android_JNI_GetAudioBuffer(void);
5.18 extern void Android_JNI_WriteAudioBuffer(void);
5.19 extern int Android_JNI_CaptureAudioBuffer(void *buffer, int buflen);