Skip to content

Commit

Permalink
Added support for surround sound and float audio on Android
Browse files Browse the repository at this point in the history
  • Loading branch information
slouken committed Oct 10, 2018
1 parent 4679f68 commit f5a21eb
Show file tree
Hide file tree
Showing 5 changed files with 449 additions and 165 deletions.
308 changes: 249 additions & 59 deletions android-project/app/src/main/java/org/libsdl/app/SDLAudioManager.java
@@ -1,6 +1,7 @@
package org.libsdl.app;

import android.media.*;
import android.os.Build;
import android.util.Log;

public class SDLAudioManager
Expand All @@ -17,41 +18,250 @@ public static void initialize() {

// Audio

/**
* This method is called by SDL using JNI.
*/
public static int audioOpen(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);
protected static String getAudioFormatString(int audioFormat) {
switch (audioFormat) {
case AudioFormat.ENCODING_PCM_8BIT:
return "8-bit";
case AudioFormat.ENCODING_PCM_16BIT:
return "16-bit";
case AudioFormat.ENCODING_PCM_FLOAT:
return "float";
default:
return Integer.toString(audioFormat);
}
}

protected static int[] open(boolean isCapture, int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
int channelConfig;
int sampleSize;
int frameSize;

Log.v(TAG, "Opening " + (isCapture ? "capture" : "playback") + ", requested " + desiredFrames + " frames of " + desiredChannels + " channel " + getAudioFormatString(audioFormat) + " audio at " + sampleRate + " Hz");

/* On older devices let's use known good settings */
if (Build.VERSION.SDK_INT < 21) {
if (desiredChannels > 2) {
desiredChannels = 2;
}
if (sampleRate < 8000) {
sampleRate = 8000;
} else if (sampleRate > 48000) {
sampleRate = 48000;
}
}

Log.v(TAG, "SDL audio: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer");
if (audioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
int minSDKVersion = (isCapture ? 23 : 21);
if (Build.VERSION.SDK_INT < minSDKVersion) {
audioFormat = AudioFormat.ENCODING_PCM_16BIT;
}
}
switch (audioFormat)
{
case AudioFormat.ENCODING_PCM_8BIT:
sampleSize = 1;
break;
case AudioFormat.ENCODING_PCM_16BIT:
sampleSize = 2;
break;
case AudioFormat.ENCODING_PCM_FLOAT:
sampleSize = 4;
break;
default:
Log.v(TAG, "Requested format " + audioFormat + ", getting ENCODING_PCM_16BIT");
audioFormat = AudioFormat.ENCODING_PCM_16BIT;
sampleSize = 2;
break;
}

if (isCapture) {
switch (desiredChannels) {
case 1:
channelConfig = AudioFormat.CHANNEL_IN_MONO;
break;
case 2:
channelConfig = AudioFormat.CHANNEL_IN_STEREO;
break;
default:
Log.v(TAG, "Requested " + desiredChannels + " channels, getting stereo");
desiredChannels = 2;
channelConfig = AudioFormat.CHANNEL_IN_STEREO;
break;
}
} else {
switch (desiredChannels) {
case 1:
channelConfig = AudioFormat.CHANNEL_OUT_MONO;
break;
case 2:
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
break;
case 3:
channelConfig = AudioFormat.CHANNEL_OUT_STEREO | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
break;
case 4:
channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
break;
case 5:
channelConfig = AudioFormat.CHANNEL_OUT_QUAD | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
break;
case 6:
channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
break;
case 7:
channelConfig = AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER;
break;
case 8:
if (Build.VERSION.SDK_INT >= 23) {
channelConfig = AudioFormat.CHANNEL_OUT_7POINT1_SURROUND;
} else {
Log.v(TAG, "Requested " + desiredChannels + " channels, getting 5.1 surround");
desiredChannels = 6;
channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
}
break;
default:
Log.v(TAG, "Requested " + desiredChannels + " channels, getting stereo");
desiredChannels = 2;
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
break;
}

/*
Log.v(TAG, "Speaker configuration (and order of channels):");
if ((channelConfig & 0x00000004) != 0) {
Log.v(TAG, " CHANNEL_OUT_FRONT_LEFT");
}
if ((channelConfig & 0x00000008) != 0) {
Log.v(TAG, " CHANNEL_OUT_FRONT_RIGHT");
}
if ((channelConfig & 0x00000010) != 0) {
Log.v(TAG, " CHANNEL_OUT_FRONT_CENTER");
}
if ((channelConfig & 0x00000020) != 0) {
Log.v(TAG, " CHANNEL_OUT_LOW_FREQUENCY");
}
if ((channelConfig & 0x00000040) != 0) {
Log.v(TAG, " CHANNEL_OUT_BACK_LEFT");
}
if ((channelConfig & 0x00000080) != 0) {
Log.v(TAG, " CHANNEL_OUT_BACK_RIGHT");
}
if ((channelConfig & 0x00000100) != 0) {
Log.v(TAG, " CHANNEL_OUT_FRONT_LEFT_OF_CENTER");
}
if ((channelConfig & 0x00000200) != 0) {
Log.v(TAG, " CHANNEL_OUT_FRONT_RIGHT_OF_CENTER");
}
if ((channelConfig & 0x00000400) != 0) {
Log.v(TAG, " CHANNEL_OUT_BACK_CENTER");
}
if ((channelConfig & 0x00000800) != 0) {
Log.v(TAG, " CHANNEL_OUT_SIDE_LEFT");
}
if ((channelConfig & 0x00001000) != 0) {
Log.v(TAG, " CHANNEL_OUT_SIDE_RIGHT");
}
*/
}
frameSize = (sampleSize * desiredChannels);

// Let the user pick a larger buffer if they really want -- but ye
// gods they probably shouldn't, the minimums are horrifyingly high
// latency already
desiredFrames = Math.max(desiredFrames, (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize);
int minBufferSize;
if (isCapture) {
minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
} else {
minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
}
desiredFrames = Math.max(desiredFrames, (minBufferSize + frameSize - 1) / frameSize);

if (mAudioTrack == null) {
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM);
int[] results = new int[4];

// Instantiating AudioTrack can "succeed" without an exception and the track may still be invalid
// Ref: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/media/java/android/media/AudioTrack.java
// Ref: http://developer.android.com/reference/android/media/AudioTrack.html#getState()
if (isCapture) {
if (mAudioRecord == null) {
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRate,
channelConfig, audioFormat, desiredFrames * frameSize);

if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
Log.e(TAG, "Failed during initialization of Audio Track");
mAudioTrack = null;
return -1;
// see notes about AudioTrack state in audioOpen(), above. Probably also applies here.
if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "Failed during initialization of AudioRecord");
mAudioRecord.release();
mAudioRecord = null;
return null;
}

mAudioRecord.startRecording();
}

results[0] = mAudioRecord.getSampleRate();
results[1] = mAudioRecord.getAudioFormat();
results[2] = mAudioRecord.getChannelCount();
results[3] = desiredFrames;

} else {
if (mAudioTrack == null) {
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM);

// Instantiating AudioTrack can "succeed" without an exception and the track may still be invalid
// Ref: https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/media/java/android/media/AudioTrack.java
// Ref: http://developer.android.com/reference/android/media/AudioTrack.html#getState()
if (mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
/* Try again, with safer values */

Log.e(TAG, "Failed during initialization of Audio Track");
mAudioTrack.release();
mAudioTrack = null;
return null;
}

mAudioTrack.play();
}

mAudioTrack.play();
results[0] = mAudioTrack.getSampleRate();
results[1] = mAudioTrack.getAudioFormat();
results[2] = mAudioTrack.getChannelCount();
results[3] = desiredFrames;
}

Log.v(TAG, "SDL audio: got " + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioTrack.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer");
Log.v(TAG, "Opening " + (isCapture ? "capture" : "playback") + ", got " + results[3] + " frames of " + results[2] + " channel " + getAudioFormatString(results[1]) + " audio at " + results[0] + " Hz");

return results;
}

return 0;
/**
* This method is called by SDL using JNI.
*/
public static int[] audioOpen(int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
return open(false, sampleRate, audioFormat, desiredChannels, desiredFrames);
}

/**
* This method is called by SDL using JNI.
*/
public static void audioWriteFloatBuffer(float[] buffer) {
if (mAudioTrack == null) {
Log.e(TAG, "Attempted to make audio call with uninitialized audio!");
return;
}

for (int i = 0; i < buffer.length;) {
int result = mAudioTrack.write(buffer, i, buffer.length - i, AudioTrack.WRITE_BLOCKING);
if (result > 0) {
i += result;
} else if (result == 0) {
try {
Thread.sleep(1);
} catch(InterruptedException e) {
// Nom nom
}
} else {
Log.w(TAG, "SDL audio: error return from write(float)");
return;
}
}
}

/**
Expand All @@ -63,7 +273,7 @@ public static void audioWriteShortBuffer(short[] buffer) {
return;
}

for (int i = 0; i < buffer.length; ) {
for (int i = 0; i < buffer.length;) {
int result = mAudioTrack.write(buffer, i, buffer.length - i);
if (result > 0) {
i += result;
Expand Down Expand Up @@ -109,53 +319,33 @@ public static void audioWriteByteBuffer(byte[] buffer) {
/**
* This method is called by SDL using JNI.
*/
public static int captureOpen(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
int channelConfig = isStereo ? AudioFormat.CHANNEL_CONFIGURATION_STEREO : AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);

Log.v(TAG, "SDL capture: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer");

// Let the user pick a larger buffer if they really want -- but ye
// gods they probably shouldn't, the minimums are horrifyingly high
// latency already
desiredFrames = Math.max(desiredFrames, (AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize);

if (mAudioRecord == null) {
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRate,
channelConfig, audioFormat, desiredFrames * frameSize);

// see notes about AudioTrack state in audioOpen(), above. Probably also applies here.
if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "Failed during initialization of AudioRecord");
mAudioRecord.release();
mAudioRecord = null;
return -1;
}

mAudioRecord.startRecording();
}

Log.v(TAG, "SDL capture: got " + ((mAudioRecord.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioRecord.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer");
public static int[] captureOpen(int sampleRate, int audioFormat, int desiredChannels, int desiredFrames) {
return open(true, sampleRate, audioFormat, desiredChannels, desiredFrames);
}

return 0;
/** This method is called by SDL using JNI. */
public static int captureReadFloatBuffer(float[] buffer, boolean blocking) {
return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
}

/** This method is called by SDL using JNI. */
public static int captureReadShortBuffer(short[] buffer, boolean blocking) {
// !!! FIXME: this is available in API Level 23. Until then, we always block. :(
//return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
return mAudioRecord.read(buffer, 0, buffer.length);
if (Build.VERSION.SDK_INT < 23) {
return mAudioRecord.read(buffer, 0, buffer.length);
} else {
return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
}
}

/** This method is called by SDL using JNI. */
public static int captureReadByteBuffer(byte[] buffer, boolean blocking) {
// !!! FIXME: this is available in API Level 23. Until then, we always block. :(
//return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
return mAudioRecord.read(buffer, 0, buffer.length);
if (Build.VERSION.SDK_INT < 23) {
return mAudioRecord.read(buffer, 0, buffer.length);
} else {
return mAudioRecord.read(buffer, 0, buffer.length, blocking ? AudioRecord.READ_BLOCKING : AudioRecord.READ_NON_BLOCKING);
}
}


/** This method is called by SDL using JNI. */
public static void audioClose() {
if (mAudioTrack != null) {
Expand Down
5 changes: 3 additions & 2 deletions src/audio/SDL_audio.c
Expand Up @@ -1130,8 +1130,9 @@ prepare_audiospec(const SDL_AudioSpec * orig, SDL_AudioSpec * prepared)
}
case 1: /* Mono */
case 2: /* Stereo */
case 4: /* surround */
case 6: /* surround with center and lfe */
case 4: /* Quadrophonic */
case 6: /* 5.1 surround */
case 8: /* 7.1 surround */
break;
default:
SDL_SetError("Unsupported number of audio channels.");
Expand Down

0 comments on commit f5a21eb

Please sign in to comment.