julioz / audiocapturesample Goto Github PK
View Code? Open in Web Editor NEWSample project to test the AudioCapture API introduced in Android 10 (Q)
Sample project to test the AudioCapture API introduced in Android 10 (Q)
My Android emulator doesn't show .pcm files of recorded audio after I press stop button. When I restart the emulator and check the directory, those files appeared but I can't open them. Here is my code to record screen with mediaProjection and record audio.
`
public class RecordingService extends Service {
private final IBinder mIBinder = new RecordingBinder();
private static final String TAG = "hung";
// private static final int PERMISSION_RECORD_DISPLAY = 1;
private static final List<Resolution> RESOLUTIONS = new ArrayList<Resolution>() {{
add(new Resolution(640, 360));
add(new Resolution(960, 540));
add(new Resolution(1366, 768));
add(new Resolution(1600, 900));
}};
private static final int NOTIFICATION_ID = 123;
private int mScreenDensity;
private MediaProjectionManager mProjectionManager;
private int mDisplayWidth;
private int mDisplayHeight;
private boolean mScreenSharing;
private MediaProjection mMediaProjection;
private VirtualDisplay mVirtualDisplay;
private MediaProjectionCallback mMediaProjectionCallback;
private MediaRecorder mMediaRecorder;
WindowManager mWindowManager;
private boolean mIsRecording = false;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private Resolution mResolution;
private Intent mScreenCaptureIntent;
private int mScreenCaptureResultCode;
private AudioRecord audioRecord;
private Thread audioCaptureThread;
private static final int NUM_SAMPLES_PER_READ = 1024;
private static final int BYTES_PER_SAMPLE = 2; // 2 bytes since we hardcoded the PCM 16-bit format
private static final int BUFFER_SIZE_IN_BYTES = NUM_SAMPLES_PER_READ * BYTES_PER_SAMPLE;
public void prepareToRecording() {
Log.d(TAG, "RecordingService: prepareToRecording()");
initRecorder();
shareScreen();
}
public void startRecording() {
mIsRecording = true;
startForeground(NOTIFICATION_ID, buildNotification());
prepareToRecording();
mVirtualDisplay = createVirtualDisplay();
mMediaRecorder.start();
startAudioCapture();
}
public void stopRecording() {
audioCaptureThread.interrupt();
try {
audioCaptureThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
}
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.d(TAG, "Stopping Recording");
stopScreenSharing();
stopForeground(true);
}
public class RecordingBinder extends Binder {
public RecordingService getService() {
return RecordingService.this;
}
}
public RecordingService() {
}
@Override
public IBinder onBind(Intent intent) {
Log.d(TAG, "RecordingService: onBind()");
mScreenCaptureIntent = intent.getParcelableExtra(Intent.EXTRA_INTENT);
mScreenCaptureResultCode = mScreenCaptureIntent.getIntExtra(UiUtils.SCREEN_CAPTURE_INTENT_RESULT_CODE, UiUtils.RESULT_CODE_FAILED);
Log.d(TAG, "onBind: " + mScreenCaptureIntent);
return mIBinder;
}
@Override
public void onCreate() {
super.onCreate();
Log.d(TAG, "RecordingService: onCreate()");
mWindowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
DisplayMetrics metrics = new DisplayMetrics();
mWindowManager.getDefaultDisplay().getMetrics(metrics);
mMediaRecorder = new MediaRecorder();
mScreenDensity = metrics.densityDpi;
mProjectionManager =
(MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
//TOdo: chooose resolution and orientation
mResolution = RESOLUTIONS.get(3);
mDisplayWidth = mResolution.y;
mDisplayHeight = mResolution.x;
}
private void stopScreenSharing() {
Log.d(TAG, "RecordingService: stopScreenSharing()");
mScreenSharing = false;
if (mVirtualDisplay == null) {
return;
}
mVirtualDisplay.release();
destroyMediaProjection();
}
private void initRecorder() {
Log.d(TAG, "RecordingService: initRecorder()");
String timeStamp = new SimpleDateFormat("yyyy-MM-dd-HH-mm").format(new Date());
try {
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setOutputFile(Environment
.getExternalStoragePublicDirectory(Environment
.DIRECTORY_DOWNLOADS) + "/SCREC-" + timeStamp + ".mp4");
mMediaRecorder.setVideoSize(mDisplayWidth, mDisplayHeight);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
mMediaRecorder.setVideoFrameRate(30);
int rotation = mWindowManager.getDefaultDisplay().getRotation();
int orientation = ORIENTATIONS.get(rotation + 90);
mMediaRecorder.setOrientationHint(orientation);
mMediaRecorder.prepare();
} catch (IOException e) {
e.printStackTrace();
}
}
private void shareScreen() {
Log.d(TAG, "RecordingService: initRecorder()");
mScreenSharing = true;
if (mMediaProjection == null) {
mMediaProjectionCallback = new MediaProjectionCallback();
mMediaProjection = mProjectionManager.getMediaProjection(mScreenCaptureResultCode, mScreenCaptureIntent);
mMediaProjection.registerCallback(mMediaProjectionCallback, null);
}
}
private VirtualDisplay createVirtualDisplay() {
Log.d(TAG, "RecordingService: createVirtualDisplay()");
return mMediaProjection.createVirtualDisplay("ScreenSharingDemo",
mDisplayWidth, mDisplayHeight, mScreenDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mMediaRecorder.getSurface(), null /*Callbacks*/, null /*Handler*/);
}
private void destroyMediaProjection() {
Log.d(TAG, "RecordingService: destroyMediaProjection()");
if (mMediaProjection != null) {
mMediaProjection.unregisterCallback(mMediaProjectionCallback);
mMediaProjection.stop();
mMediaProjection = null;
}
Log.i(TAG, "MediaProjection Stopped");
}
private class MediaProjectionCallback extends MediaProjection.Callback {
@Override
public void onStop() {
if (mIsRecording) {
mIsRecording = false;
mMediaRecorder.stop();
mMediaRecorder.reset();
Log.v(TAG, "Recording Stopped");
}
mMediaProjection = null;
stopRecording();
}
}
private static class Resolution {
int x;
int y;
public Resolution(int x, int y) {
this.x = x;
this.y = y;
}
@Override
public String toString() {
return x + "x" + y;
}
}
private Notification buildNotification() {
NotificationCompat.Builder builder;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
String channelId = "channel_01";
CharSequence channelName = "Channel Name";
int importance = android.app.NotificationManager.IMPORTANCE_DEFAULT;
NotificationChannel notificationChannel = new NotificationChannel(channelId, channelName, importance);
notificationChannel.setLightColor(Color.GREEN);
notificationChannel.setLockscreenVisibility(Notification.VISIBILITY_PRIVATE);
android.app.NotificationManager notificationManager = (android.app.NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
assert notificationManager != null;
notificationManager.createNotificationChannel(notificationChannel);
builder = new NotificationCompat.Builder(this, channelId);
} else {
builder = new NotificationCompat.Builder(this);
}
Intent notificationIntent = new Intent(this, RecordingService.class);
PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, notificationIntent, PendingIntent.FLAG_IMMUTABLE);
builder.setContentTitle("Recording in progress")
.setContentText("Tap to open")
.setSmallIcon(R.drawable.ic_rec)
.setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_rec))
.setContentIntent(pendingIntent);
return builder.build();
}
private void startAudioCapture() {
AudioPlaybackCaptureConfiguration config = null;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
config = new AudioPlaybackCaptureConfiguration.Builder(mMediaProjection)
.addMatchingUsage(AudioAttributes.USAGE_MEDIA)
.addMatchingUsage(AudioAttributes.USAGE_UNKNOWN) // TODO provide UI options for inclusion/exclusion
.build();
}
AudioFormat audioFormat = new AudioFormat.Builder()
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(8000)
.setChannelMask(AudioFormat.CHANNEL_IN_MONO)
.build();
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
return;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
audioRecord = new AudioRecord.Builder()
.setAudioFormat(audioFormat)
.setBufferSizeInBytes(BUFFER_SIZE_IN_BYTES)
.setAudioPlaybackCaptureConfig(config)
.build();
}
audioCaptureThread = new Thread(() -> {
audioRecord.startRecording();
File outputFile = createAudioFile();
Log.d(TAG, "Created file for capture target: " + outputFile.getAbsolutePath());
writeAudioToFile(outputFile);
});
audioCaptureThread.start();
}
private File createAudioFile() {
File audioCapturesDirectory = new File(Environment
.getExternalStoragePublicDirectory(Environment
.DIRECTORY_DOWNLOADS), "/AudioCaptures");
if (!audioCapturesDirectory.exists()) {
audioCapturesDirectory.mkdirs();
}
String timestamp = new SimpleDateFormat("dd-MM-yyyy-hh-mm-ss", Locale.US).format(new Date());
String fileName = "Capture-" + timestamp + ".pcm";
File res = new File(audioCapturesDirectory.getAbsolutePath() + "/" + fileName);
Log.v(TAG, res.getAbsolutePath());
return res;
}
private void writeAudioToFile(File outputFile) {
try (FileOutputStream fileOutputStream = new FileOutputStream(outputFile)) {
short[] capturedAudioSamples = new short[NUM_SAMPLES_PER_READ];
while (!audioCaptureThread.isInterrupted()) {
audioRecord.read(capturedAudioSamples, 0, NUM_SAMPLES_PER_READ);
// This loop should be as fast as possible to avoid artifacts in the captured audio
// You can uncomment the following line to see the capture samples but
// that will incur a performance hit due to logging I/O.
Log.d(TAG, "Audio samples captured: " + Arrays.toString(capturedAudioSamples));
byte[] bytes = toByteArray(capturedAudioSamples);
Log.v(TAG, "Bytes captured: " + Arrays.toString(bytes));
fileOutputStream.write(bytes, 0, bytes.length);
}
} catch (IOException e) {
e.printStackTrace();
}
Log.d(TAG, "Audio capture finished for " + outputFile.getAbsolutePath() + ". File size is " + outputFile.length() + " bytes.");
}
private byte[] toByteArray(short[] shorts) {
byte[] bytes = new byte[shorts.length * 2];
for (int i = 0; i < shorts.length; i++) {
bytes[i * 2] = (byte) (shorts[i] & 0x00FF);
bytes[i * 2 + 1] = (byte) ((shorts[i] & 0xFF00) >> 8);
}
return bytes;
}
}
`
Can you give me this project named by 《AudioCaptureSample》 with Java code?
I am trying to record youtube audio playback. When I play the .pcm file, I cannot hear anything. I am playing the recordings on a pcm player and my audio encoding is the same as in the java code.My android studio version is 3.5.0 and I am running code for android Q.
I have been implementing this functionality to record audio playbacks played on the device.
It is working fine when my app is in foreground, and I am getting pretty fine audio recordings, but it is throwing the below error whenever my app goes to background.
I have implemented this as a service and starting this service from another service which is a foreground service. I have tried all of the configuration for audioFormat but audioRecord object is failing to initialize.
Below is the function where app is crashed at run time.
private final void startAudioCapture() {
AudioPlaybackCaptureConfiguration config = new AudioPlaybackCaptureConfiguration.Builder(mediaProjection)
.addMatchingUsage(AudioAttributes.USAGE_MEDIA)
.build();
AudioFormat audioFormat = new AudioFormat.Builder()
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(8000)
.setChannelMask(AudioFormat.CHANNEL_IN_MONO)
.build();
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
return;
}
int bufferSize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord.Builder()
.setAudioFormat(audioFormat)
.setBufferSizeInBytes(bufferSize)
.setAudioPlaybackCaptureConfig(config)
.build();
audioRecord.startRecording();
audioCaptureThread = new Thread() {
@Override
public void run() {
File outputFile = createAudioFile();
writeAudioToFile(outputFile);
}
};
audioCaptureThread.start();
mIsRecording = true;
}
Errors:- E/IAudioFlinger: createRecord returned error -1 E/AudioRecord: createRecord_l(1): AudioFlinger could not create record track, status: -1 E/AudioRecord-JNI: Error creating AudioRecord instance: initialization check failed with status -1. E/android.media.AudioRecord: Error code -20 when initializing native AudioRecord object.
E/AndroidRuntime: FATAL EXCEPTION: main Process: com.Myapp.app.internal, PID: 18240 java.lang.RuntimeException: Unable to start service com.Myapp.app.AudioService@bfz0813 with Intent { act=AudioService:Start cmp=com.Myapp.app.internal/com.Myapp.app.AudioService }: java.lang.IllegalStateException: startRecording() called on an uninitialized AudioRecord.
Note:- This works perfect for all android version less than 11.
I am a student.
I want to rewrite this code in java.
for Microphone audio recording I get permission like the following.
public class Microphone extends AppCompatActivity {
public static boolean permissionToRecordAccepted = false;
private static final int REQUEST_RECORD_AUDIO_PERMISSION = 200;
private String[] permissions = {Manifest.permission.RECORD_AUDIO};
@SuppressLint("ClickableViewAccessibility")
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_microphone);
if (this.checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(permissions, REQUEST_RECORD_AUDIO_PERMISSION);
} else {
permissionToRecordAccepted = true;
}
if (permissionToRecordAccepted) {
System.out.println("permission granted.");
}
}
@Override
public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case REQUEST_RECORD_AUDIO_PERMISSION:
permissionToRecordAccepted = (grantResults[0] == PackageManager.PERMISSION_GRANTED);
}
}
}
I want to rewrite this code in java. But , I am confused .
I didn't know how to get permission for internal audio captureing in java.
Can you help me to get permission for internal audio ?
When I run this code in the emulator when playing music with another app, nothing is recorded. I tried several music apps.
All permissions are given.
I do get the popup message about the recording and I see the icon on top about the recoding.
The recoding file is saved, but when I open it, it only contains zeros.
I tried it on android 10 and 11.
Android 10: Automotive emulator and Polestar 2
Android 11: Automotive emulator (both with headset on and off)
A declarative, efficient, and flexible JavaScript library for building user interfaces.
🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
An Open Source Machine Learning Framework for Everyone
The Web framework for perfectionists with deadlines.
A PHP framework for web artisans
Bring data to life with SVG, Canvas and HTML. 📊📈🎉
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
Some thing interesting about web. New door for the world.
A server is a program made to process requests and deliver data to clients.
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
Some thing interesting about visualization, use data art
Some thing interesting about game, make everyone happy.
We are working to build community through open source technology. NB: members must have two-factor auth.
Open source projects and samples from Microsoft.
Google ❤️ Open Source for everyone.
Alibaba Open Source for everyone
Data-Driven Documents codes.
China tencent open source team.