Compare commits
40 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
80af4b83aa | ||
|
|
ccf4ad9013 | ||
|
|
9946350df8 | ||
|
|
b9e941b768 | ||
|
|
2e650fa39d | ||
|
|
3ac7e7d412 | ||
|
|
d199117b43 | ||
|
|
940f40444f | ||
|
|
e9037a3c71 | ||
|
|
4acae87db4 | ||
|
|
c333131c7f | ||
|
|
4844d975d4 | ||
|
|
b696846f6c | ||
|
|
e8df8242e6 | ||
|
|
e5b7280a06 | ||
|
|
34cf80845c | ||
|
|
426a796066 | ||
|
|
1b4aca9bd0 | ||
|
|
a1edaab9fd | ||
|
|
295c0320c8 | ||
|
|
f3c81f18a8 | ||
|
|
2220e4a523 | ||
|
|
7ba2da9662 | ||
|
|
802f380768 | ||
|
|
f0c74a7670 | ||
|
|
e40eb28970 | ||
|
|
33847be76d | ||
|
|
bb7f45c36e | ||
|
|
2f6807f3f6 | ||
|
|
51ad6af3e2 | ||
|
|
1cb6b42e56 | ||
|
|
78083f2368 | ||
|
|
a5f9fe79af | ||
|
|
f32a59be56 | ||
|
|
cf15d7ade8 | ||
|
|
c980d4c1bd | ||
|
|
031627d315 | ||
|
|
7d103df37a | ||
|
|
013cca61e8 | ||
|
|
f8704432d6 |
@@ -31,6 +31,7 @@ src = [
|
||||
'src/version.c',
|
||||
'src/video_buffer.c',
|
||||
'src/util/acksync.c',
|
||||
'src/util/average.c',
|
||||
'src/util/bytebuf.c',
|
||||
'src/util/file.c',
|
||||
'src/util/intmap.c',
|
||||
|
||||
@@ -4,11 +4,27 @@
|
||||
|
||||
#include "util/log.h"
|
||||
|
||||
/** Downcast frame_sink to sc_v4l2_sink */
|
||||
#define SC_AUDIO_PLAYER_NDEBUG // comment to debug
|
||||
|
||||
/** Downcast frame_sink to sc_audio_player */
|
||||
#define DOWNCAST(SINK) container_of(SINK, struct sc_audio_player, frame_sink)
|
||||
|
||||
#define SC_AV_SAMPLE_FMT AV_SAMPLE_FMT_S16
|
||||
#define SC_SDL_SAMPLE_FMT AUDIO_S16
|
||||
#define SC_AV_SAMPLE_FMT AV_SAMPLE_FMT_FLT
|
||||
#define SC_SDL_SAMPLE_FMT AUDIO_F32
|
||||
|
||||
#define SC_AUDIO_OUTPUT_BUFFER_SAMPLES 480 // 10ms at 48000Hz
|
||||
|
||||
// The target number of buffered samples between the producer and the consumer.
|
||||
// This value is directly use for compensation.
|
||||
#define SC_TARGET_BUFFERED_SAMPLES (3 * SC_AUDIO_OUTPUT_BUFFER_SAMPLES)
|
||||
|
||||
// If the consumer is too late, skip samples to keep at most this value
|
||||
#define SC_BUFFERED_SAMPLES_THRESHOLD 2400 // 50ms at 48000Hz
|
||||
|
||||
// Use a ring-buffer of 1 second (at 48000Hz) between the producer and the
|
||||
// consumer. It too big, but it guarantees that the producer and the consumer
|
||||
// will be able to access it in parallel without locking.
|
||||
#define SC_BYTEBUF_SIZE_IN_SAMPLES 48000
|
||||
|
||||
void
|
||||
sc_audio_player_sdl_callback(void *userdata, uint8_t *stream, int len_int) {
|
||||
@@ -20,21 +36,49 @@ sc_audio_player_sdl_callback(void *userdata, uint8_t *stream, int len_int) {
|
||||
assert(len_int > 0);
|
||||
size_t len = len_int;
|
||||
|
||||
#ifndef SC_AUDIO_PLAYER_NDEBUG
|
||||
LOGD("[Audio] SDL callback requests %" SC_PRIsizet " samples",
|
||||
len / (ap->nb_channels * ap->out_bytes_per_sample));
|
||||
#endif
|
||||
|
||||
size_t read = sc_bytebuf_read_remaining(&ap->buf);
|
||||
size_t max_buffered_bytes = SC_BUFFERED_SAMPLES_THRESHOLD
|
||||
* ap->nb_channels * ap->out_bytes_per_sample;
|
||||
if (read > max_buffered_bytes + len) {
|
||||
size_t skip = read - (max_buffered_bytes + len);
|
||||
#ifndef SC_AUDIO_PLAYER_NDEBUG
|
||||
LOGD("[Audio] Buffered samples threshold exceeded: %" SC_PRIsizet
|
||||
" bytes, skipping %" SC_PRIsizet " bytes", read, skip);
|
||||
#endif
|
||||
// After this callback, exactly max_buffered_bytes will remain
|
||||
sc_bytebuf_skip(&ap->buf, skip);
|
||||
read = max_buffered_bytes + len;
|
||||
}
|
||||
|
||||
// Number of buffered samples (may be negative on underflow)
|
||||
float buffered_samples = ((float) read - len_int)
|
||||
/ (ap->nb_channels * ap->out_bytes_per_sample);
|
||||
sc_average_push(&ap->avg_buffered_samples, buffered_samples);
|
||||
|
||||
if (read) {
|
||||
if (read > len) {
|
||||
read = len;
|
||||
}
|
||||
sc_bytebuf_read(&ap->buf, stream, read);
|
||||
}
|
||||
|
||||
if (read < len) {
|
||||
// Insert silence
|
||||
#ifndef SC_AUDIO_PLAYER_NDEBUG
|
||||
LOGD("[Audio] Buffer underflow, inserting silence: %" SC_PRIsizet
|
||||
" bytes", len - read);
|
||||
#endif
|
||||
memset(stream + read, 0, len - read);
|
||||
}
|
||||
}
|
||||
|
||||
static size_t
|
||||
sc_audio_player_get_swr_buf_size(struct sc_audio_player *ap, size_t samples) {
|
||||
sc_audio_player_get_buf_size(struct sc_audio_player *ap, size_t samples) {
|
||||
assert(ap->nb_channels);
|
||||
assert(ap->out_bytes_per_sample);
|
||||
return samples * ap->nb_channels * ap->out_bytes_per_sample;
|
||||
@@ -42,7 +86,7 @@ sc_audio_player_get_swr_buf_size(struct sc_audio_player *ap, size_t samples) {
|
||||
|
||||
static uint8_t *
|
||||
sc_audio_player_get_swr_buf(struct sc_audio_player *ap, size_t min_samples) {
|
||||
size_t min_buf_size = sc_audio_player_get_swr_buf_size(ap, min_samples);
|
||||
size_t min_buf_size = sc_audio_player_get_buf_size(ap, min_samples);
|
||||
if (min_buf_size < ap->swr_buf_alloc_size) {
|
||||
size_t new_size = min_buf_size + 4096;
|
||||
uint8_t *buf = realloc(ap->swr_buf, new_size);
|
||||
@@ -63,8 +107,28 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
||||
const AVCodecContext *ctx) {
|
||||
struct sc_audio_player *ap = DOWNCAST(sink);
|
||||
|
||||
SwrContext *swr_ctx = ap->swr_ctx;
|
||||
assert(swr_ctx);
|
||||
SDL_AudioSpec desired = {
|
||||
.freq = ctx->sample_rate,
|
||||
.format = SC_SDL_SAMPLE_FMT,
|
||||
.channels = ctx->ch_layout.nb_channels,
|
||||
.samples = SC_AUDIO_OUTPUT_BUFFER_SAMPLES,
|
||||
.callback = sc_audio_player_sdl_callback,
|
||||
.userdata = ap,
|
||||
};
|
||||
SDL_AudioSpec obtained;
|
||||
|
||||
ap->device = SDL_OpenAudioDevice(NULL, 0, &desired, &obtained, 0);
|
||||
if (!ap->device) {
|
||||
LOGE("Could not open audio device: %s", SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
SwrContext *swr_ctx = swr_alloc();
|
||||
if (!swr_ctx) {
|
||||
LOG_OOM();
|
||||
goto error_close_audio_device;
|
||||
}
|
||||
ap->swr_ctx = swr_ctx;
|
||||
|
||||
assert(ctx->sample_rate > 0);
|
||||
assert(ctx->ch_layout.nb_channels > 0);
|
||||
@@ -83,39 +147,46 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
||||
int ret = swr_init(swr_ctx);
|
||||
if (ret) {
|
||||
LOGE("Failed to initialize the resampling context");
|
||||
return false;
|
||||
goto error_free_swr_ctx;
|
||||
}
|
||||
|
||||
ap->sample_rate = ctx->sample_rate;
|
||||
ap->nb_channels = ctx->ch_layout.nb_channels;
|
||||
ap->out_bytes_per_sample = out_bytes_per_sample;
|
||||
|
||||
size_t initial_swr_buf_size = sc_audio_player_get_swr_buf_size(ap, 4096);
|
||||
size_t bytebuf_size =
|
||||
sc_audio_player_get_buf_size(ap, SC_BYTEBUF_SIZE_IN_SAMPLES);
|
||||
|
||||
bool ok = sc_bytebuf_init(&ap->buf, bytebuf_size);
|
||||
if (!ok) {
|
||||
goto error_free_swr_ctx;
|
||||
}
|
||||
|
||||
ap->safe_empty_buffer = sc_bytebuf_write_remaining(&ap->buf);
|
||||
|
||||
size_t initial_swr_buf_size = sc_audio_player_get_buf_size(ap, 4096);
|
||||
ap->swr_buf = malloc(initial_swr_buf_size);
|
||||
if (!ap->swr_buf) {
|
||||
LOG_OOM();
|
||||
return false;
|
||||
goto error_destroy_bytebuf;
|
||||
}
|
||||
ap->swr_buf_alloc_size = initial_swr_buf_size;
|
||||
|
||||
SDL_AudioSpec desired = {
|
||||
.freq = ctx->sample_rate,
|
||||
.format = SC_SDL_SAMPLE_FMT,
|
||||
.channels = ctx->ch_layout.nb_channels,
|
||||
.samples = 512, // ~10ms at 48000Hz
|
||||
.callback = sc_audio_player_sdl_callback,
|
||||
.userdata = ap,
|
||||
};
|
||||
SDL_AudioSpec obtained;
|
||||
|
||||
ap->device = SDL_OpenAudioDevice(NULL, 0, &desired, &obtained, 0);
|
||||
if (!ap->device) {
|
||||
LOGE("Could not open audio device: %s", SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
sc_average_init(&ap->avg_buffered_samples, 32);
|
||||
ap->samples_since_resync = 0;
|
||||
|
||||
SDL_PauseAudioDevice(ap->device, 0);
|
||||
|
||||
return true;
|
||||
|
||||
error_destroy_bytebuf:
|
||||
sc_bytebuf_destroy(&ap->buf);
|
||||
error_free_swr_ctx:
|
||||
swr_free(&ap->swr_ctx);
|
||||
error_close_audio_device:
|
||||
SDL_CloseAudioDevice(ap->device);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
static void
|
||||
@@ -125,6 +196,10 @@ sc_audio_player_frame_sink_close(struct sc_frame_sink *sink) {
|
||||
assert(ap->device);
|
||||
SDL_PauseAudioDevice(ap->device, 1);
|
||||
SDL_CloseAudioDevice(ap->device);
|
||||
|
||||
free(ap->swr_buf);
|
||||
sc_bytebuf_destroy(&ap->buf);
|
||||
swr_free(&ap->swr_ctx);
|
||||
}
|
||||
|
||||
static bool
|
||||
@@ -148,12 +223,12 @@ sc_audio_player_frame_sink_push(struct sc_frame_sink *sink, const AVFrame *frame
|
||||
LOGE("Resampling failed: %d", ret);
|
||||
return false;
|
||||
}
|
||||
LOGI("ret=%d dst_nb_samples=%d\n", ret, dst_nb_samples);
|
||||
|
||||
size_t swr_buf_size = sc_audio_player_get_swr_buf_size(ap, ret);
|
||||
LOGI("== swr_buf_size %lu", swr_buf_size);
|
||||
|
||||
// TODO clock drift compensation
|
||||
size_t samples_written = ret;
|
||||
size_t swr_buf_size = sc_audio_player_get_buf_size(ap, samples_written);
|
||||
#ifndef SC_AUDIO_PLAYER_NDEBUG
|
||||
LOGI("[Audio] %" SC_PRIsizet " samples written to buffer", samples_written);
|
||||
#endif
|
||||
|
||||
// It should almost always be possible to write without lock
|
||||
bool can_write_without_lock = swr_buf_size <= ap->safe_empty_buffer;
|
||||
@@ -170,36 +245,39 @@ sc_audio_player_frame_sink_push(struct sc_frame_sink *sink, const AVFrame *frame
|
||||
|
||||
// The next time, it will remain at least the current empty space
|
||||
ap->safe_empty_buffer = sc_bytebuf_write_remaining(&ap->buf);
|
||||
|
||||
// Read the value written by the SDL thread under lock
|
||||
float avg;
|
||||
bool has_avg = sc_average_get(&ap->avg_buffered_samples, &avg);
|
||||
|
||||
SDL_UnlockAudioDevice(ap->device);
|
||||
|
||||
if (has_avg) {
|
||||
ap->samples_since_resync += samples_written;
|
||||
if (ap->samples_since_resync >= ap->sample_rate) {
|
||||
// Resync every second
|
||||
ap->samples_since_resync = 0;
|
||||
|
||||
int diff = SC_TARGET_BUFFERED_SAMPLES - avg;
|
||||
#ifndef SC_AUDIO_PLAYER_NDEBUG
|
||||
LOGI("[Audio] Average buffered samples = %f, compensation %d",
|
||||
avg, diff);
|
||||
#endif
|
||||
// Compensate the diff over 3 seconds (but will be recomputed after
|
||||
// 1 second)
|
||||
int ret = swr_set_compensation(swr_ctx, diff, 3 * ap->sample_rate);
|
||||
if (ret < 0) {
|
||||
LOGW("Resampling compensation failed: %d", ret);
|
||||
// not fatal
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_audio_player_init(struct sc_audio_player *ap,
|
||||
const struct sc_audio_player_callbacks *cbs,
|
||||
void *cbs_userdata) {
|
||||
bool ok = sc_bytebuf_init(&ap->buf, 128 * 1024);
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ap->swr_ctx = swr_alloc();
|
||||
if (!ap->swr_ctx) {
|
||||
sc_bytebuf_destroy(&ap->buf);
|
||||
LOG_OOM();
|
||||
return false;
|
||||
}
|
||||
|
||||
ap->safe_empty_buffer = sc_bytebuf_write_remaining(&ap->buf);
|
||||
|
||||
ap->swr_buf = NULL;
|
||||
ap->swr_buf_alloc_size = 0;
|
||||
|
||||
assert(cbs && cbs->on_ended);
|
||||
ap->cbs = cbs;
|
||||
ap->cbs_userdata = cbs_userdata;
|
||||
|
||||
void
|
||||
sc_audio_player_init(struct sc_audio_player *ap) {
|
||||
static const struct sc_frame_sink_ops ops = {
|
||||
.open = sc_audio_player_frame_sink_open,
|
||||
.close = sc_audio_player_frame_sink_close,
|
||||
@@ -207,12 +285,4 @@ sc_audio_player_init(struct sc_audio_player *ap,
|
||||
};
|
||||
|
||||
ap->frame_sink.ops = &ops;
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
sc_audio_player_destroy(struct sc_audio_player *ap) {
|
||||
sc_bytebuf_destroy(&ap->buf);
|
||||
swr_free(&ap->swr_ctx);
|
||||
free(ap->swr_buf);
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
#include <stdbool.h>
|
||||
#include "trait/frame_sink.h"
|
||||
#include <util/average.h>
|
||||
#include <util/bytebuf.h>
|
||||
#include <util/thread.h>
|
||||
|
||||
@@ -35,6 +36,10 @@ struct sc_audio_player {
|
||||
uint8_t *swr_buf;
|
||||
size_t swr_buf_alloc_size;
|
||||
|
||||
// Number of buffered samples (may be negative on underflow)
|
||||
struct sc_average avg_buffered_samples;
|
||||
unsigned samples_since_resync;
|
||||
|
||||
const struct sc_audio_player_callbacks *cbs;
|
||||
void *cbs_userdata;
|
||||
};
|
||||
@@ -43,12 +48,7 @@ struct sc_audio_player_callbacks {
|
||||
void (*on_ended)(struct sc_audio_player *ap, bool success, void *userdata);
|
||||
};
|
||||
|
||||
bool
|
||||
sc_audio_player_init(struct sc_audio_player *ap,
|
||||
const struct sc_audio_player_callbacks *cbs,
|
||||
void *cbs_userdata);
|
||||
|
||||
void
|
||||
sc_audio_player_destroy(struct sc_audio_player *ap);
|
||||
sc_audio_player_init(struct sc_audio_player *ap);
|
||||
|
||||
#endif
|
||||
|
||||
@@ -217,17 +217,6 @@ sc_recorder_on_ended(struct sc_recorder *recorder, bool success,
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
sc_audio_player_on_ended(struct sc_audio_player *ap, bool success,
|
||||
void *userdata) {
|
||||
(void) ap;
|
||||
(void) userdata;
|
||||
|
||||
if (!success) {
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
sc_video_demuxer_on_ended(struct sc_demuxer *demuxer, bool eos,
|
||||
void *userdata) {
|
||||
@@ -314,7 +303,6 @@ scrcpy(struct scrcpy_options *options) {
|
||||
bool file_pusher_initialized = false;
|
||||
bool recorder_initialized = false;
|
||||
bool recorder_started = false;
|
||||
bool audio_player_initialized = false;
|
||||
#ifdef HAVE_V4L2
|
||||
bool v4l2_sink_initialized = false;
|
||||
#endif
|
||||
@@ -686,15 +674,7 @@ aoa_hid_end:
|
||||
sc_decoder_add_sink(&s->video_decoder, &s->screen.frame_sink);
|
||||
|
||||
if (options->audio) {
|
||||
static const struct sc_audio_player_callbacks audio_player_cbs = {
|
||||
.on_ended = sc_audio_player_on_ended,
|
||||
};
|
||||
if (!sc_audio_player_init(&s->audio_player,
|
||||
&audio_player_cbs, NULL)) {
|
||||
goto end;
|
||||
}
|
||||
audio_player_initialized = true;
|
||||
|
||||
sc_audio_player_init(&s->audio_player);
|
||||
sc_decoder_add_sink(&s->audio_decoder, &s->audio_player.frame_sink);
|
||||
}
|
||||
}
|
||||
@@ -817,10 +797,6 @@ end:
|
||||
sc_recorder_destroy(&s->recorder);
|
||||
}
|
||||
|
||||
if (audio_player_initialized) {
|
||||
sc_audio_player_destroy(&s->audio_player);
|
||||
}
|
||||
|
||||
if (file_pusher_initialized) {
|
||||
sc_file_pusher_join(&s->file_pusher);
|
||||
sc_file_pusher_destroy(&s->file_pusher);
|
||||
|
||||
@@ -338,9 +338,9 @@ sc_v4l2_sink_push(struct sc_v4l2_sink *vs, const AVFrame *frame) {
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_v4l2_frame_sink_open(struct sc_frame_sink *sink) {
|
||||
sc_v4l2_frame_sink_open(struct sc_frame_sink *sink, const AVCodecContext *ctx) {
|
||||
struct sc_v4l2_sink *vs = DOWNCAST(sink);
|
||||
return sc_v4l2_sink_open(vs);
|
||||
return sc_v4l2_sink_open(vs, ctx);
|
||||
}
|
||||
|
||||
static void
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.ComponentName;
|
||||
import android.content.Intent;
|
||||
import android.media.AudioFormat;
|
||||
import android.media.AudioRecord;
|
||||
import android.media.AudioTimestamp;
|
||||
@@ -12,6 +16,7 @@ import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.os.Looper;
|
||||
import android.os.SystemClock;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
@@ -40,10 +45,13 @@ public final class AudioEncoder {
|
||||
}
|
||||
|
||||
private static final int SAMPLE_RATE = 48000;
|
||||
private static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_STEREO;
|
||||
private static final int CHANNELS = 2;
|
||||
private static final int FORMAT = AudioFormat.ENCODING_PCM_16BIT;
|
||||
private static final int BYTES_PER_SAMPLE = 2;
|
||||
|
||||
private static final int BUFFER_MS = 10; // milliseconds
|
||||
private static final int BUFFER_SIZE = SAMPLE_RATE * CHANNELS * BUFFER_MS / 1000;
|
||||
private static final int BUFFER_MS = 5; // milliseconds
|
||||
private static final int BUFFER_SIZE = SAMPLE_RATE * CHANNELS * BYTES_PER_SAMPLE * BUFFER_MS / 1000;
|
||||
|
||||
private final Streamer streamer;
|
||||
private final int bitRate;
|
||||
@@ -72,9 +80,9 @@ public final class AudioEncoder {
|
||||
|
||||
private static AudioFormat createAudioFormat() {
|
||||
AudioFormat.Builder builder = new AudioFormat.Builder();
|
||||
builder.setEncoding(AudioFormat.ENCODING_PCM_16BIT);
|
||||
builder.setEncoding(FORMAT);
|
||||
builder.setSampleRate(SAMPLE_RATE);
|
||||
builder.setChannelMask(CHANNELS == 2 ? AudioFormat.CHANNEL_IN_STEREO : AudioFormat.CHANNEL_IN_MONO);
|
||||
builder.setChannelMask(CHANNEL_CONFIG);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@@ -88,7 +96,8 @@ public final class AudioEncoder {
|
||||
}
|
||||
builder.setAudioSource(MediaRecorder.AudioSource.REMOTE_SUBMIX);
|
||||
builder.setAudioFormat(createAudioFormat());
|
||||
builder.setBufferSizeInBytes(1024 * 1024);
|
||||
int minBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, FORMAT);
|
||||
builder.setBufferSizeInBytes(minBufferSize);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@@ -211,6 +220,32 @@ public final class AudioEncoder {
|
||||
}
|
||||
}
|
||||
|
||||
private static void startWorkaroundAndroid11() {
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||
// Android 11 requires Apps to be at foreground to record audio.
|
||||
// Normally, each App has its own user ID, so Android checks whether the requesting App has the user ID that's at the foreground.
|
||||
// But Scrcpy server is NOT an App, it's a Java application started from Android shell, so it has the same user ID (2000) with Android
|
||||
// shell ("com.android.shell").
|
||||
// If there is an Activity from Android shell running at foreground, then the permission system will believe Scrcpy is also in the
|
||||
// foreground.
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||
Intent intent = new Intent(Intent.ACTION_MAIN);
|
||||
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
|
||||
intent.addCategory(Intent.CATEGORY_LAUNCHER);
|
||||
intent.setComponent(new ComponentName(FakeContext.PACKAGE_NAME, "com.android.shell.HeapDumpActivity"));
|
||||
ServiceManager.getActivityManager().startActivityAsUserWithFeature(intent);
|
||||
// Wait for activity to start
|
||||
SystemClock.sleep(150);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void stopWorkaroundAndroid11() {
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||
ServiceManager.getActivityManager().forceStopPackage(FakeContext.PACKAGE_NAME);
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.M)
|
||||
public void encode() throws IOException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
@@ -228,7 +263,6 @@ public final class AudioEncoder {
|
||||
try {
|
||||
Codec codec = streamer.getCodec();
|
||||
mediaCodec = createMediaCodec(codec, encoderName);
|
||||
recorder = createAudioRecord();
|
||||
|
||||
mediaCodecThread = new HandlerThread("AudioEncoder");
|
||||
mediaCodecThread.start();
|
||||
@@ -237,7 +271,19 @@ public final class AudioEncoder {
|
||||
mediaCodec.setCallback(new EncoderCallback(), new Handler(mediaCodecThread.getLooper()));
|
||||
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
|
||||
recorder.startRecording();
|
||||
startWorkaroundAndroid11();
|
||||
try {
|
||||
recorder = createAudioRecord();
|
||||
recorder.startRecording();
|
||||
} catch (UnsupportedOperationException e) {
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||
Ln.e("Failed to start audio capture");
|
||||
Ln.e("On Android 11, it is only possible to capture in foreground, make sure that the device is unlocked when starting scrcpy.");
|
||||
throw new ConfigurationException("Unsupported audio capture");
|
||||
}
|
||||
} finally {
|
||||
stopWorkaroundAndroid11();
|
||||
}
|
||||
recorderStarted = true;
|
||||
|
||||
final MediaCodec mediaCodecRef = mediaCodec;
|
||||
|
||||
@@ -1,8 +1,14 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.Ln;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Intent;
|
||||
import android.os.Binder;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.os.IBinder;
|
||||
import android.os.IInterface;
|
||||
import android.os.Process;
|
||||
@@ -11,12 +17,15 @@ import java.lang.reflect.Field;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
|
||||
public class ActivityManager {
|
||||
|
||||
private final IInterface manager;
|
||||
private Method getContentProviderExternalMethod;
|
||||
private boolean getContentProviderExternalMethodNewVersion = true;
|
||||
private Method removeContentProviderExternalMethod;
|
||||
private Method startActivityAsUserWithFeatureMethod;
|
||||
private Method forceStopPackageMethod;
|
||||
|
||||
public ActivityManager(IInterface manager) {
|
||||
this.manager = manager;
|
||||
@@ -43,6 +52,7 @@ public class ActivityManager {
|
||||
return removeContentProviderExternalMethod;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.Q)
|
||||
private ContentProvider getContentProviderExternal(String name, IBinder token) {
|
||||
try {
|
||||
Method method = getGetContentProviderExternalMethod();
|
||||
@@ -85,4 +95,55 @@ public class ActivityManager {
|
||||
public ContentProvider createSettingsProvider() {
|
||||
return getContentProviderExternal("settings", new Binder());
|
||||
}
|
||||
|
||||
private Method getStartActivityAsUserWithFeatureMethod() throws NoSuchMethodException, ClassNotFoundException {
|
||||
if (startActivityAsUserWithFeatureMethod == null) {
|
||||
Class<?> iApplicationThreadClass = Class.forName("android.app.IApplicationThread");
|
||||
Class<?> profilerInfo = Class.forName("android.app.ProfilerInfo");
|
||||
startActivityAsUserWithFeatureMethod = manager.getClass()
|
||||
.getMethod("startActivityAsUserWithFeature", iApplicationThreadClass, String.class, String.class, Intent.class, String.class,
|
||||
IBinder.class, String.class, int.class, int.class, profilerInfo, Bundle.class, int.class);
|
||||
}
|
||||
return startActivityAsUserWithFeatureMethod;
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
public int startActivityAsUserWithFeature(Intent intent) {
|
||||
try {
|
||||
Method method = getStartActivityAsUserWithFeatureMethod();
|
||||
return (int) method.invoke(
|
||||
/* this */ manager,
|
||||
/* caller */ null,
|
||||
/* callingPackage */ FakeContext.PACKAGE_NAME,
|
||||
/* callingFeatureId */ null,
|
||||
/* intent */ intent,
|
||||
/* resolvedType */ null,
|
||||
/* resultTo */ null,
|
||||
/* resultWho */ null,
|
||||
/* requestCode */ 0,
|
||||
/* startFlags */ 0,
|
||||
/* profilerInfo */ null,
|
||||
/* bOptions */ null,
|
||||
/* userId */ /* UserHandle.USER_CURRENT */ -2);
|
||||
} catch (Throwable e) {
|
||||
Ln.e("Could not invoke method", e);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private Method getForceStopPackageMethod() throws NoSuchMethodException {
|
||||
if (forceStopPackageMethod == null) {
|
||||
forceStopPackageMethod = manager.getClass().getMethod("forceStopPackage", String.class, int.class);
|
||||
}
|
||||
return forceStopPackageMethod;
|
||||
}
|
||||
|
||||
public void forceStopPackage(String packageName) {
|
||||
try {
|
||||
Method method = getForceStopPackageMethod();
|
||||
method.invoke(manager, packageName, /* userId */ /* UserHandle.USER_CURRENT */ -2);
|
||||
} catch (Throwable e) {
|
||||
Ln.e("Could not invoke method", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user