Compare commits

..

11 Commits

Author SHA1 Message Date
Romain Vimont
e0d87bd903 AudioEncoder WIP 2023-02-02 10:25:07 +01:00
Simon Chan
97014889c3 Capture device audio WIP
Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-02-01 23:06:01 +01:00
Romain Vimont
05462a5467 Move Workarounds call
Workarounds are not specific to the screen encoder.

Co-authored-by: Simon Chan <1330321+yume-chan@users.noreply.github.com>
2023-02-01 23:05:25 +01:00
Romain Vimont
05fa3a3358 Use FakeContext for Application instance
This will expose the correct package name and UID to the application
context.
2023-02-01 22:54:12 +01:00
Romain Vimont
4e115065a2 Use shell package name for workarounds
For consistency.
2023-02-01 22:54:12 +01:00
Romain Vimont
5174dfaad9 Use PACKAGE_NAME from FakeContext
Remove duplicated constant.
2023-02-01 22:54:12 +01:00
Romain Vimont
6048aa7378 Use AttributionSource from FakeContext
FakeContext already provides an AttributeSource instance.

Co-authored-by: Simon Chan <1330321+yume-chan@users.noreply.github.com>
2023-02-01 22:54:12 +01:00
Simon Chan
2871ebe31a Add a fake Android Context
Since scrcpy-server is not an Android application (it's a java
executable), it has no Context.

Some features will require a Context instance to get the package name
and the UID. Add a FakeContext for this purpose.

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-02-01 22:54:12 +01:00
Romain Vimont
2989566c55 Use Process.ROOT_UID
Replace ServiceManager.USER_ID by existing constant Process.ROOT_UID.
2023-02-01 22:54:12 +01:00
Romain Vimont
653387acdb Compute relative PTS on the client-side
The PTS received by MediaCodec are expressed relative to an arbitrary
clock origin. We consider the PTS of the first frame to be 0, and the
PTS of every other frame is relative to this first PTS (note that the
PTS is only used for recording, it is ignored for mirroring).

For simplicity, this relative PTS was computed on the server-side.

To prepare support for multiple stream (video and audio), send the
packet with its original PTS, and handle the PTS offset on the
client-side (by the recorder).

Since we can't know in advance which stream will produce the first
packet with the lowest PTS (a packet received later on one stream may
have a PTS lower than a packet received earlier on another stream),
computing the PTS on the server-side would require unnecessary waiting.
2023-02-01 22:54:12 +01:00
Romain Vimont
d7f3683a1b Do not expose controller threads
The way the controller execute its events asynchronously is an
implementation detail.
2023-02-01 22:54:10 +01:00
8 changed files with 297 additions and 52 deletions

View File

@@ -11,6 +11,8 @@
/** Downcast packet_sink to recorder */
#define DOWNCAST(SINK) container_of(SINK, struct sc_recorder, packet_sink)
#define SC_PTS_ORIGIN_NONE UINT64_C(-1)
static const AVRational SCRCPY_TIME_BASE = {1, 1000000}; // timestamps in us
static const AVOutputFormat *
@@ -128,6 +130,8 @@ sc_recorder_write(struct sc_recorder *recorder, AVPacket *packet) {
return true;
}
LOGI("==== %" PRIu64, packet->pts);
sc_recorder_rescale_packet(recorder, packet);
return av_write_frame(recorder->ctx, packet) >= 0;
}
@@ -169,6 +173,18 @@ run_recorder(void *data) {
sc_mutex_unlock(&recorder->mutex);
if (recorder->pts_origin == SC_PTS_ORIGIN_NONE
&& rec->packet->pts != AV_NOPTS_VALUE) {
// First PTS received
recorder->pts_origin = rec->packet->pts;
}
if (rec->packet->pts != AV_NOPTS_VALUE) {
// Set PTS relatve to the origin
rec->packet->pts -= recorder->pts_origin;
rec->packet->dts = rec->packet->pts;
}
// recorder->previous is only written from this thread, no need to lock
struct sc_record_packet *previous = recorder->previous;
recorder->previous = rec;
@@ -243,6 +259,7 @@ sc_recorder_open(struct sc_recorder *recorder, const AVCodec *input_codec) {
recorder->failed = false;
recorder->header_written = false;
recorder->previous = NULL;
recorder->pts_origin = SC_PTS_ORIGIN_NONE;
const char *format_name = sc_recorder_get_format_name(recorder->format);
assert(format_name);

View File

@@ -28,6 +28,8 @@ struct sc_recorder {
struct sc_size declared_frame_size;
bool header_written;
uint64_t pts_origin;
sc_thread thread;
sc_mutex mutex;
sc_cond queue_cond;

View File

@@ -0,0 +1,196 @@
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.ComponentName;
import android.content.Intent;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.AudioTimestamp;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicBoolean;
public final class AudioEncoder {
private static final String MIMETYPE = MediaFormat.MIMETYPE_AUDIO_OPUS;
private static final int SAMPLE_RATE = 48000;
private static final int CHANNELS = 2;
private static final int BIT_RATE = 128000;
private static int BUFFER_MS = 15; // milliseconds
private static final int BUFFER_SIZE = SAMPLE_RATE * CHANNELS * BUFFER_MS / 1000;
private AudioRecord recorder;
private MediaCodec mediaCodec;
private HandlerThread thread;
private final AtomicBoolean interrupted = new AtomicBoolean();
private final Semaphore endSemaphore = new Semaphore(0); // blocks until encoding is ended
private static AudioFormat createAudioFormat() {
AudioFormat.Builder builder = new AudioFormat.Builder();
builder.setEncoding(AudioFormat.ENCODING_PCM_16BIT);
builder.setSampleRate(SAMPLE_RATE);
builder.setChannelMask(CHANNELS == 2 ? AudioFormat.CHANNEL_IN_STEREO : AudioFormat.CHANNEL_IN_MONO);
return builder.build();
}
@TargetApi(Build.VERSION_CODES.M)
@SuppressLint({"WrongConstant", "MissingPermission"})
private static AudioRecord createAudioRecord() {
AudioRecord.Builder builder = new AudioRecord.Builder();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
// On older APIs, Workarounds.fillAppInfo() must be called beforehand
builder.setContext(FakeContext.get());
}
builder.setAudioSource(MediaRecorder.AudioSource.REMOTE_SUBMIX);
builder.setAudioFormat(createAudioFormat());
builder.setBufferSizeInBytes(1024 * 1024);
return builder.build();
}
private static MediaFormat createFormat() {
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, MIMETYPE);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
return format;
}
@TargetApi(Build.VERSION_CODES.M)
public void start() throws IOException {
mediaCodec = MediaCodec.createEncoderByType(MIMETYPE); // may throw IOException
recorder = createAudioRecord();
MediaFormat format = createFormat();
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
recorder.startRecording();
thread = new HandlerThread("AudioEncoder");
thread.start();
class AudioEncoderCallbacks extends MediaCodec.Callback {
private final AudioTimestamp timestamp = new AudioTimestamp();
private long nextPts;
private boolean eofSignaled;
private boolean ended;
private void notifyEnded() {
assert(!ended);
ended = true;
endSemaphore.release();
}
@TargetApi(Build.VERSION_CODES.N)
@Override
public void onInputBufferAvailable(MediaCodec codec, int index) {
if (eofSignaled) {
return;
}
ByteBuffer inputBuffer = codec.getInputBuffer(index);
int r = recorder.read(inputBuffer, BUFFER_SIZE);
long pts;
int ret = recorder.getTimestamp(timestamp, AudioTimestamp.TIMEBASE_MONOTONIC);
if (ret == AudioRecord.SUCCESS) {
pts = timestamp.nanoTime / 1000;
} else {
if (nextPts == 0) {
Ln.w("Could not get any audio timestamp");
}
// compute from previous timestamp and packet size
pts = nextPts;
}
long durationMs = r * 1000 / CHANNELS / SAMPLE_RATE;
nextPts = pts + durationMs;
int flags = 0;
if (interrupted.get()) {
flags = flags | MediaCodec.BUFFER_FLAG_END_OF_STREAM;
eofSignaled = true;
}
codec.queueInputBuffer(index, 0, r, pts, flags);
}
@Override
public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo bufferInfo) {
if (ended) {
return;
}
ByteBuffer codecBuffer = codec.getOutputBuffer(index);
try {
boolean isConfig = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
long pts = bufferInfo.presentationTimeUs;
Ln.i("Audio packet: pts=" + pts + " " + codecBuffer.remaining() + " bytes");
} finally {
codec.releaseOutputBuffer(index, false);
}
boolean eof = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
if (eof) {
notifyEnded();
}
}
@Override
public void onError(MediaCodec codec, MediaCodec.CodecException e) {
Ln.e("MediaCodec error", e);
if (!ended) {
notifyEnded();
}
}
@Override
public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
// ignore
}
}
mediaCodec.setCallback(new AudioEncoderCallbacks(), new Handler(thread.getLooper()));
mediaCodec.start();
}
private void waitEnded() {
try {
endSemaphore.acquire();
} catch (InterruptedException e) {
// ignore
}
}
public void stop() {
Ln.i("==== STOP");
if (thread != null) {
interrupted.set(true);
waitEnded();
thread.interrupt();
thread = null;
mediaCodec.stop();
mediaCodec.release();
recorder.stop();
Ln.i("==== STOPPED");
}
}
}

View File

@@ -24,6 +24,8 @@ public class Controller {
private static final ScheduledExecutorService EXECUTOR = Executors.newSingleThreadScheduledExecutor();
private Thread thread;
private final Device device;
private final DesktopConnection connection;
private final DeviceMessageSender sender;
@@ -62,7 +64,7 @@ public class Controller {
}
}
public void control() throws IOException {
private void control() throws IOException {
// on start, power on the device
if (powerOn && !Device.isScreenOn()) {
device.pressReleaseKeycode(KeyEvent.KEYCODE_POWER, Device.INJECT_MODE_ASYNC);
@@ -82,6 +84,27 @@ public class Controller {
}
}
public void start() {
thread = new Thread(() -> {
try {
control();
} catch (IOException e) {
// this is expected on close
Ln.d("Controller stopped");
}
});
thread.start();
sender.start();
}
public void stop() {
if (thread != null) {
thread.interrupt();
thread = null;
}
sender.stop();
}
public DeviceMessageSender getSender() {
return sender;
}

View File

@@ -6,6 +6,8 @@ public final class DeviceMessageSender {
private final DesktopConnection connection;
private Thread thread;
private String clipboardText;
private long ack;
@@ -24,7 +26,7 @@ public final class DeviceMessageSender {
notify();
}
public void loop() throws IOException, InterruptedException {
private void loop() throws IOException, InterruptedException {
while (!Thread.currentThread().isInterrupted()) {
String text;
long sequence;
@@ -49,4 +51,22 @@ public final class DeviceMessageSender {
}
}
}
public void start() {
thread = new Thread(() -> {
try {
loop();
} catch (IOException | InterruptedException e) {
// this is expected on close
Ln.d("Device message sender stopped");
}
});
thread.start();
}
public void stop() {
if (thread != null) {
thread.interrupt();
thread = null;
}
}
}

View File

@@ -42,7 +42,6 @@ public class ScreenEncoder implements Device.RotationListener {
private final int maxFps;
private final boolean sendFrameMeta;
private final boolean downsizeOnError;
private long ptsOrigin;
private boolean firstFrameSent;
private int consecutiveErrors;
@@ -207,10 +206,7 @@ public class ScreenEncoder implements Device.RotationListener {
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
pts = PACKET_FLAG_CONFIG; // non-media data packet
} else {
if (ptsOrigin == 0) {
ptsOrigin = bufferInfo.presentationTimeUs;
}
pts = bufferInfo.presentationTimeUs - ptsOrigin;
pts = bufferInfo.presentationTimeUs;
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) {
pts |= PACKET_FLAG_KEY_FRAME;
}

View File

@@ -4,6 +4,7 @@ import android.graphics.Rect;
import android.media.MediaCodecInfo;
import android.os.BatteryManager;
import android.os.Build;
import android.provider.MediaStore;
import java.io.IOException;
import java.util.List;
@@ -66,18 +67,27 @@ public final class Server {
Thread initThread = startInitThread(options);
Workarounds.prepareMainLooper();
if (Build.BRAND.equalsIgnoreCase("meizu")) {
// <https://github.com/Genymobile/scrcpy/issues/240>
// <https://github.com/Genymobile/scrcpy/issues/2656>
Workarounds.fillAppInfo();
}
int uid = options.getUid();
boolean tunnelForward = options.isTunnelForward();
boolean control = options.getControl();
boolean audio = Build.VERSION.SDK_INT >= Build.VERSION_CODES.R; // TODO option
boolean sendDummyByte = options.getSendDummyByte();
Workarounds.prepareMainLooper();
// <https://github.com/Genymobile/scrcpy/issues/240>
// <https://github.com/Genymobile/scrcpy/issues/2656>
boolean mustFillAppInfo = Build.BRAND.equalsIgnoreCase("meizu");
// Before Android 11, audio is not supported.
// Since Android 12, we can properly set a context on the AudioRecord.
// Only on Android 11 we must fill app info for the AudioRecord to work.
mustFillAppInfo |= audio && Build.VERSION.SDK_INT == Build.VERSION_CODES.R;
if (mustFillAppInfo) {
Workarounds.fillAppInfo();
}
try (DesktopConnection connection = DesktopConnection.open(uid, tunnelForward, control, sendDummyByte)) {
if (options.getSendDeviceMeta()) {
Size videoSize = device.getScreenInfo().getVideoSize();
@@ -86,16 +96,19 @@ public final class Server {
ScreenEncoder screenEncoder = new ScreenEncoder(options.getSendFrameMeta(), options.getBitRate(), options.getMaxFps(), codecOptions,
options.getEncoderName(), options.getDownsizeOnError());
Thread controllerThread = null;
Thread deviceMessageSenderThread = null;
Controller controller = null;
if (control) {
final Controller controller = new Controller(device, connection, options.getClipboardAutosync(), options.getPowerOn());
controller = new Controller(device, connection, options.getClipboardAutosync(), options.getPowerOn());
controller.start();
// asynchronous
controllerThread = startController(controller);
deviceMessageSenderThread = startDeviceMessageSender(controller.getSender());
final Controller controllerRef = controller;
device.setClipboardListener(text -> controllerRef.getSender().pushClipboardText(text));
}
device.setClipboardListener(text -> controller.getSender().pushClipboardText(text));
AudioEncoder audioEncoder = null;
if (audio) {
audioEncoder = new AudioEncoder();
audioEncoder.start();
}
try {
@@ -106,11 +119,11 @@ public final class Server {
Ln.d("Screen streaming stopped");
} finally {
initThread.interrupt();
if (controllerThread != null) {
controllerThread.interrupt();
if (controller != null) {
controller.stop();
}
if (deviceMessageSenderThread != null) {
deviceMessageSenderThread.interrupt();
if (audioEncoder != null) {
audioEncoder.stop();
}
}
}
@@ -122,32 +135,6 @@ public final class Server {
return thread;
}
private static Thread startController(final Controller controller) {
Thread thread = new Thread(() -> {
try {
controller.control();
} catch (IOException e) {
// this is expected on close
Ln.d("Controller stopped");
}
});
thread.start();
return thread;
}
private static Thread startDeviceMessageSender(final DeviceMessageSender sender) {
Thread thread = new Thread(() -> {
try {
sender.loop();
} catch (IOException | InterruptedException e) {
// this is expected on close
Ln.d("Device message sender stopped");
}
});
thread.start();
return thread;
}
private static Options createOptions(String... args) {
if (args.length < 1) {
throw new IllegalArgumentException("Missing client version");

View File

@@ -3,6 +3,7 @@ package com.genymobile.scrcpy;
import android.annotation.SuppressLint;
import android.app.Application;
import android.app.Instrumentation;
import android.content.ContextWrapper;
import android.content.pm.ApplicationInfo;
import android.os.Looper;
@@ -60,7 +61,10 @@ public final class Workarounds {
mBoundApplicationField.setAccessible(true);
mBoundApplicationField.set(activityThread, appBindData);
Application app = Instrumentation.newApplication(Application.class, FakeContext.get());
Application app = Application.class.newInstance();
Field baseField = ContextWrapper.class.getDeclaredField("mBase");
baseField.setAccessible(true);
baseField.set(app, FakeContext.get());
// activityThread.mInitialApplication = app;
Field mInitialApplicationField = activityThreadClass.getDeclaredField("mInitialApplication");