Compare commits

..

2 Commits

Author SHA1 Message Date
Romain Vimont
b0a4e6df25 Retry on recoverable MediaCodec errors
Refs <https://developer.android.com/reference/android/media/MediaCodec#error-handling>
Fixes #3693 <https://github.com/Genymobile/scrcpy/issues/3693>
2023-01-27 23:15:44 +01:00
Romain Vimont
545a8a8f32 Extract downsize-retry handling
Move the code to downscale and retry on error out of the catch-block.

Refs 26b4104844
2023-01-27 23:15:03 +01:00
40 changed files with 254 additions and 1252 deletions

View File

@@ -252,22 +252,10 @@ This affects recording orientation.
The [window may also be rotated](#rotation) independently.
#### Codec
#### Encoder
The video codec can be selected. The possible values are `h264` (default),
`h265` and `av1`:
```bash
scrcpy --codec=h264 # default
scrcpy --codec=h265
scrcpy --codec=av1
```
##### Encoder
Some devices have more than one encoder for a specific codec, and some of them
may cause issues or crash. It is possible to select a different encoder:
Some devices have more than one encoder, and some of them may cause issues or
crash. It is possible to select a different encoder:
```bash
scrcpy --encoder=OMX.qcom.video.encoder.avc
@@ -277,8 +265,7 @@ To list the available encoders, you can pass an invalid encoder name; the
error will give the available encoders:
```bash
scrcpy --encoder=_ # for the default codec
scrcpy --codec=h265 --encoder=_ # for a specific codec
scrcpy --encoder=_
```
### Capture

View File

@@ -26,11 +26,7 @@ Encode the video at the given bit\-rate, expressed in bits/s. Unit suffixes are
Default is 8000000.
.TP
.BI "\-\-codec " name
Select a video codec (h264, h265 or av1).
.TP
.BI "\-\-codec\-options " key\fR[:\fItype\fR]=\fIvalue\fR[,...]
.BI "\-\-codec\-options " key[:type]=value[,...]
Set a list of comma-separated key:type=value options for the device encoder.
The possible values for 'type' are 'int' (default), 'long', 'float' and 'string'.
@@ -121,7 +117,7 @@ Inject computer clipboard text as a sequence of key events on Ctrl+v (like MOD+S
This is a workaround for some devices not behaving as expected when setting the device clipboard programmatically.
.TP
\fB\-\-lock\-video\-orientation\fR[=\fIvalue\fR]
.BI "\-\-lock\-video\-orientation[=value]
Lock video orientation to \fIvalue\fR. Possible values are "unlocked", "initial" (locked to the initial orientation), 0, 1, 2 and 3. Natural device orientation is 0, and each increment adds a 90 degrees rotation counterclockwise.
Default is "unlocked".
@@ -203,7 +199,7 @@ It may only work over USB.
See \fB\-\-hid\-keyboard\fR and \fB\-\-hid\-mouse\fR.
.TP
.BI "\-p, \-\-port " port\fR[:\fIport\fR]
.BI "\-p, \-\-port " port[:port]
Set the TCP port (range) used by the client to listen.
Default is 27183:27199.
@@ -264,7 +260,7 @@ Set the initial display rotation. Possibles values are 0, 1, 2 and 3. Each incre
The device serial number. Mandatory only if several devices are connected to adb.
.TP
.BI "\-\-shortcut\-mod " key\fR[+...]][,...]
.BI "\-\-shortcut\-mod " key[+...]][,...]
Specify the modifiers to use for scrcpy shortcuts. Possible keys are "lctrl", "rctrl", "lalt", "ralt", "lsuper" and "rsuper".
A shortcut can consist in several keys, separated by '+'. Several shortcuts can be specified, separated by ','.
@@ -274,7 +270,7 @@ For example, to use either LCtrl+LAlt or LSuper for scrcpy shortcuts, pass "lctr
Default is "lalt,lsuper" (left-Alt or left-Super).
.TP
.BI "\-\-tcpip\fR[=\fIip\fR[:\fIport\fR]]
.BI "\-\-tcpip[=ip[:port]]
Configure and reconnect the device over TCP/IP.
If a destination address is provided, then scrcpy connects to this address before starting. The device must listen on the given TCP port (default is 5555).

View File

@@ -57,8 +57,6 @@
#define OPT_NO_CLEANUP 1037
#define OPT_PRINT_FPS 1038
#define OPT_NO_POWER_ON 1039
#define OPT_CODEC 1040
#define OPT_NO_AUDIO 1041
struct sc_option {
char shortopt;
@@ -107,12 +105,6 @@ static const struct sc_option options[] = {
"Unit suffixes are supported: 'K' (x1000) and 'M' (x1000000).\n"
"Default is " STR(DEFAULT_BIT_RATE) ".",
},
{
.longopt_id = OPT_CODEC,
.longopt = "codec",
.argdesc = "name",
.text = "Select a video codec (h264, h265 or av1).",
},
{
.longopt_id = OPT_CODEC_OPTIONS,
.longopt = "codec-options",
@@ -299,11 +291,6 @@ static const struct sc_option options[] = {
.text = "Do not display device (only when screen recording or V4L2 "
"sink is enabled).",
},
{
.longopt_id = OPT_NO_AUDIO,
.longopt = "no-audio",
.text = "Disable audio forwarding.",
},
{
.longopt_id = OPT_NO_KEY_REPEAT,
.longopt = "no-key-repeat",
@@ -1390,24 +1377,6 @@ guess_record_format(const char *filename) {
return 0;
}
static bool
parse_codec(const char *optarg, enum sc_codec *codec) {
if (!strcmp(optarg, "h264")) {
*codec = SC_CODEC_H264;
return true;
}
if (!strcmp(optarg, "h265")) {
*codec = SC_CODEC_H265;
return true;
}
if (!strcmp(optarg, "av1")) {
*codec = SC_CODEC_AV1;
return true;
}
LOGE("Unsupported codec: %s (expected h264, h265 or av1)", optarg);
return false;
}
static bool
parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
const char *optstring, const struct option *longopts) {
@@ -1632,9 +1601,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
case OPT_NO_DOWNSIZE_ON_ERROR:
opts->downsize_on_error = false;
break;
case OPT_NO_AUDIO:
opts->audio = false;
break;
case OPT_NO_CLEANUP:
opts->cleanup = false;
break;
@@ -1644,11 +1610,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
case OPT_PRINT_FPS:
opts->start_fps_counter = true;
break;
case OPT_CODEC:
if (!parse_codec(optarg, &opts->codec)) {
return false;
}
break;
case OPT_OTG:
#ifdef HAVE_USB
opts->otg = true;
@@ -1757,13 +1718,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
}
}
if (opts->record_format == SC_RECORD_FORMAT_MP4
&& opts->codec == SC_CODEC_AV1) {
LOGE("Could not mux AV1 stream into MP4 container "
"(record to mkv or select another video codec)");
return false;
}
if (!opts->control) {
if (opts->turn_screen_off) {
LOGE("Could not request to turn screen off if control is disabled");

View File

@@ -117,9 +117,8 @@ sc_control_msg_serialize(const struct sc_control_msg *msg, unsigned char *buf) {
uint16_t pressure =
sc_float_to_u16fp(msg->inject_touch_event.pressure);
sc_write16be(&buf[22], pressure);
sc_write32be(&buf[24], msg->inject_touch_event.action_button);
sc_write32be(&buf[28], msg->inject_touch_event.buttons);
return 32;
sc_write32be(&buf[24], msg->inject_touch_event.buttons);
return 28;
case SC_CONTROL_MSG_TYPE_INJECT_SCROLL_EVENT:
write_position(&buf[1], &msg->inject_scroll_event.position);
int16_t hscroll =
@@ -180,25 +179,22 @@ sc_control_msg_log(const struct sc_control_msg *msg) {
if (pointer_name) {
// string pointer id
LOG_CMSG("touch [id=%s] %-4s position=%" PRIi32 ",%" PRIi32
" pressure=%f action_button=%06lx buttons=%06lx",
" pressure=%f buttons=%06lx",
pointer_name,
MOTIONEVENT_ACTION_LABEL(action),
msg->inject_touch_event.position.point.x,
msg->inject_touch_event.position.point.y,
msg->inject_touch_event.pressure,
(long) msg->inject_touch_event.action_button,
(long) msg->inject_touch_event.buttons);
} else {
// numeric pointer id
LOG_CMSG("touch [id=%" PRIu64_ "] %-4s position=%" PRIi32 ",%"
PRIi32 " pressure=%f action_button=%06lx"
" buttons=%06lx",
PRIi32 " pressure=%f buttons=%06lx",
id,
MOTIONEVENT_ACTION_LABEL(action),
msg->inject_touch_event.position.point.x,
msg->inject_touch_event.position.point.y,
msg->inject_touch_event.pressure,
(long) msg->inject_touch_event.action_button,
(long) msg->inject_touch_event.buttons);
}
break;

View File

@@ -65,7 +65,6 @@ struct sc_control_msg {
} inject_text;
struct {
enum android_motionevent_action action;
enum android_motionevent_buttons action_button;
enum android_motionevent_buttons buttons;
uint64_t pointer_id;
struct sc_position position;

View File

@@ -17,34 +17,6 @@
#define SC_PACKET_PTS_MASK (SC_PACKET_FLAG_KEY_FRAME - 1)
static enum AVCodecID
sc_demuxer_recv_codec_id(struct sc_demuxer *demuxer) {
uint8_t data[4];
ssize_t r = net_recv_all(demuxer->socket, data, 4);
if (r < 4) {
return false;
}
#define SC_CODEC_ID_H264 UINT32_C(0x68323634) // "h264" in ASCII
#define SC_CODEC_ID_H265 UINT32_C(0x68323635) // "h265" in ASCII
#define SC_CODEC_ID_AV1 UINT32_C(0x00617631) // "av1" in ASCII
#define SC_CODEC_ID_OPUS UINT32_C(0x6f707573) // "opus" in ASCII
uint32_t codec_id = sc_read32be(data);
switch (codec_id) {
case SC_CODEC_ID_H264:
return AV_CODEC_ID_H264;
case SC_CODEC_ID_H265:
return AV_CODEC_ID_HEVC;
case SC_CODEC_ID_AV1:
return AV_CODEC_ID_AV1;
case SC_CODEC_ID_OPUS:
return AV_CODEC_ID_OPUS;
default:
LOGE("Unknown codec id 0x%08" PRIx32, codec_id);
return AV_CODEC_ID_NONE;
}
}
static bool
sc_demuxer_recv_packet(struct sc_demuxer *demuxer, AVPacket *packet) {
// The video stream contains raw packets, without time information. When we
@@ -199,13 +171,7 @@ static int
run_demuxer(void *data) {
struct sc_demuxer *demuxer = data;
enum AVCodecID codec_id = sc_demuxer_recv_codec_id(demuxer);
if (codec_id == AV_CODEC_ID_NONE) {
// Error already logged
goto end;
}
const AVCodec *codec = avcodec_find_decoder(codec_id);
const AVCodec *codec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!codec) {
LOGE("H.264 decoder not found");
goto end;
@@ -222,7 +188,7 @@ run_demuxer(void *data) {
goto finally_free_codec_ctx;
}
demuxer->parser = av_parser_init(codec_id);
demuxer->parser = av_parser_init(AV_CODEC_ID_H264);
if (!demuxer->parser) {
LOGE("Could not initialize parser");
goto finally_close_sinks;
@@ -273,12 +239,8 @@ end:
}
void
sc_demuxer_init(struct sc_demuxer *demuxer, enum sc_stream_id stream_id,
sc_socket socket, const struct sc_demuxer_callbacks *cbs,
void *cbs_userdata) {
assert(socket != SC_SOCKET_NONE);
demuxer->stream_id = stream_id;
sc_demuxer_init(struct sc_demuxer *demuxer, sc_socket socket,
const struct sc_demuxer_callbacks *cbs, void *cbs_userdata) {
demuxer->socket = socket;
demuxer->pending = NULL;
demuxer->sink_count = 0;

View File

@@ -14,13 +14,7 @@
#define SC_DEMUXER_MAX_SINKS 2
enum sc_stream_id {
SC_STREAM_ID_VIDEO,
SC_STREAM_ID_AUDIO,
};
struct sc_demuxer {
enum sc_stream_id stream_id;
sc_socket socket;
sc_thread thread;
@@ -42,9 +36,8 @@ struct sc_demuxer_callbacks {
};
void
sc_demuxer_init(struct sc_demuxer *demuxer, enum sc_stream_id stream_id,
sc_socket socket, const struct sc_demuxer_callbacks *cbs,
void *cbs_userdata);
sc_demuxer_init(struct sc_demuxer *demuxer, sc_socket socket,
const struct sc_demuxer_callbacks *cbs, void *cbs_userdata);
void
sc_demuxer_add_sink(struct sc_demuxer *demuxer, struct sc_packet_sink *sink);

View File

@@ -339,7 +339,6 @@ simulate_virtual_finger(struct sc_input_manager *im,
im->forward_all_clicks ? POINTER_ID_VIRTUAL_MOUSE
: POINTER_ID_VIRTUAL_FINGER;
msg.inject_touch_event.pressure = up ? 0.0f : 1.0f;
msg.inject_touch_event.action_button = 0;
msg.inject_touch_event.buttons = 0;
if (!sc_controller_push_msg(im->controller, &msg)) {

View File

@@ -93,7 +93,6 @@ sc_mouse_processor_process_mouse_click(struct sc_mouse_processor *mp,
.pointer_id = event->pointer_id,
.position = event->position,
.pressure = event->action == SC_ACTION_DOWN ? 1.f : 0.f,
.action_button = convert_mouse_buttons(event->button),
.buttons = convert_mouse_buttons(event->buttons_state),
},
};

View File

@@ -13,7 +13,6 @@ const struct scrcpy_options scrcpy_options_default = {
.v4l2_device = NULL,
#endif
.log_level = SC_LOG_LEVEL_INFO,
.codec = SC_CODEC_H264,
.record_format = SC_RECORD_FORMAT_AUTO,
.keyboard_input_mode = SC_KEYBOARD_INPUT_MODE_INJECT,
.port_range = {
@@ -66,5 +65,4 @@ const struct scrcpy_options scrcpy_options_default = {
.cleanup = true,
.start_fps_counter = false,
.power_on = true,
.audio = true,
};

View File

@@ -23,12 +23,6 @@ enum sc_record_format {
SC_RECORD_FORMAT_MKV,
};
enum sc_codec {
SC_CODEC_H264,
SC_CODEC_H265,
SC_CODEC_AV1,
};
enum sc_lock_video_orientation {
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED = -1,
// lock the current orientation when scrcpy starts
@@ -99,7 +93,6 @@ struct scrcpy_options {
const char *v4l2_device;
#endif
enum sc_log_level log_level;
enum sc_codec codec;
enum sc_record_format record_format;
enum sc_keyboard_input_mode keyboard_input_mode;
enum sc_mouse_input_mode mouse_input_mode;
@@ -147,7 +140,6 @@ struct scrcpy_options {
bool cleanup;
bool start_fps_counter;
bool power_on;
bool audio;
};
extern const struct scrcpy_options scrcpy_options_default;

View File

@@ -11,8 +11,6 @@
/** Downcast packet_sink to recorder */
#define DOWNCAST(SINK) container_of(SINK, struct sc_recorder, packet_sink)
#define SC_PTS_ORIGIN_NONE UINT64_C(-1)
static const AVRational SCRCPY_TIME_BASE = {1, 1000000}; // timestamps in us
static const AVOutputFormat *
@@ -171,18 +169,6 @@ run_recorder(void *data) {
sc_mutex_unlock(&recorder->mutex);
if (recorder->pts_origin == SC_PTS_ORIGIN_NONE
&& rec->packet->pts != AV_NOPTS_VALUE) {
// First PTS received
recorder->pts_origin = rec->packet->pts;
}
if (rec->packet->pts != AV_NOPTS_VALUE) {
// Set PTS relatve to the origin
rec->packet->pts -= recorder->pts_origin;
rec->packet->dts = rec->packet->pts;
}
// recorder->previous is only written from this thread, no need to lock
struct sc_record_packet *previous = recorder->previous;
recorder->previous = rec;
@@ -257,7 +243,6 @@ sc_recorder_open(struct sc_recorder *recorder, const AVCodec *input_codec) {
recorder->failed = false;
recorder->header_written = false;
recorder->previous = NULL;
recorder->pts_origin = SC_PTS_ORIGIN_NONE;
const char *format_name = sc_recorder_get_format_name(recorder->format);
assert(format_name);

View File

@@ -28,8 +28,6 @@ struct sc_recorder {
struct sc_size declared_frame_size;
bool header_written;
uint64_t pts_origin;
sc_thread thread;
sc_mutex mutex;
sc_cond queue_cond;

View File

@@ -40,8 +40,7 @@
struct scrcpy {
struct sc_server server;
struct sc_screen screen;
struct sc_demuxer video_demuxer;
struct sc_demuxer audio_demuxer;
struct sc_demuxer demuxer;
struct sc_decoder decoder;
struct sc_recorder recorder;
#ifdef HAVE_V4L2
@@ -234,21 +233,13 @@ av_log_callback(void *avcl, int level, const char *fmt, va_list vl) {
}
static void
sc_video_demuxer_on_eos(struct sc_demuxer *demuxer, void *userdata) {
sc_demuxer_on_eos(struct sc_demuxer *demuxer, void *userdata) {
(void) demuxer;
(void) userdata;
PUSH_EVENT(EVENT_STREAM_STOPPED);
}
static void
sc_audio_demuxer_on_eos(struct sc_demuxer *demuxer, void *userdata) {
(void) demuxer;
(void) userdata;
// TODO
}
static void
sc_server_on_connection_failed(struct sc_server *server, void *userdata) {
(void) server;
@@ -304,8 +295,7 @@ scrcpy(struct scrcpy_options *options) {
#ifdef HAVE_V4L2
bool v4l2_sink_initialized = false;
#endif
bool video_demuxer_started = false;
bool audio_demuxer_started = false;
bool demuxer_started = false;
#ifdef HAVE_USB
bool aoa_hid_initialized = false;
bool hid_keyboard_initialized = false;
@@ -325,7 +315,6 @@ scrcpy(struct scrcpy_options *options) {
.select_usb = options->select_usb,
.select_tcpip = options->select_tcpip,
.log_level = options->log_level,
.codec = options->codec,
.crop = options->crop,
.port_range = options->port_range,
.tunnel_host = options->tunnel_host,
@@ -336,7 +325,6 @@ scrcpy(struct scrcpy_options *options) {
.lock_video_orientation = options->lock_video_orientation,
.control = options->control,
.display_id = options->display_id,
.audio = options->audio,
.show_touches = options->show_touches,
.stay_awake = options->stay_awake,
.codec_options = options->codec_options,
@@ -431,26 +419,17 @@ scrcpy(struct scrcpy_options *options) {
av_log_set_callback(av_log_callback);
static const struct sc_demuxer_callbacks video_demuxer_cbs = {
.on_eos = sc_video_demuxer_on_eos,
static const struct sc_demuxer_callbacks demuxer_cbs = {
.on_eos = sc_demuxer_on_eos,
};
sc_demuxer_init(&s->video_demuxer, SC_STREAM_ID_VIDEO,
s->server.video_socket, &video_demuxer_cbs, NULL);
if (options->audio) {
static const struct sc_demuxer_callbacks audio_demuxer_cbs = {
.on_eos = sc_audio_demuxer_on_eos,
};
sc_demuxer_init(&s->audio_demuxer, SC_STREAM_ID_AUDIO,
s->server.audio_socket, &audio_demuxer_cbs, NULL);
}
sc_demuxer_init(&s->demuxer, s->server.video_socket, &demuxer_cbs, NULL);
if (dec) {
sc_demuxer_add_sink(&s->video_demuxer, &dec->packet_sink);
sc_demuxer_add_sink(&s->demuxer, &dec->packet_sink);
}
if (rec) {
sc_demuxer_add_sink(&s->video_demuxer, &rec->packet_sink);
sc_demuxer_add_sink(&s->demuxer, &rec->packet_sink);
}
struct sc_controller *controller = NULL;
@@ -658,24 +637,17 @@ aoa_hid_end:
#endif
// now we consumed the header values, the socket receives the video stream
// start the video demuxer
if (!sc_demuxer_start(&s->video_demuxer)) {
// start the demuxer
if (!sc_demuxer_start(&s->demuxer)) {
goto end;
}
video_demuxer_started = true;
if (options->audio) {
if (!sc_demuxer_start(&s->audio_demuxer)) {
goto end;
}
audio_demuxer_started = true;
}
demuxer_started = true;
ret = event_loop(s);
LOGD("quit...");
// Close the window immediately on closing, because screen_destroy() may
// only be called once the video demuxer thread is joined (it may take time)
// only be called once the demuxer thread is joined (it may take time)
sc_screen_hide_window(&s->screen);
end:
@@ -713,12 +685,8 @@ end:
// now that the sockets are shutdown, the demuxer and controller are
// interrupted, we can join them
if (video_demuxer_started) {
sc_demuxer_join(&s->video_demuxer);
}
if (audio_demuxer_started) {
sc_demuxer_join(&s->audio_demuxer);
if (demuxer_started) {
sc_demuxer_join(&s->demuxer);
}
#ifdef HAVE_V4L2
@@ -737,9 +705,8 @@ end:
}
#endif
// Destroy the screen only after the video demuxer is guaranteed to be
// finished, because otherwise the screen could receive new frames after
// destruction
// Destroy the screen only after the demuxer is guaranteed to be finished,
// because otherwise the screen could receive new frames after destruction
if (screen_initialized) {
sc_screen_join(&s->screen);
sc_screen_destroy(&s->screen);

View File

@@ -8,7 +8,6 @@
#include <SDL2/SDL_platform.h>
#include "adb/adb.h"
#include "util/binary.h"
#include "util/file.h"
#include "util/log.h"
#include "util/net_intr.h"
@@ -156,20 +155,6 @@ sc_server_sleep(struct sc_server *server, sc_tick deadline) {
return !stopped;
}
static const char *
sc_server_get_codec_name(enum sc_codec codec) {
switch (codec) {
case SC_CODEC_H264:
return "h264";
case SC_CODEC_H265:
return "h265";
case SC_CODEC_AV1:
return "av1";
default:
return NULL;
}
}
static sc_pid
execute_server(struct sc_server *server,
const struct sc_server_params *params) {
@@ -217,12 +202,6 @@ execute_server(struct sc_server *server,
ADD_PARAM("log_level=%s", log_level_to_server_string(params->log_level));
ADD_PARAM("bit_rate=%" PRIu32, params->bit_rate);
if (!params->audio) {
ADD_PARAM("audio=false");
}
if (params->codec != SC_CODEC_H264) {
ADD_PARAM("codec=%s", sc_server_get_codec_name(params->codec));
}
if (params->max_size) {
ADD_PARAM("max_size=%" PRIu16, params->max_size);
}
@@ -391,7 +370,6 @@ sc_server_init(struct sc_server *server, const struct sc_server_params *params,
server->stopped = false;
server->video_socket = SC_SOCKET_NONE;
server->audio_socket = SC_SOCKET_NONE;
server->control_socket = SC_SOCKET_NONE;
sc_adb_tunnel_init(&server->tunnel);
@@ -420,9 +398,10 @@ device_read_info(struct sc_intr *intr, sc_socket device_socket,
buf[SC_DEVICE_NAME_FIELD_LENGTH - 1] = '\0';
memcpy(info->device_name, (char *) buf, sizeof(info->device_name));
unsigned char *fields = &buf[SC_DEVICE_NAME_FIELD_LENGTH];
info->frame_size.width = sc_read16be(fields);
info->frame_size.height = sc_read16be(&fields[2]);
info->frame_size.width = (buf[SC_DEVICE_NAME_FIELD_LENGTH] << 8)
| buf[SC_DEVICE_NAME_FIELD_LENGTH + 1];
info->frame_size.height = (buf[SC_DEVICE_NAME_FIELD_LENGTH + 2] << 8)
| buf[SC_DEVICE_NAME_FIELD_LENGTH + 3];
return true;
}
@@ -435,11 +414,9 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
const char *serial = server->serial;
assert(serial);
bool audio = server->params.audio;
bool control = server->params.control;
sc_socket video_socket = SC_SOCKET_NONE;
sc_socket audio_socket = SC_SOCKET_NONE;
sc_socket control_socket = SC_SOCKET_NONE;
if (!tunnel->forward) {
video_socket = net_accept_intr(&server->intr, tunnel->server_socket);
@@ -447,14 +424,6 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
goto fail;
}
if (audio) {
audio_socket =
net_accept_intr(&server->intr, tunnel->server_socket);
if (audio_socket == SC_SOCKET_NONE) {
goto fail;
}
}
if (control) {
control_socket =
net_accept_intr(&server->intr, tunnel->server_socket);
@@ -481,18 +450,6 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
goto fail;
}
if (audio) {
audio_socket = net_socket();
if (audio_socket == SC_SOCKET_NONE) {
goto fail;
}
bool ok = net_connect_intr(&server->intr, audio_socket, tunnel_host,
tunnel_port);
if (!ok) {
goto fail;
}
}
if (control) {
// we know that the device is listening, we don't need several
// attempts
@@ -519,11 +476,9 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
}
assert(video_socket != SC_SOCKET_NONE);
assert(!audio || audio_socket != SC_SOCKET_NONE);
assert(!control || control_socket != SC_SOCKET_NONE);
server->video_socket = video_socket;
server->audio_socket = audio_socket;
server->control_socket = control_socket;
return true;
@@ -535,12 +490,6 @@ fail:
}
}
if (audio_socket != SC_SOCKET_NONE) {
if (!net_close(audio_socket)) {
LOGW("Could not close audio socket");
}
}
if (control_socket != SC_SOCKET_NONE) {
if (!net_close(control_socket)) {
LOGW("Could not close control socket");
@@ -886,11 +835,6 @@ run_server(void *data) {
assert(server->video_socket != SC_SOCKET_NONE);
net_interrupt(server->video_socket);
if (server->audio_socket != SC_SOCKET_NONE) {
// There is no audio_socket if --no-audio is set
net_interrupt(server->audio_socket);
}
if (server->control_socket != SC_SOCKET_NONE) {
// There is no control_socket if --no-control is set
net_interrupt(server->control_socket);
@@ -952,9 +896,6 @@ sc_server_destroy(struct sc_server *server) {
if (server->video_socket != SC_SOCKET_NONE) {
net_close(server->video_socket);
}
if (server->audio_socket != SC_SOCKET_NONE) {
net_close(server->audio_socket);
}
if (server->control_socket != SC_SOCKET_NONE) {
net_close(server->control_socket);
}

View File

@@ -25,7 +25,6 @@ struct sc_server_params {
uint32_t uid;
const char *req_serial;
enum sc_log_level log_level;
enum sc_codec codec;
const char *crop;
const char *codec_options;
const char *encoder_name;
@@ -38,7 +37,6 @@ struct sc_server_params {
int8_t lock_video_orientation;
bool control;
uint32_t display_id;
bool audio;
bool show_touches;
bool stay_awake;
bool force_adb_forward;
@@ -70,7 +68,6 @@ struct sc_server {
struct sc_adb_tunnel tunnel;
sc_socket video_socket;
sc_socket audio_socket;
sc_socket control_socket;
const struct sc_server_callbacks *cbs;

View File

@@ -90,14 +90,13 @@ static void test_serialize_inject_touch_event(void) {
},
},
.pressure = 1.0f,
.action_button = AMOTION_EVENT_BUTTON_PRIMARY,
.buttons = AMOTION_EVENT_BUTTON_PRIMARY,
},
};
unsigned char buf[SC_CONTROL_MSG_MAX_SIZE];
size_t size = sc_control_msg_serialize(&msg, buf);
assert(size == 32);
assert(size == 28);
const unsigned char expected[] = {
SC_CONTROL_MSG_TYPE_INJECT_TOUCH_EVENT,
@@ -106,8 +105,7 @@ static void test_serialize_inject_touch_event(void) {
0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0xc8, // 100 200
0x04, 0x38, 0x07, 0x80, // 1080 1920
0xff, 0xff, // pressure
0x00, 0x00, 0x00, 0x01, // AMOTION_EVENT_BUTTON_PRIMARY (action button)
0x00, 0x00, 0x00, 0x01, // AMOTION_EVENT_BUTTON_PRIMARY (buttons)
0x00, 0x00, 0x00, 0x01 // AMOTION_EVENT_BUTTON_PRIMARY
};
assert(!memcmp(buf, expected, sizeof(expected)));
}

View File

@@ -1,46 +0,0 @@
package com.genymobile.scrcpy;
import android.media.MediaFormat;
public enum AudioCodec implements Codec {
OPUS(0x6f_70_75_73, "opus", MediaFormat.MIMETYPE_AUDIO_OPUS);
private final int id; // 4-byte ASCII representation of the name
private final String name;
private final String mimeType;
AudioCodec(int id, String name, String mimeType) {
this.id = id;
this.name = name;
this.mimeType = mimeType;
}
@Override
public Type getType() {
return Type.VIDEO;
}
@Override
public int getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public String getMimeType() {
return mimeType;
}
public static AudioCodec findByName(String name) {
for (AudioCodec codec : values()) {
if (codec.name.equals(name)) {
return codec;
}
}
return null;
}
}

View File

@@ -1,308 +0,0 @@
package com.genymobile.scrcpy;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.AudioTimestamp;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
public final class AudioEncoder {
private static class InputTask {
final int index;
InputTask(int index) {
this.index = index;
}
}
private static class OutputTask {
final int index;
final MediaCodec.BufferInfo bufferInfo;
OutputTask(int index, MediaCodec.BufferInfo bufferInfo) {
this.index = index;
this.bufferInfo = bufferInfo;
}
}
private static final String MIMETYPE = MediaFormat.MIMETYPE_AUDIO_OPUS;
private static final int SAMPLE_RATE = 48000;
private static final int CHANNELS = 2;
private static final int BIT_RATE = 128000;
private static int BUFFER_MS = 15; // milliseconds
private static final int BUFFER_SIZE = SAMPLE_RATE * CHANNELS * BUFFER_MS / 1000;
private final Streamer streamer;
private AudioRecord recorder;
private MediaCodec mediaCodec;
// Capacity of 64 is in practice "infinite" (it is limited by the number of available MediaCodec buffers, typically 4).
// So many pending tasks would lead to an unacceptable delay anyway.
private final BlockingQueue<InputTask> inputTasks = new ArrayBlockingQueue<>(64);
private final BlockingQueue<OutputTask> outputTasks = new ArrayBlockingQueue<>(64);
private Thread thread;
private HandlerThread mediaCodecThread;
private Thread inputThread;
private Thread outputThread;
private boolean ended;
public AudioEncoder(Streamer streamer) {
this.streamer = streamer;
}
private static AudioFormat createAudioFormat() {
AudioFormat.Builder builder = new AudioFormat.Builder();
builder.setEncoding(AudioFormat.ENCODING_PCM_16BIT);
builder.setSampleRate(SAMPLE_RATE);
builder.setChannelMask(CHANNELS == 2 ? AudioFormat.CHANNEL_IN_STEREO : AudioFormat.CHANNEL_IN_MONO);
return builder.build();
}
@TargetApi(Build.VERSION_CODES.M)
@SuppressLint({"WrongConstant", "MissingPermission"})
private static AudioRecord createAudioRecord() {
AudioRecord.Builder builder = new AudioRecord.Builder();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
// On older APIs, Workarounds.fillAppInfo() must be called beforehand
builder.setContext(FakeContext.get());
}
builder.setAudioSource(MediaRecorder.AudioSource.REMOTE_SUBMIX);
builder.setAudioFormat(createAudioFormat());
builder.setBufferSizeInBytes(1024 * 1024);
return builder.build();
}
private static MediaFormat createFormat() {
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, MIMETYPE);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
return format;
}
@TargetApi(Build.VERSION_CODES.N)
private void inputThread() throws IOException, InterruptedException {
final AudioTimestamp timestamp = new AudioTimestamp();
long previousPts = 0;
long nextPts = 0;
while (!Thread.currentThread().isInterrupted()) {
InputTask task = inputTasks.take();
ByteBuffer buffer = mediaCodec.getInputBuffer(task.index);
int r = recorder.read(buffer, BUFFER_SIZE);
if (r < 0) {
throw new IOException("Could not read audio: " + r);
}
long pts;
int ret = recorder.getTimestamp(timestamp, AudioTimestamp.TIMEBASE_MONOTONIC);
if (ret == AudioRecord.SUCCESS) {
pts = timestamp.nanoTime / 1000;
} else {
if (nextPts == 0) {
Ln.w("Could not get any audio timestamp");
}
// compute from previous timestamp and packet size
pts = nextPts;
}
long durationMs = r * 1000 / CHANNELS / SAMPLE_RATE;
nextPts = pts + durationMs;
if (previousPts != 0 && pts < previousPts) {
// Audio PTS may come from two sources:
// - recorder.getTimestamp() if the call works;
// - an estimation from the previous PTS and the packet size as a fallback.
//
// Therefore, the property that PTS are monotonically increasing is no guaranteed in corner cases, so enforce it.
pts = previousPts + 1;
}
mediaCodec.queueInputBuffer(task.index, 0, r, pts, 0);
previousPts = pts;
}
}
private void outputThread() throws IOException, InterruptedException {
streamer.writeHeader();
while (!Thread.currentThread().isInterrupted()) {
OutputTask task = outputTasks.take();
ByteBuffer buffer = mediaCodec.getOutputBuffer(task.index);
try {
streamer.writePacket(buffer, task.bufferInfo);
} finally {
mediaCodec.releaseOutputBuffer(task.index, false);
}
}
}
public void start() {
thread = new Thread(() -> {
try {
encode();
} catch (IOException e) {
// this is expected on close
} finally {
Ln.d("Audio encoder stopped");
}
});
thread.start();
}
public void stop() {
if (thread != null) {
// Just wake up the blocking wait from the thread, so that it properly releases all its resources and terminates
end();
}
}
public void join() throws InterruptedException {
if (thread != null) {
thread.join();
}
}
private synchronized void end() {
ended = true;
notify();
}
private synchronized void waitEnded() {
try {
while (!ended) {
wait();
}
} catch (InterruptedException e) {
// ignore
}
}
@TargetApi(Build.VERSION_CODES.M)
public void encode() throws IOException {
mediaCodec = MediaCodec.createEncoderByType(MIMETYPE); // may throw IOException
try {
recorder = createAudioRecord();
mediaCodecThread = new HandlerThread("AudioEncoder");
mediaCodecThread.start();
MediaFormat format = createFormat();
mediaCodec.setCallback(new EncoderCallback(), new Handler(mediaCodecThread.getLooper()));
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
recorder.startRecording();
inputThread = new Thread(() -> {
try {
inputThread();
} catch (IOException | InterruptedException e) {
// this is expected on close
} finally {
end();
}
});
outputThread = new Thread(() -> {
try {
outputThread();
} catch (IOException | InterruptedException e) {
// this is expected on close
} finally {
end();
}
});
mediaCodec.start();
inputThread.start();
outputThread.start();
} catch (Throwable e) {
mediaCodec.release();
if (recorder != null) {
recorder.release();
}
throw e;
}
try {
waitEnded();
} finally {
cleanUp();
}
}
private void cleanUp() {
mediaCodecThread.getLooper().quit();
inputThread.interrupt();
outputThread.interrupt();
try {
mediaCodecThread.join();
inputThread.join();
outputThread.join();
} catch (InterruptedException e) {
// Should never happen
throw new AssertionError(e);
}
mediaCodec.stop();
mediaCodec.release();
recorder.stop();
recorder.release();
}
private class EncoderCallback extends MediaCodec.Callback {
@TargetApi(Build.VERSION_CODES.N)
@Override
public void onInputBufferAvailable(MediaCodec codec, int index) {
try {
inputTasks.put(new InputTask(index));
} catch (InterruptedException e) {
end();
}
}
@Override
public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo bufferInfo) {
try {
outputTasks.put(new OutputTask(index, bufferInfo));
} catch (InterruptedException e) {
end();
}
}
@Override
public void onError(MediaCodec codec, MediaCodec.CodecException e) {
Ln.e("MediaCodec error", e);
end();
}
@Override
public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
// ignore
}
}
}

View File

@@ -1,11 +0,0 @@
package com.genymobile.scrcpy;
public interface Codec {
enum Type {VIDEO}
Type getType();
int getId();
String getName();
String getMimeType();
}

View File

@@ -29,7 +29,6 @@ public final class ControlMessage {
private int metaState; // KeyEvent.META_*
private int action; // KeyEvent.ACTION_* or MotionEvent.ACTION_* or POWER_MODE_*
private int keycode; // KeyEvent.KEYCODE_*
private int actionButton; // MotionEvent.BUTTON_*
private int buttons; // MotionEvent.BUTTON_*
private long pointerId;
private float pressure;
@@ -61,15 +60,13 @@ public final class ControlMessage {
return msg;
}
public static ControlMessage createInjectTouchEvent(int action, long pointerId, Position position, float pressure, int actionButton,
int buttons) {
public static ControlMessage createInjectTouchEvent(int action, long pointerId, Position position, float pressure, int buttons) {
ControlMessage msg = new ControlMessage();
msg.type = TYPE_INJECT_TOUCH_EVENT;
msg.action = action;
msg.pointerId = pointerId;
msg.pressure = pressure;
msg.position = position;
msg.actionButton = actionButton;
msg.buttons = buttons;
return msg;
}
@@ -143,10 +140,6 @@ public final class ControlMessage {
return keycode;
}
public int getActionButton() {
return actionButton;
}
public int getButtons() {
return buttons;
}

View File

@@ -9,7 +9,7 @@ import java.nio.charset.StandardCharsets;
public class ControlMessageReader {
static final int INJECT_KEYCODE_PAYLOAD_LENGTH = 13;
static final int INJECT_TOUCH_EVENT_PAYLOAD_LENGTH = 31;
static final int INJECT_TOUCH_EVENT_PAYLOAD_LENGTH = 27;
static final int INJECT_SCROLL_EVENT_PAYLOAD_LENGTH = 20;
static final int BACK_OR_SCREEN_ON_LENGTH = 1;
static final int SET_SCREEN_POWER_MODE_PAYLOAD_LENGTH = 1;
@@ -140,9 +140,8 @@ public class ControlMessageReader {
long pointerId = buffer.getLong();
Position position = readPosition(buffer);
float pressure = Binary.u16FixedPointToFloat(buffer.getShort());
int actionButton = buffer.getInt();
int buttons = buffer.getInt();
return ControlMessage.createInjectTouchEvent(action, pointerId, position, pressure, actionButton, buttons);
return ControlMessage.createInjectTouchEvent(action, pointerId, position, pressure, buttons);
}
private ControlMessage parseInjectScrollEvent() {

View File

@@ -1,7 +1,5 @@
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.InputManager;
import android.os.Build;
import android.os.SystemClock;
import android.view.InputDevice;
@@ -24,8 +22,6 @@ public class Controller {
private static final ScheduledExecutorService EXECUTOR = Executors.newSingleThreadScheduledExecutor();
private Thread thread;
private final Device device;
private final DesktopConnection connection;
private final DeviceMessageSender sender;
@@ -64,7 +60,7 @@ public class Controller {
}
}
private void control() throws IOException {
public void control() throws IOException {
// on start, power on the device
if (powerOn && !Device.isScreenOn()) {
device.pressReleaseKeycode(KeyEvent.KEYCODE_POWER, Device.INJECT_MODE_ASYNC);
@@ -84,34 +80,6 @@ public class Controller {
}
}
public void start() {
thread = new Thread(() -> {
try {
control();
} catch (IOException e) {
// this is expected on close
} finally {
Ln.d("Controller stopped");
}
});
thread.start();
sender.start();
}
public void stop() {
if (thread != null) {
thread.interrupt();
}
sender.stop();
}
public void join() throws InterruptedException {
if (thread != null) {
thread.join();
}
sender.join();
}
public DeviceMessageSender getSender() {
return sender;
}
@@ -131,7 +99,7 @@ public class Controller {
break;
case ControlMessage.TYPE_INJECT_TOUCH_EVENT:
if (device.supportsInputEvents()) {
injectTouch(msg.getAction(), msg.getPointerId(), msg.getPosition(), msg.getPressure(), msg.getActionButton(), msg.getButtons());
injectTouch(msg.getAction(), msg.getPointerId(), msg.getPosition(), msg.getPressure(), msg.getButtons());
}
break;
case ControlMessage.TYPE_INJECT_SCROLL_EVENT:
@@ -211,7 +179,7 @@ public class Controller {
return successCount;
}
private boolean injectTouch(int action, long pointerId, Position position, float pressure, int actionButton, int buttons) {
private boolean injectTouch(int action, long pointerId, Position position, float pressure, int buttons) {
long now = SystemClock.uptimeMillis();
Point point = device.getPhysicalPoint(position);
@@ -228,23 +196,22 @@ public class Controller {
Pointer pointer = pointersState.get(pointerIndex);
pointer.setPoint(point);
pointer.setPressure(pressure);
pointer.setUp(action == MotionEvent.ACTION_UP);
int source;
int pointerCount = pointersState.update(pointerProperties, pointerCoords);
if (pointerId == POINTER_ID_MOUSE || pointerId == POINTER_ID_VIRTUAL_MOUSE) {
// real mouse event (forced by the client when --forward-on-click)
pointerProperties[pointerIndex].toolType = MotionEvent.TOOL_TYPE_MOUSE;
source = InputDevice.SOURCE_MOUSE;
pointer.setUp(buttons == 0);
} else {
// POINTER_ID_GENERIC_FINGER, POINTER_ID_VIRTUAL_FINGER or real touch from device
pointerProperties[pointerIndex].toolType = MotionEvent.TOOL_TYPE_FINGER;
source = InputDevice.SOURCE_TOUCHSCREEN;
// Buttons must not be set for touch events
buttons = 0;
pointer.setUp(action == MotionEvent.ACTION_UP);
}
int pointerCount = pointersState.update(pointerProperties, pointerCoords);
if (pointerCount == 1) {
if (action == MotionEvent.ACTION_DOWN) {
lastTouchDown = now;
@@ -258,62 +225,6 @@ public class Controller {
}
}
/* If the input device is a mouse (on API >= 23):
* - the first button pressed must first generate ACTION_DOWN;
* - all button pressed (including the first one) must generate ACTION_BUTTON_PRESS;
* - all button released (including the last one) must generate ACTION_BUTTON_RELEASE;
* - the last button released must in addition generate ACTION_UP.
*
* Otherwise, Chrome does not work properly: <https://github.com/Genymobile/scrcpy/issues/3635>
*/
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && source == InputDevice.SOURCE_MOUSE) {
if (action == MotionEvent.ACTION_DOWN) {
if (actionButton == buttons) {
// First button pressed: ACTION_DOWN
MotionEvent downEvent = MotionEvent.obtain(lastTouchDown, now, MotionEvent.ACTION_DOWN, pointerCount, pointerProperties,
pointerCoords, 0, buttons, 1f, 1f, DEFAULT_DEVICE_ID, 0, source, 0);
if (!device.injectEvent(downEvent, Device.INJECT_MODE_ASYNC)) {
return false;
}
}
// Any button pressed: ACTION_BUTTON_PRESS
MotionEvent pressEvent = MotionEvent.obtain(lastTouchDown, now, MotionEvent.ACTION_BUTTON_PRESS, pointerCount, pointerProperties,
pointerCoords, 0, buttons, 1f, 1f, DEFAULT_DEVICE_ID, 0, source, 0);
if (!InputManager.setActionButton(pressEvent, actionButton)) {
return false;
}
if (!device.injectEvent(pressEvent, Device.INJECT_MODE_ASYNC)) {
return false;
}
return true;
}
if (action == MotionEvent.ACTION_UP) {
// Any button released: ACTION_BUTTON_RELEASE
MotionEvent releaseEvent = MotionEvent.obtain(lastTouchDown, now, MotionEvent.ACTION_BUTTON_RELEASE, pointerCount, pointerProperties,
pointerCoords, 0, buttons, 1f, 1f, DEFAULT_DEVICE_ID, 0, source, 0);
if (!InputManager.setActionButton(releaseEvent, actionButton)) {
return false;
}
if (!device.injectEvent(releaseEvent, Device.INJECT_MODE_ASYNC)) {
return false;
}
if (buttons == 0) {
// Last button released: ACTION_UP
MotionEvent upEvent = MotionEvent.obtain(lastTouchDown, now, MotionEvent.ACTION_UP, pointerCount, pointerProperties,
pointerCoords, 0, buttons, 1f, 1f, DEFAULT_DEVICE_ID, 0, source, 0);
if (!device.injectEvent(upEvent, Device.INJECT_MODE_ASYNC)) {
return false;
}
}
return true;
}
}
MotionEvent event = MotionEvent
.obtain(lastTouchDown, now, action, pointerCount, pointerProperties, pointerCoords, 0, buttons, 1f, 1f, DEFAULT_DEVICE_ID, 0, source,
0);

View File

@@ -20,9 +20,6 @@ public final class DesktopConnection implements Closeable {
private final LocalSocket videoSocket;
private final FileDescriptor videoFd;
private final LocalSocket audioSocket;
private final FileDescriptor audioFd;
private final LocalSocket controlSocket;
private final InputStream controlInputStream;
private final OutputStream controlOutputStream;
@@ -30,10 +27,9 @@ public final class DesktopConnection implements Closeable {
private final ControlMessageReader reader = new ControlMessageReader();
private final DeviceMessageWriter writer = new DeviceMessageWriter();
private DesktopConnection(LocalSocket videoSocket, LocalSocket audioSocket, LocalSocket controlSocket) throws IOException {
private DesktopConnection(LocalSocket videoSocket, LocalSocket controlSocket) throws IOException {
this.videoSocket = videoSocket;
this.controlSocket = controlSocket;
this.audioSocket = audioSocket;
if (controlSocket != null) {
controlInputStream = controlSocket.getInputStream();
controlOutputStream = controlSocket.getOutputStream();
@@ -42,7 +38,6 @@ public final class DesktopConnection implements Closeable {
controlOutputStream = null;
}
videoFd = videoSocket.getFileDescriptor();
audioFd = audioSocket != null ? audioSocket.getFileDescriptor() : null;
}
private static LocalSocket connect(String abstractName) throws IOException {
@@ -60,50 +55,40 @@ public final class DesktopConnection implements Closeable {
return SOCKET_NAME_PREFIX + String.format("_%08x", uid);
}
public static DesktopConnection open(int uid, boolean tunnelForward, boolean audio, boolean control, boolean sendDummyByte) throws IOException {
public static DesktopConnection open(int uid, boolean tunnelForward, boolean control, boolean sendDummyByte) throws IOException {
String socketName = getSocketName(uid);
LocalSocket videoSocket = null;
LocalSocket audioSocket = null;
LocalSocket videoSocket;
LocalSocket controlSocket = null;
try {
if (tunnelForward) {
try (LocalServerSocket localServerSocket = new LocalServerSocket(socketName)) {
videoSocket = localServerSocket.accept();
if (sendDummyByte) {
// send one byte so the client may read() to detect a connection error
videoSocket.getOutputStream().write(0);
}
if (audio) {
audioSocket = localServerSocket.accept();
}
if (control) {
controlSocket = localServerSocket.accept();
}
}
} else {
videoSocket = connect(socketName);
if (audio) {
audioSocket = connect(socketName);
if (tunnelForward) {
try (LocalServerSocket localServerSocket = new LocalServerSocket(socketName)) {
videoSocket = localServerSocket.accept();
if (sendDummyByte) {
// send one byte so the client may read() to detect a connection error
videoSocket.getOutputStream().write(0);
}
if (control) {
controlSocket = connect(socketName);
try {
controlSocket = localServerSocket.accept();
} catch (IOException | RuntimeException e) {
videoSocket.close();
throw e;
}
}
}
} catch (IOException | RuntimeException e) {
if (videoSocket != null) {
videoSocket.close();
} else {
videoSocket = connect(socketName);
if (control) {
try {
controlSocket = connect(socketName);
} catch (IOException | RuntimeException e) {
videoSocket.close();
throw e;
}
}
if (audioSocket != null) {
audioSocket.close();
}
if (controlSocket != null) {
controlSocket.close();
}
throw e;
}
return new DesktopConnection(videoSocket, audioSocket, controlSocket);
return new DesktopConnection(videoSocket, controlSocket);
}
public void close() throws IOException {
@@ -136,10 +121,6 @@ public final class DesktopConnection implements Closeable {
return videoFd;
}
public FileDescriptor getAudioFd() {
return audioFd;
}
public ControlMessage receiveControlMessage() throws IOException {
ControlMessage msg = reader.next();
while (msg == null) {

View File

@@ -277,26 +277,6 @@ public final class Device {
* @param mode one of the {@code POWER_MODE_*} constants
*/
public static boolean setScreenPowerMode(int mode) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
// Change the power mode for all physical displays
long[] physicalDisplayIds = SurfaceControl.getPhysicalDisplayIds();
if (physicalDisplayIds == null) {
Ln.e("Could not get physical display ids");
return false;
}
boolean allOk = true;
for (long physicalDisplayId : physicalDisplayIds) {
IBinder binder = SurfaceControl.getPhysicalDisplayToken(physicalDisplayId);
boolean ok = SurfaceControl.setDisplayPowerMode(binder, mode);
if (!ok) {
allOk = false;
}
}
return allOk;
}
// Older Android versions, only 1 display
IBinder d = SurfaceControl.getBuiltInDisplay();
if (d == null) {
Ln.e("Could not get built-in display");

View File

@@ -6,8 +6,6 @@ public final class DeviceMessageSender {
private final DesktopConnection connection;
private Thread thread;
private String clipboardText;
private long ack;
@@ -26,7 +24,7 @@ public final class DeviceMessageSender {
notify();
}
private void loop() throws IOException, InterruptedException {
public void loop() throws IOException, InterruptedException {
while (!Thread.currentThread().isInterrupted()) {
String text;
long sequence;
@@ -51,28 +49,4 @@ public final class DeviceMessageSender {
}
}
}
public void start() {
thread = new Thread(() -> {
try {
loop();
} catch (IOException | InterruptedException e) {
// this is expected on close
} finally {
Ln.d("Device message sender stopped");
}
});
thread.start();
}
public void stop() {
if (thread != null) {
thread.interrupt();
}
}
public void join() throws InterruptedException {
if (thread != null) {
thread.join();
}
}
}

View File

@@ -1,40 +0,0 @@
package com.genymobile.scrcpy;
import android.annotation.TargetApi;
import android.content.AttributionSource;
import android.content.ContextWrapper;
import android.os.Build;
import android.os.Process;
public final class FakeContext extends ContextWrapper {
public static final String PACKAGE_NAME = "com.android.shell";
private static final FakeContext INSTANCE = new FakeContext();
public static FakeContext get() {
return INSTANCE;
}
private FakeContext() {
super(null);
}
@Override
public String getPackageName() {
return PACKAGE_NAME;
}
@Override
public String getOpPackageName() {
return PACKAGE_NAME;
}
@TargetApi(Build.VERSION_CODES.S)
@Override
public AttributionSource getAttributionSource() {
AttributionSource.Builder builder = new AttributionSource.Builder(Process.SHELL_UID);
builder.setPackageName(PACKAGE_NAME);
return builder.build();
}
}

View File

@@ -5,12 +5,9 @@ import android.graphics.Rect;
import java.util.List;
public class Options {
private Ln.Level logLevel = Ln.Level.DEBUG;
private int uid = -1; // 31-bit non-negative value, or -1
private boolean audio = true;
private int maxSize;
private VideoCodec codec = VideoCodec.H264;
private int bitRate = 8000000;
private int maxFps;
private int lockVideoOrientation = -1;
@@ -32,7 +29,6 @@ public class Options {
private boolean sendDeviceMeta = true; // send device name and size
private boolean sendFrameMeta = true; // send PTS so that the client may record properly
private boolean sendDummyByte = true; // write a byte on start to detect connection issues
private boolean sendCodecId = true; // write the codec ID (4 bytes) before the stream
public Ln.Level getLogLevel() {
return logLevel;
@@ -50,14 +46,6 @@ public class Options {
this.uid = uid;
}
public boolean getAudio() {
return audio;
}
public void setAudio(boolean audio) {
this.audio = audio;
}
public int getMaxSize() {
return maxSize;
}
@@ -66,14 +54,6 @@ public class Options {
this.maxSize = maxSize;
}
public VideoCodec getCodec() {
return codec;
}
public void setCodec(VideoCodec codec) {
this.codec = codec;
}
public int getBitRate() {
return bitRate;
}
@@ -225,12 +205,4 @@ public class Options {
public void setSendDummyByte(boolean sendDummyByte) {
this.sendDummyByte = sendDummyByte;
}
public boolean getSendCodecId() {
return sendCodecId;
}
public void setSendCodecId(boolean sendCodecId) {
this.sendCodecId = sendCodecId;
}
}

View File

@@ -12,6 +12,7 @@ import android.os.IBinder;
import android.os.SystemClock;
import android.view.Surface;
import java.io.FileDescriptor;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
@@ -27,25 +28,26 @@ public class ScreenEncoder implements Device.RotationListener {
// Keep the values in descending order
private static final int[] MAX_SIZE_FALLBACK = {2560, 1920, 1600, 1280, 1024, 800};
private static final int MAX_CONSECUTIVE_ERRORS = 3;
private static final long PACKET_FLAG_CONFIG = 1L << 63;
private static final long PACKET_FLAG_KEY_FRAME = 1L << 62;
private final AtomicBoolean rotationChanged = new AtomicBoolean();
private final ByteBuffer headerBuffer = ByteBuffer.allocate(12);
private final Device device;
private final Streamer streamer;
private final String encoderName;
private final List<CodecOption> codecOptions;
private final int bitRate;
private final int maxFps;
private final boolean sendFrameMeta;
private final boolean downsizeOnError;
private long ptsOrigin;
private boolean firstFrameSent;
private int consecutiveErrors;
public ScreenEncoder(Device device, Streamer streamer, int bitRate, int maxFps, List<CodecOption> codecOptions,
String encoderName, boolean downsizeOnError) {
this.device = device;
this.streamer = streamer;
public ScreenEncoder(boolean sendFrameMeta, int bitRate, int maxFps, List<CodecOption> codecOptions, String encoderName,
boolean downsizeOnError) {
this.sendFrameMeta = sendFrameMeta;
this.bitRate = bitRate;
this.maxFps = maxFps;
this.codecOptions = codecOptions;
@@ -62,15 +64,22 @@ public class ScreenEncoder implements Device.RotationListener {
return rotationChanged.getAndSet(false);
}
public void streamScreen() throws IOException {
String videoMimeType = streamer.getCodec().getMimeType();
MediaCodec codec = createCodec(videoMimeType, encoderName);
MediaFormat format = createFormat(videoMimeType, bitRate, maxFps, codecOptions);
public void streamScreen(Device device, FileDescriptor fd) throws IOException {
Workarounds.prepareMainLooper();
if (Build.BRAND.equalsIgnoreCase("meizu")) {
// <https://github.com/Genymobile/scrcpy/issues/240>
// <https://github.com/Genymobile/scrcpy/issues/2656>
Workarounds.fillAppInfo();
}
internalStreamScreen(device, fd);
}
private void internalStreamScreen(Device device, FileDescriptor fd) throws IOException {
MediaCodec codec = createCodec(encoderName);
MediaFormat format = createFormat(bitRate, maxFps, codecOptions);
IBinder display = createDisplay();
device.setRotationListener(this);
streamer.writeHeader();
boolean alive;
try {
do {
@@ -95,15 +104,25 @@ public class ScreenEncoder implements Device.RotationListener {
codec.start();
alive = encode(codec, streamer);
alive = encode(codec, fd);
// do not call stop() on exception, it would trigger an IllegalStateException
codec.stop();
} catch (MediaCodec.CodecException e) {
Ln.e("Codec error: " + e.getMessage());
// <https://developer.android.com/reference/android/media/MediaCodec#error-handling>
// For simplicity, handle isTransient() like isRecoverable()
if (e.isRecoverable() || e.isTransient()) {
// Avoid busy-loop if too many errors are generated
SystemClock.sleep(50);
} else if (!prepareDownsizeRetry(device, screenInfo)) {
throw e;
}
alive = true;
} catch (IllegalStateException | IllegalArgumentException e) {
Ln.e("Encoding error: " + e.getClass().getName() + ": " + e.getMessage());
if (!prepareRetry(device, screenInfo)) {
if (!prepareDownsizeRetry(device, screenInfo)) {
throw e;
}
Ln.i("Retrying...");
alive = true;
} finally {
codec.reset();
@@ -119,26 +138,13 @@ public class ScreenEncoder implements Device.RotationListener {
}
}
private boolean prepareRetry(Device device, ScreenInfo screenInfo) {
if (firstFrameSent) {
++consecutiveErrors;
if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) {
// Definitively fail
return false;
}
// Wait a bit to increase the probability that retrying will fix the problem
SystemClock.sleep(50);
return true;
}
if (!downsizeOnError) {
private boolean prepareDownsizeRetry(Device device, ScreenInfo screenInfo) {
if (!downsizeOnError || firstFrameSent) {
Ln.i("#1 " + downsizeOnError + " " + firstFrameSent);
// Must fail immediately
return false;
}
// Downsizing on error is only enabled if an encoding failure occurs before the first frame (downsizing later could be surprising)
int newMaxSize = chooseMaxSizeFallback(screenInfo.getVideoSize());
Ln.i("newMaxSize = " + newMaxSize);
if (newMaxSize == 0) {
@@ -164,30 +170,30 @@ public class ScreenEncoder implements Device.RotationListener {
return 0;
}
private boolean encode(MediaCodec codec, Streamer streamer) throws IOException {
private boolean encode(MediaCodec codec, FileDescriptor fd) throws IOException {
boolean eof = false;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (!consumeRotationChange() && !eof) {
int outputBufferId = codec.dequeueOutputBuffer(bufferInfo, -1);
eof = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
try {
if (consumeRotationChange()) {
// must restart encoding with new size
break;
}
eof = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
if (outputBufferId >= 0) {
ByteBuffer codecBuffer = codec.getOutputBuffer(outputBufferId);
boolean isConfig = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
if (!isConfig) {
// If this is not a config packet, then it contains a frame
firstFrameSent = true;
consecutiveErrors = 0;
if (sendFrameMeta) {
writeFrameMeta(fd, bufferInfo, codecBuffer.remaining());
}
streamer.writePacket(codecBuffer, bufferInfo);
IO.writeFully(fd, codecBuffer);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
// If this is not a config packet, then it contains a frame
firstFrameSent = true;
}
}
} finally {
if (outputBufferId >= 0) {
@@ -199,28 +205,50 @@ public class ScreenEncoder implements Device.RotationListener {
return !eof;
}
private static MediaCodecInfo[] listEncoders(String videoMimeType) {
private void writeFrameMeta(FileDescriptor fd, MediaCodec.BufferInfo bufferInfo, int packetSize) throws IOException {
headerBuffer.clear();
long pts;
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
pts = PACKET_FLAG_CONFIG; // non-media data packet
} else {
if (ptsOrigin == 0) {
ptsOrigin = bufferInfo.presentationTimeUs;
}
pts = bufferInfo.presentationTimeUs - ptsOrigin;
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) {
pts |= PACKET_FLAG_KEY_FRAME;
}
}
headerBuffer.putLong(pts);
headerBuffer.putInt(packetSize);
headerBuffer.flip();
IO.writeFully(fd, headerBuffer);
}
private static MediaCodecInfo[] listEncoders() {
List<MediaCodecInfo> result = new ArrayList<>();
MediaCodecList list = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
for (MediaCodecInfo codecInfo : list.getCodecInfos()) {
if (codecInfo.isEncoder() && Arrays.asList(codecInfo.getSupportedTypes()).contains(videoMimeType)) {
if (codecInfo.isEncoder() && Arrays.asList(codecInfo.getSupportedTypes()).contains(MediaFormat.MIMETYPE_VIDEO_AVC)) {
result.add(codecInfo);
}
}
return result.toArray(new MediaCodecInfo[result.size()]);
}
private static MediaCodec createCodec(String videoMimeType, String encoderName) throws IOException {
private static MediaCodec createCodec(String encoderName) throws IOException {
if (encoderName != null) {
Ln.d("Creating encoder by name: '" + encoderName + "'");
try {
return MediaCodec.createByCodecName(encoderName);
} catch (IllegalArgumentException e) {
MediaCodecInfo[] encoders = listEncoders(videoMimeType);
MediaCodecInfo[] encoders = listEncoders();
throw new InvalidEncoderException(encoderName, encoders);
}
}
MediaCodec codec = MediaCodec.createEncoderByType(videoMimeType);
MediaCodec codec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
Ln.d("Using encoder: '" + codec.getName() + "'");
return codec;
}
@@ -242,9 +270,9 @@ public class ScreenEncoder implements Device.RotationListener {
Ln.d("Codec option set: " + key + " (" + value.getClass().getSimpleName() + ") = " + value);
}
private static MediaFormat createFormat(String videoMimeType, int bitRate, int maxFps, List<CodecOption> codecOptions) {
private static MediaFormat createFormat(int bitRate, int maxFps, List<CodecOption> codecOptions) {
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, videoMimeType);
format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_VIDEO_AVC);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
// must be present to configure the encoder, but does not impact the actual frame rate, which is variable
format.setInteger(MediaFormat.KEY_FRAME_RATE, 60);

View File

@@ -69,81 +69,41 @@ public final class Server {
int uid = options.getUid();
boolean tunnelForward = options.isTunnelForward();
boolean control = options.getControl();
boolean audio = options.getAudio();
boolean sendDummyByte = options.getSendDummyByte();
Workarounds.prepareMainLooper();
// Workarounds must be applied for Meizu phones:
// - <https://github.com/Genymobile/scrcpy/issues/240>
// - <https://github.com/Genymobile/scrcpy/issues/365>
// - <https://github.com/Genymobile/scrcpy/issues/2656>
//
// But only apply when strictly necessary, since workarounds can cause other issues:
// - <https://github.com/Genymobile/scrcpy/issues/940>
// - <https://github.com/Genymobile/scrcpy/issues/994>
boolean mustFillAppInfo = Build.BRAND.equalsIgnoreCase("meizu");
// Before Android 11, audio is not supported.
// Since Android 12, we can properly set a context on the AudioRecord.
// Only on Android 11 we must fill app info for the AudioRecord to work.
mustFillAppInfo |= audio && Build.VERSION.SDK_INT == Build.VERSION_CODES.R;
if (mustFillAppInfo) {
Workarounds.fillAppInfo();
}
try (DesktopConnection connection = DesktopConnection.open(uid, tunnelForward, audio, control, sendDummyByte)) {
VideoCodec codec = options.getCodec();
try (DesktopConnection connection = DesktopConnection.open(uid, tunnelForward, control, sendDummyByte)) {
if (options.getSendDeviceMeta()) {
Size videoSize = device.getScreenInfo().getVideoSize();
connection.sendDeviceMeta(Device.getDeviceName(), videoSize.getWidth(), videoSize.getHeight());
}
ScreenEncoder screenEncoder = new ScreenEncoder(options.getSendFrameMeta(), options.getBitRate(), options.getMaxFps(), codecOptions,
options.getEncoderName(), options.getDownsizeOnError());
Controller controller = null;
Thread controllerThread = null;
Thread deviceMessageSenderThread = null;
if (control) {
controller = new Controller(device, connection, options.getClipboardAutosync(), options.getPowerOn());
controller.start();
final Controller controller = new Controller(device, connection, options.getClipboardAutosync(), options.getPowerOn());
final Controller controllerRef = controller;
device.setClipboardListener(text -> controllerRef.getSender().pushClipboardText(text));
// asynchronous
controllerThread = startController(controller);
deviceMessageSenderThread = startDeviceMessageSender(controller.getSender());
device.setClipboardListener(text -> controller.getSender().pushClipboardText(text));
}
AudioEncoder audioEncoder = null;
if (audio) {
Streamer audioStreamer = new Streamer(connection.getAudioFd(), AudioCodec.OPUS, options.getSendCodecId(), options.getSendFrameMeta());
audioEncoder = new AudioEncoder(audioStreamer);
audioEncoder.start();
}
Streamer videoStreamer = new Streamer(connection.getVideoFd(), codec, options.getSendCodecId(), options.getSendFrameMeta());
ScreenEncoder screenEncoder = new ScreenEncoder(device, videoStreamer, options.getBitRate(), options.getMaxFps(),
codecOptions, options.getEncoderName(), options.getDownsizeOnError());
try {
// synchronous
screenEncoder.streamScreen();
screenEncoder.streamScreen(device, connection.getVideoFd());
} catch (IOException e) {
// this is expected on close
} finally {
Ln.d("Screen streaming stopped");
} finally {
initThread.interrupt();
if (audioEncoder != null) {
audioEncoder.stop();
if (controllerThread != null) {
controllerThread.interrupt();
}
if (controller != null) {
controller.stop();
}
try {
initThread.join();
if (audioEncoder != null) {
audioEncoder.join();
}
if (controller != null) {
controller.join();
}
} catch (InterruptedException e) {
// ignore
if (deviceMessageSenderThread != null) {
deviceMessageSenderThread.interrupt();
}
}
}
@@ -155,6 +115,32 @@ public final class Server {
return thread;
}
private static Thread startController(final Controller controller) {
Thread thread = new Thread(() -> {
try {
controller.control();
} catch (IOException e) {
// this is expected on close
Ln.d("Controller stopped");
}
});
thread.start();
return thread;
}
private static Thread startDeviceMessageSender(final DeviceMessageSender sender) {
Thread thread = new Thread(() -> {
try {
sender.loop();
} catch (IOException | InterruptedException e) {
// this is expected on close
Ln.d("Device message sender stopped");
}
});
thread.start();
return thread;
}
private static Options createOptions(String... args) {
if (args.length < 1) {
throw new IllegalArgumentException("Missing client version");
@@ -188,17 +174,6 @@ public final class Server {
Ln.Level level = Ln.Level.valueOf(value.toUpperCase(Locale.ENGLISH));
options.setLogLevel(level);
break;
case "audio":
boolean audio = Boolean.parseBoolean(value);
options.setAudio(audio);
break;
case "codec":
VideoCodec codec = VideoCodec.findByName(value);
if (codec == null) {
throw new IllegalArgumentException("Video codec " + value + " not supported");
}
options.setCodec(codec);
break;
case "max_size":
int maxSize = Integer.parseInt(value) & ~7; // multiple of 8
options.setMaxSize(maxSize);
@@ -280,17 +255,12 @@ public final class Server {
boolean sendDummyByte = Boolean.parseBoolean(value);
options.setSendDummyByte(sendDummyByte);
break;
case "send_codec_id":
boolean sendCodecId = Boolean.parseBoolean(value);
options.setSendCodecId(sendCodecId);
break;
case "raw_video_stream":
boolean rawVideoStream = Boolean.parseBoolean(value);
if (rawVideoStream) {
options.setSendDeviceMeta(false);
options.setSendFrameMeta(false);
options.setSendDummyByte(false);
options.setSendCodecId(false);
}
break;
default:

View File

@@ -1,67 +0,0 @@
package com.genymobile.scrcpy;
import android.media.MediaCodec;
import java.io.FileDescriptor;
import java.io.IOException;
import java.nio.ByteBuffer;
public final class Streamer {
private static final long PACKET_FLAG_CONFIG = 1L << 63;
private static final long PACKET_FLAG_KEY_FRAME = 1L << 62;
private final FileDescriptor fd;
private final Codec codec;
private final boolean sendCodecId;
private final boolean sendFrameMeta;
private final ByteBuffer headerBuffer = ByteBuffer.allocate(12);
public Streamer(FileDescriptor fd, Codec codec, boolean sendCodecId, boolean sendFrameMeta) {
this.fd = fd;
this.codec = codec;
this.sendCodecId = sendCodecId;
this.sendFrameMeta = sendFrameMeta;
}
public Codec getCodec() {
return codec;
}
public void writeHeader() throws IOException {
if (sendCodecId) {
ByteBuffer buffer = ByteBuffer.allocate(4);
buffer.putInt(codec.getId());
buffer.flip();
IO.writeFully(fd, buffer);
}
}
public void writePacket(ByteBuffer codecBuffer, MediaCodec.BufferInfo bufferInfo) throws IOException {
if (sendFrameMeta) {
writeFrameMeta(fd, bufferInfo, codecBuffer.remaining());
}
IO.writeFully(fd, codecBuffer);
}
private void writeFrameMeta(FileDescriptor fd, MediaCodec.BufferInfo bufferInfo, int packetSize) throws IOException {
headerBuffer.clear();
long ptsAndFlags;
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
ptsAndFlags = PACKET_FLAG_CONFIG; // non-media data packet
} else {
ptsAndFlags = bufferInfo.presentationTimeUs;
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) {
ptsAndFlags |= PACKET_FLAG_KEY_FRAME;
}
}
headerBuffer.putLong(ptsAndFlags);
headerBuffer.putInt(packetSize);
headerBuffer.flip();
IO.writeFully(fd, headerBuffer);
}
}

View File

@@ -1,48 +0,0 @@
package com.genymobile.scrcpy;
import android.media.MediaFormat;
public enum VideoCodec implements Codec {
H264(0x68_32_36_34, "h264", MediaFormat.MIMETYPE_VIDEO_AVC),
H265(0x68_32_36_35, "h265", MediaFormat.MIMETYPE_VIDEO_HEVC),
AV1(0x00_61_76_31, "av1", MediaFormat.MIMETYPE_VIDEO_AV1);
private final int id; // 4-byte ASCII representation of the name
private final String name;
private final String mimeType;
VideoCodec(int id, String name, String mimeType) {
this.id = id;
this.name = name;
this.mimeType = mimeType;
}
@Override
public Type getType() {
return Type.VIDEO;
}
@Override
public int getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public String getMimeType() {
return mimeType;
}
public static VideoCodec findByName(String name) {
for (VideoCodec codec : values()) {
if (codec.name.equals(name)) {
return codec;
}
}
return null;
}
}

View File

@@ -3,12 +3,13 @@ package com.genymobile.scrcpy;
import android.annotation.SuppressLint;
import android.app.Application;
import android.app.Instrumentation;
import android.content.ContextWrapper;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.os.Looper;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
public final class Workarounds {
private Workarounds() {
@@ -49,7 +50,7 @@ public final class Workarounds {
Object appBindData = appBindDataConstructor.newInstance();
ApplicationInfo applicationInfo = new ApplicationInfo();
applicationInfo.packageName = FakeContext.PACKAGE_NAME;
applicationInfo.packageName = "com.genymobile.scrcpy";
// appBindData.appInfo = applicationInfo;
Field appInfoField = appBindDataClass.getDeclaredField("appInfo");
@@ -61,10 +62,11 @@ public final class Workarounds {
mBoundApplicationField.setAccessible(true);
mBoundApplicationField.set(activityThread, appBindData);
Application app = Application.class.newInstance();
Field baseField = ContextWrapper.class.getDeclaredField("mBase");
baseField.setAccessible(true);
baseField.set(app, FakeContext.get());
// Context ctx = activityThread.getSystemContext();
Method getSystemContextMethod = activityThreadClass.getDeclaredMethod("getSystemContext");
Context ctx = (Context) getSystemContextMethod.invoke(activityThread);
Application app = Instrumentation.newApplication(Application.class, ctx);
// activityThread.mInitialApplication = app;
Field mInitialApplicationField = activityThreadClass.getDeclaredField("mInitialApplication");

View File

@@ -5,7 +5,6 @@ import com.genymobile.scrcpy.Ln;
import android.os.Binder;
import android.os.IBinder;
import android.os.IInterface;
import android.os.Process;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
@@ -49,10 +48,10 @@ public class ActivityManager {
Object[] args;
if (getContentProviderExternalMethodNewVersion) {
// new version
args = new Object[]{name, Process.ROOT_UID, token, null};
args = new Object[]{name, ServiceManager.USER_ID, token, null};
} else {
// old version
args = new Object[]{name, Process.ROOT_UID, token};
args = new Object[]{name, ServiceManager.USER_ID, token};
}
// ContentProviderHolder providerHolder = getContentProviderExternal(...);
Object providerHolder = method.invoke(manager, args);

View File

@@ -1,13 +1,11 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.FakeContext;
import com.genymobile.scrcpy.Ln;
import android.content.ClipData;
import android.content.IOnPrimaryClipChangedListener;
import android.os.Build;
import android.os.IInterface;
import android.os.Process;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@@ -60,22 +58,22 @@ public class ClipboardManager {
private static ClipData getPrimaryClip(Method method, boolean alternativeMethod, IInterface manager)
throws InvocationTargetException, IllegalAccessException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME);
return (ClipData) method.invoke(manager, ServiceManager.PACKAGE_NAME);
}
if (alternativeMethod) {
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, null, Process.ROOT_UID);
return (ClipData) method.invoke(manager, ServiceManager.PACKAGE_NAME, null, ServiceManager.USER_ID);
}
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, Process.ROOT_UID);
return (ClipData) method.invoke(manager, ServiceManager.PACKAGE_NAME, ServiceManager.USER_ID);
}
private static void setPrimaryClip(Method method, boolean alternativeMethod, IInterface manager, ClipData clipData)
throws InvocationTargetException, IllegalAccessException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME);
method.invoke(manager, clipData, ServiceManager.PACKAGE_NAME);
} else if (alternativeMethod) {
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, null, Process.ROOT_UID);
method.invoke(manager, clipData, ServiceManager.PACKAGE_NAME, null, ServiceManager.USER_ID);
} else {
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, Process.ROOT_UID);
method.invoke(manager, clipData, ServiceManager.PACKAGE_NAME, ServiceManager.USER_ID);
}
}
@@ -108,11 +106,11 @@ public class ClipboardManager {
private static void addPrimaryClipChangedListener(Method method, boolean alternativeMethod, IInterface manager,
IOnPrimaryClipChangedListener listener) throws InvocationTargetException, IllegalAccessException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
method.invoke(manager, listener, FakeContext.PACKAGE_NAME);
method.invoke(manager, listener, ServiceManager.PACKAGE_NAME);
} else if (alternativeMethod) {
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, null, Process.ROOT_UID);
method.invoke(manager, listener, ServiceManager.PACKAGE_NAME, null, ServiceManager.USER_ID);
} else {
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, Process.ROOT_UID);
method.invoke(manager, listener, ServiceManager.PACKAGE_NAME, ServiceManager.USER_ID);
}
}

View File

@@ -1,15 +1,11 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.FakeContext;
import com.genymobile.scrcpy.Ln;
import com.genymobile.scrcpy.SettingsException;
import android.annotation.SuppressLint;
import android.content.AttributionSource;
import android.os.Build;
import android.os.Bundle;
import android.os.IBinder;
import android.os.Process;
import java.io.Closeable;
import java.lang.reflect.InvocationTargetException;
@@ -55,10 +51,11 @@ public class ContentProvider implements Closeable {
@SuppressLint("PrivateApi")
private Method getCallMethod() throws NoSuchMethodException {
if (callMethod == null) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
callMethod = provider.getClass().getMethod("call", AttributionSource.class, String.class, String.class, String.class, Bundle.class);
try {
Class<?> attributionSourceClass = Class.forName("android.content.AttributionSource");
callMethod = provider.getClass().getMethod("call", attributionSourceClass, String.class, String.class, String.class, Bundle.class);
callMethodVersion = 0;
} else {
} catch (NoSuchMethodException | ClassNotFoundException e0) {
// old versions
try {
callMethod = provider.getClass()
@@ -78,29 +75,40 @@ public class ContentProvider implements Closeable {
return callMethod;
}
@SuppressLint("PrivateApi")
private Object getAttributionSource()
throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
if (attributionSource == null) {
Class<?> cl = Class.forName("android.content.AttributionSource$Builder");
Object builder = cl.getConstructor(int.class).newInstance(ServiceManager.USER_ID);
cl.getDeclaredMethod("setPackageName", String.class).invoke(builder, ServiceManager.PACKAGE_NAME);
attributionSource = cl.getDeclaredMethod("build").invoke(builder);
}
return attributionSource;
}
private Bundle call(String callMethod, String arg, Bundle extras)
throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException {
try {
Method method = getCallMethod();
Object[] args;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && callMethodVersion == 0) {
args = new Object[]{FakeContext.get().getAttributionSource(), "settings", callMethod, arg, extras};
} else {
switch (callMethodVersion) {
case 1:
args = new Object[]{FakeContext.PACKAGE_NAME, null, "settings", callMethod, arg, extras};
break;
case 2:
args = new Object[]{FakeContext.PACKAGE_NAME, "settings", callMethod, arg, extras};
break;
default:
args = new Object[]{FakeContext.PACKAGE_NAME, callMethod, arg, extras};
break;
}
switch (callMethodVersion) {
case 0:
args = new Object[]{getAttributionSource(), "settings", callMethod, arg, extras};
break;
case 1:
args = new Object[]{ServiceManager.PACKAGE_NAME, null, "settings", callMethod, arg, extras};
break;
case 2:
args = new Object[]{ServiceManager.PACKAGE_NAME, "settings", callMethod, arg, extras};
break;
default:
args = new Object[]{ServiceManager.PACKAGE_NAME, callMethod, arg, extras};
break;
}
return (Bundle) method.invoke(provider, args);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException | ClassNotFoundException | InstantiationException e) {
Ln.e("Could not invoke method", e);
throw e;
}
@@ -139,7 +147,7 @@ public class ContentProvider implements Closeable {
public String getValue(String table, String key) throws SettingsException {
String method = getGetMethod(table);
Bundle arg = new Bundle();
arg.putInt(CALL_METHOD_USER_KEY, Process.ROOT_UID);
arg.putInt(CALL_METHOD_USER_KEY, ServiceManager.USER_ID);
try {
Bundle bundle = call(method, key, arg);
if (bundle == null) {
@@ -155,7 +163,7 @@ public class ContentProvider implements Closeable {
public void putValue(String table, String key, String value) throws SettingsException {
String method = getPutMethod(table);
Bundle arg = new Bundle();
arg.putInt(CALL_METHOD_USER_KEY, Process.ROOT_UID);
arg.putInt(CALL_METHOD_USER_KEY, ServiceManager.USER_ID);
arg.putString(NAME_VALUE_TABLE_VALUE, value);
try {
call(method, key, arg);

View File

@@ -3,7 +3,6 @@ package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.Ln;
import android.view.InputEvent;
import android.view.MotionEvent;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@@ -18,7 +17,6 @@ public final class InputManager {
private Method injectInputEventMethod;
private static Method setDisplayIdMethod;
private static Method setActionButtonMethod;
public InputManager(android.hardware.input.InputManager manager) {
this.manager = manager;
@@ -58,22 +56,4 @@ public final class InputManager {
return false;
}
}
private static Method getSetActionButtonMethod() throws NoSuchMethodException {
if (setActionButtonMethod == null) {
setActionButtonMethod = MotionEvent.class.getMethod("setActionButton", int.class);
}
return setActionButtonMethod;
}
public static boolean setActionButton(MotionEvent motionEvent, int actionButton) {
try {
Method method = getSetActionButtonMethod();
method.invoke(motionEvent, actionButton);
return true;
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Cannot set action button on MotionEvent", e);
return false;
}
}
}

View File

@@ -10,6 +10,9 @@ import java.lang.reflect.Method;
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
public final class ServiceManager {
public static final String PACKAGE_NAME = "com.android.shell";
public static final int USER_ID = 0;
private static final Method GET_SERVICE_METHOD;
static {
try {

View File

@@ -30,8 +30,6 @@ public final class SurfaceControl {
private static Method getBuiltInDisplayMethod;
private static Method setDisplayPowerModeMethod;
private static Method getPhysicalDisplayTokenMethod;
private static Method getPhysicalDisplayIdsMethod;
private SurfaceControl() {
// only static methods
@@ -100,6 +98,7 @@ public final class SurfaceControl {
}
public static IBinder getBuiltInDisplay() {
try {
Method method = getGetBuiltInDisplayMethod();
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
@@ -115,40 +114,6 @@ public final class SurfaceControl {
}
}
private static Method getGetPhysicalDisplayTokenMethod() throws NoSuchMethodException {
if (getPhysicalDisplayTokenMethod == null) {
getPhysicalDisplayTokenMethod = CLASS.getMethod("getPhysicalDisplayToken", long.class);
}
return getPhysicalDisplayTokenMethod;
}
public static IBinder getPhysicalDisplayToken(long physicalDisplayId) {
try {
Method method = getGetPhysicalDisplayTokenMethod();
return (IBinder) method.invoke(null, physicalDisplayId);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
return null;
}
}
private static Method getGetPhysicalDisplayIdsMethod() throws NoSuchMethodException {
if (getPhysicalDisplayIdsMethod == null) {
getPhysicalDisplayIdsMethod = CLASS.getMethod("getPhysicalDisplayIds");
}
return getPhysicalDisplayIdsMethod;
}
public static long[] getPhysicalDisplayIds() {
try {
Method method = getGetPhysicalDisplayIdsMethod();
return (long[]) method.invoke(null);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
return null;
}
}
private static Method getSetDisplayPowerModeMethod() throws NoSuchMethodException {
if (setDisplayPowerModeMethod == null) {
setDisplayPowerModeMethod = CLASS.getMethod("setDisplayPowerMode", IBinder.class, int.class);

View File

@@ -94,8 +94,7 @@ public class ControlMessageReaderTest {
dos.writeShort(1080);
dos.writeShort(1920);
dos.writeShort(0xffff); // pressure
dos.writeInt(MotionEvent.BUTTON_PRIMARY); // action button
dos.writeInt(MotionEvent.BUTTON_PRIMARY); // buttons
dos.writeInt(MotionEvent.BUTTON_PRIMARY);
byte[] packet = bos.toByteArray();
@@ -113,7 +112,6 @@ public class ControlMessageReaderTest {
Assert.assertEquals(1080, event.getPosition().getScreenSize().getWidth());
Assert.assertEquals(1920, event.getPosition().getScreenSize().getHeight());
Assert.assertEquals(1f, event.getPressure(), 0f); // must be exact
Assert.assertEquals(MotionEvent.BUTTON_PRIMARY, event.getActionButton());
Assert.assertEquals(MotionEvent.BUTTON_PRIMARY, event.getButtons());
}