Compare commits
15 Commits
audio.12
...
packet_mer
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4b4534371c | ||
|
|
f03f32267e | ||
|
|
45b2e6db5c | ||
|
|
400a1c69b1 | ||
|
|
730eb1086a | ||
|
|
4f9e9c6619 | ||
|
|
953edfd1df | ||
|
|
230b8274b9 | ||
|
|
40866ddc10 | ||
|
|
bd56c0abf7 | ||
|
|
6524e90c68 | ||
|
|
f2dee20a20 | ||
|
|
d2dce51038 | ||
|
|
4342c5637d | ||
|
|
3e517cd40e |
31
README.md
31
README.md
@@ -194,18 +194,6 @@ The other dimension is computed so that the Android device aspect ratio is
|
||||
preserved. That way, a device in 1920×1080 will be mirrored at 1024×576.
|
||||
|
||||
|
||||
#### Select codec
|
||||
|
||||
The video codec can be selected. The possible values are `h264` (default),
|
||||
`h265` and `av1`:
|
||||
|
||||
```bash
|
||||
scrcpy --codec=h264 # default
|
||||
scrcpy --codec=h265
|
||||
scrcpy --codec=av1
|
||||
```
|
||||
|
||||
|
||||
#### Change bit-rate
|
||||
|
||||
The default bit-rate is 8 Mbps. To change the video bitrate (e.g. to 2 Mbps):
|
||||
@@ -264,7 +252,19 @@ This affects recording orientation.
|
||||
The [window may also be rotated](#rotation) independently.
|
||||
|
||||
|
||||
#### Encoder
|
||||
#### Codec
|
||||
|
||||
The video codec can be selected. The possible values are `h264` (default),
|
||||
`h265` and `av1`:
|
||||
|
||||
```bash
|
||||
scrcpy --codec=h264 # default
|
||||
scrcpy --codec=h265
|
||||
scrcpy --codec=av1
|
||||
```
|
||||
|
||||
|
||||
##### Encoder
|
||||
|
||||
Some devices have more than one encoder for a specific codec, and some of them
|
||||
may cause issues or crash. It is possible to select a different encoder:
|
||||
@@ -277,11 +277,10 @@ To list the available encoders, you can pass an invalid encoder name; the
|
||||
error will give the available encoders:
|
||||
|
||||
```bash
|
||||
scrcpy --encoder=_
|
||||
scrcpy --encoder=_ # for the default codec
|
||||
scrcpy --codec=h265 --encoder=_ # for a specific codec
|
||||
```
|
||||
|
||||
Note that you can also select a different [codec](#select-codec).
|
||||
|
||||
### Capture
|
||||
|
||||
#### Recording
|
||||
|
||||
@@ -21,6 +21,7 @@ src = [
|
||||
'src/mouse_inject.c',
|
||||
'src/opengl.c',
|
||||
'src/options.c',
|
||||
'src/packet_merger.c',
|
||||
'src/receiver.c',
|
||||
'src/recorder.c',
|
||||
'src/scrcpy.c',
|
||||
|
||||
@@ -58,7 +58,6 @@
|
||||
#define OPT_PRINT_FPS 1038
|
||||
#define OPT_NO_POWER_ON 1039
|
||||
#define OPT_CODEC 1040
|
||||
#define OPT_NO_AUDIO 1041
|
||||
|
||||
struct sc_option {
|
||||
char shortopt;
|
||||
@@ -299,11 +298,6 @@ static const struct sc_option options[] = {
|
||||
.text = "Do not display device (only when screen recording or V4L2 "
|
||||
"sink is enabled).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_AUDIO,
|
||||
.longopt = "no-audio",
|
||||
.text = "Disable audio forwarding.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_KEY_REPEAT,
|
||||
.longopt = "no-key-repeat",
|
||||
@@ -1632,9 +1626,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
case OPT_NO_DOWNSIZE_ON_ERROR:
|
||||
opts->downsize_on_error = false;
|
||||
break;
|
||||
case OPT_NO_AUDIO:
|
||||
opts->audio = false;
|
||||
break;
|
||||
case OPT_NO_CLEANUP:
|
||||
opts->cleanup = false;
|
||||
break;
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
#include "decoder.h"
|
||||
#include "events.h"
|
||||
#include "packet_merger.h"
|
||||
#include "recorder.h"
|
||||
#include "util/binary.h"
|
||||
#include "util/log.h"
|
||||
@@ -18,17 +19,10 @@
|
||||
#define SC_PACKET_PTS_MASK (SC_PACKET_FLAG_KEY_FRAME - 1)
|
||||
|
||||
static enum AVCodecID
|
||||
sc_demuxer_recv_codec_id(struct sc_demuxer *demuxer) {
|
||||
uint8_t data[4];
|
||||
ssize_t r = net_recv_all(demuxer->socket, data, 4);
|
||||
if (r < 4) {
|
||||
return false;
|
||||
}
|
||||
|
||||
sc_demuxer_to_avcodec_id(uint32_t codec_id) {
|
||||
#define SC_CODEC_ID_H264 UINT32_C(0x68323634) // "h264" in ASCII
|
||||
#define SC_CODEC_ID_H265 UINT32_C(0x68323635) // "h265" in ASCII
|
||||
#define SC_CODEC_ID_AV1 UINT32_C(0x00617631) // "av1" in ASCII
|
||||
uint32_t codec_id = sc_read32be(data);
|
||||
switch (codec_id) {
|
||||
case SC_CODEC_ID_H264:
|
||||
return AV_CODEC_ID_H264;
|
||||
@@ -42,6 +36,18 @@ sc_demuxer_recv_codec_id(struct sc_demuxer *demuxer) {
|
||||
}
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_demuxer_recv_codec_id(struct sc_demuxer *demuxer, uint32_t *codec_id) {
|
||||
uint8_t data[4];
|
||||
ssize_t r = net_recv_all(demuxer->socket, data, 4);
|
||||
if (r < 4) {
|
||||
return false;
|
||||
}
|
||||
|
||||
*codec_id = sc_read32be(data);
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_demuxer_recv_packet(struct sc_demuxer *demuxer, AVPacket *packet) {
|
||||
// The video stream contains raw packets, without time information. When we
|
||||
@@ -105,7 +111,7 @@ push_packet_to_sinks(struct sc_demuxer *demuxer, const AVPacket *packet) {
|
||||
for (unsigned i = 0; i < demuxer->sink_count; ++i) {
|
||||
struct sc_packet_sink *sink = demuxer->sinks[i];
|
||||
if (!sink->ops->push(sink, packet)) {
|
||||
LOGE("Could not send config packet to sink %d", i);
|
||||
LOGE("Could not send packet to sink %d", i);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -115,48 +121,7 @@ push_packet_to_sinks(struct sc_demuxer *demuxer, const AVPacket *packet) {
|
||||
|
||||
static bool
|
||||
sc_demuxer_push_packet(struct sc_demuxer *demuxer, AVPacket *packet) {
|
||||
bool is_config = packet->pts == AV_NOPTS_VALUE;
|
||||
|
||||
// A config packet must not be decoded immediately (it contains no
|
||||
// frame); instead, it must be concatenated with the future data packet.
|
||||
if (demuxer->pending || is_config) {
|
||||
if (demuxer->pending) {
|
||||
size_t offset = demuxer->pending->size;
|
||||
if (av_grow_packet(demuxer->pending, packet->size)) {
|
||||
LOG_OOM();
|
||||
return false;
|
||||
}
|
||||
|
||||
memcpy(demuxer->pending->data + offset, packet->data, packet->size);
|
||||
} else {
|
||||
demuxer->pending = av_packet_alloc();
|
||||
if (!demuxer->pending) {
|
||||
LOG_OOM();
|
||||
return false;
|
||||
}
|
||||
if (av_packet_ref(demuxer->pending, packet)) {
|
||||
LOG_OOM();
|
||||
av_packet_free(&demuxer->pending);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!is_config) {
|
||||
// prepare the concat packet to send to the decoder
|
||||
demuxer->pending->pts = packet->pts;
|
||||
demuxer->pending->dts = packet->dts;
|
||||
demuxer->pending->flags = packet->flags;
|
||||
packet = demuxer->pending;
|
||||
}
|
||||
}
|
||||
|
||||
bool ok = push_packet_to_sinks(demuxer, packet);
|
||||
|
||||
if (!is_config && demuxer->pending) {
|
||||
// the pending packet must be discarded (consumed or error)
|
||||
av_packet_free(&demuxer->pending);
|
||||
}
|
||||
|
||||
if (!ok) {
|
||||
LOGE("Could not process packet");
|
||||
return false;
|
||||
@@ -196,7 +161,17 @@ static int
|
||||
run_demuxer(void *data) {
|
||||
struct sc_demuxer *demuxer = data;
|
||||
|
||||
enum AVCodecID codec_id = sc_demuxer_recv_codec_id(demuxer);
|
||||
// Flag to report end-of-stream (i.e. device disconnected)
|
||||
bool eos = false;
|
||||
|
||||
uint32_t raw_codec_id;
|
||||
bool ok = sc_demuxer_recv_codec_id(demuxer, &raw_codec_id);
|
||||
if (!ok) {
|
||||
eos = true;
|
||||
goto end;
|
||||
}
|
||||
|
||||
enum AVCodecID codec_id = sc_demuxer_to_avcodec_id(raw_codec_id);
|
||||
if (codec_id == AV_CODEC_ID_NONE) {
|
||||
// Error already logged
|
||||
goto end;
|
||||
@@ -208,37 +183,31 @@ run_demuxer(void *data) {
|
||||
goto end;
|
||||
}
|
||||
|
||||
demuxer->codec_ctx = avcodec_alloc_context3(codec);
|
||||
if (!demuxer->codec_ctx) {
|
||||
LOG_OOM();
|
||||
if (!sc_demuxer_open_sinks(demuxer, codec)) {
|
||||
LOGE("Could not open demuxer sinks");
|
||||
goto end;
|
||||
}
|
||||
|
||||
if (!sc_demuxer_open_sinks(demuxer, codec)) {
|
||||
LOGE("Could not open demuxer sinks");
|
||||
goto finally_free_codec_ctx;
|
||||
}
|
||||
|
||||
demuxer->parser = av_parser_init(codec_id);
|
||||
if (!demuxer->parser) {
|
||||
LOGE("Could not initialize parser");
|
||||
goto finally_close_sinks;
|
||||
}
|
||||
|
||||
// We must only pass complete frames to av_parser_parse2()!
|
||||
// It's more complicated, but this allows to reduce the latency by 1 frame!
|
||||
demuxer->parser->flags |= PARSER_FLAG_COMPLETE_FRAMES;
|
||||
struct sc_packet_merger merger;
|
||||
sc_packet_merger_init(&merger);
|
||||
|
||||
AVPacket *packet = av_packet_alloc();
|
||||
if (!packet) {
|
||||
LOG_OOM();
|
||||
goto finally_close_parser;
|
||||
goto finally_close_sinks;
|
||||
}
|
||||
|
||||
for (;;) {
|
||||
bool ok = sc_demuxer_recv_packet(demuxer, packet);
|
||||
if (!ok) {
|
||||
// end of stream
|
||||
eos = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// Prepend any config packet to the next media packet
|
||||
ok = sc_packet_merger_merge(&merger, packet);
|
||||
if (!ok) {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -252,19 +221,13 @@ run_demuxer(void *data) {
|
||||
|
||||
LOGD("End of frames");
|
||||
|
||||
if (demuxer->pending) {
|
||||
av_packet_free(&demuxer->pending);
|
||||
}
|
||||
sc_packet_merger_destroy(&merger);
|
||||
|
||||
av_packet_free(&packet);
|
||||
finally_close_parser:
|
||||
av_parser_close(demuxer->parser);
|
||||
finally_close_sinks:
|
||||
sc_demuxer_close_sinks(demuxer);
|
||||
finally_free_codec_ctx:
|
||||
avcodec_free_context(&demuxer->codec_ctx);
|
||||
end:
|
||||
demuxer->cbs->on_eos(demuxer, demuxer->cbs_userdata);
|
||||
demuxer->cbs->on_ended(demuxer, eos, demuxer->cbs_userdata);
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -273,10 +236,9 @@ void
|
||||
sc_demuxer_init(struct sc_demuxer *demuxer, sc_socket socket,
|
||||
const struct sc_demuxer_callbacks *cbs, void *cbs_userdata) {
|
||||
demuxer->socket = socket;
|
||||
demuxer->pending = NULL;
|
||||
demuxer->sink_count = 0;
|
||||
|
||||
assert(cbs && cbs->on_eos);
|
||||
assert(cbs && cbs->on_ended);
|
||||
|
||||
demuxer->cbs = cbs;
|
||||
demuxer->cbs_userdata = cbs_userdata;
|
||||
|
||||
@@ -21,18 +21,12 @@ struct sc_demuxer {
|
||||
struct sc_packet_sink *sinks[SC_DEMUXER_MAX_SINKS];
|
||||
unsigned sink_count;
|
||||
|
||||
AVCodecContext *codec_ctx;
|
||||
AVCodecParserContext *parser;
|
||||
// successive packets may need to be concatenated, until a non-config
|
||||
// packet is available
|
||||
AVPacket *pending;
|
||||
|
||||
const struct sc_demuxer_callbacks *cbs;
|
||||
void *cbs_userdata;
|
||||
};
|
||||
|
||||
struct sc_demuxer_callbacks {
|
||||
void (*on_eos)(struct sc_demuxer *demuxer, void *userdata);
|
||||
void (*on_ended)(struct sc_demuxer *demuxer, bool eos, void *userdata);
|
||||
};
|
||||
|
||||
void
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#define EVENT_NEW_FRAME SDL_USEREVENT
|
||||
#define EVENT_STREAM_STOPPED (SDL_USEREVENT + 1)
|
||||
#define EVENT_SERVER_CONNECTION_FAILED (SDL_USEREVENT + 2)
|
||||
#define EVENT_SERVER_CONNECTED (SDL_USEREVENT + 3)
|
||||
#define EVENT_USB_DEVICE_DISCONNECTED (SDL_USEREVENT + 4)
|
||||
#define SC_EVENT_NEW_FRAME SDL_USEREVENT
|
||||
#define SC_EVENT_DEVICE_DISCONNECTED (SDL_USEREVENT + 1)
|
||||
#define SC_EVENT_SERVER_CONNECTION_FAILED (SDL_USEREVENT + 2)
|
||||
#define SC_EVENT_SERVER_CONNECTED (SDL_USEREVENT + 3)
|
||||
#define SC_EVENT_USB_DEVICE_DISCONNECTED (SDL_USEREVENT + 4)
|
||||
#define SC_EVENT_DEMUXER_ERROR (SDL_USEREVENT + 5)
|
||||
|
||||
@@ -66,5 +66,4 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.cleanup = true,
|
||||
.start_fps_counter = false,
|
||||
.power_on = true,
|
||||
.audio = true,
|
||||
};
|
||||
|
||||
@@ -147,7 +147,6 @@ struct scrcpy_options {
|
||||
bool cleanup;
|
||||
bool start_fps_counter;
|
||||
bool power_on;
|
||||
bool audio;
|
||||
};
|
||||
|
||||
extern const struct scrcpy_options scrcpy_options_default;
|
||||
|
||||
52
app/src/packet_merger.c
Normal file
52
app/src/packet_merger.c
Normal file
@@ -0,0 +1,52 @@
|
||||
#include "packet_merger.h"
|
||||
|
||||
#include "util/log.h"
|
||||
|
||||
void
|
||||
sc_packet_merger_init(struct sc_packet_merger *merger) {
|
||||
merger->pending_config = NULL;
|
||||
}
|
||||
|
||||
void
|
||||
sc_packet_merger_destroy(struct sc_packet_merger *merger) {
|
||||
av_packet_free(&merger->pending_config);
|
||||
}
|
||||
|
||||
bool
|
||||
sc_packet_merger_merge(struct sc_packet_merger *merger, AVPacket *packet) {
|
||||
bool is_config = packet->pts == AV_NOPTS_VALUE;
|
||||
|
||||
if (is_config) {
|
||||
av_packet_free(&merger->pending_config);
|
||||
|
||||
merger->pending_config = av_packet_alloc();
|
||||
if (!merger->pending_config) {
|
||||
LOG_OOM();
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (av_packet_ref(merger->pending_config, packet)) {
|
||||
LOG_OOM();
|
||||
av_packet_free(&merger->pending_config);
|
||||
goto error;
|
||||
}
|
||||
} else if (merger->pending_config) {
|
||||
size_t config_size = merger->pending_config->size;
|
||||
size_t media_size = packet->size;
|
||||
if (av_grow_packet(packet, config_size)) {
|
||||
LOG_OOM();
|
||||
goto error;
|
||||
}
|
||||
|
||||
memmove(packet->data + config_size, packet->data, media_size);
|
||||
memcpy(packet->data, merger->pending_config->data, config_size);
|
||||
|
||||
av_packet_free(&merger->pending_config);
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
error:
|
||||
av_packet_unref(packet);
|
||||
return false;
|
||||
}
|
||||
41
app/src/packet_merger.h
Normal file
41
app/src/packet_merger.h
Normal file
@@ -0,0 +1,41 @@
|
||||
#ifndef SC_PACKET_MERGER_H
|
||||
#define SC_PACKET_MERGER_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
||||
/**
|
||||
* Config packets (containing the SPS/PPS) are sent in-band. A new config
|
||||
* packet is sent whenever a new encoding session is started (on start and on
|
||||
* device orientation change).
|
||||
*
|
||||
* Every time a config packet is received, it must be sent alone (for recorder
|
||||
* extradata), then concatenated to the next media packet (for correct decoding
|
||||
* and recording).
|
||||
*
|
||||
* This helper reads every input packet and modifies each media packet which
|
||||
* immediately follows a config packet to prepend the config packet payload.
|
||||
*/
|
||||
|
||||
struct sc_packet_merger {
|
||||
AVPacket *pending_config;
|
||||
};
|
||||
|
||||
void
|
||||
sc_packet_merger_init(struct sc_packet_merger *merger);
|
||||
|
||||
void
|
||||
sc_packet_merger_destroy(struct sc_packet_merger *merger);
|
||||
|
||||
/**
|
||||
* If the packet is a config packet, then reference it for later.
|
||||
* Otherwise (if the packet is a media packet), then if a config packet is
|
||||
* pending, prepend the config packet to this packet (so the packet is
|
||||
* modified!).
|
||||
*/
|
||||
bool
|
||||
sc_packet_merger_merge(struct sc_packet_merger *merger, AVPacket *packet);
|
||||
|
||||
#endif
|
||||
@@ -155,9 +155,12 @@ event_loop(struct scrcpy *s) {
|
||||
SDL_Event event;
|
||||
while (SDL_WaitEvent(&event)) {
|
||||
switch (event.type) {
|
||||
case EVENT_STREAM_STOPPED:
|
||||
case SC_EVENT_DEVICE_DISCONNECTED:
|
||||
LOGW("Device disconnected");
|
||||
return SCRCPY_EXIT_DISCONNECTED;
|
||||
case SC_EVENT_DEMUXER_ERROR:
|
||||
LOGE("Demuxer error");
|
||||
return SCRCPY_EXIT_FAILURE;
|
||||
case SDL_QUIT:
|
||||
LOGD("User requested to quit");
|
||||
return SCRCPY_EXIT_SUCCESS;
|
||||
@@ -179,10 +182,10 @@ await_for_server(bool *connected) {
|
||||
LOGD("User requested to quit");
|
||||
*connected = false;
|
||||
return true;
|
||||
case EVENT_SERVER_CONNECTION_FAILED:
|
||||
case SC_EVENT_SERVER_CONNECTION_FAILED:
|
||||
LOGE("Server connection failed");
|
||||
return false;
|
||||
case EVENT_SERVER_CONNECTED:
|
||||
case SC_EVENT_SERVER_CONNECTED:
|
||||
LOGD("Server connected");
|
||||
*connected = true;
|
||||
return true;
|
||||
@@ -233,11 +236,15 @@ av_log_callback(void *avcl, int level, const char *fmt, va_list vl) {
|
||||
}
|
||||
|
||||
static void
|
||||
sc_demuxer_on_eos(struct sc_demuxer *demuxer, void *userdata) {
|
||||
sc_demuxer_on_ended(struct sc_demuxer *demuxer, bool eos, void *userdata) {
|
||||
(void) demuxer;
|
||||
(void) userdata;
|
||||
|
||||
PUSH_EVENT(EVENT_STREAM_STOPPED);
|
||||
if (eos) {
|
||||
PUSH_EVENT(SC_EVENT_DEVICE_DISCONNECTED);
|
||||
} else {
|
||||
PUSH_EVENT(SC_EVENT_DEMUXER_ERROR);
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
@@ -245,7 +252,7 @@ sc_server_on_connection_failed(struct sc_server *server, void *userdata) {
|
||||
(void) server;
|
||||
(void) userdata;
|
||||
|
||||
PUSH_EVENT(EVENT_SERVER_CONNECTION_FAILED);
|
||||
PUSH_EVENT(SC_EVENT_SERVER_CONNECTION_FAILED);
|
||||
}
|
||||
|
||||
static void
|
||||
@@ -253,7 +260,7 @@ sc_server_on_connected(struct sc_server *server, void *userdata) {
|
||||
(void) server;
|
||||
(void) userdata;
|
||||
|
||||
PUSH_EVENT(EVENT_SERVER_CONNECTED);
|
||||
PUSH_EVENT(SC_EVENT_SERVER_CONNECTED);
|
||||
}
|
||||
|
||||
static void
|
||||
@@ -326,7 +333,6 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.lock_video_orientation = options->lock_video_orientation,
|
||||
.control = options->control,
|
||||
.display_id = options->display_id,
|
||||
.audio = options->audio,
|
||||
.show_touches = options->show_touches,
|
||||
.stay_awake = options->stay_awake,
|
||||
.codec_options = options->codec_options,
|
||||
@@ -422,7 +428,7 @@ scrcpy(struct scrcpy_options *options) {
|
||||
av_log_set_callback(av_log_callback);
|
||||
|
||||
static const struct sc_demuxer_callbacks demuxer_cbs = {
|
||||
.on_eos = sc_demuxer_on_eos,
|
||||
.on_ended = sc_demuxer_on_ended,
|
||||
};
|
||||
sc_demuxer_init(&s->demuxer, s->server.video_socket, &demuxer_cbs, NULL);
|
||||
|
||||
|
||||
@@ -371,7 +371,7 @@ sc_video_buffer_on_new_frame(struct sc_video_buffer *vb, bool previous_skipped,
|
||||
bool need_new_event;
|
||||
if (previous_skipped) {
|
||||
sc_fps_counter_add_skipped_frame(&screen->fps_counter);
|
||||
// The EVENT_NEW_FRAME triggered for the previous frame will consume
|
||||
// The SC_EVENT_NEW_FRAME triggered for the previous frame will consume
|
||||
// this new frame instead, unless the previous event failed
|
||||
need_new_event = screen->event_failed;
|
||||
} else {
|
||||
@@ -380,7 +380,7 @@ sc_video_buffer_on_new_frame(struct sc_video_buffer *vb, bool previous_skipped,
|
||||
|
||||
if (need_new_event) {
|
||||
static SDL_Event new_frame_event = {
|
||||
.type = EVENT_NEW_FRAME,
|
||||
.type = SC_EVENT_NEW_FRAME,
|
||||
};
|
||||
|
||||
// Post the event on the UI thread
|
||||
@@ -820,7 +820,7 @@ sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event) {
|
||||
bool relative_mode = sc_screen_is_relative_mode(screen);
|
||||
|
||||
switch (event->type) {
|
||||
case EVENT_NEW_FRAME: {
|
||||
case SC_EVENT_NEW_FRAME: {
|
||||
bool ok = sc_screen_update_frame(screen);
|
||||
if (!ok) {
|
||||
LOGW("Frame update failed\n");
|
||||
|
||||
@@ -217,9 +217,6 @@ execute_server(struct sc_server *server,
|
||||
ADD_PARAM("log_level=%s", log_level_to_server_string(params->log_level));
|
||||
ADD_PARAM("bit_rate=%" PRIu32, params->bit_rate);
|
||||
|
||||
if (!params->audio) {
|
||||
ADD_PARAM("audio=false");
|
||||
}
|
||||
if (params->codec != SC_CODEC_H264) {
|
||||
ADD_PARAM("codec=%s", sc_server_get_codec_name(params->codec));
|
||||
}
|
||||
@@ -391,7 +388,6 @@ sc_server_init(struct sc_server *server, const struct sc_server_params *params,
|
||||
server->stopped = false;
|
||||
|
||||
server->video_socket = SC_SOCKET_NONE;
|
||||
server->audio_socket = SC_SOCKET_NONE;
|
||||
server->control_socket = SC_SOCKET_NONE;
|
||||
|
||||
sc_adb_tunnel_init(&server->tunnel);
|
||||
@@ -435,11 +431,9 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
const char *serial = server->serial;
|
||||
assert(serial);
|
||||
|
||||
bool audio = server->params.audio;
|
||||
bool control = server->params.control;
|
||||
|
||||
sc_socket video_socket = SC_SOCKET_NONE;
|
||||
sc_socket audio_socket = SC_SOCKET_NONE;
|
||||
sc_socket control_socket = SC_SOCKET_NONE;
|
||||
if (!tunnel->forward) {
|
||||
video_socket = net_accept_intr(&server->intr, tunnel->server_socket);
|
||||
@@ -447,14 +441,6 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
if (audio) {
|
||||
audio_socket =
|
||||
net_accept_intr(&server->intr, tunnel->server_socket);
|
||||
if (audio_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
if (control) {
|
||||
control_socket =
|
||||
net_accept_intr(&server->intr, tunnel->server_socket);
|
||||
@@ -481,18 +467,6 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
if (audio) {
|
||||
audio_socket = net_socket();
|
||||
if (audio_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
bool ok = net_connect_intr(&server->intr, audio_socket, tunnel_host,
|
||||
tunnel_port);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
if (control) {
|
||||
// we know that the device is listening, we don't need several
|
||||
// attempts
|
||||
@@ -519,11 +493,9 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
}
|
||||
|
||||
assert(video_socket != SC_SOCKET_NONE);
|
||||
assert(!audio || audio_socket != SC_SOCKET_NONE);
|
||||
assert(!control || control_socket != SC_SOCKET_NONE);
|
||||
|
||||
server->video_socket = video_socket;
|
||||
server->audio_socket = audio_socket;
|
||||
server->control_socket = control_socket;
|
||||
|
||||
return true;
|
||||
@@ -535,12 +507,6 @@ fail:
|
||||
}
|
||||
}
|
||||
|
||||
if (audio_socket != SC_SOCKET_NONE) {
|
||||
if (!net_close(audio_socket)) {
|
||||
LOGW("Could not close audio socket");
|
||||
}
|
||||
}
|
||||
|
||||
if (control_socket != SC_SOCKET_NONE) {
|
||||
if (!net_close(control_socket)) {
|
||||
LOGW("Could not close control socket");
|
||||
@@ -886,11 +852,6 @@ run_server(void *data) {
|
||||
assert(server->video_socket != SC_SOCKET_NONE);
|
||||
net_interrupt(server->video_socket);
|
||||
|
||||
if (server->audio_socket != SC_SOCKET_NONE) {
|
||||
// There is no audio_socket if --no-audio is set
|
||||
net_interrupt(server->audio_socket);
|
||||
}
|
||||
|
||||
if (server->control_socket != SC_SOCKET_NONE) {
|
||||
// There is no control_socket if --no-control is set
|
||||
net_interrupt(server->control_socket);
|
||||
@@ -952,9 +913,6 @@ sc_server_destroy(struct sc_server *server) {
|
||||
if (server->video_socket != SC_SOCKET_NONE) {
|
||||
net_close(server->video_socket);
|
||||
}
|
||||
if (server->audio_socket != SC_SOCKET_NONE) {
|
||||
net_close(server->audio_socket);
|
||||
}
|
||||
if (server->control_socket != SC_SOCKET_NONE) {
|
||||
net_close(server->control_socket);
|
||||
}
|
||||
|
||||
@@ -38,7 +38,6 @@ struct sc_server_params {
|
||||
int8_t lock_video_orientation;
|
||||
bool control;
|
||||
uint32_t display_id;
|
||||
bool audio;
|
||||
bool show_touches;
|
||||
bool stay_awake;
|
||||
bool force_adb_forward;
|
||||
@@ -70,7 +69,6 @@ struct sc_server {
|
||||
struct sc_adb_tunnel tunnel;
|
||||
|
||||
sc_socket video_socket;
|
||||
sc_socket audio_socket;
|
||||
sc_socket control_socket;
|
||||
|
||||
const struct sc_server_callbacks *cbs;
|
||||
|
||||
@@ -22,7 +22,7 @@ sc_usb_on_disconnected(struct sc_usb *usb, void *userdata) {
|
||||
(void) userdata;
|
||||
|
||||
SDL_Event event;
|
||||
event.type = EVENT_USB_DEVICE_DISCONNECTED;
|
||||
event.type = SC_EVENT_USB_DEVICE_DISCONNECTED;
|
||||
int ret = SDL_PushEvent(&event);
|
||||
if (ret < 0) {
|
||||
LOGE("Could not post USB disconnection event: %s", SDL_GetError());
|
||||
@@ -34,7 +34,7 @@ event_loop(struct scrcpy_otg *s) {
|
||||
SDL_Event event;
|
||||
while (SDL_WaitEvent(&event)) {
|
||||
switch (event.type) {
|
||||
case EVENT_USB_DEVICE_DISCONNECTED:
|
||||
case SC_EVENT_USB_DEVICE_DISCONNECTED:
|
||||
LOGW("Device disconnected");
|
||||
return SCRCPY_EXIT_DISCONNECTED;
|
||||
case SDL_QUIT:
|
||||
|
||||
@@ -1,207 +0,0 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.ComponentName;
|
||||
import android.content.Intent;
|
||||
import android.media.AudioFormat;
|
||||
import android.media.AudioRecord;
|
||||
import android.media.AudioTimestamp;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaRecorder;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.os.SystemClock;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
public final class AudioEncoder {
|
||||
|
||||
private static final String MIMETYPE = MediaFormat.MIMETYPE_AUDIO_OPUS;
|
||||
private static final int SAMPLE_RATE = 48000;
|
||||
private static final int CHANNELS = 2;
|
||||
private static final int BIT_RATE = 128000;
|
||||
|
||||
private static int BUFFER_MS = 15; // milliseconds
|
||||
private static final int BUFFER_SIZE = SAMPLE_RATE * CHANNELS * BUFFER_MS / 1000;
|
||||
|
||||
private AudioRecord recorder;
|
||||
private MediaCodec mediaCodec;
|
||||
private HandlerThread thread;
|
||||
private final AtomicBoolean interrupted = new AtomicBoolean();
|
||||
private final Semaphore endSemaphore = new Semaphore(0); // blocks until encoding is ended
|
||||
|
||||
private static AudioFormat createAudioFormat() {
|
||||
AudioFormat.Builder builder = new AudioFormat.Builder();
|
||||
builder.setEncoding(AudioFormat.ENCODING_PCM_16BIT);
|
||||
builder.setSampleRate(SAMPLE_RATE);
|
||||
builder.setChannelMask(CHANNELS == 2 ? AudioFormat.CHANNEL_IN_STEREO : AudioFormat.CHANNEL_IN_MONO);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.M)
|
||||
@SuppressLint({"WrongConstant", "MissingPermission"})
|
||||
private static AudioRecord createAudioRecord() {
|
||||
AudioRecord.Builder builder = new AudioRecord.Builder();
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
// On older APIs, Workarounds.fillAppInfo() must be called beforehand
|
||||
builder.setContext(FakeContext.get());
|
||||
}
|
||||
builder.setAudioSource(MediaRecorder.AudioSource.REMOTE_SUBMIX);
|
||||
builder.setAudioFormat(createAudioFormat());
|
||||
builder.setBufferSizeInBytes(1024 * 1024);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private static MediaFormat createFormat() {
|
||||
MediaFormat format = new MediaFormat();
|
||||
format.setString(MediaFormat.KEY_MIME, MIMETYPE);
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
|
||||
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
|
||||
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
|
||||
return format;
|
||||
}
|
||||
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.M)
|
||||
public void start() throws IOException {
|
||||
mediaCodec = MediaCodec.createEncoderByType(MIMETYPE); // may throw IOException
|
||||
|
||||
recorder = createAudioRecord();
|
||||
|
||||
MediaFormat format = createFormat();
|
||||
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
|
||||
recorder.startRecording();
|
||||
|
||||
thread = new HandlerThread("AudioEncoder");
|
||||
thread.start();
|
||||
|
||||
class AudioEncoderCallbacks extends MediaCodec.Callback {
|
||||
|
||||
private final AudioTimestamp timestamp = new AudioTimestamp();
|
||||
private long previousPts;
|
||||
private long nextPts;
|
||||
private boolean eofSignaled;
|
||||
private boolean ended;
|
||||
|
||||
private void notifyEnded() {
|
||||
assert !ended;
|
||||
ended = true;
|
||||
endSemaphore.release();
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@Override
|
||||
public void onInputBufferAvailable(MediaCodec codec, int index) {
|
||||
if (eofSignaled) {
|
||||
return;
|
||||
}
|
||||
|
||||
ByteBuffer inputBuffer = codec.getInputBuffer(index);
|
||||
int r = recorder.read(inputBuffer, BUFFER_SIZE);
|
||||
|
||||
long pts;
|
||||
|
||||
int ret = recorder.getTimestamp(timestamp, AudioTimestamp.TIMEBASE_MONOTONIC);
|
||||
if (ret == AudioRecord.SUCCESS) {
|
||||
pts = timestamp.nanoTime / 1000;
|
||||
} else {
|
||||
if (nextPts == 0) {
|
||||
Ln.w("Could not get any audio timestamp");
|
||||
}
|
||||
// compute from previous timestamp and packet size
|
||||
pts = nextPts;
|
||||
}
|
||||
|
||||
long durationMs = r * 1000 / CHANNELS / SAMPLE_RATE;
|
||||
nextPts = pts + durationMs;
|
||||
|
||||
int flags = 0;
|
||||
if (interrupted.get()) {
|
||||
flags = flags | MediaCodec.BUFFER_FLAG_END_OF_STREAM;
|
||||
eofSignaled = true;
|
||||
}
|
||||
|
||||
if (previousPts != 0 && pts < previousPts) {
|
||||
// Audio PTS may come from two sources:
|
||||
// - recorder.getTimestamp() if the call works;
|
||||
// - an estimation from the previous PTS and the packet size as a fallback.
|
||||
//
|
||||
// Therefore, the property that PTS are monotonically increasing is no guaranteed in corner cases, so enforce it.
|
||||
pts = previousPts + 1;
|
||||
}
|
||||
|
||||
codec.queueInputBuffer(index, 0, r, pts, flags);
|
||||
|
||||
previousPts = pts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo bufferInfo) {
|
||||
if (ended) {
|
||||
return;
|
||||
}
|
||||
|
||||
ByteBuffer codecBuffer = codec.getOutputBuffer(index);
|
||||
try {
|
||||
boolean isConfig = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
|
||||
long pts = bufferInfo.presentationTimeUs;
|
||||
Ln.i("Audio packet: pts=" + pts + " " + codecBuffer.remaining() + " bytes");
|
||||
} finally {
|
||||
codec.releaseOutputBuffer(index, false);
|
||||
}
|
||||
|
||||
boolean eof = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
|
||||
if (eof) {
|
||||
notifyEnded();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(MediaCodec codec, MediaCodec.CodecException e) {
|
||||
Ln.e("MediaCodec error", e);
|
||||
if (!ended) {
|
||||
notifyEnded();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
mediaCodec.setCallback(new AudioEncoderCallbacks(), new Handler(thread.getLooper()));
|
||||
mediaCodec.start();
|
||||
}
|
||||
|
||||
private void waitEnded() {
|
||||
try {
|
||||
endSemaphore.acquire();
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
Ln.i("==== STOP");
|
||||
if (thread != null) {
|
||||
interrupted.set(true);
|
||||
waitEnded();
|
||||
thread.interrupt();
|
||||
thread = null;
|
||||
mediaCodec.stop();
|
||||
mediaCodec.release();
|
||||
recorder.stop();
|
||||
Ln.i("==== STOPPED");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -90,6 +90,7 @@ public class Controller {
|
||||
control();
|
||||
} catch (IOException e) {
|
||||
// this is expected on close
|
||||
} finally {
|
||||
Ln.d("Controller stopped");
|
||||
}
|
||||
});
|
||||
@@ -100,11 +101,17 @@ public class Controller {
|
||||
public void stop() {
|
||||
if (thread != null) {
|
||||
thread.interrupt();
|
||||
thread = null;
|
||||
}
|
||||
sender.stop();
|
||||
}
|
||||
|
||||
public void join() throws InterruptedException {
|
||||
if (thread != null) {
|
||||
thread.join();
|
||||
}
|
||||
sender.join();
|
||||
}
|
||||
|
||||
public DeviceMessageSender getSender() {
|
||||
return sender;
|
||||
}
|
||||
|
||||
@@ -20,9 +20,6 @@ public final class DesktopConnection implements Closeable {
|
||||
private final LocalSocket videoSocket;
|
||||
private final FileDescriptor videoFd;
|
||||
|
||||
private final LocalSocket audioSocket;
|
||||
private final FileDescriptor audioFd;
|
||||
|
||||
private final LocalSocket controlSocket;
|
||||
private final InputStream controlInputStream;
|
||||
private final OutputStream controlOutputStream;
|
||||
@@ -30,10 +27,9 @@ public final class DesktopConnection implements Closeable {
|
||||
private final ControlMessageReader reader = new ControlMessageReader();
|
||||
private final DeviceMessageWriter writer = new DeviceMessageWriter();
|
||||
|
||||
private DesktopConnection(LocalSocket videoSocket, LocalSocket audioSocket, LocalSocket controlSocket) throws IOException {
|
||||
private DesktopConnection(LocalSocket videoSocket, LocalSocket controlSocket) throws IOException {
|
||||
this.videoSocket = videoSocket;
|
||||
this.controlSocket = controlSocket;
|
||||
this.audioSocket = audioSocket;
|
||||
if (controlSocket != null) {
|
||||
controlInputStream = controlSocket.getInputStream();
|
||||
controlOutputStream = controlSocket.getOutputStream();
|
||||
@@ -42,7 +38,6 @@ public final class DesktopConnection implements Closeable {
|
||||
controlOutputStream = null;
|
||||
}
|
||||
videoFd = videoSocket.getFileDescriptor();
|
||||
audioFd = audioSocket != null ? audioSocket.getFileDescriptor() : null;
|
||||
}
|
||||
|
||||
private static LocalSocket connect(String abstractName) throws IOException {
|
||||
@@ -60,47 +55,40 @@ public final class DesktopConnection implements Closeable {
|
||||
return SOCKET_NAME_PREFIX + String.format("_%08x", uid);
|
||||
}
|
||||
|
||||
public static DesktopConnection open(int uid, boolean tunnelForward, boolean audio, boolean control, boolean sendDummyByte) throws IOException {
|
||||
public static DesktopConnection open(int uid, boolean tunnelForward, boolean control, boolean sendDummyByte) throws IOException {
|
||||
String socketName = getSocketName(uid);
|
||||
|
||||
LocalSocket videoSocket = null;
|
||||
LocalSocket audioSocket = null;
|
||||
LocalSocket videoSocket;
|
||||
LocalSocket controlSocket = null;
|
||||
try {
|
||||
if (tunnelForward) {
|
||||
try (LocalServerSocket localServerSocket = new LocalServerSocket(socketName)) {
|
||||
videoSocket = localServerSocket.accept();
|
||||
if (sendDummyByte) {
|
||||
// send one byte so the client may read() to detect a connection error
|
||||
videoSocket.getOutputStream().write(0);
|
||||
}
|
||||
if (audio) {
|
||||
audioSocket = localServerSocket.accept();
|
||||
}
|
||||
if (control) {
|
||||
controlSocket = localServerSocket.accept();
|
||||
}
|
||||
if (tunnelForward) {
|
||||
try (LocalServerSocket localServerSocket = new LocalServerSocket(socketName)) {
|
||||
videoSocket = localServerSocket.accept();
|
||||
if (sendDummyByte) {
|
||||
// send one byte so the client may read() to detect a connection error
|
||||
videoSocket.getOutputStream().write(0);
|
||||
}
|
||||
} else {
|
||||
videoSocket = connect(socketName);
|
||||
if (control) {
|
||||
controlSocket = connect(socketName);
|
||||
try {
|
||||
controlSocket = localServerSocket.accept();
|
||||
} catch (IOException | RuntimeException e) {
|
||||
videoSocket.close();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException | RuntimeException e) {
|
||||
if (videoSocket != null) {
|
||||
videoSocket.close();
|
||||
} else {
|
||||
videoSocket = connect(socketName);
|
||||
if (control) {
|
||||
try {
|
||||
controlSocket = connect(socketName);
|
||||
} catch (IOException | RuntimeException e) {
|
||||
videoSocket.close();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (audioSocket != null) {
|
||||
audioSocket.close();
|
||||
}
|
||||
if (controlSocket != null) {
|
||||
controlSocket.close();
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
|
||||
return new DesktopConnection(videoSocket, audioSocket, controlSocket);
|
||||
return new DesktopConnection(videoSocket, controlSocket);
|
||||
}
|
||||
|
||||
public void close() throws IOException {
|
||||
@@ -133,10 +121,6 @@ public final class DesktopConnection implements Closeable {
|
||||
return videoFd;
|
||||
}
|
||||
|
||||
public FileDescriptor getAudioFd() {
|
||||
return audioFd;
|
||||
}
|
||||
|
||||
public ControlMessage receiveControlMessage() throws IOException {
|
||||
ControlMessage msg = reader.next();
|
||||
while (msg == null) {
|
||||
|
||||
@@ -277,6 +277,26 @@ public final class Device {
|
||||
* @param mode one of the {@code POWER_MODE_*} constants
|
||||
*/
|
||||
public static boolean setScreenPowerMode(int mode) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||
// Change the power mode for all physical displays
|
||||
long[] physicalDisplayIds = SurfaceControl.getPhysicalDisplayIds();
|
||||
if (physicalDisplayIds == null) {
|
||||
Ln.e("Could not get physical display ids");
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean allOk = true;
|
||||
for (long physicalDisplayId : physicalDisplayIds) {
|
||||
IBinder binder = SurfaceControl.getPhysicalDisplayToken(physicalDisplayId);
|
||||
boolean ok = SurfaceControl.setDisplayPowerMode(binder, mode);
|
||||
if (!ok) {
|
||||
allOk = false;
|
||||
}
|
||||
}
|
||||
return allOk;
|
||||
}
|
||||
|
||||
// Older Android versions, only 1 display
|
||||
IBinder d = SurfaceControl.getBuiltInDisplay();
|
||||
if (d == null) {
|
||||
Ln.e("Could not get built-in display");
|
||||
|
||||
@@ -57,6 +57,7 @@ public final class DeviceMessageSender {
|
||||
loop();
|
||||
} catch (IOException | InterruptedException e) {
|
||||
// this is expected on close
|
||||
} finally {
|
||||
Ln.d("Device message sender stopped");
|
||||
}
|
||||
});
|
||||
@@ -66,7 +67,12 @@ public final class DeviceMessageSender {
|
||||
public void stop() {
|
||||
if (thread != null) {
|
||||
thread.interrupt();
|
||||
thread = null;
|
||||
}
|
||||
}
|
||||
|
||||
public void join() throws InterruptedException {
|
||||
if (thread != null) {
|
||||
thread.join();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.AttributionSource;
|
||||
import android.content.ContextWrapper;
|
||||
import android.os.Build;
|
||||
import android.os.Process;
|
||||
|
||||
public final class FakeContext extends ContextWrapper {
|
||||
|
||||
public static final String PACKAGE_NAME = "com.android.shell";
|
||||
|
||||
private static final FakeContext INSTANCE = new FakeContext();
|
||||
|
||||
public static FakeContext get() {
|
||||
return INSTANCE;
|
||||
}
|
||||
|
||||
private FakeContext() {
|
||||
super(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPackageName() {
|
||||
return PACKAGE_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOpPackageName() {
|
||||
return PACKAGE_NAME;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
@Override
|
||||
public AttributionSource getAttributionSource() {
|
||||
AttributionSource.Builder builder = new AttributionSource.Builder(Process.SHELL_UID);
|
||||
builder.setPackageName(PACKAGE_NAME);
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
@@ -5,11 +5,9 @@ import android.graphics.Rect;
|
||||
import java.util.List;
|
||||
|
||||
public class Options {
|
||||
private static final String VIDEO_CODEC_H264 = "h264";
|
||||
|
||||
private Ln.Level logLevel = Ln.Level.DEBUG;
|
||||
private int uid = -1; // 31-bit non-negative value, or -1
|
||||
private boolean audio = true;
|
||||
private int maxSize;
|
||||
private VideoCodec codec = VideoCodec.H264;
|
||||
private int bitRate = 8000000;
|
||||
@@ -51,14 +49,6 @@ public class Options {
|
||||
this.uid = uid;
|
||||
}
|
||||
|
||||
public boolean getAudio() {
|
||||
return audio;
|
||||
}
|
||||
|
||||
public void setAudio(boolean audio) {
|
||||
this.audio = audio;
|
||||
}
|
||||
|
||||
public int getMaxSize() {
|
||||
return maxSize;
|
||||
}
|
||||
|
||||
@@ -69,31 +69,22 @@ public final class Server {
|
||||
int uid = options.getUid();
|
||||
boolean tunnelForward = options.isTunnelForward();
|
||||
boolean control = options.getControl();
|
||||
boolean audio = options.getAudio();
|
||||
boolean sendDummyByte = options.getSendDummyByte();
|
||||
|
||||
Workarounds.prepareMainLooper();
|
||||
|
||||
// Workarounds must be applied for Meizu phones:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/240>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/365>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/2656>
|
||||
//
|
||||
// But only apply when strictly necessary, since workarounds can cause other issues:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/940>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/994>
|
||||
boolean mustFillAppInfo = Build.BRAND.equalsIgnoreCase("meizu");
|
||||
|
||||
// Before Android 11, audio is not supported.
|
||||
// Since Android 12, we can properly set a context on the AudioRecord.
|
||||
// Only on Android 11 we must fill app info for the AudioRecord to work.
|
||||
mustFillAppInfo |= audio && Build.VERSION.SDK_INT == Build.VERSION_CODES.R;
|
||||
|
||||
if (mustFillAppInfo) {
|
||||
if (Build.BRAND.equalsIgnoreCase("meizu")) {
|
||||
// Workarounds must be applied for Meizu phones:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/240>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/365>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/2656>
|
||||
//
|
||||
// But only apply when strictly necessary, since workarounds can cause other issues:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/940>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/994>
|
||||
Workarounds.fillAppInfo();
|
||||
}
|
||||
|
||||
try (DesktopConnection connection = DesktopConnection.open(uid, tunnelForward, audio, control, sendDummyByte)) {
|
||||
try (DesktopConnection connection = DesktopConnection.open(uid, tunnelForward, control, sendDummyByte)) {
|
||||
VideoCodec codec = options.getCodec();
|
||||
if (options.getSendDeviceMeta()) {
|
||||
Size videoSize = device.getScreenInfo().getVideoSize();
|
||||
@@ -111,12 +102,6 @@ public final class Server {
|
||||
device.setClipboardListener(text -> controllerRef.getSender().pushClipboardText(text));
|
||||
}
|
||||
|
||||
AudioEncoder audioEncoder = null;
|
||||
if (audio) {
|
||||
audioEncoder = new AudioEncoder();
|
||||
audioEncoder.start();
|
||||
}
|
||||
|
||||
try {
|
||||
// synchronous
|
||||
VideoStreamer videoStreamer = new VideoStreamer(connection.getVideoFd(), options.getSendFrameMeta());
|
||||
@@ -126,14 +111,20 @@ public final class Server {
|
||||
screenEncoder.streamScreen(device, videoStreamer);
|
||||
} catch (IOException e) {
|
||||
// this is expected on close
|
||||
Ln.d("Screen streaming stopped");
|
||||
} finally {
|
||||
Ln.d("Screen streaming stopped");
|
||||
initThread.interrupt();
|
||||
if (controller != null) {
|
||||
controller.stop();
|
||||
}
|
||||
if (audioEncoder != null) {
|
||||
audioEncoder.stop();
|
||||
|
||||
try {
|
||||
initThread.join();
|
||||
if (controller != null) {
|
||||
controller.join();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -178,10 +169,6 @@ public final class Server {
|
||||
Ln.Level level = Ln.Level.valueOf(value.toUpperCase(Locale.ENGLISH));
|
||||
options.setLogLevel(level);
|
||||
break;
|
||||
case "audio":
|
||||
boolean audio = Boolean.parseBoolean(value);
|
||||
options.setAudio(audio);
|
||||
break;
|
||||
case "codec":
|
||||
VideoCodec codec = VideoCodec.findByName(value);
|
||||
if (codec == null) {
|
||||
|
||||
@@ -3,12 +3,13 @@ package com.genymobile.scrcpy;
|
||||
import android.annotation.SuppressLint;
|
||||
import android.app.Application;
|
||||
import android.app.Instrumentation;
|
||||
import android.content.ContextWrapper;
|
||||
import android.content.Context;
|
||||
import android.content.pm.ApplicationInfo;
|
||||
import android.os.Looper;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
public final class Workarounds {
|
||||
private Workarounds() {
|
||||
@@ -49,7 +50,7 @@ public final class Workarounds {
|
||||
Object appBindData = appBindDataConstructor.newInstance();
|
||||
|
||||
ApplicationInfo applicationInfo = new ApplicationInfo();
|
||||
applicationInfo.packageName = FakeContext.PACKAGE_NAME;
|
||||
applicationInfo.packageName = "com.genymobile.scrcpy";
|
||||
|
||||
// appBindData.appInfo = applicationInfo;
|
||||
Field appInfoField = appBindDataClass.getDeclaredField("appInfo");
|
||||
@@ -61,10 +62,11 @@ public final class Workarounds {
|
||||
mBoundApplicationField.setAccessible(true);
|
||||
mBoundApplicationField.set(activityThread, appBindData);
|
||||
|
||||
Application app = Application.class.newInstance();
|
||||
Field baseField = ContextWrapper.class.getDeclaredField("mBase");
|
||||
baseField.setAccessible(true);
|
||||
baseField.set(app, FakeContext.get());
|
||||
// Context ctx = activityThread.getSystemContext();
|
||||
Method getSystemContextMethod = activityThreadClass.getDeclaredMethod("getSystemContext");
|
||||
Context ctx = (Context) getSystemContextMethod.invoke(activityThread);
|
||||
|
||||
Application app = Instrumentation.newApplication(Application.class, ctx);
|
||||
|
||||
// activityThread.mInitialApplication = app;
|
||||
Field mInitialApplicationField = activityThreadClass.getDeclaredField("mInitialApplication");
|
||||
|
||||
@@ -5,7 +5,6 @@ import com.genymobile.scrcpy.Ln;
|
||||
import android.os.Binder;
|
||||
import android.os.IBinder;
|
||||
import android.os.IInterface;
|
||||
import android.os.Process;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
@@ -49,10 +48,10 @@ public class ActivityManager {
|
||||
Object[] args;
|
||||
if (getContentProviderExternalMethodNewVersion) {
|
||||
// new version
|
||||
args = new Object[]{name, Process.ROOT_UID, token, null};
|
||||
args = new Object[]{name, ServiceManager.USER_ID, token, null};
|
||||
} else {
|
||||
// old version
|
||||
args = new Object[]{name, Process.ROOT_UID, token};
|
||||
args = new Object[]{name, ServiceManager.USER_ID, token};
|
||||
}
|
||||
// ContentProviderHolder providerHolder = getContentProviderExternal(...);
|
||||
Object providerHolder = method.invoke(manager, args);
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.Ln;
|
||||
|
||||
import android.content.ClipData;
|
||||
import android.content.IOnPrimaryClipChangedListener;
|
||||
import android.os.Build;
|
||||
import android.os.IInterface;
|
||||
import android.os.Process;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
@@ -60,22 +58,22 @@ public class ClipboardManager {
|
||||
private static ClipData getPrimaryClip(Method method, boolean alternativeMethod, IInterface manager)
|
||||
throws InvocationTargetException, IllegalAccessException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME);
|
||||
return (ClipData) method.invoke(manager, ServiceManager.PACKAGE_NAME);
|
||||
}
|
||||
if (alternativeMethod) {
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, null, Process.ROOT_UID);
|
||||
return (ClipData) method.invoke(manager, ServiceManager.PACKAGE_NAME, null, ServiceManager.USER_ID);
|
||||
}
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, Process.ROOT_UID);
|
||||
return (ClipData) method.invoke(manager, ServiceManager.PACKAGE_NAME, ServiceManager.USER_ID);
|
||||
}
|
||||
|
||||
private static void setPrimaryClip(Method method, boolean alternativeMethod, IInterface manager, ClipData clipData)
|
||||
throws InvocationTargetException, IllegalAccessException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME);
|
||||
method.invoke(manager, clipData, ServiceManager.PACKAGE_NAME);
|
||||
} else if (alternativeMethod) {
|
||||
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, null, Process.ROOT_UID);
|
||||
method.invoke(manager, clipData, ServiceManager.PACKAGE_NAME, null, ServiceManager.USER_ID);
|
||||
} else {
|
||||
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, Process.ROOT_UID);
|
||||
method.invoke(manager, clipData, ServiceManager.PACKAGE_NAME, ServiceManager.USER_ID);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,11 +106,11 @@ public class ClipboardManager {
|
||||
private static void addPrimaryClipChangedListener(Method method, boolean alternativeMethod, IInterface manager,
|
||||
IOnPrimaryClipChangedListener listener) throws InvocationTargetException, IllegalAccessException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
method.invoke(manager, listener, FakeContext.PACKAGE_NAME);
|
||||
method.invoke(manager, listener, ServiceManager.PACKAGE_NAME);
|
||||
} else if (alternativeMethod) {
|
||||
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, null, Process.ROOT_UID);
|
||||
method.invoke(manager, listener, ServiceManager.PACKAGE_NAME, null, ServiceManager.USER_ID);
|
||||
} else {
|
||||
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, Process.ROOT_UID);
|
||||
method.invoke(manager, listener, ServiceManager.PACKAGE_NAME, ServiceManager.USER_ID);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,15 +1,11 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.Ln;
|
||||
import com.genymobile.scrcpy.SettingsException;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.AttributionSource;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.os.IBinder;
|
||||
import android.os.Process;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
@@ -55,10 +51,11 @@ public class ContentProvider implements Closeable {
|
||||
@SuppressLint("PrivateApi")
|
||||
private Method getCallMethod() throws NoSuchMethodException {
|
||||
if (callMethod == null) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
callMethod = provider.getClass().getMethod("call", AttributionSource.class, String.class, String.class, String.class, Bundle.class);
|
||||
try {
|
||||
Class<?> attributionSourceClass = Class.forName("android.content.AttributionSource");
|
||||
callMethod = provider.getClass().getMethod("call", attributionSourceClass, String.class, String.class, String.class, Bundle.class);
|
||||
callMethodVersion = 0;
|
||||
} else {
|
||||
} catch (NoSuchMethodException | ClassNotFoundException e0) {
|
||||
// old versions
|
||||
try {
|
||||
callMethod = provider.getClass()
|
||||
@@ -78,29 +75,40 @@ public class ContentProvider implements Closeable {
|
||||
return callMethod;
|
||||
}
|
||||
|
||||
@SuppressLint("PrivateApi")
|
||||
private Object getAttributionSource()
|
||||
throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
|
||||
if (attributionSource == null) {
|
||||
Class<?> cl = Class.forName("android.content.AttributionSource$Builder");
|
||||
Object builder = cl.getConstructor(int.class).newInstance(ServiceManager.USER_ID);
|
||||
cl.getDeclaredMethod("setPackageName", String.class).invoke(builder, ServiceManager.PACKAGE_NAME);
|
||||
attributionSource = cl.getDeclaredMethod("build").invoke(builder);
|
||||
}
|
||||
|
||||
return attributionSource;
|
||||
}
|
||||
|
||||
private Bundle call(String callMethod, String arg, Bundle extras)
|
||||
throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
|
||||
throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException {
|
||||
try {
|
||||
Method method = getCallMethod();
|
||||
Object[] args;
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && callMethodVersion == 0) {
|
||||
args = new Object[]{FakeContext.get().getAttributionSource(), "settings", callMethod, arg, extras};
|
||||
} else {
|
||||
switch (callMethodVersion) {
|
||||
case 1:
|
||||
args = new Object[]{FakeContext.PACKAGE_NAME, null, "settings", callMethod, arg, extras};
|
||||
break;
|
||||
case 2:
|
||||
args = new Object[]{FakeContext.PACKAGE_NAME, "settings", callMethod, arg, extras};
|
||||
break;
|
||||
default:
|
||||
args = new Object[]{FakeContext.PACKAGE_NAME, callMethod, arg, extras};
|
||||
break;
|
||||
}
|
||||
switch (callMethodVersion) {
|
||||
case 0:
|
||||
args = new Object[]{getAttributionSource(), "settings", callMethod, arg, extras};
|
||||
break;
|
||||
case 1:
|
||||
args = new Object[]{ServiceManager.PACKAGE_NAME, null, "settings", callMethod, arg, extras};
|
||||
break;
|
||||
case 2:
|
||||
args = new Object[]{ServiceManager.PACKAGE_NAME, "settings", callMethod, arg, extras};
|
||||
break;
|
||||
default:
|
||||
args = new Object[]{ServiceManager.PACKAGE_NAME, callMethod, arg, extras};
|
||||
break;
|
||||
}
|
||||
return (Bundle) method.invoke(provider, args);
|
||||
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
|
||||
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException | ClassNotFoundException | InstantiationException e) {
|
||||
Ln.e("Could not invoke method", e);
|
||||
throw e;
|
||||
}
|
||||
@@ -139,7 +147,7 @@ public class ContentProvider implements Closeable {
|
||||
public String getValue(String table, String key) throws SettingsException {
|
||||
String method = getGetMethod(table);
|
||||
Bundle arg = new Bundle();
|
||||
arg.putInt(CALL_METHOD_USER_KEY, Process.ROOT_UID);
|
||||
arg.putInt(CALL_METHOD_USER_KEY, ServiceManager.USER_ID);
|
||||
try {
|
||||
Bundle bundle = call(method, key, arg);
|
||||
if (bundle == null) {
|
||||
@@ -155,7 +163,7 @@ public class ContentProvider implements Closeable {
|
||||
public void putValue(String table, String key, String value) throws SettingsException {
|
||||
String method = getPutMethod(table);
|
||||
Bundle arg = new Bundle();
|
||||
arg.putInt(CALL_METHOD_USER_KEY, Process.ROOT_UID);
|
||||
arg.putInt(CALL_METHOD_USER_KEY, ServiceManager.USER_ID);
|
||||
arg.putString(NAME_VALUE_TABLE_VALUE, value);
|
||||
try {
|
||||
call(method, key, arg);
|
||||
|
||||
@@ -10,6 +10,9 @@ import java.lang.reflect.Method;
|
||||
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
|
||||
public final class ServiceManager {
|
||||
|
||||
public static final String PACKAGE_NAME = "com.android.shell";
|
||||
public static final int USER_ID = 0;
|
||||
|
||||
private static final Method GET_SERVICE_METHOD;
|
||||
static {
|
||||
try {
|
||||
|
||||
@@ -30,6 +30,8 @@ public final class SurfaceControl {
|
||||
|
||||
private static Method getBuiltInDisplayMethod;
|
||||
private static Method setDisplayPowerModeMethod;
|
||||
private static Method getPhysicalDisplayTokenMethod;
|
||||
private static Method getPhysicalDisplayIdsMethod;
|
||||
|
||||
private SurfaceControl() {
|
||||
// only static methods
|
||||
@@ -98,7 +100,6 @@ public final class SurfaceControl {
|
||||
}
|
||||
|
||||
public static IBinder getBuiltInDisplay() {
|
||||
|
||||
try {
|
||||
Method method = getGetBuiltInDisplayMethod();
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
@@ -114,6 +115,40 @@ public final class SurfaceControl {
|
||||
}
|
||||
}
|
||||
|
||||
private static Method getGetPhysicalDisplayTokenMethod() throws NoSuchMethodException {
|
||||
if (getPhysicalDisplayTokenMethod == null) {
|
||||
getPhysicalDisplayTokenMethod = CLASS.getMethod("getPhysicalDisplayToken", long.class);
|
||||
}
|
||||
return getPhysicalDisplayTokenMethod;
|
||||
}
|
||||
|
||||
public static IBinder getPhysicalDisplayToken(long physicalDisplayId) {
|
||||
try {
|
||||
Method method = getGetPhysicalDisplayTokenMethod();
|
||||
return (IBinder) method.invoke(null, physicalDisplayId);
|
||||
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
|
||||
Ln.e("Could not invoke method", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static Method getGetPhysicalDisplayIdsMethod() throws NoSuchMethodException {
|
||||
if (getPhysicalDisplayIdsMethod == null) {
|
||||
getPhysicalDisplayIdsMethod = CLASS.getMethod("getPhysicalDisplayIds");
|
||||
}
|
||||
return getPhysicalDisplayIdsMethod;
|
||||
}
|
||||
|
||||
public static long[] getPhysicalDisplayIds() {
|
||||
try {
|
||||
Method method = getGetPhysicalDisplayIdsMethod();
|
||||
return (long[]) method.invoke(null);
|
||||
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
|
||||
Ln.e("Could not invoke method", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static Method getSetDisplayPowerModeMethod() throws NoSuchMethodException {
|
||||
if (setDisplayPowerModeMethod == null) {
|
||||
setDisplayPowerModeMethod = CLASS.getMethod("setDisplayPowerMode", IBinder.class, int.class);
|
||||
|
||||
Reference in New Issue
Block a user