Compare commits

..

2 Commits

Author SHA1 Message Date
Avinash Sonawane
afb7b469d3 Replace raw number by its name
PR #4373 <https://github.com/Genymobile/scrcpy/pull/4373>

Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-25 14:43:32 +02:00
Avinash Sonawane
ed43d423d9 Replace sprintf() with safer snprintf()
PR #4373 <https://github.com/Genymobile/scrcpy/pull/4373>

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-25 14:42:19 +02:00
25 changed files with 122 additions and 1197 deletions

View File

@@ -10,14 +10,10 @@ _scrcpy() {
--audio-source=
--audio-output-buffer=
-b --video-bit-rate=
--camera-ar=
--camera-id=
--camera-facing=
--camera-size=
--crop=
-d --select-usb
--disable-screensaver
--display-id=
--display=
--display-buffer=
-e --select-tcpip
-f --fullscreen
@@ -27,8 +23,6 @@ _scrcpy() {
--kill-adb-on-close
-K --hid-keyboard
--legacy-paste
--list-camera-sizes
--list-cameras
--list-displays
--list-encoders
--lock-video-orientation
@@ -78,7 +72,6 @@ _scrcpy() {
--video-codec=
--video-codec-options=
--video-encoder=
--video-source=
-w --stay-awake
--window-borderless
--window-title=
@@ -98,18 +91,10 @@ _scrcpy() {
COMPREPLY=($(compgen -W 'opus aac raw' -- "$cur"))
return
;;
--video-source)
COMPREPLY=($(compgen -W 'display camera' -- "$cur"))
return
;;
--audio-source)
COMPREPLY=($(compgen -W 'output mic' -- "$cur"))
return
;;
--camera-facing)
COMPREPLY=($(compgen -W 'front back external' -- "$cur"))
return
;;
--lock-video-orientation)
COMPREPLY=($(compgen -W 'unlocked initial 0 1 2 3' -- "$cur"))
return
@@ -154,11 +139,8 @@ _scrcpy() {
|--audio-codec-options \
|--audio-encoder \
|--audio-output-buffer \
|--camera-ar \
|--camera-id \
|--camera-size \
|--crop \
|--display-id \
|--display \
|--display-buffer \
|--max-fps \
|-m|--max-size \

View File

@@ -17,14 +17,10 @@ arguments=(
'--audio-source=[Select the audio source]:source:(output mic)'
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
'--camera-ar=[Select the camera size by its aspect ratio]'
'--camera-id=[Specify the camera id to mirror]'
'--camera-facing=[Select the device camera by its facing direction]:facing:(front back external)'
'--camera-size=[Specify an explicit camera capture size]'
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
{-d,--select-usb}'[Use USB device]'
'--disable-screensaver[Disable screensaver while scrcpy is running]'
'--display-id=[Specify the display id to mirror]'
'--display=[Specify the display id to mirror]'
'--display-buffer=[Add a buffering delay \(in milliseconds\) before displaying]'
{-e,--select-tcpip}'[Use TCP/IP device]'
{-f,--fullscreen}'[Start in fullscreen]'
@@ -34,8 +30,6 @@ arguments=(
'--kill-adb-on-close[Kill adb when scrcpy terminates]'
{-K,--hid-keyboard}'[Simulate a physical keyboard by using HID over AOAv2]'
'--legacy-paste[Inject computer clipboard text as a sequence of key events on Ctrl+v]'
'--list-camera-sizes[List the valid camera capture sizes]'
'--list-cameras[List cameras available on the device]'
'--list-displays[List displays available on the device]'
'--list-encoders[List video and audio encoders available on the device]'
'--lock-video-orientation=[Lock video orientation]:orientation:(unlocked initial 0 1 2 3)'
@@ -82,7 +76,6 @@ arguments=(
'--video-codec=[Select the video codec]:codec:(h264 h265 av1)'
'--video-codec-options=[Set a list of comma-separated key\:type=value options for the device video encoder]'
'--video-encoder=[Use a specific MediaCodec video encoder]'
'--video-source=[Select the video source]:source:(display camera)'
{-w,--stay-awake}'[Keep the device on while scrcpy is running, when the device is plugged in]'
'--window-borderless[Disable window decorations \(display borderless window\)]'
'--window-title=[Set a custom window title]'

View File

@@ -75,28 +75,6 @@ Encode the video at the given bit rate, expressed in bits/s. Unit suffixes are s
Default is 8M (8000000).
.TP
.BI "\-\-camera\-ar " ar
Select the camera size by its aspect ratio (+/- 10%).
Possible values are "sensor" (use the camera sensor aspect ratio), "<num>:<den>" (e.g. "4:3") and "<value>" (e.g. "1.6").
.TP
.BI "\-\-camera\-id " id
Specify the device camera id to mirror.
The available camera ids can be listed by \-\-list\-cameras.
.TP
.BI "\-\-camera\-facing " facing
Select the device camera by its facing direction.
Possible values are "front", "back" and "external".
.TP
.BI "\-\-camera\-size " width\fRx\fIheight
Specify an explicit camera capture size.
.TP
.BI "\-\-crop " width\fR:\fIheight\fR:\fIx\fR:\fIy
Crop the device screen on the server.
@@ -116,7 +94,7 @@ Also see \fB\-e\fR (\fB\-\-select\-tcpip\fR).
Disable screensaver while scrcpy is running.
.TP
.BI "\-\-display\-id " id
.BI "\-\-display " id
Specify the device display id to mirror.
The available display ids can be listed by \-\-list\-displays.
@@ -177,12 +155,6 @@ Inject computer clipboard text as a sequence of key events on Ctrl+v (like MOD+S
This is a workaround for some devices not behaving as expected when setting the device clipboard programmatically.
.B \-\-list\-camera\-sizes
List the valid camera capture sizes.
.B \-\-list\-cameras
List cameras available on the device.
.TP
.B \-\-list\-encoders
List video and audio encoders available on the device.
@@ -454,14 +426,6 @@ Use a specific MediaCodec video encoder (depending on the codec provided by \fB\
The available encoders can be listed by \-\-list\-encoders.
.TP
.BI "\-\-video\-source " source
Select the video source (display or camera).
Camera mirroring requires Android 12+.
Default is display.
.TP
.B \-w, \-\-stay-awake
Keep the device on while scrcpy is running, when the device is plugged in.

View File

@@ -220,12 +220,12 @@ sc_adb_forward(struct sc_intr *intr, const char *serial, uint16_t local_port,
char remote[108 + 14 + 1]; // localabstract:NAME
int r = snprintf(local, sizeof(local), "tcp:%" PRIu16, local_port);
assert(r >= 0 && (size_t) r < sizeof(local));
assert(r != -1 && (size_t) r < sizeof(local));
r = snprintf(remote, sizeof(remote), "localabstract:%s",
device_socket_name);
if (r < 0 || (size_t) r >= sizeof(remote)) {
LOGE("Could not write socket name");
if (r == -1 || (size_t) r >= sizeof(remote)) {
LOGE("Device socket name too long");
return false;
}
@@ -242,7 +242,7 @@ sc_adb_forward_remove(struct sc_intr *intr, const char *serial,
uint16_t local_port, unsigned flags) {
char local[4 + 5 + 1]; // tcp:PORT
int r = snprintf(local, sizeof(local), "tcp:%" PRIu16, local_port);
assert(r >= 0 && (size_t) r < sizeof(local));
assert(r != -1 && (size_t) r < sizeof(local));
(void) r;
assert(serial);
@@ -260,12 +260,12 @@ sc_adb_reverse(struct sc_intr *intr, const char *serial,
char local[4 + 5 + 1]; // tcp:PORT
char remote[108 + 14 + 1]; // localabstract:NAME
int r = snprintf(local, sizeof(local), "tcp:%" PRIu16, local_port);
assert(r >= 0 && (size_t) r < sizeof(local));
assert(r != -1 && (size_t) r < sizeof(local));
r = snprintf(remote, sizeof(remote), "localabstract:%s",
device_socket_name);
if (r < 0 || (size_t) r >= sizeof(remote)) {
LOGE("Could not write socket name");
if (r == -1 || (size_t) r >= sizeof(remote)) {
LOGE("Device socket name too long");
return false;
}
@@ -283,7 +283,7 @@ sc_adb_reverse_remove(struct sc_intr *intr, const char *serial,
char remote[108 + 14 + 1]; // localabstract:NAME
int r = snprintf(remote, sizeof(remote), "localabstract:%s",
device_socket_name);
if (r < 0 || (size_t) r >= sizeof(remote)) {
if (r == -1 || (size_t) r >= sizeof(remote)) {
LOGE("Device socket name too long");
return false;
}
@@ -357,7 +357,7 @@ sc_adb_tcpip(struct sc_intr *intr, const char *serial, uint16_t port,
unsigned flags) {
char port_string[5 + 1];
int r = snprintf(port_string, sizeof(port_string), "%" PRIu16, port);
assert(r >= 0 && (size_t) r < sizeof(port_string));
assert(r != -1 && (size_t) r < sizeof(port_string));
(void) r;
assert(serial);

View File

@@ -32,7 +32,6 @@ enum {
OPT_WINDOW_BORDERLESS,
OPT_MAX_FPS,
OPT_LOCK_VIDEO_ORIENTATION,
OPT_DISPLAY,
OPT_DISPLAY_ID,
OPT_ROTATION,
OPT_RENDER_DRIVER,
@@ -77,17 +76,10 @@ enum {
OPT_NO_VIDEO,
OPT_NO_AUDIO_PLAYBACK,
OPT_NO_VIDEO_PLAYBACK,
OPT_VIDEO_SOURCE,
OPT_AUDIO_SOURCE,
OPT_KILL_ADB_ON_CLOSE,
OPT_TIME_LIMIT,
OPT_PAUSE_ON_EXIT,
OPT_LIST_CAMERAS,
OPT_LIST_CAMERA_SIZES,
OPT_CAMERA_ID,
OPT_CAMERA_SIZE,
OPT_CAMERA_FACING,
OPT_CAMERA_AR,
};
struct sc_option {
@@ -204,36 +196,6 @@ static const struct sc_option options[] = {
.longopt = "bit-rate",
.argdesc = "value",
},
{
.longopt_id = OPT_CAMERA_AR,
.longopt = "camera-ar",
.argdesc = "ar",
.text = "Select the camera size by its aspect ratio (+/- 10%).\n"
"Possible values are \"sensor\" (use the camera sensor aspect "
"ratio), \"<num>:<den>\" (e.g. \"4:3\") or \"<value>\" (e.g. "
"\"1.6\")."
},
{
.longopt_id = OPT_CAMERA_ID,
.longopt = "camera-id",
.argdesc = "id",
.text = "Specify the device camera id to mirror.\n"
"The available camera ids can be listed by:\n"
" scrcpy --list-cameras",
},
{
.longopt_id = OPT_CAMERA_FACING,
.longopt = "camera-facing",
.argdesc = "facing",
.text = "Select the device camera by its facing direction.\n"
"Possible values are \"front\", \"back\" and \"external\".",
},
{
.longopt_id = OPT_CAMERA_SIZE,
.longopt = "camera-size",
.argdesc = "<width>x<height>",
.text = "Specify an explicit camera capture size.",
},
{
// Not really deprecated (--codec has never been released), but without
// declaring an explicit --codec option, getopt_long() partial matching
@@ -269,15 +231,9 @@ static const struct sc_option options[] = {
.longopt = "disable-screensaver",
.text = "Disable screensaver while scrcpy is running.",
},
{
// deprecated
.longopt_id = OPT_DISPLAY,
.longopt = "display",
.argdesc = "id",
},
{
.longopt_id = OPT_DISPLAY_ID,
.longopt = "display-id",
.longopt = "display",
.argdesc = "id",
.text = "Specify the device display id to mirror.\n"
"The available display ids can be listed by:\n"
@@ -357,16 +313,6 @@ static const struct sc_option options[] = {
"This is a workaround for some devices not behaving as "
"expected when setting the device clipboard programmatically.",
},
{
.longopt_id = OPT_LIST_CAMERAS,
.longopt = "list-cameras",
.text = "List device cameras.",
},
{
.longopt_id = OPT_LIST_CAMERA_SIZES,
.longopt = "list-camera-sizes",
.text = "List the valid camera capture sizes.",
},
{
.longopt_id = OPT_LIST_DISPLAYS,
.longopt = "list-displays",
@@ -738,14 +684,6 @@ static const struct sc_option options[] = {
"codec provided by --video-codec).\n"
"The available encoders can be listed by --list-encoders.",
},
{
.longopt_id = OPT_VIDEO_SOURCE,
.longopt = "video-source",
.argdesc = "source",
.text = "Select the video source (display or camera).\n"
"Camera mirroring requires Android 12+.\n"
"Default is display.",
},
{
.shortopt = 'w',
.longopt = "stay-awake",
@@ -1266,9 +1204,9 @@ parse_integer_arg(const char *s, long *out, bool accept_suffix, long min,
}
static size_t
parse_integers_arg(const char *s, const char sep, size_t max_items, long *out,
long min, long max, const char *name) {
size_t count = sc_str_parse_integers(s, sep, max_items, out);
parse_integers_arg(const char *s, size_t max_items, long *out, long min,
long max, const char *name) {
size_t count = sc_str_parse_integers(s, ':', max_items, out);
if (!count) {
LOGE("Could not parse %s: %s", name, s);
return 0;
@@ -1424,7 +1362,7 @@ parse_window_dimension(const char *s, uint16_t *dimension) {
static bool
parse_port_range(const char *s, struct sc_port_range *port_range) {
long values[2];
size_t count = parse_integers_arg(s, ':', 2, values, 0, 0xFFFF, "port");
size_t count = parse_integers_arg(s, 2, values, 0, 0xFFFF, "port");
if (!count) {
return false;
}
@@ -1686,22 +1624,6 @@ parse_audio_codec(const char *optarg, enum sc_codec *codec) {
return false;
}
static bool
parse_video_source(const char *optarg, enum sc_video_source *source) {
if (!strcmp(optarg, "display")) {
*source = SC_VIDEO_SOURCE_DISPLAY;
return true;
}
if (!strcmp(optarg, "camera")) {
*source = SC_VIDEO_SOURCE_CAMERA;
return true;
}
LOGE("Unsupported video source: %s (expected display or camera)", optarg);
return false;
}
static bool
parse_audio_source(const char *optarg, enum sc_audio_source *source) {
if (!strcmp(optarg, "mic")) {
@@ -1718,34 +1640,6 @@ parse_audio_source(const char *optarg, enum sc_audio_source *source) {
return false;
}
static bool
parse_camera_facing(const char *optarg, enum sc_camera_facing *facing) {
if (!strcmp(optarg, "front")) {
*facing = SC_CAMERA_FACING_FRONT;
return true;
}
if (!strcmp(optarg, "back")) {
*facing = SC_CAMERA_FACING_BACK;
return true;
}
if (!strcmp(optarg, "external")) {
*facing = SC_CAMERA_FACING_EXTERNAL;
return true;
}
if (*optarg == '\0') {
// Empty string is a valid value (equivalent to not passing the option)
*facing = SC_CAMERA_FACING_ANY;
return true;
}
LOGE("Unsupported camera facing: %s (expected front, back or external)",
optarg);
return false;
}
static bool
parse_time_limit(const char *s, sc_tick *tick) {
long value;
@@ -1808,9 +1702,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
case OPT_CROP:
opts->crop = optarg;
break;
case OPT_DISPLAY:
LOGW("--display is deprecated, use --display-id instead.");
// fall through
case OPT_DISPLAY_ID:
if (!parse_display_id(optarg, &opts->display_id)) {
return false;
@@ -2092,16 +1983,10 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
#endif
case OPT_LIST_ENCODERS:
opts->list |= SC_OPTION_LIST_ENCODERS;
opts->list_encoders = true;
break;
case OPT_LIST_DISPLAYS:
opts->list |= SC_OPTION_LIST_DISPLAYS;
break;
case OPT_LIST_CAMERAS:
opts->list |= SC_OPTION_LIST_CAMERAS;
break;
case OPT_LIST_CAMERA_SIZES:
opts->list |= SC_OPTION_LIST_CAMERA_SIZES;
opts->list_displays = true;
break;
case OPT_REQUIRE_AUDIO:
opts->require_audio = true;
@@ -2117,11 +2002,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
}
break;
case OPT_VIDEO_SOURCE:
if (!parse_video_source(optarg, &opts->video_source)) {
return false;
}
break;
case OPT_AUDIO_SOURCE:
if (!parse_audio_source(optarg, &opts->audio_source)) {
return false;
@@ -2140,20 +2020,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
}
break;
case OPT_CAMERA_AR:
opts->camera_ar = optarg;
break;
case OPT_CAMERA_ID:
opts->camera_id = optarg;
break;
case OPT_CAMERA_SIZE:
opts->camera_size = optarg;
break;
case OPT_CAMERA_FACING:
if (!parse_camera_facing(optarg, &opts->camera_facing)) {
return false;
}
break;
default:
// getopt prints the error message on stderr
return false;
@@ -2247,57 +2113,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->force_adb_forward = true;
}
if (opts->video_source == SC_VIDEO_SOURCE_CAMERA) {
if (opts->display_id) {
LOGE("--display-id is only available with --video-source=display");
return false;
}
if (opts->lock_video_orientation !=
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED) {
LOGE("--lock-video-orientation is not supported for camera");
return false;
}
if (opts->camera_id && opts->camera_facing != SC_CAMERA_FACING_ANY) {
LOGE("Could not specify both --camera-id and --camera-facing");
return false;
}
if (opts->camera_size) {
if (opts->camera_ar) {
LOGE("Could not specify both --camera-size and -m/--max-size");
return false;
}
if (opts->camera_ar) {
LOGE("Could not specify both --camera-size and --camera-ar");
return false;
}
}
if (opts->control) {
LOGI("Camera video source: control disabled");
opts->control = false;
}
} else if (opts->camera_id
|| opts->camera_ar
|| opts->camera_facing != SC_CAMERA_FACING_ANY
|| opts->camera_size) {
LOGE("Camera options are only available with --video-source=camera");
return false;
}
if (opts->audio && opts->audio_source == SC_AUDIO_SOURCE_AUTO) {
// Select the audio source according to the video source
if (opts->video_source == SC_VIDEO_SOURCE_DISPLAY) {
opts->audio_source = SC_AUDIO_SOURCE_OUTPUT;
} else {
opts->audio_source = SC_AUDIO_SOURCE_MIC;
LOGI("Camera video source: microphone audio source selected");
}
}
if (opts->record_format && !opts->record_filename) {
LOGE("Record format specified without recording");
return false;

View File

@@ -11,18 +11,13 @@ const struct scrcpy_options scrcpy_options_default = {
.audio_codec_options = NULL,
.video_encoder = NULL,
.audio_encoder = NULL,
.camera_id = NULL,
.camera_size = NULL,
.camera_ar = NULL,
.log_level = SC_LOG_LEVEL_INFO,
.video_codec = SC_CODEC_H264,
.audio_codec = SC_CODEC_OPUS,
.video_source = SC_VIDEO_SOURCE_DISPLAY,
.audio_source = SC_AUDIO_SOURCE_AUTO,
.audio_source = SC_AUDIO_SOURCE_OUTPUT,
.record_format = SC_RECORD_FORMAT_AUTO,
.keyboard_input_mode = SC_KEYBOARD_INPUT_MODE_INJECT,
.mouse_input_mode = SC_MOUSE_INPUT_MODE_INJECT,
.camera_facing = SC_CAMERA_FACING_ANY,
.port_range = {
.first = DEFAULT_LOCAL_PORT_RANGE_FIRST,
.last = DEFAULT_LOCAL_PORT_RANGE_LAST,
@@ -84,6 +79,7 @@ const struct scrcpy_options scrcpy_options_default = {
.video = true,
.audio = true,
.require_audio = false,
.list_encoders = false,
.list_displays = false,
.kill_adb_on_close = false,
.list = 0,
};

View File

@@ -44,24 +44,11 @@ enum sc_codec {
SC_CODEC_RAW,
};
enum sc_video_source {
SC_VIDEO_SOURCE_DISPLAY,
SC_VIDEO_SOURCE_CAMERA,
};
enum sc_audio_source {
SC_AUDIO_SOURCE_AUTO, // OUTPUT for video DISPLAY, MIC for video CAMERA
SC_AUDIO_SOURCE_OUTPUT,
SC_AUDIO_SOURCE_MIC,
};
enum sc_camera_facing {
SC_CAMERA_FACING_ANY,
SC_CAMERA_FACING_FRONT,
SC_CAMERA_FACING_BACK,
SC_CAMERA_FACING_EXTERNAL,
};
enum sc_lock_video_orientation {
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED = -1,
// lock the current orientation when scrcpy starts
@@ -130,18 +117,13 @@ struct scrcpy_options {
const char *audio_codec_options;
const char *video_encoder;
const char *audio_encoder;
const char *camera_id;
const char *camera_size;
const char *camera_ar;
enum sc_log_level log_level;
enum sc_codec video_codec;
enum sc_codec audio_codec;
enum sc_video_source video_source;
enum sc_audio_source audio_source;
enum sc_record_format record_format;
enum sc_keyboard_input_mode keyboard_input_mode;
enum sc_mouse_input_mode mouse_input_mode;
enum sc_camera_facing camera_facing;
struct sc_port_range port_range;
uint32_t tunnel_host;
uint16_t tunnel_port;
@@ -197,12 +179,9 @@ struct scrcpy_options {
bool video;
bool audio;
bool require_audio;
bool list_encoders;
bool list_displays;
bool kill_adb_on_close;
#define SC_OPTION_LIST_ENCODERS 0x1
#define SC_OPTION_LIST_DISPLAYS 0x2
#define SC_OPTION_LIST_CAMERAS 0x4
#define SC_OPTION_LIST_CAMERA_SIZES 0x8
uint8_t list;
};
extern const struct scrcpy_options scrcpy_options_default;

View File

@@ -351,9 +351,7 @@ scrcpy(struct scrcpy_options *options) {
.log_level = options->log_level,
.video_codec = options->video_codec,
.audio_codec = options->audio_codec,
.video_source = options->video_source,
.audio_source = options->audio_source,
.camera_facing = options->camera_facing,
.crop = options->crop,
.port_range = options->port_range,
.tunnel_host = options->tunnel_host,
@@ -373,9 +371,6 @@ scrcpy(struct scrcpy_options *options) {
.audio_codec_options = options->audio_codec_options,
.video_encoder = options->video_encoder,
.audio_encoder = options->audio_encoder,
.camera_id = options->camera_id,
.camera_size = options->camera_size,
.camera_ar = options->camera_ar,
.force_adb_forward = options->force_adb_forward,
.power_off_on_close = options->power_off_on_close,
.clipboard_autosync = options->clipboard_autosync,
@@ -384,8 +379,9 @@ scrcpy(struct scrcpy_options *options) {
.tcpip_dst = options->tcpip_dst,
.cleanup = options->cleanup,
.power_on = options->power_on,
.list_encoders = options->list_encoders,
.list_displays = options->list_displays,
.kill_adb_on_close = options->kill_adb_on_close,
.list = options->list,
};
static const struct sc_server_callbacks cbs = {
@@ -403,7 +399,7 @@ scrcpy(struct scrcpy_options *options) {
server_started = true;
if (options->list) {
if (options->list_encoders || options->list_displays) {
bool ok = await_for_server(NULL);
ret = ok ? SCRCPY_EXIT_SUCCESS : SCRCPY_EXIT_FAILURE;
goto end;

View File

@@ -76,8 +76,6 @@ sc_server_params_destroy(struct sc_server_params *params) {
free((char *) params->video_encoder);
free((char *) params->audio_encoder);
free((char *) params->tcpip_dst);
free((char *) params->camera_id);
free((char *) params->camera_ar);
}
static bool
@@ -105,8 +103,6 @@ sc_server_params_copy(struct sc_server_params *dst,
COPY(video_encoder);
COPY(audio_encoder);
COPY(tcpip_dst);
COPY(camera_id);
COPY(camera_ar);
#undef COPY
return true;
@@ -185,20 +181,6 @@ sc_server_get_codec_name(enum sc_codec codec) {
}
}
static const char *
sc_server_get_camera_facing_name(enum sc_camera_facing camera_facing) {
switch (camera_facing) {
case SC_CAMERA_FACING_FRONT:
return "front";
case SC_CAMERA_FACING_BACK:
return "back";
case SC_CAMERA_FACING_EXTERNAL:
return "external";
default:
return NULL;
}
}
static sc_pid
execute_server(struct sc_server *server,
const struct sc_server_params *params) {
@@ -265,11 +247,8 @@ execute_server(struct sc_server *server,
ADD_PARAM("audio_codec=%s",
sc_server_get_codec_name(params->audio_codec));
}
if (params->video_source != SC_VIDEO_SOURCE_DISPLAY) {
assert(params->video_source == SC_VIDEO_SOURCE_CAMERA);
ADD_PARAM("video_source=camera");
}
if (params->audio_source == SC_AUDIO_SOURCE_MIC) {
if (params->audio_source != SC_AUDIO_SOURCE_OUTPUT) {
assert(params->audio_source == SC_AUDIO_SOURCE_MIC);
ADD_PARAM("audio_source=mic");
}
if (params->max_size) {
@@ -295,19 +274,6 @@ execute_server(struct sc_server *server,
if (params->display_id) {
ADD_PARAM("display_id=%" PRIu32, params->display_id);
}
if (params->camera_id) {
ADD_PARAM("camera_id=%s", params->camera_id);
}
if (params->camera_size) {
ADD_PARAM("camera_size=%s", params->camera_size);
}
if (params->camera_facing != SC_CAMERA_FACING_ANY) {
ADD_PARAM("camera_facing=%s",
sc_server_get_camera_facing_name(params->camera_facing));
}
if (params->camera_ar) {
ADD_PARAM("camera_ar=%s", params->camera_ar);
}
if (params->show_touches) {
ADD_PARAM("show_touches=true");
}
@@ -345,18 +311,12 @@ execute_server(struct sc_server *server,
// By default, power_on is true
ADD_PARAM("power_on=false");
}
if (params->list & SC_OPTION_LIST_ENCODERS) {
if (params->list_encoders) {
ADD_PARAM("list_encoders=true");
}
if (params->list & SC_OPTION_LIST_DISPLAYS) {
if (params->list_displays) {
ADD_PARAM("list_displays=true");
}
if (params->list & SC_OPTION_LIST_CAMERAS) {
ADD_PARAM("list_cameras=true");
}
if (params->list & SC_OPTION_LIST_CAMERA_SIZES) {
ADD_PARAM("list_camera_sizes=true");
}
#undef ADD_PARAM
@@ -936,7 +896,7 @@ run_server(void *data) {
// If --list-* is passed, then the server just prints the requested data
// then exits.
if (params->list) {
if (params->list_encoders || params->list_displays) {
sc_pid pid = execute_server(server, params);
if (pid == SC_PROCESS_NONE) {
goto error_connection_failed;

View File

@@ -26,17 +26,12 @@ struct sc_server_params {
enum sc_log_level log_level;
enum sc_codec video_codec;
enum sc_codec audio_codec;
enum sc_video_source video_source;
enum sc_audio_source audio_source;
enum sc_camera_facing camera_facing;
const char *crop;
const char *video_codec_options;
const char *audio_codec_options;
const char *video_encoder;
const char *audio_encoder;
const char *camera_id;
const char *camera_size;
const char *camera_ar;
struct sc_port_range port_range;
uint32_t tunnel_host;
uint16_t tunnel_port;
@@ -61,8 +56,9 @@ struct sc_server_params {
bool select_tcpip;
bool cleanup;
bool power_on;
bool list_encoders;
bool list_displays;
bool kill_adb_on_close;
uint8_t list;
};
struct sc_server {

View File

@@ -269,25 +269,21 @@ static void test_parse_integer_with_suffix(void) {
char buf[32];
int r = snprintf(buf, sizeof(buf), "%ldk", LONG_MAX / 2000);
assert(r >= 0 && (size_t) r < sizeof(buf));
sprintf(buf, "%ldk", LONG_MAX / 2000);
ok = sc_str_parse_integer_with_suffix(buf, &value);
assert(ok);
assert(value == LONG_MAX / 2000 * 1000);
r = snprintf(buf, sizeof(buf), "%ldm", LONG_MAX / 2000);
assert(r >= 0 && (size_t) r < sizeof(buf));
sprintf(buf, "%ldm", LONG_MAX / 2000);
ok = sc_str_parse_integer_with_suffix(buf, &value);
assert(!ok);
r = snprintf(buf, sizeof(buf), "%ldk", LONG_MIN / 2000);
assert(r >= 0 && (size_t) r < sizeof(buf));
sprintf(buf, "%ldk", LONG_MIN / 2000);
ok = sc_str_parse_integer_with_suffix(buf, &value);
assert(ok);
assert(value == LONG_MIN / 2000 * 1000);
r = snprintf(buf, sizeof(buf), "%ldm", LONG_MIN / 2000);
assert(r >= 0 && (size_t) r < sizeof(buf));
sprintf(buf, "%ldm", LONG_MIN / 2000);
ok = sc_str_parse_integer_with_suffix(buf, &value);
assert(!ok);
}

View File

@@ -143,7 +143,7 @@ If several displays are available on the Android device, it is possible to
select the display to mirror:
```bash
scrcpy --display-id=1
scrcpy --display=1
```
The list of display ids can be retrieved by:

View File

@@ -1,37 +0,0 @@
package com.genymobile.scrcpy;
public final class CameraAspectRatio {
private static final float SENSOR = -1;
private float ar;
private CameraAspectRatio(float ar) {
this.ar = ar;
}
public static CameraAspectRatio fromFloat(float ar) {
if (ar < 0) {
throw new IllegalArgumentException("Invalid aspect ratio: " + ar);
}
return new CameraAspectRatio(ar);
}
public static CameraAspectRatio fromFraction(int w, int h) {
if (w <= 0 || h <= 0) {
throw new IllegalArgumentException("Invalid aspect ratio: " + w + ":" + h);
}
return new CameraAspectRatio((float) w / h);
}
public static CameraAspectRatio sensorAspectRatio() {
return new CameraAspectRatio(SENSOR);
}
public boolean isSensor() {
return ar == SENSOR;
}
public float getAspectRatio() {
return ar;
}
}

View File

@@ -1,330 +0,0 @@
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.graphics.Rect;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.params.SessionConfiguration;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.view.Surface;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Stream;
public class CameraCapture extends SurfaceCapture {
private final String explicitCameraId;
private final CameraFacing cameraFacing;
private final Size explicitSize;
private int maxSize;
private final CameraAspectRatio aspectRatio;
private String cameraId;
private Size size;
private HandlerThread cameraThread;
private Handler cameraHandler;
private CameraDevice cameraDevice;
private Executor cameraExecutor;
private final AtomicBoolean disconnected = new AtomicBoolean();
public CameraCapture(String explicitCameraId, CameraFacing cameraFacing, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio) {
this.explicitCameraId = explicitCameraId;
this.cameraFacing = cameraFacing;
this.explicitSize = explicitSize;
this.maxSize = maxSize;
this.aspectRatio = aspectRatio;
}
@Override
public void init() throws IOException {
cameraThread = new HandlerThread("camera");
cameraThread.start();
cameraHandler = new Handler(cameraThread.getLooper());
cameraExecutor = new HandlerExecutor(cameraHandler);
try {
cameraId = selectCamera(explicitCameraId, cameraFacing);
if (cameraId == null) {
throw new IOException("No matching camera found");
}
size = selectSize(cameraId, explicitSize, maxSize, aspectRatio);
if (size == null) {
throw new IOException("Could not select camera size");
}
Ln.i("Using camera '" + cameraId + "'");
cameraDevice = openCamera(cameraId);
} catch (CameraAccessException | InterruptedException e) {
throw new IOException(e);
}
}
private static String selectCamera(String explicitCameraId, CameraFacing cameraFacing) throws CameraAccessException {
if (explicitCameraId != null) {
return explicitCameraId;
}
CameraManager cameraManager = ServiceManager.getCameraManager();
String[] cameraIds = cameraManager.getCameraIdList();
if (cameraFacing == null) {
// Use the first one
return cameraIds.length > 0 ? cameraIds[0] : null;
}
for (String cameraId : cameraIds) {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cameraFacing.value() == facing) {
return cameraId;
}
}
// Not found
return null;
}
@TargetApi(Build.VERSION_CODES.N)
private static Size selectSize(String cameraId, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio) throws CameraAccessException {
if (explicitSize != null) {
return explicitSize;
}
CameraManager cameraManager = ServiceManager.getCameraManager();
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = configs.getOutputSizes(MediaCodec.class);
Stream<android.util.Size> stream = Arrays.stream(sizes);
if (maxSize > 0) {
stream = stream.filter(it -> it.getWidth() <= maxSize && it.getHeight() <= maxSize);
}
Float targetAspectRatio = resolveAspectRatio(aspectRatio, characteristics);
if (targetAspectRatio != null) {
stream = stream.filter(it -> {
float ar = ((float) it.getWidth() / it.getHeight());
float arRatio = ar / targetAspectRatio;
// Accept if the aspect ratio is the target aspect ratio + or - 10%
return arRatio >= 0.9f && arRatio <= 1.1f;
});
}
Optional<android.util.Size> selected = stream.max((s1, s2) -> {
// Greater width is better
int cmp = Integer.compare(s1.getWidth(), s2.getWidth());
if (cmp != 0) {
return cmp;
}
if (targetAspectRatio != null) {
// Closer to the target aspect ratio is better
float ar1 = ((float) s1.getWidth() / s1.getHeight());
float arRatio1 = ar1 / targetAspectRatio;
float distance1 = Math.abs(1 - arRatio1);
float ar2 = ((float) s2.getWidth() / s2.getHeight());
float arRatio2 = ar2 / targetAspectRatio;
float distance2 = Math.abs(1 - arRatio2);
// Reverse the order because lower distance is better
return Float.compare(distance2, distance1);
}
// Greater height is better
return Integer.compare(s1.getHeight(), s2.getHeight());
});
if (selected.isPresent()) {
android.util.Size size = selected.get();
return new Size(size.getWidth(), size.getHeight());
}
// Not found
return null;
}
private static Float resolveAspectRatio(CameraAspectRatio ratio, CameraCharacteristics characteristics) {
if (ratio == null) {
return null;
}
if (ratio.isSensor()) {
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
return (float) activeSize.width() / activeSize.height();
}
return ratio.getAspectRatio();
}
@Override
public void start(Surface surface) throws IOException {
try {
CameraCaptureSession session = createCaptureSession(cameraDevice, surface);
CaptureRequest.Builder requestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
requestBuilder.addTarget(surface);
CaptureRequest request = requestBuilder.build();
setRepeatingRequest(session, request);
} catch (CameraAccessException | InterruptedException e) {
throw new IOException(e);
}
}
@Override
public void release() {
if (cameraDevice != null) {
cameraDevice.close();
}
if (cameraThread != null) {
cameraThread.quitSafely();
}
}
@Override
public Size getSize() {
return size;
}
@Override
public boolean setMaxSize(int maxSize) {
if (explicitSize != null) {
return false;
}
this.maxSize = maxSize;
try {
size = selectSize(cameraId, null, maxSize, aspectRatio);
return true;
} catch (CameraAccessException e) {
Ln.w("Could not select camera size", e);
return false;
}
}
@SuppressLint("MissingPermission")
@TargetApi(Build.VERSION_CODES.S)
private CameraDevice openCamera(String id) throws CameraAccessException, InterruptedException {
CompletableFuture<CameraDevice> future = new CompletableFuture<>();
ServiceManager.getCameraManager().openCamera(id, new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
Ln.d("Camera opened successfully");
future.complete(camera);
}
@Override
public void onDisconnected(CameraDevice camera) {
Ln.w("Camera disconnected");
disconnected.set(true);
requestReset();
}
@Override
public void onError(CameraDevice camera, int error) {
int cameraAccessExceptionErrorCode;
switch (error) {
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_IN_USE;
break;
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
cameraAccessExceptionErrorCode = CameraAccessException.MAX_CAMERAS_IN_USE;
break;
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_DISABLED;
break;
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
default:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_ERROR;
break;
}
future.completeExceptionally(new CameraAccessException(cameraAccessExceptionErrorCode));
}
}, cameraHandler);
try {
return future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
@TargetApi(Build.VERSION_CODES.S)
private CameraCaptureSession createCaptureSession(CameraDevice camera, Surface surface) throws CameraAccessException, InterruptedException {
CompletableFuture<CameraCaptureSession> future = new CompletableFuture<>();
OutputConfiguration outputConfig = new OutputConfiguration(surface);
List<OutputConfiguration> outputs = Arrays.asList(outputConfig);
SessionConfiguration sessionConfig = new SessionConfiguration(SessionConfiguration.SESSION_REGULAR, outputs, cameraExecutor,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
future.complete(session);
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
future.completeExceptionally(new CameraAccessException(CameraAccessException.CAMERA_ERROR));
}
});
camera.createCaptureSession(sessionConfig);
try {
return future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
@TargetApi(Build.VERSION_CODES.S)
private void setRepeatingRequest(CameraCaptureSession session, CaptureRequest request) throws CameraAccessException, InterruptedException {
CompletableFuture<Void> future = new CompletableFuture<>();
session.setRepeatingRequest(request, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
future.complete(null);
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
future.completeExceptionally(new CameraAccessException(CameraAccessException.CAMERA_ERROR));
}
}, cameraHandler);
try {
future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
@Override
public boolean isClosed() {
return disconnected.get();
}
}

View File

@@ -1,31 +0,0 @@
package com.genymobile.scrcpy;
import android.hardware.camera2.CameraCharacteristics;
public enum CameraFacing {
FRONT("front", CameraCharacteristics.LENS_FACING_FRONT),
BACK("back", CameraCharacteristics.LENS_FACING_BACK),
EXTERNAL("external", CameraCharacteristics.LENS_FACING_EXTERNAL);
private final String name;
private final int value;
CameraFacing(String name, int value) {
this.name = name;
this.value = value;
}
int value() {
return value;
}
static CameraFacing findByName(String name) {
for (CameraFacing facing : CameraFacing.values()) {
if (name.equals(facing.name)) {
return facing;
}
}
return null;
}
}

View File

@@ -1,23 +0,0 @@
package com.genymobile.scrcpy;
import android.os.Handler;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
// Inspired from hidden android.os.HandlerExecutor
public class HandlerExecutor implements Executor {
private final Handler handler;
public HandlerExecutor(Handler handler) {
this.handler = handler;
}
@Override
public void execute(Runnable command) {
if (!handler.post(command)) {
throw new RejectedExecutionException(handler + " is shutting down");
}
}
}

View File

@@ -3,13 +3,6 @@ package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.DisplayManager;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.graphics.Rect;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec;
import java.util.List;
public final class LogUtils {
@@ -54,7 +47,7 @@ public final class LogUtils {
builder.append("\n (none)");
} else {
for (int id : displayIds) {
builder.append("\n --display-id=").append(id).append(" (");
builder.append("\n --display=").append(id).append(" (");
DisplayInfo displayInfo = displayManager.getDisplayInfo(id);
if (displayInfo != null) {
Size size = displayInfo.getSize();
@@ -67,50 +60,4 @@ public final class LogUtils {
}
return builder.toString();
}
private static String getCameraFacingName(int facing) {
switch (facing) {
case CameraCharacteristics.LENS_FACING_FRONT:
return "front";
case CameraCharacteristics.LENS_FACING_BACK:
return "back";
case CameraCharacteristics.LENS_FACING_EXTERNAL:
return "external";
default:
return "unknown";
}
}
public static String buildCameraListMessage(boolean includeSizes) {
StringBuilder builder = new StringBuilder("List of cameras:");
CameraManager cameraManager = ServiceManager.getCameraManager();
try {
String[] cameraIds = cameraManager.getCameraIdList();
if (cameraIds == null || cameraIds.length == 0) {
builder.append("\n (none)");
} else {
for (String id : cameraIds) {
builder.append("\n --video-source=camera --camera-id=").append(id);
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
builder.append(" (").append(getCameraFacingName(facing)).append(", ");
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
builder.append(activeSize.width()).append("x").append(activeSize.height());
builder.append(')');
if (includeSizes) {
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = configs.getOutputSizes(MediaCodec.class);
for (android.util.Size size : sizes) {
builder.append("\n - ").append(size.getWidth()).append('x').append(size.getHeight());
}
}
}
}
} catch (CameraAccessException e) {
builder.append("\n (access denied)");
}
return builder.toString();
}
}

View File

@@ -14,7 +14,6 @@ public class Options {
private int maxSize;
private VideoCodec videoCodec = VideoCodec.H264;
private AudioCodec audioCodec = AudioCodec.OPUS;
private VideoSource videoSource = VideoSource.DISPLAY;
private AudioSource audioSource = AudioSource.OUTPUT;
private int videoBitRate = 8000000;
private int audioBitRate = 128000;
@@ -24,10 +23,6 @@ public class Options {
private Rect crop;
private boolean control = true;
private int displayId;
private String cameraId;
private Size cameraSize;
private CameraFacing cameraFacing;
private CameraAspectRatio cameraAspectRatio;
private boolean showTouches;
private boolean stayAwake;
private List<CodecOption> videoCodecOptions;
@@ -43,8 +38,6 @@ public class Options {
private boolean listEncoders;
private boolean listDisplays;
private boolean listCameras;
private boolean listCameraSizes;
// Options not used by the scrcpy client, but useful to use scrcpy-server directly
private boolean sendDeviceMeta = true; // send device name and size
@@ -80,10 +73,6 @@ public class Options {
return audioCodec;
}
public VideoSource getVideoSource() {
return videoSource;
}
public AudioSource getAudioSource() {
return audioSource;
}
@@ -120,22 +109,6 @@ public class Options {
return displayId;
}
public String getCameraId() {
return cameraId;
}
public Size getCameraSize() {
return cameraSize;
}
public CameraFacing getCameraFacing() {
return cameraFacing;
}
public CameraAspectRatio getCameraAspectRatio() {
return cameraAspectRatio;
}
public boolean getShowTouches() {
return showTouches;
}
@@ -180,10 +153,6 @@ public class Options {
return powerOn;
}
public boolean getList() {
return listEncoders || listDisplays || listCameras || listCameraSizes;
}
public boolean getListEncoders() {
return listEncoders;
}
@@ -192,14 +161,6 @@ public class Options {
return listDisplays;
}
public boolean getListCameras() {
return listCameras;
}
public boolean getListCameraSizes() {
return listCameraSizes;
}
public boolean getSendDeviceMeta() {
return sendDeviceMeta;
}
@@ -269,13 +230,6 @@ public class Options {
}
options.audioCodec = audioCodec;
break;
case "video_source":
VideoSource videoSource = VideoSource.findByName(value);
if (videoSource == null) {
throw new IllegalArgumentException("Video source " + value + " not supported");
}
options.videoSource = videoSource;
break;
case "audio_source":
AudioSource audioSource = AudioSource.findByName(value);
if (audioSource == null) {
@@ -302,9 +256,7 @@ public class Options {
options.tunnelForward = Boolean.parseBoolean(value);
break;
case "crop":
if (!value.isEmpty()) {
options.crop = parseCrop(value);
}
options.crop = parseCrop(value);
break;
case "control":
options.control = Boolean.parseBoolean(value);
@@ -354,36 +306,6 @@ public class Options {
case "list_displays":
options.listDisplays = Boolean.parseBoolean(value);
break;
case "list_cameras":
options.listCameras = Boolean.parseBoolean(value);
break;
case "list_camera_sizes":
options.listCameraSizes = Boolean.parseBoolean(value);
break;
case "camera_id":
if (!value.isEmpty()) {
options.cameraId = value;
}
break;
case "camera_size":
if (!value.isEmpty()) {
options.cameraSize = parseSize(value);
}
break;
case "camera_facing":
if (!value.isEmpty()) {
CameraFacing facing = CameraFacing.findByName(value);
if (facing == null) {
throw new IllegalArgumentException("Camera facing " + value + " not supported");
}
options.cameraFacing = facing;
}
break;
case "camera_ar":
if (!value.isEmpty()) {
options.cameraAspectRatio = parseCameraAspectRatio(value);
}
break;
case "send_device_meta":
options.sendDeviceMeta = Boolean.parseBoolean(value);
break;
@@ -415,6 +337,9 @@ public class Options {
}
private static Rect parseCrop(String crop) {
if (crop.isEmpty()) {
return null;
}
// input format: "width:height:x:y"
String[] tokens = crop.split(":");
if (tokens.length != 4) {
@@ -426,31 +351,4 @@ public class Options {
int y = Integer.parseInt(tokens[3]);
return new Rect(x, y, x + width, y + height);
}
private static Size parseSize(String size) {
// input format: "<width>x<height>"
String[] tokens = size.split("x");
if (tokens.length != 2) {
throw new IllegalArgumentException("Invalid size format (expected <width>x<height>): \"" + size + "\"");
}
int width = Integer.parseInt(tokens[0]);
int height = Integer.parseInt(tokens[1]);
return new Size(width, height);
}
private static CameraAspectRatio parseCameraAspectRatio(String ar) {
if ("sensor".equals(ar)) {
return CameraAspectRatio.sensorAspectRatio();
}
String[] tokens = ar.split(":");
if (tokens.length == 2) {
int w = Integer.parseInt(tokens[0]);
int h = Integer.parseInt(tokens[1]);
return CameraAspectRatio.fromFraction(w, h);
}
float floatAr = Float.parseFloat(tokens[0]);
return CameraAspectRatio.fromFloat(floatAr);
}
}

View File

@@ -1,84 +0,0 @@
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.SurfaceControl;
import android.graphics.Rect;
import android.os.Build;
import android.os.IBinder;
import android.view.Surface;
public class ScreenCapture extends SurfaceCapture implements Device.RotationListener, Device.FoldListener {
private final Device device;
private IBinder display;
public ScreenCapture(Device device) {
this.device = device;
}
@Override
public void init() {
display = createDisplay();
device.setRotationListener(this);
device.setFoldListener(this);
}
@Override
public void start(Surface surface) {
ScreenInfo screenInfo = device.getScreenInfo();
Rect contentRect = screenInfo.getContentRect();
// does not include the locked video orientation
Rect unlockedVideoRect = screenInfo.getUnlockedVideoSize().toRect();
int videoRotation = screenInfo.getVideoRotation();
int layerStack = device.getLayerStack();
setDisplaySurface(display, surface, videoRotation, contentRect, unlockedVideoRect, layerStack);
}
@Override
public void release() {
device.setRotationListener(null);
device.setFoldListener(null);
SurfaceControl.destroyDisplay(display);
}
@Override
public Size getSize() {
return device.getScreenInfo().getVideoSize();
}
@Override
public boolean setMaxSize(int size) {
device.setMaxSize(size);
return true;
}
@Override
public void onFoldChanged(int displayId, boolean folded) {
requestReset();
}
@Override
public void onRotationChanged(int rotation) {
requestReset();
}
private static IBinder createDisplay() {
// Since Android 12 (preview), secure displays could not be created with shell permissions anymore.
// On Android 12 preview, SDK_INT is still R (not S), but CODENAME is "S".
boolean secure = Build.VERSION.SDK_INT < Build.VERSION_CODES.R || (Build.VERSION.SDK_INT == Build.VERSION_CODES.R && !"S".equals(
Build.VERSION.CODENAME));
return SurfaceControl.createDisplay("scrcpy", secure);
}
private static void setDisplaySurface(IBinder display, Surface surface, int orientation, Rect deviceRect, Rect displayRect, int layerStack) {
SurfaceControl.openTransaction();
try {
SurfaceControl.setDisplaySurface(display, surface);
SurfaceControl.setDisplayProjection(display, orientation, deviceRect, displayRect);
SurfaceControl.setDisplayLayerStack(display, layerStack);
} finally {
SurfaceControl.closeTransaction();
}
}
}

View File

@@ -1,8 +1,13 @@
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.SurfaceControl;
import android.graphics.Rect;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.os.IBinder;
import android.os.Looper;
import android.os.SystemClock;
import android.view.Surface;
@@ -12,7 +17,7 @@ import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
public class SurfaceEncoder implements AsyncProcessor {
public class ScreenEncoder implements Device.RotationListener, Device.FoldListener, AsyncProcessor {
private static final int DEFAULT_I_FRAME_INTERVAL = 10; // seconds
private static final int REPEAT_FRAME_DELAY_US = 100_000; // repeat after 100ms
@@ -22,7 +27,9 @@ public class SurfaceEncoder implements AsyncProcessor {
private static final int[] MAX_SIZE_FALLBACK = {2560, 1920, 1600, 1280, 1024, 800};
private static final int MAX_CONSECUTIVE_ERRORS = 3;
private final SurfaceCapture capture;
private final AtomicBoolean resetCapture = new AtomicBoolean();
private final Device device;
private final Streamer streamer;
private final String encoderName;
private final List<CodecOption> codecOptions;
@@ -36,9 +43,9 @@ public class SurfaceEncoder implements AsyncProcessor {
private Thread thread;
private final AtomicBoolean stopped = new AtomicBoolean();
public SurfaceEncoder(SurfaceCapture capture, Streamer streamer, int videoBitRate, int maxFps, List<CodecOption> codecOptions, String encoderName,
public ScreenEncoder(Device device, Streamer streamer, int videoBitRate, int maxFps, List<CodecOption> codecOptions, String encoderName,
boolean downsizeOnError) {
this.capture = capture;
this.device = device;
this.streamer = streamer;
this.videoBitRate = videoBitRate;
this.maxFps = maxFps;
@@ -47,29 +54,51 @@ public class SurfaceEncoder implements AsyncProcessor {
this.downsizeOnError = downsizeOnError;
}
@Override
public void onFoldChanged(int displayId, boolean folded) {
resetCapture.set(true);
}
@Override
public void onRotationChanged(int rotation) {
resetCapture.set(true);
}
private boolean consumeResetCapture() {
return resetCapture.getAndSet(false);
}
private void streamScreen() throws IOException, ConfigurationException {
Codec codec = streamer.getCodec();
MediaCodec mediaCodec = createMediaCodec(codec, encoderName);
MediaFormat format = createFormat(codec.getMimeType(), videoBitRate, maxFps, codecOptions);
IBinder display = createDisplay();
device.setRotationListener(this);
device.setFoldListener(this);
capture.init();
streamer.writeVideoHeader(device.getScreenInfo().getVideoSize());
boolean alive;
try {
streamer.writeVideoHeader(capture.getSize());
boolean alive;
do {
Size size = capture.getSize();
format.setInteger(MediaFormat.KEY_WIDTH, size.getWidth());
format.setInteger(MediaFormat.KEY_HEIGHT, size.getHeight());
ScreenInfo screenInfo = device.getScreenInfo();
Rect contentRect = screenInfo.getContentRect();
// include the locked video orientation
Rect videoRect = screenInfo.getVideoSize().toRect();
format.setInteger(MediaFormat.KEY_WIDTH, videoRect.width());
format.setInteger(MediaFormat.KEY_HEIGHT, videoRect.height());
Surface surface = null;
try {
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
surface = mediaCodec.createInputSurface();
capture.start(surface);
// does not include the locked video orientation
Rect unlockedVideoRect = screenInfo.getUnlockedVideoSize().toRect();
int videoRotation = screenInfo.getVideoRotation();
int layerStack = device.getLayerStack();
setDisplaySurface(display, surface, videoRotation, contentRect, unlockedVideoRect, layerStack);
mediaCodec.start();
@@ -78,7 +107,7 @@ public class SurfaceEncoder implements AsyncProcessor {
mediaCodec.stop();
} catch (IllegalStateException | IllegalArgumentException e) {
Ln.e("Encoding error: " + e.getClass().getName() + ": " + e.getMessage());
if (!prepareRetry(size)) {
if (!prepareRetry(device, screenInfo)) {
throw e;
}
Ln.i("Retrying...");
@@ -92,11 +121,13 @@ public class SurfaceEncoder implements AsyncProcessor {
} while (alive);
} finally {
mediaCodec.release();
capture.release();
device.setRotationListener(null);
device.setFoldListener(null);
SurfaceControl.destroyDisplay(display);
}
}
private boolean prepareRetry(Size currentSize) {
private boolean prepareRetry(Device device, ScreenInfo screenInfo) {
if (firstFrameSent) {
++consecutiveErrors;
if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) {
@@ -116,19 +147,15 @@ public class SurfaceEncoder implements AsyncProcessor {
// Downsizing on error is only enabled if an encoding failure occurs before the first frame (downsizing later could be surprising)
int newMaxSize = chooseMaxSizeFallback(currentSize);
int newMaxSize = chooseMaxSizeFallback(screenInfo.getVideoSize());
if (newMaxSize == 0) {
// Must definitively fail
return false;
}
boolean accepted = capture.setMaxSize(newMaxSize);
if (!accepted) {
return false;
}
// Retry with a smaller size
// Retry with a smaller device size
Ln.i("Retrying with -m" + newMaxSize + "...");
device.setMaxSize(newMaxSize);
return true;
}
@@ -149,14 +176,14 @@ public class SurfaceEncoder implements AsyncProcessor {
boolean alive = true;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (!capture.consumeReset() && !eof) {
while (!consumeResetCapture() && !eof) {
if (stopped.get()) {
alive = false;
break;
}
int outputBufferId = codec.dequeueOutputBuffer(bufferInfo, -1);
try {
if (capture.consumeReset()) {
if (consumeResetCapture()) {
// must restart encoding with new size
break;
}
@@ -181,11 +208,6 @@ public class SurfaceEncoder implements AsyncProcessor {
}
}
if (capture.isClosed()) {
// The capture might have been closed internally (for example if the camera is disconnected)
alive = false;
}
return !eof && alive;
}
@@ -242,6 +264,25 @@ public class SurfaceEncoder implements AsyncProcessor {
return format;
}
private static IBinder createDisplay() {
// Since Android 12 (preview), secure displays could not be created with shell permissions anymore.
// On Android 12 preview, SDK_INT is still R (not S), but CODENAME is "S".
boolean secure = Build.VERSION.SDK_INT < Build.VERSION_CODES.R || (Build.VERSION.SDK_INT == Build.VERSION_CODES.R && !"S"
.equals(Build.VERSION.CODENAME));
return SurfaceControl.createDisplay("scrcpy", secure);
}
private static void setDisplaySurface(IBinder display, Surface surface, int orientation, Rect deviceRect, Rect displayRect, int layerStack) {
SurfaceControl.openTransaction();
try {
SurfaceControl.setDisplaySurface(display, surface);
SurfaceControl.setDisplayProjection(display, orientation, deviceRect, displayRect);
SurfaceControl.setDisplayLayerStack(display, layerStack);
} finally {
SurfaceControl.closeTransaction();
}
}
@Override
public void start(TerminationListener listener) {
thread = new Thread(() -> {

View File

@@ -88,12 +88,6 @@ public final class Server {
private static void scrcpy(Options options) throws IOException, ConfigurationException {
Ln.i("Device: [" + Build.MANUFACTURER + "] " + Build.BRAND + " " + Build.MODEL + " (Android " + Build.VERSION.RELEASE + ")");
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S && options.getVideoSource() == VideoSource.CAMERA) {
Ln.e("Camera mirroring is not supported before Android 12");
throw new ConfigurationException("Camera mirroring is not supported");
}
final Device device = new Device(options);
Thread initThread = startInitThread(options);
@@ -105,8 +99,7 @@ public final class Server {
boolean audio = options.getAudio();
boolean sendDummyByte = options.getSendDummyByte();
boolean camera = true;
Workarounds.apply(audio, camera);
Workarounds.apply(audio);
List<AsyncProcessor> asyncProcessors = new ArrayList<>();
@@ -139,16 +132,9 @@ public final class Server {
if (video) {
Streamer videoStreamer = new Streamer(connection.getVideoFd(), options.getVideoCodec(), options.getSendCodecMeta(),
options.getSendFrameMeta());
SurfaceCapture surfaceCapture;
if (options.getVideoSource() == VideoSource.DISPLAY) {
surfaceCapture = new ScreenCapture(device);
} else {
surfaceCapture = new CameraCapture(options.getCameraId(), options.getCameraFacing(), options.getCameraSize(),
options.getMaxSize(), options.getCameraAspectRatio());
}
SurfaceEncoder surfaceEncoder = new SurfaceEncoder(surfaceCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
ScreenEncoder screenEncoder = new ScreenEncoder(device, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
asyncProcessors.add(surfaceEncoder);
asyncProcessors.add(screenEncoder);
}
Completion completion = new Completion(asyncProcessors.size());
@@ -193,7 +179,7 @@ public final class Server {
Ln.initLogLevel(options.getLogLevel());
if (options.getList()) {
if (options.getListEncoders() || options.getListDisplays()) {
if (options.getCleanup()) {
CleanUp.unlinkSelf();
}
@@ -205,10 +191,6 @@ public final class Server {
if (options.getListDisplays()) {
Ln.i(LogUtils.buildDisplayListMessage());
}
if (options.getListCameras() || options.getListCameraSizes()) {
Workarounds.apply(false, true);
Ln.i(LogUtils.buildCameraListMessage(options.getListCameraSizes()));
}
// Just print the requested data, do not mirror
return;
}

View File

@@ -1,71 +0,0 @@
package com.genymobile.scrcpy;
import android.view.Surface;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* A video source which can be rendered on a Surface for encoding.
*/
public abstract class SurfaceCapture {
private final AtomicBoolean resetCapture = new AtomicBoolean();
/**
* Request the encoding session to be restarted, for example if the capture implementation detects that the video source size has changed (on
* device rotation for example).
*/
protected void requestReset() {
resetCapture.set(true);
}
/**
* Consume the reset request (intended to be called by the encoder).
*
* @return {@code true} if a reset request was pending, {@code false} otherwise.
*/
public boolean consumeReset() {
return resetCapture.getAndSet(false);
}
/**
* Called once before the capture starts.
*/
public abstract void init() throws IOException;
/**
* Called after the capture ends (if and only if {@link #init()} has been called).
*/
public abstract void release();
/**
* Start the capture to the target surface.
*
* @param surface the surface which will be encoded
*/
public abstract void start(Surface surface) throws IOException;
/**
* Return the video size
*
* @return the video size
*/
public abstract Size getSize();
/**
* Set the maximum capture size (set by the encoder if it does not support the current size).
*
* @param size Maximum size
*/
public abstract boolean setMaxSize(int size);
/**
* Indicate if the capture has been closed internally.
*
* @return {@code true} is the capture is closed, {@code false} otherwise.
*/
public boolean isClosed() {
return false;
}
}

View File

@@ -1,22 +0,0 @@
package com.genymobile.scrcpy;
public enum VideoSource {
DISPLAY("display"),
CAMERA("camera");
private final String name;
VideoSource(String name) {
this.name = name;
}
static VideoSource findByName(String name) {
for (VideoSource videoSource : VideoSource.values()) {
if (name.equals(videoSource.name)) {
return videoSource;
}
}
return null;
}
}

View File

@@ -28,13 +28,14 @@ public final class Workarounds {
// not instantiable
}
public static void apply(boolean audio, boolean camera) {
public static void apply(boolean audio) {
Workarounds.prepareMainLooper();
boolean mustFillAppInfo = false;
boolean mustFillBaseContext = false;
boolean mustFillAppContext = false;
if (Build.BRAND.equalsIgnoreCase("meizu")) {
// Workarounds must be applied for Meizu phones:
// - <https://github.com/Genymobile/scrcpy/issues/240>
@@ -64,11 +65,6 @@ public final class Workarounds {
mustFillAppContext = true;
}
if (camera) {
mustFillAppInfo = true;
mustFillBaseContext = true;
}
if (mustFillAppInfo) {
Workarounds.fillAppInfo();
}

View File

@@ -1,14 +1,9 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.FakeContext;
import android.annotation.SuppressLint;
import android.content.Context;
import android.hardware.camera2.CameraManager;
import android.os.IBinder;
import android.os.IInterface;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@@ -31,7 +26,6 @@ public final class ServiceManager {
private static StatusBarManager statusBarManager;
private static ClipboardManager clipboardManager;
private static ActivityManager activityManager;
private static CameraManager cameraManager;
private ServiceManager() {
/* not instantiable */
@@ -135,16 +129,4 @@ public final class ServiceManager {
return activityManager;
}
public static CameraManager getCameraManager() {
if (cameraManager == null) {
try {
Constructor<CameraManager> ctor = CameraManager.class.getDeclaredConstructor(Context.class);
cameraManager = ctor.newInstance(FakeContext.get());
} catch (Exception e) {
throw new AssertionError(e);
}
}
return cameraManager;
}
}