Compare commits

..

12 Commits

Author SHA1 Message Date
Simon Chan
d4577ba631 Add --camera-facing
Add an option to select the camera by its lens facing (any, front, back
or external).

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-26 23:41:14 +02:00
Romain Vimont
cb7fea31cb Make camera id optional
If no camera id is provided, use the first camera available.
2023-10-26 23:39:53 +02:00
Romain Vimont
05aa988946 Handle camera disconnection 2023-10-26 23:39:53 +02:00
Romain Vimont
39ed0d7cbc Automatically select audio source
If --audio-source is not specified, select the default value
according to the video source:
 - for display mirroring, use device audio by default;
 - for camera mirroring, use microphone by default.
2023-10-26 23:39:53 +02:00
Romain Vimont
f92c22e331 DONOTMERGE workaround for Android 11 testing 2023-10-26 23:39:53 +02:00
Simon Chan
89049db1dd Add camera mirroring
Add --video-source=camera, and related options:
 - --camera-id=ID: select the camera (ids are listed by --list-cameras);
 - --camera-size=WIDTHxHEIGHT: select the capture size.

Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-26 23:39:52 +02:00
Romain Vimont
88083483b0 Add --list-camera-sizes 2023-10-26 23:06:34 +02:00
Simon Chan
3d2f39fc8a Add --list-cameras
Add an option to list the device cameras.

Co-authored-by: Romain Vimont <rom@rom1v.com>
2023-10-26 23:06:34 +02:00
Romain Vimont
986bceb269 Factorize --list- options handling
This will limit code duplication as more list options will be added.
2023-10-26 23:06:34 +02:00
Romain Vimont
9d01140826 Make separator configurable for parsing integers
The separator was hardcoded to ':'. This will allow to reuse the
function to parse sizes as WIDTHxHEIGHT.
2023-10-26 23:06:34 +02:00
Simon Chan
616544d995 Extract SurfaceCapture from ScreenEncoder
Extract an interface SurfaceCapture from ScreenEncoder, representing a
video source which can be rendered to a Surface for encoding.

Split ScreenEncoder into:
 - ScreenCapture, implementing SurfaceCapture to capture the device
   screen,
 - SurfaceEncoder, to encode any SurfaceCapture.

This separation prepares the introduction of another SurfaceCapture
implementation to capture the camera instead of the device screen.

Co-authored-by: Romain Vimont <rom@rom1v.com>
2023-10-26 23:06:34 +02:00
Romain Vimont
3c1a2eb87a Rename --display to --display-id
The option is named "display id" everywhere.

This will be consistent with --camera-id (there will be many camera
options, so an option --camera would be confusing).
2023-10-26 23:06:33 +02:00
48 changed files with 244 additions and 888 deletions

1
.gitignore vendored
View File

@@ -7,4 +7,3 @@ build/
.gradle/ .gradle/
/x/ /x/
local.properties local.properties
/scrcpy-server

View File

@@ -1,4 +1,4 @@
# scrcpy (v2.2) # scrcpy (v2.1.1)
<img src="app/data/icon.svg" width="128" height="128" alt="scrcpy" align="right" /> <img src="app/data/icon.svg" width="128" height="128" alt="scrcpy" align="right" />
@@ -25,13 +25,12 @@ It focuses on:
[lowlatency]: https://github.com/Genymobile/scrcpy/pull/646 [lowlatency]: https://github.com/Genymobile/scrcpy/pull/646
Its features include: Its features include:
- [audio forwarding](doc/audio.md) (Android 11+) - [audio forwarding](doc/audio.md) (Android >= 11)
- [recording](doc/recording.md) - [recording](doc/recording.md)
- mirroring with [Android device screen off](doc/device.md#turn-screen-off) - mirroring with [Android device screen off](doc/device.md#turn-screen-off)
- [copy-paste](doc/control.md#copy-paste) in both directions - [copy-paste](doc/control.md#copy-paste) in both directions
- [configurable quality](doc/video.md) - [configurable quality](doc/video.md)
- [camera mirroring](doc/camera.md) (Android 12+) - Android device screen [as a webcam (V4L2)](doc/v4l2.md) (Linux-only)
- [mirroring as a webcam (V4L2)](doc/v4l2.md) (Linux-only)
- [physical keyboard/mouse simulation (HID)](doc/hid-otg.md) - [physical keyboard/mouse simulation (HID)](doc/hid-otg.md)
- [OTG mode](doc/hid-otg.md#otg) - [OTG mode](doc/hid-otg.md#otg)
- and more… - and more…
@@ -78,7 +77,6 @@ documented in the following pages:
- [Recording](doc/recording.md) - [Recording](doc/recording.md)
- [Tunnels](doc/tunnels.md) - [Tunnels](doc/tunnels.md)
- [HID/OTG](doc/hid-otg.md) - [HID/OTG](doc/hid-otg.md)
- [Camera](doc/camera.md)
- [Video4Linux](doc/v4l2.md) - [Video4Linux](doc/v4l2.md)
- [Shortcuts](doc/shortcuts.md) - [Shortcuts](doc/shortcuts.md)

View File

@@ -10,11 +10,8 @@ _scrcpy() {
--audio-source= --audio-source=
--audio-output-buffer= --audio-output-buffer=
-b --video-bit-rate= -b --video-bit-rate=
--camera-ar=
--camera-id= --camera-id=
--camera-facing= --camera-facing=
--camera-fps=
--camera-high-speed
--camera-size= --camera-size=
--crop= --crop=
-d --select-usb -d --select-usb
@@ -97,7 +94,7 @@ _scrcpy() {
return return
;; ;;
--audio-codec) --audio-codec)
COMPREPLY=($(compgen -W 'opus aac flac raw' -- "$cur")) COMPREPLY=($(compgen -W 'opus aac raw' -- "$cur"))
return return
;; ;;
--video-source) --video-source)
@@ -156,9 +153,7 @@ _scrcpy() {
|--audio-codec-options \ |--audio-codec-options \
|--audio-encoder \ |--audio-encoder \
|--audio-output-buffer \ |--audio-output-buffer \
|--camera-ar \
|--camera-id \ |--camera-id \
|--camera-fps \
|--camera-size \ |--camera-size \
|--crop \ |--crop \
|--display-id \ |--display-id \

View File

@@ -11,17 +11,14 @@ arguments=(
'--always-on-top[Make scrcpy window always on top \(above other windows\)]' '--always-on-top[Make scrcpy window always on top \(above other windows\)]'
'--audio-bit-rate=[Encode the audio at the given bit-rate]' '--audio-bit-rate=[Encode the audio at the given bit-rate]'
'--audio-buffer=[Configure the audio buffering delay (in milliseconds)]' '--audio-buffer=[Configure the audio buffering delay (in milliseconds)]'
'--audio-codec=[Select the audio codec]:codec:(opus aac flac raw)' '--audio-codec=[Select the audio codec]:codec:(opus aac raw)'
'--audio-codec-options=[Set a list of comma-separated key\:type=value options for the device audio encoder]' '--audio-codec-options=[Set a list of comma-separated key\:type=value options for the device audio encoder]'
'--audio-encoder=[Use a specific MediaCodec audio encoder]' '--audio-encoder=[Use a specific MediaCodec audio encoder]'
'--audio-source=[Select the audio source]:source:(output mic)' '--audio-source=[Select the audio source]:source:(output mic)'
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]' '--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]' {-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
'--camera-ar=[Select the camera size by its aspect ratio]'
'--camera-high-speed=[Enable high-speed camera capture mode]'
'--camera-id=[Specify the camera id to mirror]' '--camera-id=[Specify the camera id to mirror]'
'--camera-facing=[Select the device camera by its facing direction]:facing:(front back external)' '--camera-facing=[Select the device camera by its facing direction]:facing:(front back external)'
'--camera-fps=[Specify the camera capture frame rate]'
'--camera-size=[Specify an explicit camera capture size]' '--camera-size=[Specify an explicit camera capture size]'
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]' '--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
{-d,--select-usb}'[Use USB device]' {-d,--select-usb}'[Use USB device]'

View File

@@ -6,10 +6,10 @@ cd "$DIR"
mkdir -p "$PREBUILT_DATA_DIR" mkdir -p "$PREBUILT_DATA_DIR"
cd "$PREBUILT_DATA_DIR" cd "$PREBUILT_DATA_DIR"
DEP_DIR=platform-tools-34.0.5 DEP_DIR=platform-tools-34.0.3
FILENAME=platform-tools_r34.0.5-windows.zip FILENAME=platform-tools_r34.0.3-windows.zip
SHA256SUM=3f8320152704377de150418a3c4c9d07d16d80a6c0d0d8f7289c22c499e33571 SHA256SUM=fce992e93eb786fc9f47df93d83a7b912c46742d45c39d712c02e06d05b72e2b
if [[ -d "$DEP_DIR" ]] if [[ -d "$DEP_DIR" ]]
then then

View File

@@ -6,11 +6,11 @@ cd "$DIR"
mkdir -p "$PREBUILT_DATA_DIR" mkdir -p "$PREBUILT_DATA_DIR"
cd "$PREBUILT_DATA_DIR" cd "$PREBUILT_DATA_DIR"
VERSION=6.1-scrcpy VERSION=6.0-scrcpy-4
DEP_DIR="ffmpeg-$VERSION" DEP_DIR="ffmpeg-$VERSION"
FILENAME="$DEP_DIR".7z FILENAME="$DEP_DIR".7z
SHA256SUM=b41726e603f4624bb9ed7d2836e3e59d9d20b000e22a9ebd27055f4e99e48219 SHA256SUM=39274b321491ce83e76cab5d24e7cbe3f402d3ccf382f739b13be5651c146b60
if [[ -d "$DEP_DIR" ]] if [[ -d "$DEP_DIR" ]]
then then

View File

@@ -6,10 +6,10 @@ cd "$DIR"
mkdir -p "$PREBUILT_DATA_DIR" mkdir -p "$PREBUILT_DATA_DIR"
cd "$PREBUILT_DATA_DIR" cd "$PREBUILT_DATA_DIR"
DEP_DIR=SDL2-2.28.4 DEP_DIR=SDL2-2.28.0
FILENAME=SDL2-devel-2.28.4-mingw.tar.gz FILENAME=SDL2-devel-2.28.0-mingw.tar.gz
SHA256SUM=779d091072cf97291f80030f5232d97aa3d48ab0f2c14fe0b9d9a33c593cdc35 SHA256SUM=b91ce59eeacd4a9db403f976fd2337d9360b21ada374124417d716065c380e20
if [[ -d "$DEP_DIR" ]] if [[ -d "$DEP_DIR" ]]
then then

View File

@@ -13,7 +13,7 @@ BEGIN
VALUE "LegalCopyright", "Romain Vimont, Genymobile" VALUE "LegalCopyright", "Romain Vimont, Genymobile"
VALUE "OriginalFilename", "scrcpy.exe" VALUE "OriginalFilename", "scrcpy.exe"
VALUE "ProductName", "scrcpy" VALUE "ProductName", "scrcpy"
VALUE "ProductVersion", "v2.2" VALUE "ProductVersion", "2.1.1"
END END
END END
BLOCK "VarFileInfo" BLOCK "VarFileInfo"

View File

@@ -35,7 +35,7 @@ Default is 50.
.TP .TP
.BI "\-\-audio\-codec " name .BI "\-\-audio\-codec " name
Select an audio codec (opus, aac, flac or raw). Select an audio codec (opus, aac or raw).
Default is opus. Default is opus.
@@ -45,15 +45,15 @@ Set a list of comma-separated key:type=value options for the device audio encode
The possible values for 'type' are 'int' (default), 'long', 'float' and 'string'. The possible values for 'type' are 'int' (default), 'long', 'float' and 'string'.
The list of possible codec options is available in the Android documentation: The list of possible codec options is available in the Android documentation
.UR https://d.android.com/reference/android/media/MediaFormat
<https://d.android.com/reference/android/media/MediaFormat> .UE .
.TP .TP
.BI "\-\-audio\-encoder " name .BI "\-\-audio\-encoder " name
Use a specific MediaCodec audio encoder (depending on the codec provided by \fB\-\-audio\-codec\fR). Use a specific MediaCodec audio encoder (depending on the codec provided by \fB\-\-audio\-codec\fR).
The available encoders can be listed by \fB\-\-list\-encoders\fR. The available encoders can be listed by \-\-list\-encoders.
.TP .TP
.BI "\-\-audio\-source " source .BI "\-\-audio\-source " source
@@ -75,23 +75,11 @@ Encode the video at the given bit rate, expressed in bits/s. Unit suffixes are s
Default is 8M (8000000). Default is 8M (8000000).
.TP
.BI "\-\-camera\-ar " ar
Select the camera size by its aspect ratio (+/- 10%).
Possible values are "sensor" (use the camera sensor aspect ratio), "\fInum\fR:\fIden\fR" (e.g. "4:3") and "\fIvalue\fR" (e.g. "1.6").
.TP
.B \-\-camera\-high\-speed
Enable high-speed camera capture mode.
This mode is restricted to specific resolutions and frame rates, listed by \fB\-\-list\-camera\-sizes\fR.
.TP .TP
.BI "\-\-camera\-id " id .BI "\-\-camera\-id " id
Specify the device camera id to mirror. Specify the device camera id to mirror.
The available camera ids can be listed by \fB\-\-list\-cameras\fR. The available camera ids can be listed by \-\-list\-cameras.
.TP .TP
.BI "\-\-camera\-facing " facing .BI "\-\-camera\-facing " facing
@@ -99,12 +87,6 @@ Select the device camera by its facing direction.
Possible values are "front", "back" and "external". Possible values are "front", "back" and "external".
.TP
.BI "\-\-camera\-fps " fps
Specify the camera capture frame rate.
If not specified, Android's default frame rate (30 fps) is used.
.TP .TP
.BI "\-\-camera\-size " width\fRx\fIheight .BI "\-\-camera\-size " width\fRx\fIheight
Specify an explicit camera capture size. Specify an explicit camera capture size.
@@ -131,7 +113,7 @@ Disable screensaver while scrcpy is running.
.BI "\-\-display\-id " id .BI "\-\-display\-id " id
Specify the device display id to mirror. Specify the device display id to mirror.
The available display ids can be listed by \fB\-\-list\-displays\fR. The available display ids can be listed by \-\-list\-displays.
Default is 0. Default is 0.
@@ -189,11 +171,9 @@ Inject computer clipboard text as a sequence of key events on Ctrl+v (like MOD+S
This is a workaround for some devices not behaving as expected when setting the device clipboard programmatically. This is a workaround for some devices not behaving as expected when setting the device clipboard programmatically.
.TP
.B \-\-list\-camera\-sizes .B \-\-list\-camera\-sizes
List the valid camera capture sizes. List the valid camera capture sizes.
.TP
.B \-\-list\-cameras .B \-\-list\-cameras
List cameras available on the device. List cameras available on the device.
@@ -241,7 +221,7 @@ Disable device control (mirror the device in read\-only).
.TP .TP
.B \-N, \-\-no\-playback .B \-N, \-\-no\-playback
Disable video and audio playback on the computer (equivalent to \fB\-\-no\-video\-playback \-\-no\-audio\-playback\fR). Disable video and audio playback on the computer (equivalent to --no-video-playback --no-audio-playback).
.TP .TP
.B \-\-no\-audio .B \-\-no\-audio
@@ -363,7 +343,8 @@ Request SDL to use the given render driver (this is just a hint).
Supported names are currently "direct3d", "opengl", "opengles2", "opengles", "metal" and "software". Supported names are currently "direct3d", "opengl", "opengles2", "opengles", "metal" and "software".
<https://wiki.libsdl.org/SDL_HINT_RENDER_DRIVER> .UR https://wiki.libsdl.org/SDL_HINT_RENDER_DRIVER
.UE
.TP .TP
.B \-\-require\-audio .B \-\-require\-audio
@@ -411,13 +392,13 @@ Set the maximum mirroring time, in seconds.
.TP .TP
.BI "\-\-tunnel\-host " ip .BI "\-\-tunnel\-host " ip
Set the IP address of the adb tunnel to reach the scrcpy server. This option automatically enables \fB\-\-force\-adb\-forward\fR. Set the IP address of the adb tunnel to reach the scrcpy server. This option automatically enables --force-adb-forward.
Default is localhost. Default is localhost.
.TP .TP
.BI "\-\-tunnel\-port " port .BI "\-\-tunnel\-port " port
Set the TCP port of the adb tunnel to reach the scrcpy server. This option automatically enables \fB\-\-force\-adb\-forward\fR. Set the TCP port of the adb tunnel to reach the scrcpy server. This option automatically enables --force-adb-forward.
Default is 0 (not forced): the local port used for establishing the tunnel will be used. Default is 0 (not forced): the local port used for establishing the tunnel will be used.
@@ -457,22 +438,20 @@ Set a list of comma-separated key:type=value options for the device video encode
The possible values for 'type' are 'int' (default), 'long', 'float' and 'string'. The possible values for 'type' are 'int' (default), 'long', 'float' and 'string'.
The list of possible codec options is available in the Android documentation: The list of possible codec options is available in the Android documentation
.UR https://d.android.com/reference/android/media/MediaFormat
<https://d.android.com/reference/android/media/MediaFormat> .UE .
.TP .TP
.BI "\-\-video\-encoder " name .BI "\-\-video\-encoder " name
Use a specific MediaCodec video encoder (depending on the codec provided by \fB\-\-video\-codec\fR). Use a specific MediaCodec video encoder (depending on the codec provided by \fB\-\-video\-codec\fR).
The available encoders can be listed by \fB\-\-list\-encoders\fR. The available encoders can be listed by \-\-list\-encoders.
.TP .TP
.BI "\-\-video\-source " source .BI "\-\-video\-source " source
Select the video source (display or camera). Select the video source (display or camera).
Camera mirroring requires Android 12+.
Default is display. Default is display.
.TP .TP
@@ -635,7 +614,7 @@ Path to adb.
.TP .TP
.B ANDROID_SERIAL .B ANDROID_SERIAL
Device serial to use if no selector (\fB-s\fR, \fB-d\fR, \fB-e\fR or \fB\-\-tcpip=\fIaddr\fR) is specified. Device serial to use if no selector (-s, -d, -e or --tcpip=<addr>) is specified.
.TP .TP
.B SCRCPY_ICON_PATH .B SCRCPY_ICON_PATH
@@ -658,14 +637,23 @@ for the Debian Project (and may be used by others).
.SH "REPORTING BUGS" .SH "REPORTING BUGS"
Report bugs to <https://github.com/Genymobile/scrcpy/issues>. Report bugs to
.UR https://github.com/Genymobile/scrcpy/issues
.UE .
.SH COPYRIGHT .SH COPYRIGHT
Copyright \(co 2018 Genymobile <https://www.genymobile.com> Copyright \(co 2018 Genymobile
.UR https://www.genymobile.com
Genymobile
.UE
Copyright \(co 2018\-2023 Romain Vimont <rom@rom1v.com> Copyright \(co 2018\-2023
.MT rom@rom1v.com
Romain Vimont
.ME
Licensed under the Apache License, Version 2.0. Licensed under the Apache License, Version 2.0.
.SH WWW .SH WWW
<https://github.com/Genymobile/scrcpy> .UR https://github.com/Genymobile/scrcpy
.UE

View File

@@ -87,9 +87,6 @@ enum {
OPT_CAMERA_ID, OPT_CAMERA_ID,
OPT_CAMERA_SIZE, OPT_CAMERA_SIZE,
OPT_CAMERA_FACING, OPT_CAMERA_FACING,
OPT_CAMERA_AR,
OPT_CAMERA_FPS,
OPT_CAMERA_HIGH_SPEED,
}; };
struct sc_option { struct sc_option {
@@ -152,7 +149,7 @@ static const struct sc_option options[] = {
.longopt_id = OPT_AUDIO_CODEC, .longopt_id = OPT_AUDIO_CODEC,
.longopt = "audio-codec", .longopt = "audio-codec",
.argdesc = "name", .argdesc = "name",
.text = "Select an audio codec (opus, aac, flac or raw).\n" .text = "Select an audio codec (opus, aac or raw).\n"
"Default is opus.", "Default is opus.",
}, },
{ {
@@ -206,15 +203,6 @@ static const struct sc_option options[] = {
.longopt = "bit-rate", .longopt = "bit-rate",
.argdesc = "value", .argdesc = "value",
}, },
{
.longopt_id = OPT_CAMERA_AR,
.longopt = "camera-ar",
.argdesc = "ar",
.text = "Select the camera size by its aspect ratio (+/- 10%).\n"
"Possible values are \"sensor\" (use the camera sensor aspect "
"ratio), \"<num>:<den>\" (e.g. \"4:3\") or \"<value>\" (e.g. "
"\"1.6\")."
},
{ {
.longopt_id = OPT_CAMERA_ID, .longopt_id = OPT_CAMERA_ID,
.longopt = "camera-id", .longopt = "camera-id",
@@ -230,27 +218,12 @@ static const struct sc_option options[] = {
.text = "Select the device camera by its facing direction.\n" .text = "Select the device camera by its facing direction.\n"
"Possible values are \"front\", \"back\" and \"external\".", "Possible values are \"front\", \"back\" and \"external\".",
}, },
{
.longopt_id = OPT_CAMERA_HIGH_SPEED,
.longopt = "camera-high-speed",
.text = "Enable high-speed camera capture mode.\n"
"This mode is restricted to specific resolutions and frame "
"rates, listed by --list-camera-sizes.",
},
{ {
.longopt_id = OPT_CAMERA_SIZE, .longopt_id = OPT_CAMERA_SIZE,
.longopt = "camera-size", .longopt = "camera-size",
.argdesc = "<width>x<height>", .argdesc = "<width>x<height>",
.text = "Specify an explicit camera capture size.", .text = "Specify an explicit camera capture size.",
}, },
{
.longopt_id = OPT_CAMERA_FPS,
.longopt = "camera-fps",
.argdesc = "value",
.text = "Specify the camera capture frame rate.\n"
"If not specified, Android's default frame rate (30 fps) is "
"used.",
},
{ {
// Not really deprecated (--codec has never been released), but without // Not really deprecated (--codec has never been released), but without
// declaring an explicit --codec option, getopt_long() partial matching // declaring an explicit --codec option, getopt_long() partial matching
@@ -760,7 +733,6 @@ static const struct sc_option options[] = {
.longopt = "video-source", .longopt = "video-source",
.argdesc = "source", .argdesc = "source",
.text = "Select the video source (display or camera).\n" .text = "Select the video source (display or camera).\n"
"Camera mirroring requires Android 12+.\n"
"Default is display.", "Default is display.",
}, },
{ {
@@ -1332,7 +1304,7 @@ parse_max_size(const char *s, uint16_t *max_size) {
static bool static bool
parse_max_fps(const char *s, uint16_t *max_fps) { parse_max_fps(const char *s, uint16_t *max_fps) {
long value; long value;
bool ok = parse_integer_arg(s, &value, false, 0, 0xFFFF, "max fps"); bool ok = parse_integer_arg(s, &value, false, 0, 1000, "max fps");
if (!ok) { if (!ok) {
return false; return false;
} }
@@ -1626,9 +1598,6 @@ get_record_format(const char *name) {
if (!strcmp(name, "aac")) { if (!strcmp(name, "aac")) {
return SC_RECORD_FORMAT_AAC; return SC_RECORD_FORMAT_AAC;
} }
if (!strcmp(name, "flac")) {
return SC_RECORD_FORMAT_FLAC;
}
return 0; return 0;
} }
@@ -1698,15 +1667,11 @@ parse_audio_codec(const char *optarg, enum sc_codec *codec) {
*codec = SC_CODEC_AAC; *codec = SC_CODEC_AAC;
return true; return true;
} }
if (!strcmp(optarg, "flac")) {
*codec = SC_CODEC_FLAC;
return true;
}
if (!strcmp(optarg, "raw")) { if (!strcmp(optarg, "raw")) {
*codec = SC_CODEC_RAW; *codec = SC_CODEC_RAW;
return true; return true;
} }
LOGE("Unsupported audio codec: %s (expected opus, aac, flac or raw)", optarg); LOGE("Unsupported audio codec: %s (expected opus, aac or raw)", optarg);
return false; return false;
} }
@@ -1770,18 +1735,6 @@ parse_camera_facing(const char *optarg, enum sc_camera_facing *facing) {
return false; return false;
} }
static bool
parse_camera_fps(const char *s, uint16_t *camera_fps) {
long value;
bool ok = parse_integer_arg(s, &value, false, 0, 0xFFFF, "camera fps");
if (!ok) {
return false;
}
*camera_fps = (uint16_t) value;
return true;
}
static bool static bool
parse_time_limit(const char *s, sc_tick *tick) { parse_time_limit(const char *s, sc_tick *tick) {
long value; long value;
@@ -2176,9 +2129,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false; return false;
} }
break; break;
case OPT_CAMERA_AR:
opts->camera_ar = optarg;
break;
case OPT_CAMERA_ID: case OPT_CAMERA_ID:
opts->camera_id = optarg; opts->camera_id = optarg;
break; break;
@@ -2190,14 +2140,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false; return false;
} }
break; break;
case OPT_CAMERA_FPS:
if (!parse_camera_fps(optarg, &opts->camera_fps)) {
return false;
}
break;
case OPT_CAMERA_HIGH_SPEED:
opts->camera_high_speed = true;
break;
default: default:
// getopt prints the error message on stderr // getopt prints the error message on stderr
return false; return false;
@@ -2264,19 +2206,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->require_audio = true; opts->require_audio = true;
} }
if (opts->audio_playback && opts->audio_buffer == -1) {
if (opts->audio_codec == SC_CODEC_FLAC) {
// Use 50 ms audio buffer by default, but use a higher value for FLAC,
// which is not low latency (the default encoder produces blocks of
// 4096 samples, which represent ~85.333ms).
LOGI("FLAC audio: audio buffer increased to 120 ms (use "
"--audio-buffer to set a custom value)");
opts->audio_buffer = SC_TICK_FROM_MS(120);
} else {
opts->audio_buffer = SC_TICK_FROM_MS(50);
}
}
#ifdef HAVE_V4L2 #ifdef HAVE_V4L2
if (v4l2) { if (v4l2) {
if (opts->lock_video_orientation == if (opts->lock_video_orientation ==
@@ -2310,25 +2239,19 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false; return false;
} }
if (opts->lock_video_orientation !=
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED) {
LOGE("--lock-video-orientation is not supported for camera");
return false;
}
if (opts->camera_id && opts->camera_facing != SC_CAMERA_FACING_ANY) { if (opts->camera_id && opts->camera_facing != SC_CAMERA_FACING_ANY) {
LOGE("Could not specify both --camera-id and --camera-facing"); LOGE("Could not specify both --camera-id and --camera-facing");
return false; return false;
} }
if (opts->camera_size) { if (!opts->camera_size) {
if (opts->max_size) { LOGE("Camera size must be specified by --camera-size=WIDTHxHEIGHT");
LOGE("Could not specify both --camera-size and -m/--max-size");
return false;
}
if (opts->camera_ar) {
LOGE("Could not specify both --camera-size and --camera-ar");
return false;
}
}
if (opts->camera_high_speed && !opts->camera_fps) {
LOGE("--camera-high-speed requires an explicit --camera-fps value");
return false; return false;
} }
@@ -2337,10 +2260,7 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->control = false; opts->control = false;
} }
} else if (opts->camera_id } else if (opts->camera_id
|| opts->camera_ar
|| opts->camera_facing != SC_CAMERA_FACING_ANY || opts->camera_facing != SC_CAMERA_FACING_ANY
|| opts->camera_fps
|| opts->camera_high_speed
|| opts->camera_size) { || opts->camera_size) {
LOGE("Camera options are only available with --video-source=camera"); LOGE("Camera options are only available with --video-source=camera");
return false; return false;
@@ -2373,7 +2293,7 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
} }
if (opts->audio_codec == SC_CODEC_RAW) { if (opts->audio_codec == SC_CODEC_RAW) {
LOGE("Recording does not support RAW audio codec"); LOGW("Recording does not support RAW audio codec");
return false; return false;
} }
@@ -2396,16 +2316,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
"(try with --audio-codec=aac)"); "(try with --audio-codec=aac)");
return false; return false;
} }
if (opts->record_format == SC_RECORD_FORMAT_FLAC
&& opts->audio_codec != SC_CODEC_FLAC) {
LOGE("Recording to FLAC file requires a FLAC audio stream "
"(try with --audio-codec=flac)");
return false;
}
}
if (opts->audio_codec == SC_CODEC_FLAC && opts->audio_bit_rate) {
LOGW("--audio-bit-rate is ignored for FLAC audio codec");
} }
if (opts->audio_codec == SC_CODEC_RAW) { if (opts->audio_codec == SC_CODEC_RAW) {

View File

@@ -25,8 +25,7 @@ sc_demuxer_to_avcodec_id(uint32_t codec_id) {
#define SC_CODEC_ID_H265 UINT32_C(0x68323635) // "h265" in ASCII #define SC_CODEC_ID_H265 UINT32_C(0x68323635) // "h265" in ASCII
#define SC_CODEC_ID_AV1 UINT32_C(0x00617631) // "av1" in ASCII #define SC_CODEC_ID_AV1 UINT32_C(0x00617631) // "av1" in ASCII
#define SC_CODEC_ID_OPUS UINT32_C(0x6f707573) // "opus" in ASCII #define SC_CODEC_ID_OPUS UINT32_C(0x6f707573) // "opus" in ASCII
#define SC_CODEC_ID_AAC UINT32_C(0x00616163) // "aac" in ASCII #define SC_CODEC_ID_AAC UINT32_C(0x00616163) // "aac in ASCII"
#define SC_CODEC_ID_FLAC UINT32_C(0x666c6163) // "flac" in ASCII
#define SC_CODEC_ID_RAW UINT32_C(0x00726177) // "raw" in ASCII #define SC_CODEC_ID_RAW UINT32_C(0x00726177) // "raw" in ASCII
switch (codec_id) { switch (codec_id) {
case SC_CODEC_ID_H264: case SC_CODEC_ID_H264:
@@ -44,8 +43,6 @@ sc_demuxer_to_avcodec_id(uint32_t codec_id) {
return AV_CODEC_ID_OPUS; return AV_CODEC_ID_OPUS;
case SC_CODEC_ID_AAC: case SC_CODEC_ID_AAC:
return AV_CODEC_ID_AAC; return AV_CODEC_ID_AAC;
case SC_CODEC_ID_FLAC:
return AV_CODEC_ID_FLAC;
case SC_CODEC_ID_RAW: case SC_CODEC_ID_RAW:
return AV_CODEC_ID_PCM_S16LE; return AV_CODEC_ID_PCM_S16LE;
default: default:
@@ -210,11 +207,6 @@ run_demuxer(void *data) {
codec_ctx->channels = 2; codec_ctx->channels = 2;
#endif #endif
codec_ctx->sample_rate = 48000; codec_ctx->sample_rate = 48000;
if (raw_codec_id == SC_CODEC_ID_FLAC) {
// The sample_fmt is not set by the FLAC decoder
codec_ctx->sample_fmt = AV_SAMPLE_FMT_S16;
}
} }
if (avcodec_open2(codec_ctx, codec, NULL) < 0) { if (avcodec_open2(codec_ctx, codec, NULL) < 0) {

View File

@@ -13,8 +13,6 @@ const struct scrcpy_options scrcpy_options_default = {
.audio_encoder = NULL, .audio_encoder = NULL,
.camera_id = NULL, .camera_id = NULL,
.camera_size = NULL, .camera_size = NULL,
.camera_ar = NULL,
.camera_fps = 0,
.log_level = SC_LOG_LEVEL_INFO, .log_level = SC_LOG_LEVEL_INFO,
.video_codec = SC_CODEC_H264, .video_codec = SC_CODEC_H264,
.audio_codec = SC_CODEC_OPUS, .audio_codec = SC_CODEC_OPUS,
@@ -46,7 +44,7 @@ const struct scrcpy_options scrcpy_options_default = {
.window_height = 0, .window_height = 0,
.display_id = 0, .display_id = 0,
.display_buffer = 0, .display_buffer = 0,
.audio_buffer = -1, // depends on the audio format, .audio_buffer = SC_TICK_FROM_MS(50),
.audio_output_buffer = SC_TICK_FROM_MS(5), .audio_output_buffer = SC_TICK_FROM_MS(5),
.time_limit = 0, .time_limit = 0,
#ifdef HAVE_V4L2 #ifdef HAVE_V4L2
@@ -86,6 +84,5 @@ const struct scrcpy_options scrcpy_options_default = {
.audio = true, .audio = true,
.require_audio = false, .require_audio = false,
.kill_adb_on_close = false, .kill_adb_on_close = false,
.camera_high_speed = false,
.list = 0, .list = 0,
}; };

View File

@@ -25,7 +25,6 @@ enum sc_record_format {
SC_RECORD_FORMAT_MKA, SC_RECORD_FORMAT_MKA,
SC_RECORD_FORMAT_OPUS, SC_RECORD_FORMAT_OPUS,
SC_RECORD_FORMAT_AAC, SC_RECORD_FORMAT_AAC,
SC_RECORD_FORMAT_FLAC,
}; };
static inline bool static inline bool
@@ -33,8 +32,7 @@ sc_record_format_is_audio_only(enum sc_record_format fmt) {
return fmt == SC_RECORD_FORMAT_M4A return fmt == SC_RECORD_FORMAT_M4A
|| fmt == SC_RECORD_FORMAT_MKA || fmt == SC_RECORD_FORMAT_MKA
|| fmt == SC_RECORD_FORMAT_OPUS || fmt == SC_RECORD_FORMAT_OPUS
|| fmt == SC_RECORD_FORMAT_AAC || fmt == SC_RECORD_FORMAT_AAC;
|| fmt == SC_RECORD_FORMAT_FLAC;
} }
enum sc_codec { enum sc_codec {
@@ -43,7 +41,6 @@ enum sc_codec {
SC_CODEC_AV1, SC_CODEC_AV1,
SC_CODEC_OPUS, SC_CODEC_OPUS,
SC_CODEC_AAC, SC_CODEC_AAC,
SC_CODEC_FLAC,
SC_CODEC_RAW, SC_CODEC_RAW,
}; };
@@ -135,8 +132,6 @@ struct scrcpy_options {
const char *audio_encoder; const char *audio_encoder;
const char *camera_id; const char *camera_id;
const char *camera_size; const char *camera_size;
const char *camera_ar;
uint16_t camera_fps;
enum sc_log_level log_level; enum sc_log_level log_level;
enum sc_codec video_codec; enum sc_codec video_codec;
enum sc_codec audio_codec; enum sc_codec audio_codec;
@@ -202,7 +197,6 @@ struct scrcpy_options {
bool audio; bool audio;
bool require_audio; bool require_audio;
bool kill_adb_on_close; bool kill_adb_on_close;
bool camera_high_speed;
#define SC_OPTION_LIST_ENCODERS 0x1 #define SC_OPTION_LIST_ENCODERS 0x1
#define SC_OPTION_LIST_DISPLAYS 0x2 #define SC_OPTION_LIST_DISPLAYS 0x2
#define SC_OPTION_LIST_CAMERAS 0x4 #define SC_OPTION_LIST_CAMERAS 0x4

View File

@@ -69,8 +69,6 @@ sc_recorder_get_format_name(enum sc_record_format format) {
return "matroska"; return "matroska";
case SC_RECORD_FORMAT_OPUS: case SC_RECORD_FORMAT_OPUS:
return "opus"; return "opus";
case SC_RECORD_FORMAT_FLAC:
return "flac";
default: default:
return NULL; return NULL;
} }

View File

@@ -375,8 +375,6 @@ scrcpy(struct scrcpy_options *options) {
.audio_encoder = options->audio_encoder, .audio_encoder = options->audio_encoder,
.camera_id = options->camera_id, .camera_id = options->camera_id,
.camera_size = options->camera_size, .camera_size = options->camera_size,
.camera_ar = options->camera_ar,
.camera_fps = options->camera_fps,
.force_adb_forward = options->force_adb_forward, .force_adb_forward = options->force_adb_forward,
.power_off_on_close = options->power_off_on_close, .power_off_on_close = options->power_off_on_close,
.clipboard_autosync = options->clipboard_autosync, .clipboard_autosync = options->clipboard_autosync,
@@ -386,7 +384,6 @@ scrcpy(struct scrcpy_options *options) {
.cleanup = options->cleanup, .cleanup = options->cleanup,
.power_on = options->power_on, .power_on = options->power_on,
.kill_adb_on_close = options->kill_adb_on_close, .kill_adb_on_close = options->kill_adb_on_close,
.camera_high_speed = options->camera_high_speed,
.list = options->list, .list = options->list,
}; };
@@ -417,14 +414,10 @@ scrcpy(struct scrcpy_options *options) {
if (options->video_playback) { if (options->video_playback) {
sdl_set_hints(options->render_driver); sdl_set_hints(options->render_driver);
} if (SDL_Init(SDL_INIT_VIDEO)) {
LOGE("Could not initialize SDL video: %s", SDL_GetError());
// Initialize the video subsystem even if --no-video or --no-video-playback goto end;
// is passed so that clipboard synchronization still works. }
// <https://github.com/Genymobile/scrcpy/issues/4418>
if (SDL_Init(SDL_INIT_VIDEO)) {
LOGE("Could not initialize SDL video: %s", SDL_GetError());
goto end;
} }
if (options->audio_playback) { if (options->audio_playback) {

View File

@@ -77,7 +77,6 @@ sc_server_params_destroy(struct sc_server_params *params) {
free((char *) params->audio_encoder); free((char *) params->audio_encoder);
free((char *) params->tcpip_dst); free((char *) params->tcpip_dst);
free((char *) params->camera_id); free((char *) params->camera_id);
free((char *) params->camera_ar);
} }
static bool static bool
@@ -106,7 +105,6 @@ sc_server_params_copy(struct sc_server_params *dst,
COPY(audio_encoder); COPY(audio_encoder);
COPY(tcpip_dst); COPY(tcpip_dst);
COPY(camera_id); COPY(camera_id);
COPY(camera_ar);
#undef COPY #undef COPY
return true; return true;
@@ -178,8 +176,6 @@ sc_server_get_codec_name(enum sc_codec codec) {
return "opus"; return "opus";
case SC_CODEC_AAC: case SC_CODEC_AAC:
return "aac"; return "aac";
case SC_CODEC_FLAC:
return "flac";
case SC_CODEC_RAW: case SC_CODEC_RAW:
return "raw"; return "raw";
default: default:
@@ -307,15 +303,6 @@ execute_server(struct sc_server *server,
ADD_PARAM("camera_facing=%s", ADD_PARAM("camera_facing=%s",
sc_server_get_camera_facing_name(params->camera_facing)); sc_server_get_camera_facing_name(params->camera_facing));
} }
if (params->camera_ar) {
ADD_PARAM("camera_ar=%s", params->camera_ar);
}
if (params->camera_fps) {
ADD_PARAM("camera_fps=%" PRIu16, params->camera_fps);
}
if (params->camera_high_speed) {
ADD_PARAM("camera_high_speed=true");
}
if (params->show_touches) { if (params->show_touches) {
ADD_PARAM("show_touches=true"); ADD_PARAM("show_touches=true");
} }

View File

@@ -36,8 +36,6 @@ struct sc_server_params {
const char *audio_encoder; const char *audio_encoder;
const char *camera_id; const char *camera_id;
const char *camera_size; const char *camera_size;
const char *camera_ar;
uint16_t camera_fps;
struct sc_port_range port_range; struct sc_port_range port_range;
uint32_t tunnel_host; uint32_t tunnel_host;
uint16_t tunnel_port; uint16_t tunnel_port;
@@ -63,7 +61,6 @@ struct sc_server_params {
bool cleanup; bool cleanup;
bool power_on; bool power_on;
bool kill_adb_on_close; bool kill_adb_on_close;
bool camera_high_speed;
uint8_t list; uint8_t list;
}; };

View File

@@ -16,6 +16,6 @@ cpu = 'i686'
endian = 'little' endian = 'little'
[properties] [properties]
prebuilt_ffmpeg = 'ffmpeg-6.1-scrcpy/win32' prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-4/win32'
prebuilt_sdl2 = 'SDL2-2.28.4/i686-w64-mingw32' prebuilt_sdl2 = 'SDL2-2.28.0/i686-w64-mingw32'
prebuilt_libusb = 'libusb-1.0.26/libusb-MinGW-Win32' prebuilt_libusb = 'libusb-1.0.26/libusb-MinGW-Win32'

View File

@@ -16,6 +16,6 @@ cpu = 'x86_64'
endian = 'little' endian = 'little'
[properties] [properties]
prebuilt_ffmpeg = 'ffmpeg-6.1-scrcpy/win64' prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-4/win64'
prebuilt_sdl2 = 'SDL2-2.28.4/x86_64-w64-mingw32' prebuilt_sdl2 = 'SDL2-2.28.0/x86_64-w64-mingw32'
prebuilt_libusb = 'libusb-1.0.26/libusb-MinGW-x64' prebuilt_libusb = 'libusb-1.0.26/libusb-MinGW-x64'

View File

@@ -62,13 +62,12 @@ scrcpy --audio-source=mic --no-video --no-playback --record=file.opus
## Codec ## Codec
The audio codec can be selected. The possible values are `opus` (default), The audio codec can be selected. The possible values are `opus` (default), `aac`
`aac`, `flac` and `raw` (uncompressed PCM 16-bit LE): and `raw` (uncompressed PCM 16-bit LE):
```bash ```bash
scrcpy --audio-codec=opus # default scrcpy --audio-codec=opus # default
scrcpy --audio-codec=aac scrcpy --audio-codec=aac
scrcpy --audio-codec=flac
scrcpy --audio-codec=raw scrcpy --audio-codec=raw
``` ```
@@ -81,14 +80,7 @@ then your device has no Opus encoder: try `scrcpy --audio-codec=aac`.
For advanced usage, to pass arbitrary parameters to the [`MediaFormat`], For advanced usage, to pass arbitrary parameters to the [`MediaFormat`],
check `--audio-codec-options` in the manpage or in `scrcpy --help`. check `--audio-codec-options` in the manpage or in `scrcpy --help`.
For example, to change the [FLAC compression level]:
```bash
scrcpy --audio-codec=flac --audio-codec-options=flac-compression-level=8
```
[`MediaFormat`]: https://developer.android.com/reference/android/media/MediaFormat [`MediaFormat`]: https://developer.android.com/reference/android/media/MediaFormat
[FLAC compression level]: https://developer.android.com/reference/android/media/MediaFormat#KEY_FLAC_COMPRESSION_LEVEL
## Encoder ## Encoder

View File

@@ -233,10 +233,10 @@ install` must be run as root)._
#### Option 2: Use prebuilt server #### Option 2: Use prebuilt server
- [`scrcpy-server-v2.2`][direct-scrcpy-server] - [`scrcpy-server-v2.1.1`][direct-scrcpy-server]
<sub>SHA-256: `c85c4aa84305efb69115cd497a120ebdd10258993b4cf123a8245b3d99d49874`</sub> <sub>SHA-256: `9558db6c56743a1dc03b38f59801fb40e91cc891f8fc0c89e5b0b067761f148e`</sub>
[direct-scrcpy-server]: https://github.com/Genymobile/scrcpy/releases/download/v2.2/scrcpy-server-v2.2 [direct-scrcpy-server]: https://github.com/Genymobile/scrcpy/releases/download/v2.1.1/scrcpy-server-v2.1.1
Download the prebuilt server somewhere, and specify its path during the Meson Download the prebuilt server somewhere, and specify its path during the Meson
configuration: configuration:

View File

@@ -1,150 +0,0 @@
# Camera
Camera mirroring is supported for devices with Android 12 or higher.
To capture the camera instead of the device screen:
```
scrcpy --video-source=camera
```
By default, it automatically switches [audio source](audio.md#source) to
microphone (as if `--audio-source=mic` were also passed).
```bash
scrcpy --video-source=display # default is --audio-source=output
scrcpy --video-source=camera # default is --audio-source=mic
scrcpy --video-source=display --audio-source=mic # force display AND microphone
scrcpy --video-source=camera --audio-source=output # force camera AND device audio output
```
## List
To list the cameras available (with their declared valid sizes and frame rates):
```
scrcpy --list-cameras
scrcpy --list-camera-sizes
```
_Note that the sizes and frame rates are declarative. They are not accurate on
all devices: some of them are declared but not supported, while some others are
not declared but supported._
## Selection
It is possible to pass an explicit camera id (as listed by `--list-cameras`):
```
scrcpy --video-source=camera --camera-id=0
```
Alternatively, the camera may be selected automatically:
```bash
scrcpy --video-source=camera # use the first camera
scrcpy --video-source=camera --camera-facing=front # use the first front camera
scrcpy --video-source=camera --camera-facing=back # use the first back camera
scrcpy --video-source=camera --camera-facing=external # use the first external camera
```
If `--camera-id` is specified, then `--camera-facing` is forbidden (the id
already determines the camera):
```bash
scrcpy --video-source=camera --camera-id=0 --camera-facing=front # error
```
### Size selection
It is possible to pass an explicit camera size:
```
scrcpy --video-source=camera --camera-size=1920x1080
```
The given size may be listed among the declared valid sizes
(`--list-camera-sizes`), but may also be anything else (some devices support
arbitrary sizes):
```
scrcpy --video-source=camera --camera-size=1840x444
```
Alternatively, a declared valid size (among the ones listed by
`list-camera-sizes`) may be selected automatically.
Two constraints are supported:
- `-m`/`--max-size` (already used for display mirroring), for example `-m1920`;
- `--camera-ar` to specify an aspect ratio (`<num>:<den>`, `<value>` or
`sensor`).
Some examples:
```bash
scrcpy --video-source=camera # use the greatest width and the greatest associated height
scrcpy --video-source=camera -m1920 # use the greatest width not above 1920 and the greatest associated height
scrcpy --video-source=camera --camera-ar=4:3 # use the greatest size with an aspect ratio of 4:3 (+/- 10%)
scrcpy --video-source=camera --camera-ar=1.6 # use the greatest size with an aspect ratio of 1.6 (+/- 10%)
scrcpy --video-source=camera --camera-ar=sensor # use the greatest size with the aspect ratio of the camera sensor (+/- 10%)
scrcpy --video-source=camera -m1920 --camera-ar=16:9 # use the greatest width not above 1920 and the closest to 16:9 aspect ratio
```
If `--camera-size` is specified, then `-m`/`--max-size` and `--camera-ar` are
forbidden (the size is determined by the value given explicitly):
```bash
scrcpy --video-source=camera --camera-size=1920x1080 -m3000 # error
```
## Frame rate
By default, camera is captured at Android's default frame rate (30 fps).
To configure a different frame rate:
```
scrcpy --video-source=camera --camera-fps=60
```
## High speed capture
The Android camera API also supports a [high speed capture mode][high speed].
This mode is restricted to specific resolutions and frame rates, listed by
`--list-camera-sizes`.
```
scrcpy --video-source=camera --camera-size=1920x1080 --camera-fps=240
```
[high speed]: https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession
## Brace expansion tip
All camera options start with `--camera-`, so if your shell supports it, you can
benefit from [brace expansion] (for example, it is supported _bash_ and _zsh_):
```bash
scrcpy --video-source=camera --camera-{facing=back,ar=16:9,high-speed,fps=120}
```
This will be expanded as:
```bash
scrcpy --video-source=camera --camera-facing=back --camera-ar=16:9 --camera-high-speed --camera-fps=120
```
[brace expansion]: https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html
## Webcam
Combined with the [V4L2](v4l2.md) feature on Linux, the Android device camera
may be used as a webcam on the computer.

View File

@@ -18,8 +18,7 @@ To record only the audio:
```bash ```bash
scrcpy --no-video --record=file.opus scrcpy --no-video --record=file.opus
scrcpy --no-video --audio-codec=aac --record=file.aac scrcpy --no-video --audio-codec=aac --record=file.aac
scrcpy --no-video --audio-codec=flac --record=file.flac # .m4a/.mp4 and .mka/.mkv are also supported for both opus and aac
# .m4a/.mp4 and .mka/.mkv are also supported for opus, aac and flac
``` ```
Timestamps are captured on the device, so [packet delay variation] does not Timestamps are captured on the device, so [packet delay variation] does not

View File

@@ -1,14 +1,5 @@
# Video # Video
## Source
By default, scrcpy mirrors the device screen.
It is possible to capture the device camera instead.
See the dedicated [camera](camera.md) page.
## Size ## Size
By default, scrcpy attempts to mirror at the Android device resolution. By default, scrcpy attempts to mirror at the Android device resolution.

View File

@@ -4,14 +4,14 @@
Download the [latest release]: Download the [latest release]:
- [`scrcpy-win64-v2.2.zip`][direct-win64] (64-bit) - [`scrcpy-win64-v2.1.1.zip`][direct-win64] (64-bit)
<sub>SHA-256: `9f9da88ac4c8319dcb9bf852f2d9bba942bac663413383419cddf64eaa5685bd`</sub> <sub>SHA-256: `f77281e1bce2f9934617699c581f063d5b327f012eff602ee98fb2ef550c25c2`</sub>
- [`scrcpy-win32-v2.2.zip`][direct-win32] (32-bit) - [`scrcpy-win32-v2.1.1.zip`][direct-win32] (32-bit)
<sub>SHA-256: `cb84269fc847b8b880e320879492a1ae6c017b42175f03e199530f7a53be9d74`</sub> <sub>SHA-256: `ef7ae7fbe9449f2643febdc2244fb186d1a746a3c736394150cfd14f06d3c943`</sub>
[latest release]: https://github.com/Genymobile/scrcpy/releases/latest [latest release]: https://github.com/Genymobile/scrcpy/releases/latest
[direct-win64]: https://github.com/Genymobile/scrcpy/releases/download/v2.2/scrcpy-win64-v2.2.zip [direct-win64]: https://github.com/Genymobile/scrcpy/releases/download/v2.1.1/scrcpy-win64-v2.1.1.zip
[direct-win32]: https://github.com/Genymobile/scrcpy/releases/download/v2.2/scrcpy-win32-v2.2.zip [direct-win32]: https://github.com/Genymobile/scrcpy/releases/download/v2.1.1/scrcpy-win32-v2.1.1.zip
and extract it. and extract it.

View File

@@ -2,8 +2,8 @@
set -e set -e
BUILDDIR=build-auto BUILDDIR=build-auto
PREBUILT_SERVER_URL=https://github.com/Genymobile/scrcpy/releases/download/v2.2/scrcpy-server-v2.2 PREBUILT_SERVER_URL=https://github.com/Genymobile/scrcpy/releases/download/v2.1.1/scrcpy-server-v2.1.1
PREBUILT_SERVER_SHA256=c85c4aa84305efb69115cd497a120ebdd10258993b4cf123a8245b3d99d49874 PREBUILT_SERVER_SHA256=9558db6c56743a1dc03b38f59801fb40e91cc891f8fc0c89e5b0b067761f148e
echo "[scrcpy] Downloading prebuilt server..." echo "[scrcpy] Downloading prebuilt server..."
wget "$PREBUILT_SERVER_URL" -O scrcpy-server wget "$PREBUILT_SERVER_URL" -O scrcpy-server

View File

@@ -1,5 +1,5 @@
project('scrcpy', 'c', project('scrcpy', 'c',
version: 'v2.2', version: '2.1.1',
meson_version: '>= 0.48', meson_version: '>= 0.48',
default_options: [ default_options: [
'c_std=c11', 'c_std=c11',

View File

@@ -94,14 +94,14 @@ dist-win32: build-server build-win32
cp app/data/scrcpy-noconsole.vbs "$(DIST)/$(WIN32_TARGET_DIR)" cp app/data/scrcpy-noconsole.vbs "$(DIST)/$(WIN32_TARGET_DIR)"
cp app/data/icon.png "$(DIST)/$(WIN32_TARGET_DIR)" cp app/data/icon.png "$(DIST)/$(WIN32_TARGET_DIR)"
cp app/data/open_a_terminal_here.bat "$(DIST)/$(WIN32_TARGET_DIR)" cp app/data/open_a_terminal_here.bat "$(DIST)/$(WIN32_TARGET_DIR)"
cp app/prebuilt-deps/data/ffmpeg-6.1-scrcpy/win32/bin/avutil-58.dll "$(DIST)/$(WIN32_TARGET_DIR)/" cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/avutil-58.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
cp app/prebuilt-deps/data/ffmpeg-6.1-scrcpy/win32/bin/avcodec-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/" cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/avcodec-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
cp app/prebuilt-deps/data/ffmpeg-6.1-scrcpy/win32/bin/avformat-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/" cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/avformat-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
cp app/prebuilt-deps/data/ffmpeg-6.1-scrcpy/win32/bin/swresample-4.dll "$(DIST)/$(WIN32_TARGET_DIR)/" cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/swresample-4.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
cp app/prebuilt-deps/data/platform-tools-34.0.5/adb.exe "$(DIST)/$(WIN32_TARGET_DIR)/" cp app/prebuilt-deps/data/platform-tools-34.0.3/adb.exe "$(DIST)/$(WIN32_TARGET_DIR)/"
cp app/prebuilt-deps/data/platform-tools-34.0.5/AdbWinApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/" cp app/prebuilt-deps/data/platform-tools-34.0.3/AdbWinApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
cp app/prebuilt-deps/data/platform-tools-34.0.5/AdbWinUsbApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/" cp app/prebuilt-deps/data/platform-tools-34.0.3/AdbWinUsbApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
cp app/prebuilt-deps/data/SDL2-2.28.4/i686-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN32_TARGET_DIR)/" cp app/prebuilt-deps/data/SDL2-2.28.0/i686-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
cp app/prebuilt-deps/data/libusb-1.0.26/libusb-MinGW-Win32/bin/msys-usb-1.0.dll "$(DIST)/$(WIN32_TARGET_DIR)/" cp app/prebuilt-deps/data/libusb-1.0.26/libusb-MinGW-Win32/bin/msys-usb-1.0.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
dist-win64: build-server build-win64 dist-win64: build-server build-win64
@@ -112,14 +112,14 @@ dist-win64: build-server build-win64
cp app/data/scrcpy-noconsole.vbs "$(DIST)/$(WIN64_TARGET_DIR)" cp app/data/scrcpy-noconsole.vbs "$(DIST)/$(WIN64_TARGET_DIR)"
cp app/data/icon.png "$(DIST)/$(WIN64_TARGET_DIR)" cp app/data/icon.png "$(DIST)/$(WIN64_TARGET_DIR)"
cp app/data/open_a_terminal_here.bat "$(DIST)/$(WIN64_TARGET_DIR)" cp app/data/open_a_terminal_here.bat "$(DIST)/$(WIN64_TARGET_DIR)"
cp app/prebuilt-deps/data/ffmpeg-6.1-scrcpy/win64/bin/avutil-58.dll "$(DIST)/$(WIN64_TARGET_DIR)/" cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/avutil-58.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
cp app/prebuilt-deps/data/ffmpeg-6.1-scrcpy/win64/bin/avcodec-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/" cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/avcodec-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
cp app/prebuilt-deps/data/ffmpeg-6.1-scrcpy/win64/bin/avformat-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/" cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/avformat-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
cp app/prebuilt-deps/data/ffmpeg-6.1-scrcpy/win64/bin/swresample-4.dll "$(DIST)/$(WIN64_TARGET_DIR)/" cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/swresample-4.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
cp app/prebuilt-deps/data/platform-tools-34.0.5/adb.exe "$(DIST)/$(WIN64_TARGET_DIR)/" cp app/prebuilt-deps/data/platform-tools-34.0.3/adb.exe "$(DIST)/$(WIN64_TARGET_DIR)/"
cp app/prebuilt-deps/data/platform-tools-34.0.5/AdbWinApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/" cp app/prebuilt-deps/data/platform-tools-34.0.3/AdbWinApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
cp app/prebuilt-deps/data/platform-tools-34.0.5/AdbWinUsbApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/" cp app/prebuilt-deps/data/platform-tools-34.0.3/AdbWinUsbApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
cp app/prebuilt-deps/data/SDL2-2.28.4/x86_64-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN64_TARGET_DIR)/" cp app/prebuilt-deps/data/SDL2-2.28.0/x86_64-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
cp app/prebuilt-deps/data/libusb-1.0.26/libusb-MinGW-x64/bin/msys-usb-1.0.dll "$(DIST)/$(WIN64_TARGET_DIR)/" cp app/prebuilt-deps/data/libusb-1.0.26/libusb-MinGW-x64/bin/msys-usb-1.0.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
zip-win32: dist-win32 zip-win32: dist-win32

View File

@@ -7,8 +7,8 @@ android {
applicationId "com.genymobile.scrcpy" applicationId "com.genymobile.scrcpy"
minSdkVersion 21 minSdkVersion 21
targetSdkVersion 33 targetSdkVersion 33
versionCode 200 versionCode 20101
versionName "v2.2" versionName "2.1.1"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
} }
buildTypes { buildTypes {

View File

@@ -12,7 +12,7 @@
set -e set -e
SCRCPY_DEBUG=false SCRCPY_DEBUG=false
SCRCPY_VERSION_NAME=v2.2 SCRCPY_VERSION_NAME=2.1.1
PLATFORM=${ANDROID_PLATFORM:-33} PLATFORM=${ANDROID_PLATFORM:-33}
BUILD_TOOLS=${ANDROID_BUILD_TOOLS:-33.0.0} BUILD_TOOLS=${ANDROID_BUILD_TOOLS:-33.0.0}

View File

@@ -5,7 +5,6 @@ import android.media.MediaFormat;
public enum AudioCodec implements Codec { public enum AudioCodec implements Codec {
OPUS(0x6f_70_75_73, "opus", MediaFormat.MIMETYPE_AUDIO_OPUS), OPUS(0x6f_70_75_73, "opus", MediaFormat.MIMETYPE_AUDIO_OPUS),
AAC(0x00_61_61_63, "aac", MediaFormat.MIMETYPE_AUDIO_AAC), AAC(0x00_61_61_63, "aac", MediaFormat.MIMETYPE_AUDIO_AAC),
FLAC(0x66_6c_61_63, "flac", MediaFormat.MIMETYPE_AUDIO_FLAC),
RAW(0x00_72_61_77, "raw", MediaFormat.MIMETYPE_AUDIO_RAW); RAW(0x00_72_61_77, "raw", MediaFormat.MIMETYPE_AUDIO_RAW);
private final int id; // 4-byte ASCII representation of the name private final int id; // 4-byte ASCII representation of the name

View File

@@ -32,13 +32,7 @@ public final class AudioRawRecorder implements AsyncProcessor {
final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
try { try {
try { capture.start();
capture.start();
} catch (Throwable t) {
// Notify the client that the audio could not be captured
streamer.writeDisableStream(false);
throw t;
}
streamer.writeAudioHeader(); streamer.writeAudioHeader();
while (!Thread.currentThread().isInterrupted()) { while (!Thread.currentThread().isInterrupted()) {
@@ -51,11 +45,10 @@ public final class AudioRawRecorder implements AsyncProcessor {
streamer.writePacket(buffer, bufferInfo); streamer.writePacket(buffer, bufferInfo);
} }
} catch (IOException e) { } catch (Throwable e) {
// Broken pipe is expected on close, because the socket is closed by the client // Notify the client that the audio could not be captured
if (!IO.isBrokenPipe(e)) { streamer.writeDisableStream(false);
Ln.e("Audio capture error", e); throw e;
}
} finally { } finally {
capture.stop(); capture.stop();
} }
@@ -69,8 +62,8 @@ public final class AudioRawRecorder implements AsyncProcessor {
record(); record();
} catch (AudioCaptureForegroundException e) { } catch (AudioCaptureForegroundException e) {
// Do not print stack trace, a user-friendly error-message has already been logged // Do not print stack trace, a user-friendly error-message has already been logged
} catch (Throwable t) { } catch (IOException e) {
Ln.e("Audio recording error", t); Ln.e("Audio recording error", e);
fatalError = true; fatalError = true;
} finally { } finally {
Ln.d("Audio recorder stopped"); Ln.d("Audio recorder stopped");

View File

@@ -1,37 +0,0 @@
package com.genymobile.scrcpy;
public final class CameraAspectRatio {
private static final float SENSOR = -1;
private float ar;
private CameraAspectRatio(float ar) {
this.ar = ar;
}
public static CameraAspectRatio fromFloat(float ar) {
if (ar < 0) {
throw new IllegalArgumentException("Invalid aspect ratio: " + ar);
}
return new CameraAspectRatio(ar);
}
public static CameraAspectRatio fromFraction(int w, int h) {
if (w <= 0 || h <= 0) {
throw new IllegalArgumentException("Invalid aspect ratio: " + w + ":" + h);
}
return new CameraAspectRatio((float) w / h);
}
public static CameraAspectRatio sensorAspectRatio() {
return new CameraAspectRatio(SENSOR);
}
public boolean isSensor() {
return ar == SENSOR;
}
public float getAspectRatio() {
return ar;
}
}

View File

@@ -4,47 +4,50 @@ import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.graphics.Rect;
import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession;
import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure; import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.OutputConfiguration; import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.params.SessionConfiguration; import android.hardware.camera2.params.SessionConfiguration;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec;
import android.os.Build; import android.os.Build;
import android.os.Handler; import android.os.Handler;
import android.os.HandlerThread; import android.os.HandlerThread;
import android.util.Range;
import android.view.Surface; import android.view.Surface;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Stream;
public class CameraCapture extends SurfaceCapture { public class CameraCapture extends SurfaceCapture {
private final String explicitCameraId; public static class CameraSelection {
private final CameraFacing cameraFacing; private String explicitCameraId;
private final Size explicitSize; private CameraFacing cameraFacing;
private int maxSize;
private final CameraAspectRatio aspectRatio;
private final int fps;
private final boolean highSpeed;
private String cameraId; public CameraSelection(String explicitCameraId, CameraFacing cameraFacing) {
private Size size; this.explicitCameraId = explicitCameraId;
this.cameraFacing = cameraFacing;
}
boolean hasId() {
return explicitCameraId != null;
}
boolean hasProperties() {
return cameraFacing != null;
}
}
private final CameraSelection cameraSelection;
private final Size explicitSize;
private HandlerThread cameraThread; private HandlerThread cameraThread;
private Handler cameraHandler; private Handler cameraHandler;
@@ -53,15 +56,9 @@ public class CameraCapture extends SurfaceCapture {
private final AtomicBoolean disconnected = new AtomicBoolean(); private final AtomicBoolean disconnected = new AtomicBoolean();
public CameraCapture(String explicitCameraId, CameraFacing cameraFacing, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio, int fps, public CameraCapture(CameraSelection cameraSelection, Size explicitSize) {
boolean highSpeed) { this.cameraSelection = cameraSelection;
this.explicitCameraId = explicitCameraId;
this.cameraFacing = cameraFacing;
this.explicitSize = explicitSize; this.explicitSize = explicitSize;
this.maxSize = maxSize;
this.aspectRatio = aspectRatio;
this.fps = fps;
this.highSpeed = highSpeed;
} }
@Override @Override
@@ -72,16 +69,11 @@ public class CameraCapture extends SurfaceCapture {
cameraExecutor = new HandlerExecutor(cameraHandler); cameraExecutor = new HandlerExecutor(cameraHandler);
try { try {
cameraId = selectCamera(explicitCameraId, cameraFacing); String cameraId = selectCamera(cameraSelection);
if (cameraId == null) { if (cameraId == null) {
throw new IOException("No matching camera found"); throw new IOException("No matching camera found");
} }
size = selectSize(cameraId, explicitSize, maxSize, aspectRatio, highSpeed);
if (size == null) {
throw new IOException("Could not select camera size");
}
Ln.i("Using camera '" + cameraId + "'"); Ln.i("Using camera '" + cameraId + "'");
cameraDevice = openCamera(cameraId); cameraDevice = openCamera(cameraId);
} catch (CameraAccessException | InterruptedException e) { } catch (CameraAccessException | InterruptedException e) {
@@ -89,15 +81,15 @@ public class CameraCapture extends SurfaceCapture {
} }
} }
private static String selectCamera(String explicitCameraId, CameraFacing cameraFacing) throws CameraAccessException { private String selectCamera(CameraSelection cameraSelection) throws CameraAccessException {
if (explicitCameraId != null) { if (cameraSelection.hasId()) {
return explicitCameraId; return cameraSelection.explicitCameraId;
} }
CameraManager cameraManager = ServiceManager.getCameraManager(); CameraManager cameraManager = ServiceManager.getCameraManager();
String[] cameraIds = cameraManager.getCameraIdList(); String[] cameraIds = cameraManager.getCameraIdList();
if (cameraFacing == null) { if (!cameraSelection.hasProperties()) {
// Use the first one // Use the first one
return cameraIds.length > 0 ? cameraIds[0] : null; return cameraIds.length > 0 ? cameraIds[0] : null;
} }
@@ -105,98 +97,28 @@ public class CameraCapture extends SurfaceCapture {
for (String cameraId : cameraIds) { for (String cameraId : cameraIds) {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
int facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (cameraSelection.cameraFacing != null) {
if (cameraFacing.value() == facing) { int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
return cameraId; if (cameraSelection.cameraFacing.value() != facing) {
} // Does not match
} continue;
// Not found
return null;
}
@TargetApi(Build.VERSION_CODES.N)
private static Size selectSize(String cameraId, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio, boolean highSpeed)
throws CameraAccessException {
if (explicitSize != null) {
return explicitSize;
}
CameraManager cameraManager = ServiceManager.getCameraManager();
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = highSpeed ? configs.getHighSpeedVideoSizes() : configs.getOutputSizes(MediaCodec.class);
Stream<android.util.Size> stream = Arrays.stream(sizes);
if (maxSize > 0) {
stream = stream.filter(it -> it.getWidth() <= maxSize && it.getHeight() <= maxSize);
}
Float targetAspectRatio = resolveAspectRatio(aspectRatio, characteristics);
if (targetAspectRatio != null) {
stream = stream.filter(it -> {
float ar = ((float) it.getWidth() / it.getHeight());
float arRatio = ar / targetAspectRatio;
// Accept if the aspect ratio is the target aspect ratio + or - 10%
return arRatio >= 0.9f && arRatio <= 1.1f;
});
}
Optional<android.util.Size> selected = stream.max((s1, s2) -> {
// Greater width is better
int cmp = Integer.compare(s1.getWidth(), s2.getWidth());
if (cmp != 0) {
return cmp;
}
if (targetAspectRatio != null) {
// Closer to the target aspect ratio is better
float ar1 = ((float) s1.getWidth() / s1.getHeight());
float arRatio1 = ar1 / targetAspectRatio;
float distance1 = Math.abs(1 - arRatio1);
float ar2 = ((float) s2.getWidth() / s2.getHeight());
float arRatio2 = ar2 / targetAspectRatio;
float distance2 = Math.abs(1 - arRatio2);
// Reverse the order because lower distance is better
cmp = Float.compare(distance2, distance1);
if (cmp != 0) {
return cmp;
} }
} }
// Greater height is better return cameraId;
return Integer.compare(s1.getHeight(), s2.getHeight());
});
if (selected.isPresent()) {
android.util.Size size = selected.get();
return new Size(size.getWidth(), size.getHeight());
} }
// Not found // Not found
return null; return null;
} }
private static Float resolveAspectRatio(CameraAspectRatio ratio, CameraCharacteristics characteristics) {
if (ratio == null) {
return null;
}
if (ratio.isSensor()) {
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
return (float) activeSize.width() / activeSize.height();
}
return ratio.getAspectRatio();
}
@Override @Override
public void start(Surface surface) throws IOException { public void start(Surface surface) throws IOException {
try { try {
CameraCaptureSession session = createCaptureSession(cameraDevice, surface); CameraCaptureSession session = createCaptureSession(cameraDevice, surface);
CaptureRequest request = createCaptureRequest(surface); CaptureRequest.Builder requestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
requestBuilder.addTarget(surface);
CaptureRequest request = requestBuilder.build();
setRepeatingRequest(session, request); setRepeatingRequest(session, request);
} catch (CameraAccessException | InterruptedException e) { } catch (CameraAccessException | InterruptedException e) {
throw new IOException(e); throw new IOException(e);
@@ -215,33 +137,24 @@ public class CameraCapture extends SurfaceCapture {
@Override @Override
public Size getSize() { public Size getSize() {
return size; return explicitSize;
} }
@Override @Override
public boolean setMaxSize(int maxSize) { public boolean setMaxSize(int size) {
if (explicitSize != null) { return false;
return false;
}
this.maxSize = maxSize;
try {
size = selectSize(cameraId, null, maxSize, aspectRatio, highSpeed);
return size != null;
} catch (CameraAccessException e) {
Ln.w("Could not select camera size", e);
return false;
}
} }
@SuppressLint("MissingPermission") @SuppressLint("MissingPermission")
@TargetApi(Build.VERSION_CODES.S) @TargetApi(Build.VERSION_CODES.S)
private CameraDevice openCamera(String id) throws CameraAccessException, InterruptedException { private CameraDevice openCamera(String id) throws CameraAccessException, InterruptedException {
Ln.v("Open Camera: " + id);
CompletableFuture<CameraDevice> future = new CompletableFuture<>(); CompletableFuture<CameraDevice> future = new CompletableFuture<>();
ServiceManager.getCameraManager().openCamera(id, new CameraDevice.StateCallback() { ServiceManager.getCameraManager().openCamera(id, new CameraDevice.StateCallback() {
@Override @Override
public void onOpened(CameraDevice camera) { public void onOpened(CameraDevice camera) {
Ln.d("Camera opened successfully"); Ln.v("Open Camera Success");
future.complete(camera); future.complete(camera);
} }
@@ -284,15 +197,17 @@ public class CameraCapture extends SurfaceCapture {
@TargetApi(Build.VERSION_CODES.S) @TargetApi(Build.VERSION_CODES.S)
private CameraCaptureSession createCaptureSession(CameraDevice camera, Surface surface) throws CameraAccessException, InterruptedException { private CameraCaptureSession createCaptureSession(CameraDevice camera, Surface surface) throws CameraAccessException, InterruptedException {
Ln.d("Create Capture Session");
CompletableFuture<CameraCaptureSession> future = new CompletableFuture<>(); CompletableFuture<CameraCaptureSession> future = new CompletableFuture<>();
// replace by createCaptureSession(SessionConfiguration)
OutputConfiguration outputConfig = new OutputConfiguration(surface); OutputConfiguration outputConfig = new OutputConfiguration(surface);
List<OutputConfiguration> outputs = Arrays.asList(outputConfig); List<OutputConfiguration> outputs = Arrays.asList(outputConfig);
SessionConfiguration sessionConfig = new SessionConfiguration(SessionConfiguration.SESSION_REGULAR, outputs, cameraExecutor,
int sessionType = highSpeed ? SessionConfiguration.SESSION_HIGH_SPEED : SessionConfiguration.SESSION_REGULAR;
SessionConfiguration sessionConfig = new SessionConfiguration(sessionType, outputs, cameraExecutor,
new CameraCaptureSession.StateCallback() { new CameraCaptureSession.StateCallback() {
@Override @Override
public void onConfigured(CameraCaptureSession session) { public void onConfigured(CameraCaptureSession session) {
Ln.d("Create Capture Session Success");
future.complete(session); future.complete(session);
} }
@@ -311,37 +226,25 @@ public class CameraCapture extends SurfaceCapture {
} }
} }
private CaptureRequest createCaptureRequest(Surface surface) throws CameraAccessException {
CaptureRequest.Builder requestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
requestBuilder.addTarget(surface);
if (fps > 0) {
requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(fps, fps));
}
return requestBuilder.build();
}
@TargetApi(Build.VERSION_CODES.S) @TargetApi(Build.VERSION_CODES.S)
private void setRepeatingRequest(CameraCaptureSession session, CaptureRequest request) throws CameraAccessException, InterruptedException { private void setRepeatingRequest(CameraCaptureSession session, CaptureRequest request) throws CameraAccessException, InterruptedException {
CameraCaptureSession.CaptureCallback callback = new CameraCaptureSession.CaptureCallback() { CompletableFuture<Void> future = new CompletableFuture<>();
session.setRepeatingRequest(request, new CameraCaptureSession.CaptureCallback() {
@Override @Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) { public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
// Called for each frame captured, do nothing future.complete(null);
} }
@Override @Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
Ln.w("Camera capture failed: frame " + failure.getFrameNumber()); future.completeExceptionally(new CameraAccessException(CameraAccessException.CAMERA_ERROR));
} }
}; }, cameraHandler);
if (highSpeed) { try {
CameraConstrainedHighSpeedCaptureSession highSpeedSession = (CameraConstrainedHighSpeedCaptureSession) session; future.get();
List<CaptureRequest> requests = highSpeedSession.createHighSpeedRequestList(request); } catch (ExecutionException e) {
highSpeedSession.setRepeatingBurst(requests, callback, cameraHandler); throw (CameraAccessException) e.getCause();
} else {
session.setRepeatingRequest(request, callback, cameraHandler);
} }
} }
@@ -349,4 +252,4 @@ public class CameraCapture extends SurfaceCapture {
public boolean isClosed() { public boolean isClosed() {
return disconnected.get(); return disconnected.get();
} }
} }

View File

@@ -1,12 +1,10 @@
package com.genymobile.scrcpy; package com.genymobile.scrcpy;
import android.annotation.SuppressLint;
import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraCharacteristics;
public enum CameraFacing { public enum CameraFacing {
FRONT("front", CameraCharacteristics.LENS_FACING_FRONT), FRONT("front", CameraCharacteristics.LENS_FACING_FRONT),
BACK("back", CameraCharacteristics.LENS_FACING_BACK), BACK("back", CameraCharacteristics.LENS_FACING_BACK),
@SuppressLint("InlinedApi") // introduced in API 23
EXTERNAL("external", CameraCharacteristics.LENS_FACING_EXTERNAL); EXTERNAL("external", CameraCharacteristics.LENS_FACING_EXTERNAL);
private final String name; private final String name;

View File

@@ -132,29 +132,20 @@ public final class DesktopConnection implements Closeable {
return controlSocket; return controlSocket;
} }
public void shutdown() throws IOException { public void close() throws IOException {
if (videoSocket != null) { if (videoSocket != null) {
videoSocket.shutdownInput(); videoSocket.shutdownInput();
videoSocket.shutdownOutput(); videoSocket.shutdownOutput();
videoSocket.close();
} }
if (audioSocket != null) { if (audioSocket != null) {
audioSocket.shutdownInput(); audioSocket.shutdownInput();
audioSocket.shutdownOutput(); audioSocket.shutdownOutput();
audioSocket.close();
} }
if (controlSocket != null) { if (controlSocket != null) {
controlSocket.shutdownInput(); controlSocket.shutdownInput();
controlSocket.shutdownOutput(); controlSocket.shutdownOutput();
}
}
public void close() throws IOException {
if (videoSocket != null) {
videoSocket.close();
}
if (audioSocket != null) {
audioSocket.close();
}
if (controlSocket != null) {
controlSocket.close(); controlSocket.close();
} }
} }

View File

@@ -2,12 +2,11 @@ package com.genymobile.scrcpy;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.AttributionSource; import android.content.AttributionSource;
import android.content.Context; import android.content.MutableContextWrapper;
import android.content.ContextWrapper;
import android.os.Build; import android.os.Build;
import android.os.Process; import android.os.Process;
public final class FakeContext extends ContextWrapper { public final class FakeContext extends MutableContextWrapper {
public static final String PACKAGE_NAME = "com.android.shell"; public static final String PACKAGE_NAME = "com.android.shell";
public static final int ROOT_UID = 0; // Like android.os.Process.ROOT_UID, but before API 29 public static final int ROOT_UID = 0; // Like android.os.Process.ROOT_UID, but before API 29
@@ -19,7 +18,7 @@ public final class FakeContext extends ContextWrapper {
} }
private FakeContext() { private FakeContext() {
super(Workarounds.getSystemContext()); super(null);
} }
@Override @Override
@@ -45,9 +44,4 @@ public final class FakeContext extends ContextWrapper {
public int getDeviceId() { public int getDeviceId() {
return 0; return 0;
} }
@Override
public Context getApplicationContext() {
return this;
}
} }

View File

@@ -2,11 +2,6 @@ package com.genymobile.scrcpy;
import android.util.Log; import android.util.Log;
import java.io.FileDescriptor;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.io.PrintStream;
/** /**
* Log both to Android logger (so that logs are visible in "adb logcat") and standard output/error (so that they are visible in the terminal * Log both to Android logger (so that logs are visible in "adb logcat") and standard output/error (so that they are visible in the terminal
* directly). * directly).
@@ -16,9 +11,6 @@ public final class Ln {
private static final String TAG = "scrcpy"; private static final String TAG = "scrcpy";
private static final String PREFIX = "[server] "; private static final String PREFIX = "[server] ";
private static final PrintStream CONSOLE_OUT = new PrintStream(new FileOutputStream(FileDescriptor.out));
private static final PrintStream CONSOLE_ERR = new PrintStream(new FileOutputStream(FileDescriptor.err));
enum Level { enum Level {
VERBOSE, DEBUG, INFO, WARN, ERROR VERBOSE, DEBUG, INFO, WARN, ERROR
} }
@@ -29,12 +21,6 @@ public final class Ln {
// not instantiable // not instantiable
} }
public static void disableSystemStreams() {
PrintStream nullStream = new PrintStream(new NullOutputStream());
System.setOut(nullStream);
System.setErr(nullStream);
}
/** /**
* Initialize the log level. * Initialize the log level.
* <p> * <p>
@@ -53,30 +39,30 @@ public final class Ln {
public static void v(String message) { public static void v(String message) {
if (isEnabled(Level.VERBOSE)) { if (isEnabled(Level.VERBOSE)) {
Log.v(TAG, message); Log.v(TAG, message);
CONSOLE_OUT.print(PREFIX + "VERBOSE: " + message + '\n'); System.out.print(PREFIX + "VERBOSE: " + message + '\n');
} }
} }
public static void d(String message) { public static void d(String message) {
if (isEnabled(Level.DEBUG)) { if (isEnabled(Level.DEBUG)) {
Log.d(TAG, message); Log.d(TAG, message);
CONSOLE_OUT.print(PREFIX + "DEBUG: " + message + '\n'); System.out.print(PREFIX + "DEBUG: " + message + '\n');
} }
} }
public static void i(String message) { public static void i(String message) {
if (isEnabled(Level.INFO)) { if (isEnabled(Level.INFO)) {
Log.i(TAG, message); Log.i(TAG, message);
CONSOLE_OUT.print(PREFIX + "INFO: " + message + '\n'); System.out.print(PREFIX + "INFO: " + message + '\n');
} }
} }
public static void w(String message, Throwable throwable) { public static void w(String message, Throwable throwable) {
if (isEnabled(Level.WARN)) { if (isEnabled(Level.WARN)) {
Log.w(TAG, message, throwable); Log.w(TAG, message, throwable);
CONSOLE_ERR.print(PREFIX + "WARN: " + message + '\n'); System.err.print(PREFIX + "WARN: " + message + '\n');
if (throwable != null) { if (throwable != null) {
throwable.printStackTrace(CONSOLE_ERR); throwable.printStackTrace();
} }
} }
} }
@@ -88,9 +74,9 @@ public final class Ln {
public static void e(String message, Throwable throwable) { public static void e(String message, Throwable throwable) {
if (isEnabled(Level.ERROR)) { if (isEnabled(Level.ERROR)) {
Log.e(TAG, message, throwable); Log.e(TAG, message, throwable);
CONSOLE_ERR.print(PREFIX + "ERROR: " + message + '\n'); System.err.print(PREFIX + "ERROR: " + message + "\n");
if (throwable != null) { if (throwable != null) {
throwable.printStackTrace(CONSOLE_ERR); throwable.printStackTrace();
} }
} }
} }
@@ -98,21 +84,4 @@ public final class Ln {
public static void e(String message) { public static void e(String message) {
e(message, null); e(message, null);
} }
static class NullOutputStream extends OutputStream {
@Override
public void write(byte[] b) {
// ignore
}
@Override
public void write(byte[] b, int off, int len) {
// ignore
}
@Override
public void write(int b) {
// ignore
}
}
} }

View File

@@ -9,11 +9,8 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap; import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.util.Range;
import java.util.List; import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
public final class LogUtils { public final class LogUtils {
@@ -93,45 +90,21 @@ public final class LogUtils {
builder.append("\n (none)"); builder.append("\n (none)");
} else { } else {
for (String id : cameraIds) { for (String id : cameraIds) {
builder.append("\n --camera-id=").append(id); builder.append("\n --video-source=camera --camera-id=").append(id);
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id); CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
int facing = characteristics.get(CameraCharacteristics.LENS_FACING); int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
builder.append(" (").append(getCameraFacingName(facing)).append(", "); builder.append(" (").append(getCameraFacingName(facing)).append(", ");
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
builder.append(activeSize.width()).append("x").append(activeSize.height()); builder.append(activeSize.width()).append("x").append(activeSize.height());
try {
// Capture frame rates for low-FPS mode are the same for every resolution
Range<Integer>[] lowFpsRanges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
SortedSet<Integer> uniqueLowFps = getUniqueSet(lowFpsRanges);
builder.append(", fps=").append(uniqueLowFps);
} catch (Exception e) {
// Some devices may provide invalid ranges, causing an IllegalArgumentException "lower must be less than or equal to upper"
Ln.w("Could not get available frame rates for camera " + id, e);
}
builder.append(')'); builder.append(')');
if (includeSizes) { if (includeSizes) {
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = configs.getOutputSizes(MediaCodec.class); android.util.Size[] sizes = configs.getOutputSizes(MediaCodec.class);
for (android.util.Size size : sizes) { for (android.util.Size size : sizes) {
builder.append("\n - ").append(size.getWidth()).append('x').append(size.getHeight()); builder.append("\n - ").append(size.getWidth()).append('x').append(size.getHeight());
} }
android.util.Size[] highSpeedSizes = configs.getHighSpeedVideoSizes();
if (highSpeedSizes.length > 0) {
builder.append("\n High speed capture (--camera-high-speed):");
for (android.util.Size size : highSpeedSizes) {
Range<Integer>[] highFpsRanges = configs.getHighSpeedVideoFpsRanges();
SortedSet<Integer> uniqueHighFps = getUniqueSet(highFpsRanges);
builder.append("\n - ").append(size.getWidth()).append("x").append(size.getHeight());
builder.append(" (fps=").append(uniqueHighFps).append(')');
}
}
} }
} }
} }
@@ -140,12 +113,4 @@ public final class LogUtils {
} }
return builder.toString(); return builder.toString();
} }
private static SortedSet<Integer> getUniqueSet(Range<Integer>[] ranges) {
SortedSet<Integer> set = new TreeSet<>();
for (Range<Integer> range : ranges) {
set.add(range.getUpper());
}
return set;
}
} }

View File

@@ -27,9 +27,6 @@ public class Options {
private String cameraId; private String cameraId;
private Size cameraSize; private Size cameraSize;
private CameraFacing cameraFacing; private CameraFacing cameraFacing;
private CameraAspectRatio cameraAspectRatio;
private int cameraFps;
private boolean cameraHighSpeed;
private boolean showTouches; private boolean showTouches;
private boolean stayAwake; private boolean stayAwake;
private List<CodecOption> videoCodecOptions; private List<CodecOption> videoCodecOptions;
@@ -134,18 +131,6 @@ public class Options {
return cameraFacing; return cameraFacing;
} }
public CameraAspectRatio getCameraAspectRatio() {
return cameraAspectRatio;
}
public int getCameraFps() {
return cameraFps;
}
public boolean getCameraHighSpeed() {
return cameraHighSpeed;
}
public boolean getShowTouches() { public boolean getShowTouches() {
return showTouches; return showTouches;
} }
@@ -376,9 +361,7 @@ public class Options {
} }
break; break;
case "camera_size": case "camera_size":
if (!value.isEmpty()) { options.cameraSize = parseSize(value);
options.cameraSize = parseSize(value);
}
break; break;
case "camera_facing": case "camera_facing":
if (!value.isEmpty()) { if (!value.isEmpty()) {
@@ -389,17 +372,6 @@ public class Options {
options.cameraFacing = facing; options.cameraFacing = facing;
} }
break; break;
case "camera_ar":
if (!value.isEmpty()) {
options.cameraAspectRatio = parseCameraAspectRatio(value);
}
break;
case "camera_fps":
options.cameraFps = Integer.parseInt(value);
break;
case "camera_high_speed":
options.cameraHighSpeed = Boolean.parseBoolean(value);
break;
case "send_device_meta": case "send_device_meta":
options.sendDeviceMeta = Boolean.parseBoolean(value); options.sendDeviceMeta = Boolean.parseBoolean(value);
break; break;
@@ -444,6 +416,9 @@ public class Options {
} }
private static Size parseSize(String size) { private static Size parseSize(String size) {
if (size.isEmpty()) {
return null;
}
// input format: "<width>x<height>" // input format: "<width>x<height>"
String[] tokens = size.split("x"); String[] tokens = size.split("x");
if (tokens.length != 2) { if (tokens.length != 2) {
@@ -453,20 +428,4 @@ public class Options {
int height = Integer.parseInt(tokens[1]); int height = Integer.parseInt(tokens[1]);
return new Size(width, height); return new Size(width, height);
} }
private static CameraAspectRatio parseCameraAspectRatio(String ar) {
if ("sensor".equals(ar)) {
return CameraAspectRatio.sensorAspectRatio();
}
String[] tokens = ar.split(":");
if (tokens.length == 2) {
int w = Integer.parseInt(tokens[0]);
int h = Integer.parseInt(tokens[1]);
return CameraAspectRatio.fromFraction(w, h);
}
float floatAr = Float.parseFloat(tokens[0]);
return CameraAspectRatio.fromFloat(floatAr);
}
} }

View File

@@ -48,8 +48,8 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
} }
@Override @Override
public boolean setMaxSize(int maxSize) { public boolean setMaxSize(int size) {
device.setMaxSize(maxSize); device.setMaxSize(size);
return true; return true;
} }

View File

@@ -2,6 +2,7 @@ package com.genymobile.scrcpy;
import android.os.BatteryManager; import android.os.BatteryManager;
import android.os.Build; import android.os.Build;
import android.os.Looper;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@@ -87,11 +88,7 @@ public final class Server {
} }
private static void scrcpy(Options options) throws IOException, ConfigurationException { private static void scrcpy(Options options) throws IOException, ConfigurationException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S && options.getVideoSource() == VideoSource.CAMERA) { Ln.i("Device: [" + Build.MANUFACTURER + "] " + Build.BRAND + " " + Build.MODEL + " (Android " + Build.VERSION.RELEASE + ")");
Ln.e("Camera mirroring is not supported before Android 12");
throw new ConfigurationException("Camera mirroring is not supported");
}
final Device device = new Device(options); final Device device = new Device(options);
Thread initThread = startInitThread(options); Thread initThread = startInitThread(options);
@@ -102,8 +99,8 @@ public final class Server {
boolean video = options.getVideo(); boolean video = options.getVideo();
boolean audio = options.getAudio(); boolean audio = options.getAudio();
boolean sendDummyByte = options.getSendDummyByte(); boolean sendDummyByte = options.getSendDummyByte();
boolean camera = options.getVideoSource() == VideoSource.CAMERA;
boolean camera = true;
Workarounds.apply(audio, camera); Workarounds.apply(audio, camera);
List<AsyncProcessor> asyncProcessors = new ArrayList<>(); List<AsyncProcessor> asyncProcessors = new ArrayList<>();
@@ -141,8 +138,9 @@ public final class Server {
if (options.getVideoSource() == VideoSource.DISPLAY) { if (options.getVideoSource() == VideoSource.DISPLAY) {
surfaceCapture = new ScreenCapture(device); surfaceCapture = new ScreenCapture(device);
} else { } else {
surfaceCapture = new CameraCapture(options.getCameraId(), options.getCameraFacing(), options.getCameraSize(), CameraCapture.CameraSelection cameraSelection = new CameraCapture.CameraSelection(options.getCameraId(),
options.getMaxSize(), options.getCameraAspectRatio(), options.getCameraFps(), options.getCameraHighSpeed()); options.getCameraFacing());
surfaceCapture = new CameraCapture(cameraSelection, options.getCameraSize());
} }
SurfaceEncoder surfaceEncoder = new SurfaceEncoder(surfaceCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(), SurfaceEncoder surfaceEncoder = new SurfaceEncoder(surfaceCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError()); options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
@@ -163,8 +161,6 @@ public final class Server {
asyncProcessor.stop(); asyncProcessor.stop();
} }
connection.shutdown();
try { try {
initThread.join(); initThread.join();
for (AsyncProcessor asyncProcessor : asyncProcessors) { for (AsyncProcessor asyncProcessor : asyncProcessors) {
@@ -184,33 +180,15 @@ public final class Server {
return thread; return thread;
} }
public static void main(String... args) { public static void main(String... args) throws Exception {
int status = 0;
try {
internalMain(args);
} catch (Throwable t) {
Ln.e(t.getMessage(), t);
status = 1;
} finally {
// By default, the Java process exits when all non-daemon threads are terminated.
// The Android SDK might start some non-daemon threads internally, preventing the scrcpy server to exit.
// So force the process to exit explicitly.
System.exit(status);
}
}
private static void internalMain(String... args) throws Exception {
Thread.setDefaultUncaughtExceptionHandler((t, e) -> { Thread.setDefaultUncaughtExceptionHandler((t, e) -> {
Ln.e("Exception on thread " + t, e); Ln.e("Exception on thread " + t, e);
}); });
Options options = Options.parse(args); Options options = Options.parse(args);
Ln.disableSystemStreams();
Ln.initLogLevel(options.getLogLevel()); Ln.initLogLevel(options.getLogLevel());
Ln.i("Device: [" + Build.MANUFACTURER + "] " + Build.BRAND + " " + Build.MODEL + " (Android " + Build.VERSION.RELEASE + ")");
if (options.getList()) { if (options.getList()) {
if (options.getCleanup()) { if (options.getCleanup()) {
CleanUp.unlinkSelf(); CleanUp.unlinkSelf();

View File

@@ -5,14 +5,14 @@ import android.media.MediaCodec;
import java.io.FileDescriptor; import java.io.FileDescriptor;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
public final class Streamer { public final class Streamer {
private static final long PACKET_FLAG_CONFIG = 1L << 63; private static final long PACKET_FLAG_CONFIG = 1L << 63;
private static final long PACKET_FLAG_KEY_FRAME = 1L << 62; private static final long PACKET_FLAG_KEY_FRAME = 1L << 62;
private static final long AOPUSHDR = 0x5244485355504F41L; // "AOPUSHDR" in ASCII (little-endian)
private final FileDescriptor fd; private final FileDescriptor fd;
private final Codec codec; private final Codec codec;
private final boolean sendCodecMeta; private final boolean sendCodecMeta;
@@ -30,7 +30,6 @@ public final class Streamer {
public Codec getCodec() { public Codec getCodec() {
return codec; return codec;
} }
public void writeAudioHeader() throws IOException { public void writeAudioHeader() throws IOException {
if (sendCodecMeta) { if (sendCodecMeta) {
ByteBuffer buffer = ByteBuffer.allocate(4); ByteBuffer buffer = ByteBuffer.allocate(4);
@@ -63,12 +62,8 @@ public final class Streamer {
} }
public void writePacket(ByteBuffer buffer, long pts, boolean config, boolean keyFrame) throws IOException { public void writePacket(ByteBuffer buffer, long pts, boolean config, boolean keyFrame) throws IOException {
if (config) { if (config && codec == AudioCodec.OPUS) {
if (codec == AudioCodec.OPUS) { fixOpusConfigPacket(buffer);
fixOpusConfigPacket(buffer);
} else if (codec == AudioCodec.FLAC) {
fixFlacConfigPacket(buffer);
}
} }
if (sendFrameMeta) { if (sendFrameMeta) {
@@ -125,14 +120,11 @@ public final class Streamer {
throw new IOException("Not enough data in OPUS config packet"); throw new IOException("Not enough data in OPUS config packet");
} }
final byte[] opusHeaderId = {'A', 'O', 'P', 'U', 'S', 'H', 'D', 'R'}; long id = buffer.getLong();
byte[] idBuffer = new byte[8]; if (id != AOPUSHDR) {
buffer.get(idBuffer);
if (!Arrays.equals(idBuffer, opusHeaderId)) {
throw new IOException("OPUS header not found"); throw new IOException("OPUS header not found");
} }
// The size is in native byte-order
long sizeLong = buffer.getLong(); long sizeLong = buffer.getLong();
if (sizeLong < 0 || sizeLong >= 0x7FFFFFFF) { if (sizeLong < 0 || sizeLong >= 0x7FFFFFFF) {
throw new IOException("Invalid block size in OPUS header: " + sizeLong); throw new IOException("Invalid block size in OPUS header: " + sizeLong);
@@ -146,41 +138,4 @@ public final class Streamer {
// Set the buffer to point to the OPUS header slice // Set the buffer to point to the OPUS header slice
buffer.limit(buffer.position() + size); buffer.limit(buffer.position() + size);
} }
private static void fixFlacConfigPacket(ByteBuffer buffer) throws IOException {
// 00000000 66 4c 61 43 00 00 00 22 |fLaC..." |
// -------------- BELOW IS THE PART WE MUST PUT AS EXTRADATA -------------------
// 00000000 10 00 10 00 00 00 00 00 | ........|
// 00000010 00 00 0b b8 02 f0 00 00 00 00 00 00 00 00 00 00 |................|
// 00000020 00 00 00 00 00 00 00 00 00 00 |.......... |
// ------------------------------------------------------------------------------
// 00000020 84 00 00 28 20 00 | ...( .|
// 00000030 00 00 72 65 66 65 72 65 6e 63 65 20 6c 69 62 46 |..reference libF|
// 00000040 4c 41 43 20 31 2e 33 2e 32 20 32 30 32 32 31 30 |LAC 1.3.2 202210|
// 00000050 32 32 00 00 00 00 |22....|
//
// <https://developer.android.com/reference/android/media/MediaCodec#CSD>
if (buffer.remaining() < 8) {
throw new IOException("Not enough data in FLAC config packet");
}
final byte[] flacHeaderId = {'f', 'L', 'a', 'C'};
byte[] idBuffer = new byte[4];
buffer.get(idBuffer);
if (!Arrays.equals(idBuffer, flacHeaderId)) {
throw new IOException("FLAC header not found");
}
// The size is in big-endian
buffer.order(ByteOrder.BIG_ENDIAN);
int size = buffer.getInt();
if (buffer.remaining() < size) {
throw new IOException("Not enough data in FLAC header (invalid size: " + size + ")");
}
// Set the buffer to point to the FLAC header slice
buffer.limit(buffer.position() + size);
}
} }

View File

@@ -56,9 +56,9 @@ public abstract class SurfaceCapture {
/** /**
* Set the maximum capture size (set by the encoder if it does not support the current size). * Set the maximum capture size (set by the encoder if it does not support the current size).
* *
* @param maxSize Maximum size * @param size Maximum size
*/ */
public abstract boolean setMaxSize(int maxSize); public abstract boolean setMaxSize(int size);
/** /**
* Indicate if the capture has been closed internally. * Indicate if the capture has been closed internally.

View File

@@ -6,7 +6,7 @@ import android.media.MediaFormat;
public enum VideoCodec implements Codec { public enum VideoCodec implements Codec {
H264(0x68_32_36_34, "h264", MediaFormat.MIMETYPE_VIDEO_AVC), H264(0x68_32_36_34, "h264", MediaFormat.MIMETYPE_VIDEO_AVC),
H265(0x68_32_36_35, "h265", MediaFormat.MIMETYPE_VIDEO_HEVC), H265(0x68_32_36_35, "h265", MediaFormat.MIMETYPE_VIDEO_HEVC),
@SuppressLint("InlinedApi") // introduced in API 29 @SuppressLint("InlinedApi") // introduced in API 21
AV1(0x00_61_76_31, "av1", MediaFormat.MIMETYPE_VIDEO_AV1); AV1(0x00_61_76_31, "av1", MediaFormat.MIMETYPE_VIDEO_AV1);
private final int id; // 4-byte ASCII representation of the name private final int id; // 4-byte ASCII representation of the name

View File

@@ -1,5 +1,7 @@
package com.genymobile.scrcpy; package com.genymobile.scrcpy;
import android.media.MediaRecorder;
public enum VideoSource { public enum VideoSource {
DISPLAY("display"), DISPLAY("display"),
CAMERA("camera"); CAMERA("camera");

View File

@@ -21,34 +21,18 @@ import java.lang.reflect.Method;
public final class Workarounds { public final class Workarounds {
private static final Class<?> ACTIVITY_THREAD_CLASS; private static Class<?> activityThreadClass;
private static final Object ACTIVITY_THREAD; private static Object activityThread;
static {
prepareMainLooper();
try {
// ActivityThread activityThread = new ActivityThread();
ACTIVITY_THREAD_CLASS = Class.forName("android.app.ActivityThread");
Constructor<?> activityThreadConstructor = ACTIVITY_THREAD_CLASS.getDeclaredConstructor();
activityThreadConstructor.setAccessible(true);
ACTIVITY_THREAD = activityThreadConstructor.newInstance();
// ActivityThread.sCurrentActivityThread = activityThread;
Field sCurrentActivityThreadField = ACTIVITY_THREAD_CLASS.getDeclaredField("sCurrentActivityThread");
sCurrentActivityThreadField.setAccessible(true);
sCurrentActivityThreadField.set(null, ACTIVITY_THREAD);
} catch (Exception e) {
throw new AssertionError(e);
}
}
private Workarounds() { private Workarounds() {
// not instantiable // not instantiable
} }
public static void apply(boolean audio, boolean camera) { public static void apply(boolean audio, boolean camera) {
Workarounds.prepareMainLooper();
boolean mustFillAppInfo = false; boolean mustFillAppInfo = false;
boolean mustFillBaseContext = true;
boolean mustFillAppContext = false; boolean mustFillAppContext = false;
if (Build.BRAND.equalsIgnoreCase("meizu")) { if (Build.BRAND.equalsIgnoreCase("meizu")) {
@@ -69,6 +53,7 @@ public final class Workarounds {
// - <https://github.com/Genymobile/scrcpy/issues/4015#issuecomment-1595382142> // - <https://github.com/Genymobile/scrcpy/issues/4015#issuecomment-1595382142>
// - <https://github.com/Genymobile/scrcpy/issues/3805#issuecomment-1596148031> // - <https://github.com/Genymobile/scrcpy/issues/3805#issuecomment-1596148031>
mustFillAppInfo = true; mustFillAppInfo = true;
mustFillBaseContext = true;
mustFillAppContext = true; mustFillAppContext = true;
} }
@@ -81,12 +66,14 @@ public final class Workarounds {
if (camera) { if (camera) {
mustFillAppInfo = true; mustFillAppInfo = true;
mustFillAppContext = true;
} }
if (mustFillAppInfo) { if (mustFillAppInfo) {
Workarounds.fillAppInfo(); Workarounds.fillAppInfo();
} }
if (mustFillBaseContext) {
Workarounds.fillBaseContext();
}
if (mustFillAppContext) { if (mustFillAppContext) {
Workarounds.fillAppContext(); Workarounds.fillAppContext();
} }
@@ -105,9 +92,27 @@ public final class Workarounds {
Looper.prepareMainLooper(); Looper.prepareMainLooper();
} }
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
private static void fillActivityThread() throws Exception {
if (activityThread == null) {
// ActivityThread activityThread = new ActivityThread();
activityThreadClass = Class.forName("android.app.ActivityThread");
Constructor<?> activityThreadConstructor = activityThreadClass.getDeclaredConstructor();
activityThreadConstructor.setAccessible(true);
activityThread = activityThreadConstructor.newInstance();
// ActivityThread.sCurrentActivityThread = activityThread;
Field sCurrentActivityThreadField = activityThreadClass.getDeclaredField("sCurrentActivityThread");
sCurrentActivityThreadField.setAccessible(true);
sCurrentActivityThreadField.set(null, activityThread);
}
}
@SuppressLint("PrivateApi,DiscouragedPrivateApi") @SuppressLint("PrivateApi,DiscouragedPrivateApi")
private static void fillAppInfo() { private static void fillAppInfo() {
try { try {
fillActivityThread();
// ActivityThread.AppBindData appBindData = new ActivityThread.AppBindData(); // ActivityThread.AppBindData appBindData = new ActivityThread.AppBindData();
Class<?> appBindDataClass = Class.forName("android.app.ActivityThread$AppBindData"); Class<?> appBindDataClass = Class.forName("android.app.ActivityThread$AppBindData");
Constructor<?> appBindDataConstructor = appBindDataClass.getDeclaredConstructor(); Constructor<?> appBindDataConstructor = appBindDataClass.getDeclaredConstructor();
@@ -123,9 +128,9 @@ public final class Workarounds {
appInfoField.set(appBindData, applicationInfo); appInfoField.set(appBindData, applicationInfo);
// activityThread.mBoundApplication = appBindData; // activityThread.mBoundApplication = appBindData;
Field mBoundApplicationField = ACTIVITY_THREAD_CLASS.getDeclaredField("mBoundApplication"); Field mBoundApplicationField = activityThreadClass.getDeclaredField("mBoundApplication");
mBoundApplicationField.setAccessible(true); mBoundApplicationField.setAccessible(true);
mBoundApplicationField.set(ACTIVITY_THREAD, appBindData); mBoundApplicationField.set(activityThread, appBindData);
} catch (Throwable throwable) { } catch (Throwable throwable) {
// this is a workaround, so failing is not an error // this is a workaround, so failing is not an error
Ln.d("Could not fill app info: " + throwable.getMessage()); Ln.d("Could not fill app info: " + throwable.getMessage());
@@ -135,29 +140,33 @@ public final class Workarounds {
@SuppressLint("PrivateApi,DiscouragedPrivateApi") @SuppressLint("PrivateApi,DiscouragedPrivateApi")
private static void fillAppContext() { private static void fillAppContext() {
try { try {
Application app = new Application(); fillActivityThread();
Application app = Application.class.newInstance();
Field baseField = ContextWrapper.class.getDeclaredField("mBase"); Field baseField = ContextWrapper.class.getDeclaredField("mBase");
baseField.setAccessible(true); baseField.setAccessible(true);
baseField.set(app, FakeContext.get()); baseField.set(app, FakeContext.get());
// activityThread.mInitialApplication = app; // activityThread.mInitialApplication = app;
Field mInitialApplicationField = ACTIVITY_THREAD_CLASS.getDeclaredField("mInitialApplication"); Field mInitialApplicationField = activityThreadClass.getDeclaredField("mInitialApplication");
mInitialApplicationField.setAccessible(true); mInitialApplicationField.setAccessible(true);
mInitialApplicationField.set(ACTIVITY_THREAD, app); mInitialApplicationField.set(activityThread, app);
} catch (Throwable throwable) { } catch (Throwable throwable) {
// this is a workaround, so failing is not an error // this is a workaround, so failing is not an error
Ln.d("Could not fill app context: " + throwable.getMessage()); Ln.d("Could not fill app context: " + throwable.getMessage());
} }
} }
static Context getSystemContext() { private static void fillBaseContext() {
try { try {
Method getSystemContextMethod = ACTIVITY_THREAD_CLASS.getDeclaredMethod("getSystemContext"); fillActivityThread();
return (Context) getSystemContextMethod.invoke(ACTIVITY_THREAD);
Method getSystemContextMethod = activityThreadClass.getDeclaredMethod("getSystemContext");
Context context = (Context) getSystemContextMethod.invoke(activityThread);
FakeContext.get().setBaseContext(context);
} catch (Throwable throwable) { } catch (Throwable throwable) {
// this is a workaround, so failing is not an error // this is a workaround, so failing is not an error
Ln.d("Could not get system context: " + throwable.getMessage()); Ln.d("Could not fill base context: " + throwable.getMessage());
return null;
} }
} }

View File

@@ -1,6 +1,7 @@
package com.genymobile.scrcpy.wrappers; package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.FakeContext; import com.genymobile.scrcpy.FakeContext;
import com.genymobile.scrcpy.Workarounds;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.content.Context; import android.content.Context;