Compare commits

...

10 Commits

Author SHA1 Message Date
Andrew Gunnerson
d1bac2c469 Add support for high frame rate camera capture
Add --camera-high-speed to enable high frame rate camera capture. If
the option is enabled, then --camera-fps is mandatory.

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Andrew Gunnerson <accounts+github@chiller3.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-30 13:08:54 +01:00
Andrew Gunnerson
e574da7d84 Add --camera-fps
Add a new option for specifying the camera frame rate.

By default, Android's default frame rate (30 fps) is used.

Signed-off-by: Andrew Gunnerson <accounts+github@chiller3.com>
2023-10-30 13:05:41 +01:00
Romain Vimont
bdfed6ec8f Do not arbitrary limit --max-fps to 1000
Limit to the variable type size, for consistency.
2023-10-30 13:05:41 +01:00
Simon Chan
c526e0708c Fail-fast camera mirroring on Android 11 and older
Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-30 13:05:41 +01:00
Romain Vimont
76e3b0a068 Support camera size selection using -m/--camera-ar
In addition to --camera-size to specify an explicit size, make it
possible to select the camera size automatically, respecting the maximum
size (already used for display mirroring) and an aspect ratio.

For example, "scrcpy --video-source=camera" followed by:
 - (no additional arguments)
    : mirrors at the maximum size, any a-r
 - -m1920
    : only consider valid sizes having both dimensions not above 1920
 - --camera-ar=4:3
    : only consider valid sizes having an aspect ratio of 4:3 (+/- 10%)
 - -m2048 --camera-ar=1.6
    : only consider valid sizes having both dimensions not above 2048
      and an aspect ratio of 1.6 (+/- 10%)

Co-authored-by: Simon Chan <1330321+yume-chan@users.noreply.github.com>
2023-10-30 13:05:41 +01:00
Simon Chan
e886db7137 Add --camera-facing
Add an option to select the camera by its lens facing (any, front, back
or external).

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-30 13:05:40 +01:00
Romain Vimont
05d6be550a Make camera id optional
If no camera id is provided, use the first camera available.
2023-10-30 13:05:19 +01:00
Romain Vimont
1c3ad15fd9 Handle camera disconnection
Stop mirroring on camera disconnection.
2023-10-30 13:04:54 +01:00
Romain Vimont
db7c10f631 Automatically select audio source
If --audio-source is not specified, select the default value
according to the video source:
 - for display mirroring, use device audio by default;
 - for camera mirroring, use microphone by default.
2023-10-30 13:04:54 +01:00
Simon Chan
ae599faa77 Add camera mirroring
Add --video-source=camera, and related options:
 - --camera-id=ID: select the camera (ids are listed by --list-cameras);
 - --camera-size=WIDTHxHEIGHT: select the capture size.

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-30 13:04:52 +01:00
20 changed files with 984 additions and 14 deletions

View File

@@ -10,6 +10,12 @@ _scrcpy() {
--audio-source=
--audio-output-buffer=
-b --video-bit-rate=
--camera-ar=
--camera-id=
--camera-facing=
--camera-fps=
--camera-high-speed
--camera-size=
--crop=
-d --select-usb
--disable-screensaver
@@ -74,6 +80,7 @@ _scrcpy() {
--video-codec=
--video-codec-options=
--video-encoder=
--video-source=
-w --stay-awake
--window-borderless
--window-title=
@@ -93,10 +100,18 @@ _scrcpy() {
COMPREPLY=($(compgen -W 'opus aac raw' -- "$cur"))
return
;;
--video-source)
COMPREPLY=($(compgen -W 'display camera' -- "$cur"))
return
;;
--audio-source)
COMPREPLY=($(compgen -W 'output mic' -- "$cur"))
return
;;
--camera-facing)
COMPREPLY=($(compgen -W 'front back external' -- "$cur"))
return
;;
--lock-video-orientation)
COMPREPLY=($(compgen -W 'unlocked initial 0 1 2 3' -- "$cur"))
return
@@ -141,6 +156,10 @@ _scrcpy() {
|--audio-codec-options \
|--audio-encoder \
|--audio-output-buffer \
|--camera-ar \
|--camera-id \
|--camera-fps \
|--camera-size \
|--crop \
|--display-id \
|--display-buffer \

View File

@@ -17,6 +17,12 @@ arguments=(
'--audio-source=[Select the audio source]:source:(output mic)'
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
'--camera-ar=[Select the camera size by its aspect ratio]'
'--camera-high-speed=[Enable high-speed camera capture mode]'
'--camera-id=[Specify the camera id to mirror]'
'--camera-facing=[Select the device camera by its facing direction]:facing:(front back external)'
'--camera-fps=[Specify the camera capture frame rate]'
'--camera-size=[Specify an explicit camera capture size]'
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
{-d,--select-usb}'[Use USB device]'
'--disable-screensaver[Disable screensaver while scrcpy is running]'
@@ -78,6 +84,7 @@ arguments=(
'--video-codec=[Select the video codec]:codec:(h264 h265 av1)'
'--video-codec-options=[Set a list of comma-separated key\:type=value options for the device video encoder]'
'--video-encoder=[Use a specific MediaCodec video encoder]'
'--video-source=[Select the video source]:source:(display camera)'
{-w,--stay-awake}'[Keep the device on while scrcpy is running, when the device is plugged in]'
'--window-borderless[Disable window decorations \(display borderless window\)]'
'--window-title=[Set a custom window title]'

View File

@@ -75,6 +75,40 @@ Encode the video at the given bit rate, expressed in bits/s. Unit suffixes are s
Default is 8M (8000000).
.TP
.BI "\-\-camera\-ar " ar
Select the camera size by its aspect ratio (+/- 10%).
Possible values are "sensor" (use the camera sensor aspect ratio), "<num>:<den>" (e.g. "4:3") and "<value>" (e.g. "1.6").
.TP
.B \-\-camera\-high\-speed
Enable high-speed camera capture mode.
This mode is restricted to specific resolutions and frame rates, listed by --list-camera-sizes.
.TP
.BI "\-\-camera\-id " id
Specify the device camera id to mirror.
The available camera ids can be listed by \-\-list\-cameras.
.TP
.BI "\-\-camera\-facing " facing
Select the device camera by its facing direction.
Possible values are "front", "back" and "external".
.TP
.BI "\-\-camera\-fps " fps
Specify the camera capture frame rate.
If not specified, Android's default frame rate (30 fps) is used.
.TP
.BI "\-\-camera\-size " width\fRx\fIheight
Specify an explicit camera capture size.
.TP
.BI "\-\-crop " width\fR:\fIheight\fR:\fIx\fR:\fIy
Crop the device screen on the server.
@@ -432,6 +466,14 @@ Use a specific MediaCodec video encoder (depending on the codec provided by \fB\
The available encoders can be listed by \-\-list\-encoders.
.TP
.BI "\-\-video\-source " source
Select the video source (display or camera).
Camera mirroring requires Android 12+.
Default is display.
.TP
.B \-w, \-\-stay-awake
Keep the device on while scrcpy is running, when the device is plugged in.

View File

@@ -77,12 +77,19 @@ enum {
OPT_NO_VIDEO,
OPT_NO_AUDIO_PLAYBACK,
OPT_NO_VIDEO_PLAYBACK,
OPT_VIDEO_SOURCE,
OPT_AUDIO_SOURCE,
OPT_KILL_ADB_ON_CLOSE,
OPT_TIME_LIMIT,
OPT_PAUSE_ON_EXIT,
OPT_LIST_CAMERAS,
OPT_LIST_CAMERA_SIZES,
OPT_CAMERA_ID,
OPT_CAMERA_SIZE,
OPT_CAMERA_FACING,
OPT_CAMERA_AR,
OPT_CAMERA_FPS,
OPT_CAMERA_HIGH_SPEED,
};
struct sc_option {
@@ -199,6 +206,51 @@ static const struct sc_option options[] = {
.longopt = "bit-rate",
.argdesc = "value",
},
{
.longopt_id = OPT_CAMERA_AR,
.longopt = "camera-ar",
.argdesc = "ar",
.text = "Select the camera size by its aspect ratio (+/- 10%).\n"
"Possible values are \"sensor\" (use the camera sensor aspect "
"ratio), \"<num>:<den>\" (e.g. \"4:3\") or \"<value>\" (e.g. "
"\"1.6\")."
},
{
.longopt_id = OPT_CAMERA_ID,
.longopt = "camera-id",
.argdesc = "id",
.text = "Specify the device camera id to mirror.\n"
"The available camera ids can be listed by:\n"
" scrcpy --list-cameras",
},
{
.longopt_id = OPT_CAMERA_FACING,
.longopt = "camera-facing",
.argdesc = "facing",
.text = "Select the device camera by its facing direction.\n"
"Possible values are \"front\", \"back\" and \"external\".",
},
{
.longopt_id = OPT_CAMERA_HIGH_SPEED,
.longopt = "camera-high-speed",
.text = "Enable high-speed camera capture mode.\n"
"This mode is restricted to specific resolutions and frame "
"rates, listed by --list-camera-sizes.",
},
{
.longopt_id = OPT_CAMERA_SIZE,
.longopt = "camera-size",
.argdesc = "<width>x<height>",
.text = "Specify an explicit camera capture size.",
},
{
.longopt_id = OPT_CAMERA_FPS,
.longopt = "camera-fps",
.argdesc = "value",
.text = "Specify the camera capture frame rate.\n"
"If not specified, Android's default frame rate (30 fps) is "
"used.",
},
{
// Not really deprecated (--codec has never been released), but without
// declaring an explicit --codec option, getopt_long() partial matching
@@ -703,6 +755,14 @@ static const struct sc_option options[] = {
"codec provided by --video-codec).\n"
"The available encoders can be listed by --list-encoders.",
},
{
.longopt_id = OPT_VIDEO_SOURCE,
.longopt = "video-source",
.argdesc = "source",
.text = "Select the video source (display or camera).\n"
"Camera mirroring requires Android 12+.\n"
"Default is display.",
},
{
.shortopt = 'w',
.longopt = "stay-awake",
@@ -1272,7 +1332,7 @@ parse_max_size(const char *s, uint16_t *max_size) {
static bool
parse_max_fps(const char *s, uint16_t *max_fps) {
long value;
bool ok = parse_integer_arg(s, &value, false, 0, 1000, "max fps");
bool ok = parse_integer_arg(s, &value, false, 0, 0xFFFF, "max fps");
if (!ok) {
return false;
}
@@ -1643,6 +1703,22 @@ parse_audio_codec(const char *optarg, enum sc_codec *codec) {
return false;
}
static bool
parse_video_source(const char *optarg, enum sc_video_source *source) {
if (!strcmp(optarg, "display")) {
*source = SC_VIDEO_SOURCE_DISPLAY;
return true;
}
if (!strcmp(optarg, "camera")) {
*source = SC_VIDEO_SOURCE_CAMERA;
return true;
}
LOGE("Unsupported video source: %s (expected display or camera)", optarg);
return false;
}
static bool
parse_audio_source(const char *optarg, enum sc_audio_source *source) {
if (!strcmp(optarg, "mic")) {
@@ -1659,6 +1735,46 @@ parse_audio_source(const char *optarg, enum sc_audio_source *source) {
return false;
}
static bool
parse_camera_facing(const char *optarg, enum sc_camera_facing *facing) {
if (!strcmp(optarg, "front")) {
*facing = SC_CAMERA_FACING_FRONT;
return true;
}
if (!strcmp(optarg, "back")) {
*facing = SC_CAMERA_FACING_BACK;
return true;
}
if (!strcmp(optarg, "external")) {
*facing = SC_CAMERA_FACING_EXTERNAL;
return true;
}
if (*optarg == '\0') {
// Empty string is a valid value (equivalent to not passing the option)
*facing = SC_CAMERA_FACING_ANY;
return true;
}
LOGE("Unsupported camera facing: %s (expected front, back or external)",
optarg);
return false;
}
static bool
parse_camera_fps(const char *s, uint16_t *camera_fps) {
long value;
bool ok = parse_integer_arg(s, &value, false, 0, 0xFFFF, "camera fps");
if (!ok) {
return false;
}
*camera_fps = (uint16_t) value;
return true;
}
static bool
parse_time_limit(const char *s, sc_tick *tick) {
long value;
@@ -2030,6 +2146,11 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
}
break;
case OPT_VIDEO_SOURCE:
if (!parse_video_source(optarg, &opts->video_source)) {
return false;
}
break;
case OPT_AUDIO_SOURCE:
if (!parse_audio_source(optarg, &opts->audio_source)) {
return false;
@@ -2048,6 +2169,28 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
}
break;
case OPT_CAMERA_AR:
opts->camera_ar = optarg;
break;
case OPT_CAMERA_ID:
opts->camera_id = optarg;
break;
case OPT_CAMERA_SIZE:
opts->camera_size = optarg;
break;
case OPT_CAMERA_FACING:
if (!parse_camera_facing(optarg, &opts->camera_facing)) {
return false;
}
break;
case OPT_CAMERA_FPS:
if (!parse_camera_fps(optarg, &opts->camera_fps)) {
return false;
}
break;
case OPT_CAMERA_HIGH_SPEED:
opts->camera_high_speed = true;
break;
default:
// getopt prints the error message on stderr
return false;
@@ -2141,6 +2284,58 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->force_adb_forward = true;
}
if (opts->video_source == SC_VIDEO_SOURCE_CAMERA) {
if (opts->display_id) {
LOGE("--display-id is only available with --video-source=display");
return false;
}
if (opts->camera_id && opts->camera_facing != SC_CAMERA_FACING_ANY) {
LOGE("Could not specify both --camera-id and --camera-facing");
return false;
}
if (opts->camera_size) {
if (opts->camera_ar) {
LOGE("Could not specify both --camera-size and -m/--max-size");
return false;
}
if (opts->camera_ar) {
LOGE("Could not specify both --camera-size and --camera-ar");
return false;
}
}
if (opts->camera_high_speed && !opts->camera_fps) {
LOGE("--camera-high-speed requires an explicit --camera-fps value");
return false;
}
if (opts->control) {
LOGI("Camera video source: control disabled");
opts->control = false;
}
} else if (opts->camera_id
|| opts->camera_ar
|| opts->camera_facing != SC_CAMERA_FACING_ANY
|| opts->camera_fps
|| opts->camera_high_speed
|| opts->camera_size) {
LOGE("Camera options are only available with --video-source=camera");
return false;
}
if (opts->audio && opts->audio_source == SC_AUDIO_SOURCE_AUTO) {
// Select the audio source according to the video source
if (opts->video_source == SC_VIDEO_SOURCE_DISPLAY) {
opts->audio_source = SC_AUDIO_SOURCE_OUTPUT;
} else {
opts->audio_source = SC_AUDIO_SOURCE_MIC;
LOGI("Camera video source: microphone audio source selected");
}
}
if (opts->record_format && !opts->record_filename) {
LOGE("Record format specified without recording");
return false;

View File

@@ -11,13 +11,19 @@ const struct scrcpy_options scrcpy_options_default = {
.audio_codec_options = NULL,
.video_encoder = NULL,
.audio_encoder = NULL,
.camera_id = NULL,
.camera_size = NULL,
.camera_ar = NULL,
.camera_fps = 0,
.log_level = SC_LOG_LEVEL_INFO,
.video_codec = SC_CODEC_H264,
.audio_codec = SC_CODEC_OPUS,
.audio_source = SC_AUDIO_SOURCE_OUTPUT,
.video_source = SC_VIDEO_SOURCE_DISPLAY,
.audio_source = SC_AUDIO_SOURCE_AUTO,
.record_format = SC_RECORD_FORMAT_AUTO,
.keyboard_input_mode = SC_KEYBOARD_INPUT_MODE_INJECT,
.mouse_input_mode = SC_MOUSE_INPUT_MODE_INJECT,
.camera_facing = SC_CAMERA_FACING_ANY,
.port_range = {
.first = DEFAULT_LOCAL_PORT_RANGE_FIRST,
.last = DEFAULT_LOCAL_PORT_RANGE_LAST,
@@ -80,5 +86,6 @@ const struct scrcpy_options scrcpy_options_default = {
.audio = true,
.require_audio = false,
.kill_adb_on_close = false,
.camera_high_speed = false,
.list = 0,
};

View File

@@ -44,11 +44,24 @@ enum sc_codec {
SC_CODEC_RAW,
};
enum sc_video_source {
SC_VIDEO_SOURCE_DISPLAY,
SC_VIDEO_SOURCE_CAMERA,
};
enum sc_audio_source {
SC_AUDIO_SOURCE_AUTO, // OUTPUT for video DISPLAY, MIC for video CAMERA
SC_AUDIO_SOURCE_OUTPUT,
SC_AUDIO_SOURCE_MIC,
};
enum sc_camera_facing {
SC_CAMERA_FACING_ANY,
SC_CAMERA_FACING_FRONT,
SC_CAMERA_FACING_BACK,
SC_CAMERA_FACING_EXTERNAL,
};
enum sc_lock_video_orientation {
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED = -1,
// lock the current orientation when scrcpy starts
@@ -117,13 +130,19 @@ struct scrcpy_options {
const char *audio_codec_options;
const char *video_encoder;
const char *audio_encoder;
const char *camera_id;
const char *camera_size;
const char *camera_ar;
uint16_t camera_fps;
enum sc_log_level log_level;
enum sc_codec video_codec;
enum sc_codec audio_codec;
enum sc_video_source video_source;
enum sc_audio_source audio_source;
enum sc_record_format record_format;
enum sc_keyboard_input_mode keyboard_input_mode;
enum sc_mouse_input_mode mouse_input_mode;
enum sc_camera_facing camera_facing;
struct sc_port_range port_range;
uint32_t tunnel_host;
uint16_t tunnel_port;
@@ -180,6 +199,7 @@ struct scrcpy_options {
bool audio;
bool require_audio;
bool kill_adb_on_close;
bool camera_high_speed;
#define SC_OPTION_LIST_ENCODERS 0x1
#define SC_OPTION_LIST_DISPLAYS 0x2
#define SC_OPTION_LIST_CAMERAS 0x4

View File

@@ -351,7 +351,9 @@ scrcpy(struct scrcpy_options *options) {
.log_level = options->log_level,
.video_codec = options->video_codec,
.audio_codec = options->audio_codec,
.video_source = options->video_source,
.audio_source = options->audio_source,
.camera_facing = options->camera_facing,
.crop = options->crop,
.port_range = options->port_range,
.tunnel_host = options->tunnel_host,
@@ -371,6 +373,10 @@ scrcpy(struct scrcpy_options *options) {
.audio_codec_options = options->audio_codec_options,
.video_encoder = options->video_encoder,
.audio_encoder = options->audio_encoder,
.camera_id = options->camera_id,
.camera_size = options->camera_size,
.camera_ar = options->camera_ar,
.camera_fps = options->camera_fps,
.force_adb_forward = options->force_adb_forward,
.power_off_on_close = options->power_off_on_close,
.clipboard_autosync = options->clipboard_autosync,
@@ -380,6 +386,7 @@ scrcpy(struct scrcpy_options *options) {
.cleanup = options->cleanup,
.power_on = options->power_on,
.kill_adb_on_close = options->kill_adb_on_close,
.camera_high_speed = options->camera_high_speed,
.list = options->list,
};

View File

@@ -76,6 +76,8 @@ sc_server_params_destroy(struct sc_server_params *params) {
free((char *) params->video_encoder);
free((char *) params->audio_encoder);
free((char *) params->tcpip_dst);
free((char *) params->camera_id);
free((char *) params->camera_ar);
}
static bool
@@ -103,6 +105,8 @@ sc_server_params_copy(struct sc_server_params *dst,
COPY(video_encoder);
COPY(audio_encoder);
COPY(tcpip_dst);
COPY(camera_id);
COPY(camera_ar);
#undef COPY
return true;
@@ -181,6 +185,20 @@ sc_server_get_codec_name(enum sc_codec codec) {
}
}
static const char *
sc_server_get_camera_facing_name(enum sc_camera_facing camera_facing) {
switch (camera_facing) {
case SC_CAMERA_FACING_FRONT:
return "front";
case SC_CAMERA_FACING_BACK:
return "back";
case SC_CAMERA_FACING_EXTERNAL:
return "external";
default:
return NULL;
}
}
static sc_pid
execute_server(struct sc_server *server,
const struct sc_server_params *params) {
@@ -247,8 +265,11 @@ execute_server(struct sc_server *server,
ADD_PARAM("audio_codec=%s",
sc_server_get_codec_name(params->audio_codec));
}
if (params->audio_source != SC_AUDIO_SOURCE_OUTPUT) {
assert(params->audio_source == SC_AUDIO_SOURCE_MIC);
if (params->video_source != SC_VIDEO_SOURCE_DISPLAY) {
assert(params->video_source == SC_VIDEO_SOURCE_CAMERA);
ADD_PARAM("video_source=camera");
}
if (params->audio_source == SC_AUDIO_SOURCE_MIC) {
ADD_PARAM("audio_source=mic");
}
if (params->max_size) {
@@ -274,6 +295,25 @@ execute_server(struct sc_server *server,
if (params->display_id) {
ADD_PARAM("display_id=%" PRIu32, params->display_id);
}
if (params->camera_id) {
ADD_PARAM("camera_id=%s", params->camera_id);
}
if (params->camera_size) {
ADD_PARAM("camera_size=%s", params->camera_size);
}
if (params->camera_facing != SC_CAMERA_FACING_ANY) {
ADD_PARAM("camera_facing=%s",
sc_server_get_camera_facing_name(params->camera_facing));
}
if (params->camera_ar) {
ADD_PARAM("camera_ar=%s", params->camera_ar);
}
if (params->camera_fps) {
ADD_PARAM("camera_fps=%" PRIu16, params->camera_fps);
}
if (params->camera_high_speed) {
ADD_PARAM("camera_high_speed=true");
}
if (params->show_touches) {
ADD_PARAM("show_touches=true");
}

View File

@@ -26,12 +26,18 @@ struct sc_server_params {
enum sc_log_level log_level;
enum sc_codec video_codec;
enum sc_codec audio_codec;
enum sc_video_source video_source;
enum sc_audio_source audio_source;
enum sc_camera_facing camera_facing;
const char *crop;
const char *video_codec_options;
const char *audio_codec_options;
const char *video_encoder;
const char *audio_encoder;
const char *camera_id;
const char *camera_size;
const char *camera_ar;
uint16_t camera_fps;
struct sc_port_range port_range;
uint32_t tunnel_host;
uint16_t tunnel_port;
@@ -57,6 +63,7 @@ struct sc_server_params {
bool cleanup;
bool power_on;
bool kill_adb_on_close;
bool camera_high_speed;
uint8_t list;
};

View File

@@ -0,0 +1,37 @@
package com.genymobile.scrcpy;
public final class CameraAspectRatio {
private static final float SENSOR = -1;
private float ar;
private CameraAspectRatio(float ar) {
this.ar = ar;
}
public static CameraAspectRatio fromFloat(float ar) {
if (ar < 0) {
throw new IllegalArgumentException("Invalid aspect ratio: " + ar);
}
return new CameraAspectRatio(ar);
}
public static CameraAspectRatio fromFraction(int w, int h) {
if (w <= 0 || h <= 0) {
throw new IllegalArgumentException("Invalid aspect ratio: " + w + ":" + h);
}
return new CameraAspectRatio((float) w / h);
}
public static CameraAspectRatio sensorAspectRatio() {
return new CameraAspectRatio(SENSOR);
}
public boolean isSensor() {
return ar == SENSOR;
}
public float getAspectRatio() {
return ar;
}
}

View File

@@ -0,0 +1,352 @@
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.graphics.Rect;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.params.SessionConfiguration;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Range;
import android.view.Surface;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Stream;
public class CameraCapture extends SurfaceCapture {
private final String explicitCameraId;
private final CameraFacing cameraFacing;
private final Size explicitSize;
private int maxSize;
private final CameraAspectRatio aspectRatio;
private final int fps;
private final boolean highSpeed;
private String cameraId;
private Size size;
private HandlerThread cameraThread;
private Handler cameraHandler;
private CameraDevice cameraDevice;
private Executor cameraExecutor;
private final AtomicBoolean disconnected = new AtomicBoolean();
public CameraCapture(String explicitCameraId, CameraFacing cameraFacing, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio, int fps,
boolean highSpeed) {
this.explicitCameraId = explicitCameraId;
this.cameraFacing = cameraFacing;
this.explicitSize = explicitSize;
this.maxSize = maxSize;
this.aspectRatio = aspectRatio;
this.fps = fps;
this.highSpeed = highSpeed;
}
@Override
public void init() throws IOException {
cameraThread = new HandlerThread("camera");
cameraThread.start();
cameraHandler = new Handler(cameraThread.getLooper());
cameraExecutor = new HandlerExecutor(cameraHandler);
try {
cameraId = selectCamera(explicitCameraId, cameraFacing);
if (cameraId == null) {
throw new IOException("No matching camera found");
}
size = selectSize(cameraId, explicitSize, maxSize, aspectRatio, highSpeed);
if (size == null) {
throw new IOException("Could not select camera size");
}
Ln.i("Using camera '" + cameraId + "'");
cameraDevice = openCamera(cameraId);
} catch (CameraAccessException | InterruptedException e) {
throw new IOException(e);
}
}
private static String selectCamera(String explicitCameraId, CameraFacing cameraFacing) throws CameraAccessException {
if (explicitCameraId != null) {
return explicitCameraId;
}
CameraManager cameraManager = ServiceManager.getCameraManager();
String[] cameraIds = cameraManager.getCameraIdList();
if (cameraFacing == null) {
// Use the first one
return cameraIds.length > 0 ? cameraIds[0] : null;
}
for (String cameraId : cameraIds) {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cameraFacing.value() == facing) {
return cameraId;
}
}
// Not found
return null;
}
@TargetApi(Build.VERSION_CODES.N)
private static Size selectSize(String cameraId, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio, boolean highSpeed)
throws CameraAccessException {
if (explicitSize != null) {
return explicitSize;
}
CameraManager cameraManager = ServiceManager.getCameraManager();
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = highSpeed ? configs.getHighSpeedVideoSizes() : configs.getOutputSizes(MediaCodec.class);
Stream<android.util.Size> stream = Arrays.stream(sizes);
if (maxSize > 0) {
stream = stream.filter(it -> it.getWidth() <= maxSize && it.getHeight() <= maxSize);
}
Float targetAspectRatio = resolveAspectRatio(aspectRatio, characteristics);
if (targetAspectRatio != null) {
stream = stream.filter(it -> {
float ar = ((float) it.getWidth() / it.getHeight());
float arRatio = ar / targetAspectRatio;
// Accept if the aspect ratio is the target aspect ratio + or - 10%
return arRatio >= 0.9f && arRatio <= 1.1f;
});
}
Optional<android.util.Size> selected = stream.max((s1, s2) -> {
// Greater width is better
int cmp = Integer.compare(s1.getWidth(), s2.getWidth());
if (cmp != 0) {
return cmp;
}
if (targetAspectRatio != null) {
// Closer to the target aspect ratio is better
float ar1 = ((float) s1.getWidth() / s1.getHeight());
float arRatio1 = ar1 / targetAspectRatio;
float distance1 = Math.abs(1 - arRatio1);
float ar2 = ((float) s2.getWidth() / s2.getHeight());
float arRatio2 = ar2 / targetAspectRatio;
float distance2 = Math.abs(1 - arRatio2);
// Reverse the order because lower distance is better
cmp = Float.compare(distance2, distance1);
if (cmp != 0) {
return cmp;
}
}
// Greater height is better
return Integer.compare(s1.getHeight(), s2.getHeight());
});
if (selected.isPresent()) {
android.util.Size size = selected.get();
return new Size(size.getWidth(), size.getHeight());
}
// Not found
return null;
}
private static Float resolveAspectRatio(CameraAspectRatio ratio, CameraCharacteristics characteristics) {
if (ratio == null) {
return null;
}
if (ratio.isSensor()) {
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
return (float) activeSize.width() / activeSize.height();
}
return ratio.getAspectRatio();
}
@Override
public void start(Surface surface) throws IOException {
try {
CameraCaptureSession session = createCaptureSession(cameraDevice, surface);
CaptureRequest request = createCaptureRequest(surface);
setRepeatingRequest(session, request);
} catch (CameraAccessException | InterruptedException e) {
throw new IOException(e);
}
}
@Override
public void release() {
if (cameraDevice != null) {
cameraDevice.close();
}
if (cameraThread != null) {
cameraThread.quitSafely();
}
}
@Override
public Size getSize() {
return size;
}
@Override
public boolean setMaxSize(int maxSize) {
if (explicitSize != null) {
return false;
}
this.maxSize = maxSize;
try {
size = selectSize(cameraId, null, maxSize, aspectRatio, highSpeed);
return true;
} catch (CameraAccessException e) {
Ln.w("Could not select camera size", e);
return false;
}
}
@SuppressLint("MissingPermission")
@TargetApi(Build.VERSION_CODES.S)
private CameraDevice openCamera(String id) throws CameraAccessException, InterruptedException {
CompletableFuture<CameraDevice> future = new CompletableFuture<>();
ServiceManager.getCameraManager().openCamera(id, new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
Ln.d("Camera opened successfully");
future.complete(camera);
}
@Override
public void onDisconnected(CameraDevice camera) {
Ln.w("Camera disconnected");
disconnected.set(true);
requestReset();
}
@Override
public void onError(CameraDevice camera, int error) {
int cameraAccessExceptionErrorCode;
switch (error) {
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_IN_USE;
break;
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
cameraAccessExceptionErrorCode = CameraAccessException.MAX_CAMERAS_IN_USE;
break;
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_DISABLED;
break;
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
default:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_ERROR;
break;
}
future.completeExceptionally(new CameraAccessException(cameraAccessExceptionErrorCode));
}
}, cameraHandler);
try {
return future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
@TargetApi(Build.VERSION_CODES.S)
private CameraCaptureSession createCaptureSession(CameraDevice camera, Surface surface) throws CameraAccessException, InterruptedException {
CompletableFuture<CameraCaptureSession> future = new CompletableFuture<>();
OutputConfiguration outputConfig = new OutputConfiguration(surface);
List<OutputConfiguration> outputs = Arrays.asList(outputConfig);
int sessionType = highSpeed ? SessionConfiguration.SESSION_HIGH_SPEED : SessionConfiguration.SESSION_REGULAR;
SessionConfiguration sessionConfig = new SessionConfiguration(sessionType, outputs, cameraExecutor,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
future.complete(session);
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
future.completeExceptionally(new CameraAccessException(CameraAccessException.CAMERA_ERROR));
}
});
camera.createCaptureSession(sessionConfig);
try {
return future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
private CaptureRequest createCaptureRequest(Surface surface) throws CameraAccessException {
CaptureRequest.Builder requestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
requestBuilder.addTarget(surface);
if (fps > 0) {
requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(fps, fps));
}
return requestBuilder.build();
}
@TargetApi(Build.VERSION_CODES.S)
private void setRepeatingRequest(CameraCaptureSession session, CaptureRequest request) throws CameraAccessException, InterruptedException {
CameraCaptureSession.CaptureCallback callback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
// Called for each frame captured, do nothing
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
Ln.w("Camera capture failed: frame " + failure.getFrameNumber());
}
};
if (highSpeed) {
CameraConstrainedHighSpeedCaptureSession highSpeedSession = (CameraConstrainedHighSpeedCaptureSession) session;
List<CaptureRequest> requests = highSpeedSession.createHighSpeedRequestList(request);
highSpeedSession.setRepeatingBurst(requests, callback, cameraHandler);
} else {
session.setRepeatingRequest(request, callback, cameraHandler);
}
}
@Override
public boolean isClosed() {
return disconnected.get();
}
}

View File

@@ -0,0 +1,31 @@
package com.genymobile.scrcpy;
import android.hardware.camera2.CameraCharacteristics;
public enum CameraFacing {
FRONT("front", CameraCharacteristics.LENS_FACING_FRONT),
BACK("back", CameraCharacteristics.LENS_FACING_BACK),
EXTERNAL("external", CameraCharacteristics.LENS_FACING_EXTERNAL);
private final String name;
private final int value;
CameraFacing(String name, int value) {
this.name = name;
this.value = value;
}
int value() {
return value;
}
static CameraFacing findByName(String name) {
for (CameraFacing facing : CameraFacing.values()) {
if (name.equals(facing.name)) {
return facing;
}
}
return null;
}
}

View File

@@ -0,0 +1,23 @@
package com.genymobile.scrcpy;
import android.os.Handler;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
// Inspired from hidden android.os.HandlerExecutor
public class HandlerExecutor implements Executor {
private final Handler handler;
public HandlerExecutor(Handler handler) {
this.handler = handler;
}
@Override
public void execute(Runnable command) {
if (!handler.post(command)) {
throw new RejectedExecutionException(handler + " is shutting down");
}
}
}

View File

@@ -9,8 +9,13 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec;
import android.util.Range;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
public final class LogUtils {
@@ -97,14 +102,31 @@ public final class LogUtils {
builder.append(" (").append(getCameraFacingName(facing)).append(", ");
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
builder.append(activeSize.width()).append("x").append(activeSize.height()).append(')');
builder.append(activeSize.width()).append("x").append(activeSize.height()).append(", ");
// Capture frame rates for low-FPS mode are the same for every resolution
Range<Integer>[] lowFpsRanges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
SortedSet<Integer> uniqueLowFps = getUniqueSet(lowFpsRanges);
builder.append("fps=").append(uniqueLowFps).append(')');
if (includeSizes) {
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = configs.getOutputSizes(MediaCodec.class);
for (android.util.Size size : sizes) {
builder.append("\n - ").append(size.getWidth()).append('x').append(size.getHeight());
}
android.util.Size[] highSpeedSizes = configs.getHighSpeedVideoSizes();
if (highSpeedSizes.length > 0) {
builder.append("\n High speed capture (--camera-high-speed):");
for (android.util.Size size : highSpeedSizes) {
Range<Integer>[] highFpsRanges = configs.getHighSpeedVideoFpsRanges();
SortedSet<Integer> uniqueHighFps = getUniqueSet(highFpsRanges);
builder.append("\n - ").append(size.getWidth()).append("x").append(size.getHeight());
builder.append(" (fps=").append(uniqueHighFps).append(')');
}
}
}
}
}
@@ -113,4 +135,12 @@ public final class LogUtils {
}
return builder.toString();
}
private static SortedSet<Integer> getUniqueSet(Range<Integer>[] ranges) {
SortedSet<Integer> set = new TreeSet<>();
for (Range<Integer> range : ranges) {
set.add(range.getUpper());
}
return set;
}
}

View File

@@ -14,6 +14,7 @@ public class Options {
private int maxSize;
private VideoCodec videoCodec = VideoCodec.H264;
private AudioCodec audioCodec = AudioCodec.OPUS;
private VideoSource videoSource = VideoSource.DISPLAY;
private AudioSource audioSource = AudioSource.OUTPUT;
private int videoBitRate = 8000000;
private int audioBitRate = 128000;
@@ -23,6 +24,12 @@ public class Options {
private Rect crop;
private boolean control = true;
private int displayId;
private String cameraId;
private Size cameraSize;
private CameraFacing cameraFacing;
private CameraAspectRatio cameraAspectRatio;
private int cameraFps;
private boolean cameraHighSpeed;
private boolean showTouches;
private boolean stayAwake;
private List<CodecOption> videoCodecOptions;
@@ -75,6 +82,10 @@ public class Options {
return audioCodec;
}
public VideoSource getVideoSource() {
return videoSource;
}
public AudioSource getAudioSource() {
return audioSource;
}
@@ -111,6 +122,30 @@ public class Options {
return displayId;
}
public String getCameraId() {
return cameraId;
}
public Size getCameraSize() {
return cameraSize;
}
public CameraFacing getCameraFacing() {
return cameraFacing;
}
public CameraAspectRatio getCameraAspectRatio() {
return cameraAspectRatio;
}
public int getCameraFps() {
return cameraFps;
}
public boolean getCameraHighSpeed() {
return cameraHighSpeed;
}
public boolean getShowTouches() {
return showTouches;
}
@@ -244,6 +279,13 @@ public class Options {
}
options.audioCodec = audioCodec;
break;
case "video_source":
VideoSource videoSource = VideoSource.findByName(value);
if (videoSource == null) {
throw new IllegalArgumentException("Video source " + value + " not supported");
}
options.videoSource = videoSource;
break;
case "audio_source":
AudioSource audioSource = AudioSource.findByName(value);
if (audioSource == null) {
@@ -328,6 +370,36 @@ public class Options {
case "list_camera_sizes":
options.listCameraSizes = Boolean.parseBoolean(value);
break;
case "camera_id":
if (!value.isEmpty()) {
options.cameraId = value;
}
break;
case "camera_size":
if (!value.isEmpty()) {
options.cameraSize = parseSize(value);
}
break;
case "camera_facing":
if (!value.isEmpty()) {
CameraFacing facing = CameraFacing.findByName(value);
if (facing == null) {
throw new IllegalArgumentException("Camera facing " + value + " not supported");
}
options.cameraFacing = facing;
}
break;
case "camera_ar":
if (!value.isEmpty()) {
options.cameraAspectRatio = parseCameraAspectRatio(value);
}
break;
case "camera_fps":
options.cameraFps = Integer.parseInt(value);
break;
case "camera_high_speed":
options.cameraHighSpeed = Boolean.parseBoolean(value);
break;
case "send_device_meta":
options.sendDeviceMeta = Boolean.parseBoolean(value);
break;
@@ -370,4 +442,31 @@ public class Options {
int y = Integer.parseInt(tokens[3]);
return new Rect(x, y, x + width, y + height);
}
private static Size parseSize(String size) {
// input format: "<width>x<height>"
String[] tokens = size.split("x");
if (tokens.length != 2) {
throw new IllegalArgumentException("Invalid size format (expected <width>x<height>): \"" + size + "\"");
}
int width = Integer.parseInt(tokens[0]);
int height = Integer.parseInt(tokens[1]);
return new Size(width, height);
}
private static CameraAspectRatio parseCameraAspectRatio(String ar) {
if ("sensor".equals(ar)) {
return CameraAspectRatio.sensorAspectRatio();
}
String[] tokens = ar.split(":");
if (tokens.length == 2) {
int w = Integer.parseInt(tokens[0]);
int h = Integer.parseInt(tokens[1]);
return CameraAspectRatio.fromFraction(w, h);
}
float floatAr = Float.parseFloat(tokens[0]);
return CameraAspectRatio.fromFloat(floatAr);
}
}

View File

@@ -48,8 +48,9 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
}
@Override
public void setMaxSize(int size) {
public boolean setMaxSize(int size) {
device.setMaxSize(size);
return true;
}
@Override

View File

@@ -88,6 +88,12 @@ public final class Server {
private static void scrcpy(Options options) throws IOException, ConfigurationException {
Ln.i("Device: [" + Build.MANUFACTURER + "] " + Build.BRAND + " " + Build.MODEL + " (Android " + Build.VERSION.RELEASE + ")");
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S && options.getVideoSource() == VideoSource.CAMERA) {
Ln.e("Camera mirroring is not supported before Android 12");
throw new ConfigurationException("Camera mirroring is not supported");
}
final Device device = new Device(options);
Thread initThread = startInitThread(options);
@@ -133,10 +139,16 @@ public final class Server {
if (video) {
Streamer videoStreamer = new Streamer(connection.getVideoFd(), options.getVideoCodec(), options.getSendCodecMeta(),
options.getSendFrameMeta());
ScreenCapture screenCapture = new ScreenCapture(device);
SurfaceEncoder screenEncoder = new SurfaceEncoder(screenCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
SurfaceCapture surfaceCapture;
if (options.getVideoSource() == VideoSource.DISPLAY) {
surfaceCapture = new ScreenCapture(device);
} else {
surfaceCapture = new CameraCapture(options.getCameraId(), options.getCameraFacing(), options.getCameraSize(),
options.getMaxSize(), options.getCameraAspectRatio(), options.getCameraFps(), options.getCameraHighSpeed());
}
SurfaceEncoder surfaceEncoder = new SurfaceEncoder(surfaceCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
asyncProcessors.add(screenEncoder);
asyncProcessors.add(surfaceEncoder);
}
Completion completion = new Completion(asyncProcessors.size());

View File

@@ -2,6 +2,7 @@ package com.genymobile.scrcpy;
import android.view.Surface;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
/**
@@ -31,7 +32,7 @@ public abstract class SurfaceCapture {
/**
* Called once before the capture starts.
*/
public abstract void init();
public abstract void init() throws IOException;
/**
* Called after the capture ends (if and only if {@link #init()} has been called).
@@ -43,7 +44,7 @@ public abstract class SurfaceCapture {
*
* @param surface the surface which will be encoded
*/
public abstract void start(Surface surface);
public abstract void start(Surface surface) throws IOException;
/**
* Return the video size
@@ -57,5 +58,14 @@ public abstract class SurfaceCapture {
*
* @param size Maximum size
*/
public abstract void setMaxSize(int size);
public abstract boolean setMaxSize(int size);
/**
* Indicate if the capture has been closed internally.
*
* @return {@code true} is the capture is closed, {@code false} otherwise.
*/
public boolean isClosed() {
return false;
}
}

View File

@@ -122,9 +122,13 @@ public class SurfaceEncoder implements AsyncProcessor {
return false;
}
// Retry with a smaller device size
boolean accepted = capture.setMaxSize(newMaxSize);
if (!accepted) {
return false;
}
// Retry with a smaller size
Ln.i("Retrying with -m" + newMaxSize + "...");
capture.setMaxSize(newMaxSize);
return true;
}
@@ -177,6 +181,11 @@ public class SurfaceEncoder implements AsyncProcessor {
}
}
if (capture.isClosed()) {
// The capture might have been closed internally (for example if the camera is disconnected)
alive = false;
}
return !eof && alive;
}

View File

@@ -0,0 +1,22 @@
package com.genymobile.scrcpy;
public enum VideoSource {
DISPLAY("display"),
CAMERA("camera");
private final String name;
VideoSource(String name) {
this.name = name;
}
static VideoSource findByName(String name) {
for (VideoSource videoSource : VideoSource.values()) {
if (name.equals(videoSource.name)) {
return videoSource;
}
}
return null;
}
}