Compare commits

..

12 Commits

Author SHA1 Message Date
Simon Chan
d4577ba631 Add --camera-facing
Add an option to select the camera by its lens facing (any, front, back
or external).

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-26 23:41:14 +02:00
Romain Vimont
cb7fea31cb Make camera id optional
If no camera id is provided, use the first camera available.
2023-10-26 23:39:53 +02:00
Romain Vimont
05aa988946 Handle camera disconnection 2023-10-26 23:39:53 +02:00
Romain Vimont
39ed0d7cbc Automatically select audio source
If --audio-source is not specified, select the default value
according to the video source:
 - for display mirroring, use device audio by default;
 - for camera mirroring, use microphone by default.
2023-10-26 23:39:53 +02:00
Romain Vimont
f92c22e331 DONOTMERGE workaround for Android 11 testing 2023-10-26 23:39:53 +02:00
Simon Chan
89049db1dd Add camera mirroring
Add --video-source=camera, and related options:
 - --camera-id=ID: select the camera (ids are listed by --list-cameras);
 - --camera-size=WIDTHxHEIGHT: select the capture size.

Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-26 23:39:52 +02:00
Romain Vimont
88083483b0 Add --list-camera-sizes 2023-10-26 23:06:34 +02:00
Simon Chan
3d2f39fc8a Add --list-cameras
Add an option to list the device cameras.

Co-authored-by: Romain Vimont <rom@rom1v.com>
2023-10-26 23:06:34 +02:00
Romain Vimont
986bceb269 Factorize --list- options handling
This will limit code duplication as more list options will be added.
2023-10-26 23:06:34 +02:00
Romain Vimont
9d01140826 Make separator configurable for parsing integers
The separator was hardcoded to ':'. This will allow to reuse the
function to parse sizes as WIDTHxHEIGHT.
2023-10-26 23:06:34 +02:00
Simon Chan
616544d995 Extract SurfaceCapture from ScreenEncoder
Extract an interface SurfaceCapture from ScreenEncoder, representing a
video source which can be rendered to a Surface for encoding.

Split ScreenEncoder into:
 - ScreenCapture, implementing SurfaceCapture to capture the device
   screen,
 - SurfaceEncoder, to encode any SurfaceCapture.

This separation prepares the introduction of another SurfaceCapture
implementation to capture the camera instead of the device screen.

Co-authored-by: Romain Vimont <rom@rom1v.com>
2023-10-26 23:06:34 +02:00
Romain Vimont
3c1a2eb87a Rename --display to --display-id
The option is named "display id" everywhere.

This will be consistent with --camera-id (there will be many camera
options, so an option --camera would be confusing).
2023-10-26 23:06:33 +02:00
17 changed files with 80 additions and 413 deletions

View File

@@ -10,11 +10,8 @@ _scrcpy() {
--audio-source=
--audio-output-buffer=
-b --video-bit-rate=
--camera-ar=
--camera-id=
--camera-facing=
--camera-fps=
--camera-high-speed
--camera-size=
--crop=
-d --select-usb
@@ -156,9 +153,7 @@ _scrcpy() {
|--audio-codec-options \
|--audio-encoder \
|--audio-output-buffer \
|--camera-ar \
|--camera-id \
|--camera-fps \
|--camera-size \
|--crop \
|--display-id \

View File

@@ -17,11 +17,8 @@ arguments=(
'--audio-source=[Select the audio source]:source:(output mic)'
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
'--camera-ar=[Select the camera size by its aspect ratio]'
'--camera-high-speed=[Enable high-speed camera capture mode]'
'--camera-id=[Specify the camera id to mirror]'
'--camera-facing=[Select the device camera by its facing direction]:facing:(front back external)'
'--camera-fps=[Specify the camera capture frame rate]'
'--camera-size=[Specify an explicit camera capture size]'
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
{-d,--select-usb}'[Use USB device]'

View File

@@ -75,18 +75,6 @@ Encode the video at the given bit rate, expressed in bits/s. Unit suffixes are s
Default is 8M (8000000).
.TP
.BI "\-\-camera\-ar " ar
Select the camera size by its aspect ratio (+/- 10%).
Possible values are "sensor" (use the camera sensor aspect ratio), "<num>:<den>" (e.g. "4:3") and "<value>" (e.g. "1.6").
.TP
.B \-\-camera\-high\-speed
Enable high-speed camera capture mode.
This mode is restricted to specific resolutions and frame rates, listed by --list-camera-sizes.
.TP
.BI "\-\-camera\-id " id
Specify the device camera id to mirror.
@@ -99,12 +87,6 @@ Select the device camera by its facing direction.
Possible values are "front", "back" and "external".
.TP
.BI "\-\-camera\-fps " fps
Specify the camera capture frame rate.
If not specified, Android's default frame rate (30 fps) is used.
.TP
.BI "\-\-camera\-size " width\fRx\fIheight
Specify an explicit camera capture size.
@@ -470,8 +452,6 @@ The available encoders can be listed by \-\-list\-encoders.
.BI "\-\-video\-source " source
Select the video source (display or camera).
Camera mirroring requires Android 12+.
Default is display.
.TP

View File

@@ -87,9 +87,6 @@ enum {
OPT_CAMERA_ID,
OPT_CAMERA_SIZE,
OPT_CAMERA_FACING,
OPT_CAMERA_AR,
OPT_CAMERA_FPS,
OPT_CAMERA_HIGH_SPEED,
};
struct sc_option {
@@ -206,15 +203,6 @@ static const struct sc_option options[] = {
.longopt = "bit-rate",
.argdesc = "value",
},
{
.longopt_id = OPT_CAMERA_AR,
.longopt = "camera-ar",
.argdesc = "ar",
.text = "Select the camera size by its aspect ratio (+/- 10%).\n"
"Possible values are \"sensor\" (use the camera sensor aspect "
"ratio), \"<num>:<den>\" (e.g. \"4:3\") or \"<value>\" (e.g. "
"\"1.6\")."
},
{
.longopt_id = OPT_CAMERA_ID,
.longopt = "camera-id",
@@ -230,27 +218,12 @@ static const struct sc_option options[] = {
.text = "Select the device camera by its facing direction.\n"
"Possible values are \"front\", \"back\" and \"external\".",
},
{
.longopt_id = OPT_CAMERA_HIGH_SPEED,
.longopt = "camera-high-speed",
.text = "Enable high-speed camera capture mode.\n"
"This mode is restricted to specific resolutions and frame "
"rates, listed by --list-camera-sizes.",
},
{
.longopt_id = OPT_CAMERA_SIZE,
.longopt = "camera-size",
.argdesc = "<width>x<height>",
.text = "Specify an explicit camera capture size.",
},
{
.longopt_id = OPT_CAMERA_FPS,
.longopt = "camera-fps",
.argdesc = "value",
.text = "Specify the camera capture frame rate.\n"
"If not specified, Android's default frame rate (30 fps) is "
"used.",
},
{
// Not really deprecated (--codec has never been released), but without
// declaring an explicit --codec option, getopt_long() partial matching
@@ -760,7 +733,6 @@ static const struct sc_option options[] = {
.longopt = "video-source",
.argdesc = "source",
.text = "Select the video source (display or camera).\n"
"Camera mirroring requires Android 12+.\n"
"Default is display.",
},
{
@@ -1332,7 +1304,7 @@ parse_max_size(const char *s, uint16_t *max_size) {
static bool
parse_max_fps(const char *s, uint16_t *max_fps) {
long value;
bool ok = parse_integer_arg(s, &value, false, 0, 0xFFFF, "max fps");
bool ok = parse_integer_arg(s, &value, false, 0, 1000, "max fps");
if (!ok) {
return false;
}
@@ -1763,18 +1735,6 @@ parse_camera_facing(const char *optarg, enum sc_camera_facing *facing) {
return false;
}
static bool
parse_camera_fps(const char *s, uint16_t *camera_fps) {
long value;
bool ok = parse_integer_arg(s, &value, false, 0, 0xFFFF, "camera fps");
if (!ok) {
return false;
}
*camera_fps = (uint16_t) value;
return true;
}
static bool
parse_time_limit(const char *s, sc_tick *tick) {
long value;
@@ -2169,9 +2129,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
}
break;
case OPT_CAMERA_AR:
opts->camera_ar = optarg;
break;
case OPT_CAMERA_ID:
opts->camera_id = optarg;
break;
@@ -2183,14 +2140,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
}
break;
case OPT_CAMERA_FPS:
if (!parse_camera_fps(optarg, &opts->camera_fps)) {
return false;
}
break;
case OPT_CAMERA_HIGH_SPEED:
opts->camera_high_speed = true;
break;
default:
// getopt prints the error message on stderr
return false;
@@ -2290,25 +2239,19 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
}
if (opts->lock_video_orientation !=
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED) {
LOGE("--lock-video-orientation is not supported for camera");
return false;
}
if (opts->camera_id && opts->camera_facing != SC_CAMERA_FACING_ANY) {
LOGE("Could not specify both --camera-id and --camera-facing");
return false;
}
if (opts->camera_size) {
if (opts->camera_ar) {
LOGE("Could not specify both --camera-size and -m/--max-size");
return false;
}
if (opts->camera_ar) {
LOGE("Could not specify both --camera-size and --camera-ar");
return false;
}
}
if (opts->camera_high_speed && !opts->camera_fps) {
LOGE("--camera-high-speed requires an explicit --camera-fps value");
if (!opts->camera_size) {
LOGE("Camera size must be specified by --camera-size=WIDTHxHEIGHT");
return false;
}
@@ -2317,10 +2260,7 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->control = false;
}
} else if (opts->camera_id
|| opts->camera_ar
|| opts->camera_facing != SC_CAMERA_FACING_ANY
|| opts->camera_fps
|| opts->camera_high_speed
|| opts->camera_size) {
LOGE("Camera options are only available with --video-source=camera");
return false;

View File

@@ -13,8 +13,6 @@ const struct scrcpy_options scrcpy_options_default = {
.audio_encoder = NULL,
.camera_id = NULL,
.camera_size = NULL,
.camera_ar = NULL,
.camera_fps = 0,
.log_level = SC_LOG_LEVEL_INFO,
.video_codec = SC_CODEC_H264,
.audio_codec = SC_CODEC_OPUS,
@@ -86,6 +84,5 @@ const struct scrcpy_options scrcpy_options_default = {
.audio = true,
.require_audio = false,
.kill_adb_on_close = false,
.camera_high_speed = false,
.list = 0,
};

View File

@@ -132,8 +132,6 @@ struct scrcpy_options {
const char *audio_encoder;
const char *camera_id;
const char *camera_size;
const char *camera_ar;
uint16_t camera_fps;
enum sc_log_level log_level;
enum sc_codec video_codec;
enum sc_codec audio_codec;
@@ -199,7 +197,6 @@ struct scrcpy_options {
bool audio;
bool require_audio;
bool kill_adb_on_close;
bool camera_high_speed;
#define SC_OPTION_LIST_ENCODERS 0x1
#define SC_OPTION_LIST_DISPLAYS 0x2
#define SC_OPTION_LIST_CAMERAS 0x4

View File

@@ -375,8 +375,6 @@ scrcpy(struct scrcpy_options *options) {
.audio_encoder = options->audio_encoder,
.camera_id = options->camera_id,
.camera_size = options->camera_size,
.camera_ar = options->camera_ar,
.camera_fps = options->camera_fps,
.force_adb_forward = options->force_adb_forward,
.power_off_on_close = options->power_off_on_close,
.clipboard_autosync = options->clipboard_autosync,
@@ -386,7 +384,6 @@ scrcpy(struct scrcpy_options *options) {
.cleanup = options->cleanup,
.power_on = options->power_on,
.kill_adb_on_close = options->kill_adb_on_close,
.camera_high_speed = options->camera_high_speed,
.list = options->list,
};

View File

@@ -77,7 +77,6 @@ sc_server_params_destroy(struct sc_server_params *params) {
free((char *) params->audio_encoder);
free((char *) params->tcpip_dst);
free((char *) params->camera_id);
free((char *) params->camera_ar);
}
static bool
@@ -106,7 +105,6 @@ sc_server_params_copy(struct sc_server_params *dst,
COPY(audio_encoder);
COPY(tcpip_dst);
COPY(camera_id);
COPY(camera_ar);
#undef COPY
return true;
@@ -305,15 +303,6 @@ execute_server(struct sc_server *server,
ADD_PARAM("camera_facing=%s",
sc_server_get_camera_facing_name(params->camera_facing));
}
if (params->camera_ar) {
ADD_PARAM("camera_ar=%s", params->camera_ar);
}
if (params->camera_fps) {
ADD_PARAM("camera_fps=%" PRIu16, params->camera_fps);
}
if (params->camera_high_speed) {
ADD_PARAM("camera_high_speed=true");
}
if (params->show_touches) {
ADD_PARAM("show_touches=true");
}

View File

@@ -36,8 +36,6 @@ struct sc_server_params {
const char *audio_encoder;
const char *camera_id;
const char *camera_size;
const char *camera_ar;
uint16_t camera_fps;
struct sc_port_range port_range;
uint32_t tunnel_host;
uint16_t tunnel_port;
@@ -63,7 +61,6 @@ struct sc_server_params {
bool cleanup;
bool power_on;
bool kill_adb_on_close;
bool camera_high_speed;
uint8_t list;
};

View File

@@ -1,37 +0,0 @@
package com.genymobile.scrcpy;
public final class CameraAspectRatio {
private static final float SENSOR = -1;
private float ar;
private CameraAspectRatio(float ar) {
this.ar = ar;
}
public static CameraAspectRatio fromFloat(float ar) {
if (ar < 0) {
throw new IllegalArgumentException("Invalid aspect ratio: " + ar);
}
return new CameraAspectRatio(ar);
}
public static CameraAspectRatio fromFraction(int w, int h) {
if (w <= 0 || h <= 0) {
throw new IllegalArgumentException("Invalid aspect ratio: " + w + ":" + h);
}
return new CameraAspectRatio((float) w / h);
}
public static CameraAspectRatio sensorAspectRatio() {
return new CameraAspectRatio(SENSOR);
}
public boolean isSensor() {
return ar == SENSOR;
}
public float getAspectRatio() {
return ar;
}
}

View File

@@ -4,47 +4,50 @@ import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.graphics.Rect;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.params.SessionConfiguration;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Range;
import android.view.Surface;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Stream;
public class CameraCapture extends SurfaceCapture {
private final String explicitCameraId;
private final CameraFacing cameraFacing;
private final Size explicitSize;
private int maxSize;
private final CameraAspectRatio aspectRatio;
private final int fps;
private final boolean highSpeed;
public static class CameraSelection {
private String explicitCameraId;
private CameraFacing cameraFacing;
private String cameraId;
private Size size;
public CameraSelection(String explicitCameraId, CameraFacing cameraFacing) {
this.explicitCameraId = explicitCameraId;
this.cameraFacing = cameraFacing;
}
boolean hasId() {
return explicitCameraId != null;
}
boolean hasProperties() {
return cameraFacing != null;
}
}
private final CameraSelection cameraSelection;
private final Size explicitSize;
private HandlerThread cameraThread;
private Handler cameraHandler;
@@ -53,15 +56,9 @@ public class CameraCapture extends SurfaceCapture {
private final AtomicBoolean disconnected = new AtomicBoolean();
public CameraCapture(String explicitCameraId, CameraFacing cameraFacing, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio, int fps,
boolean highSpeed) {
this.explicitCameraId = explicitCameraId;
this.cameraFacing = cameraFacing;
public CameraCapture(CameraSelection cameraSelection, Size explicitSize) {
this.cameraSelection = cameraSelection;
this.explicitSize = explicitSize;
this.maxSize = maxSize;
this.aspectRatio = aspectRatio;
this.fps = fps;
this.highSpeed = highSpeed;
}
@Override
@@ -72,16 +69,11 @@ public class CameraCapture extends SurfaceCapture {
cameraExecutor = new HandlerExecutor(cameraHandler);
try {
cameraId = selectCamera(explicitCameraId, cameraFacing);
String cameraId = selectCamera(cameraSelection);
if (cameraId == null) {
throw new IOException("No matching camera found");
}
size = selectSize(cameraId, explicitSize, maxSize, aspectRatio, highSpeed);
if (size == null) {
throw new IOException("Could not select camera size");
}
Ln.i("Using camera '" + cameraId + "'");
cameraDevice = openCamera(cameraId);
} catch (CameraAccessException | InterruptedException e) {
@@ -89,15 +81,15 @@ public class CameraCapture extends SurfaceCapture {
}
}
private static String selectCamera(String explicitCameraId, CameraFacing cameraFacing) throws CameraAccessException {
if (explicitCameraId != null) {
return explicitCameraId;
private String selectCamera(CameraSelection cameraSelection) throws CameraAccessException {
if (cameraSelection.hasId()) {
return cameraSelection.explicitCameraId;
}
CameraManager cameraManager = ServiceManager.getCameraManager();
String[] cameraIds = cameraManager.getCameraIdList();
if (cameraFacing == null) {
if (!cameraSelection.hasProperties()) {
// Use the first one
return cameraIds.length > 0 ? cameraIds[0] : null;
}
@@ -105,98 +97,28 @@ public class CameraCapture extends SurfaceCapture {
for (String cameraId : cameraIds) {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cameraFacing.value() == facing) {
return cameraId;
}
}
// Not found
return null;
}
@TargetApi(Build.VERSION_CODES.N)
private static Size selectSize(String cameraId, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio, boolean highSpeed)
throws CameraAccessException {
if (explicitSize != null) {
return explicitSize;
}
CameraManager cameraManager = ServiceManager.getCameraManager();
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = highSpeed ? configs.getHighSpeedVideoSizes() : configs.getOutputSizes(MediaCodec.class);
Stream<android.util.Size> stream = Arrays.stream(sizes);
if (maxSize > 0) {
stream = stream.filter(it -> it.getWidth() <= maxSize && it.getHeight() <= maxSize);
}
Float targetAspectRatio = resolveAspectRatio(aspectRatio, characteristics);
if (targetAspectRatio != null) {
stream = stream.filter(it -> {
float ar = ((float) it.getWidth() / it.getHeight());
float arRatio = ar / targetAspectRatio;
// Accept if the aspect ratio is the target aspect ratio + or - 10%
return arRatio >= 0.9f && arRatio <= 1.1f;
});
}
Optional<android.util.Size> selected = stream.max((s1, s2) -> {
// Greater width is better
int cmp = Integer.compare(s1.getWidth(), s2.getWidth());
if (cmp != 0) {
return cmp;
}
if (targetAspectRatio != null) {
// Closer to the target aspect ratio is better
float ar1 = ((float) s1.getWidth() / s1.getHeight());
float arRatio1 = ar1 / targetAspectRatio;
float distance1 = Math.abs(1 - arRatio1);
float ar2 = ((float) s2.getWidth() / s2.getHeight());
float arRatio2 = ar2 / targetAspectRatio;
float distance2 = Math.abs(1 - arRatio2);
// Reverse the order because lower distance is better
cmp = Float.compare(distance2, distance1);
if (cmp != 0) {
return cmp;
if (cameraSelection.cameraFacing != null) {
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cameraSelection.cameraFacing.value() != facing) {
// Does not match
continue;
}
}
// Greater height is better
return Integer.compare(s1.getHeight(), s2.getHeight());
});
if (selected.isPresent()) {
android.util.Size size = selected.get();
return new Size(size.getWidth(), size.getHeight());
return cameraId;
}
// Not found
return null;
}
private static Float resolveAspectRatio(CameraAspectRatio ratio, CameraCharacteristics characteristics) {
if (ratio == null) {
return null;
}
if (ratio.isSensor()) {
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
return (float) activeSize.width() / activeSize.height();
}
return ratio.getAspectRatio();
}
@Override
public void start(Surface surface) throws IOException {
try {
CameraCaptureSession session = createCaptureSession(cameraDevice, surface);
CaptureRequest request = createCaptureRequest(surface);
CaptureRequest.Builder requestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
requestBuilder.addTarget(surface);
CaptureRequest request = requestBuilder.build();
setRepeatingRequest(session, request);
} catch (CameraAccessException | InterruptedException e) {
throw new IOException(e);
@@ -215,33 +137,24 @@ public class CameraCapture extends SurfaceCapture {
@Override
public Size getSize() {
return size;
return explicitSize;
}
@Override
public boolean setMaxSize(int maxSize) {
if (explicitSize != null) {
return false;
}
this.maxSize = maxSize;
try {
size = selectSize(cameraId, null, maxSize, aspectRatio, highSpeed);
return true;
} catch (CameraAccessException e) {
Ln.w("Could not select camera size", e);
return false;
}
public boolean setMaxSize(int size) {
return false;
}
@SuppressLint("MissingPermission")
@TargetApi(Build.VERSION_CODES.S)
private CameraDevice openCamera(String id) throws CameraAccessException, InterruptedException {
Ln.v("Open Camera: " + id);
CompletableFuture<CameraDevice> future = new CompletableFuture<>();
ServiceManager.getCameraManager().openCamera(id, new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
Ln.d("Camera opened successfully");
Ln.v("Open Camera Success");
future.complete(camera);
}
@@ -284,15 +197,17 @@ public class CameraCapture extends SurfaceCapture {
@TargetApi(Build.VERSION_CODES.S)
private CameraCaptureSession createCaptureSession(CameraDevice camera, Surface surface) throws CameraAccessException, InterruptedException {
Ln.d("Create Capture Session");
CompletableFuture<CameraCaptureSession> future = new CompletableFuture<>();
// replace by createCaptureSession(SessionConfiguration)
OutputConfiguration outputConfig = new OutputConfiguration(surface);
List<OutputConfiguration> outputs = Arrays.asList(outputConfig);
int sessionType = highSpeed ? SessionConfiguration.SESSION_HIGH_SPEED : SessionConfiguration.SESSION_REGULAR;
SessionConfiguration sessionConfig = new SessionConfiguration(sessionType, outputs, cameraExecutor,
SessionConfiguration sessionConfig = new SessionConfiguration(SessionConfiguration.SESSION_REGULAR, outputs, cameraExecutor,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
Ln.d("Create Capture Session Success");
future.complete(session);
}
@@ -311,37 +226,25 @@ public class CameraCapture extends SurfaceCapture {
}
}
private CaptureRequest createCaptureRequest(Surface surface) throws CameraAccessException {
CaptureRequest.Builder requestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
requestBuilder.addTarget(surface);
if (fps > 0) {
requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(fps, fps));
}
return requestBuilder.build();
}
@TargetApi(Build.VERSION_CODES.S)
private void setRepeatingRequest(CameraCaptureSession session, CaptureRequest request) throws CameraAccessException, InterruptedException {
CameraCaptureSession.CaptureCallback callback = new CameraCaptureSession.CaptureCallback() {
CompletableFuture<Void> future = new CompletableFuture<>();
session.setRepeatingRequest(request, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
// Called for each frame captured, do nothing
future.complete(null);
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
Ln.w("Camera capture failed: frame " + failure.getFrameNumber());
future.completeExceptionally(new CameraAccessException(CameraAccessException.CAMERA_ERROR));
}
};
}, cameraHandler);
if (highSpeed) {
CameraConstrainedHighSpeedCaptureSession highSpeedSession = (CameraConstrainedHighSpeedCaptureSession) session;
List<CaptureRequest> requests = highSpeedSession.createHighSpeedRequestList(request);
highSpeedSession.setRepeatingBurst(requests, callback, cameraHandler);
} else {
session.setRepeatingRequest(request, callback, cameraHandler);
try {
future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
@@ -349,4 +252,4 @@ public class CameraCapture extends SurfaceCapture {
public boolean isClosed() {
return disconnected.get();
}
}
}

View File

@@ -9,13 +9,8 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec;
import android.util.Range;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
public final class LogUtils {
@@ -97,36 +92,19 @@ public final class LogUtils {
for (String id : cameraIds) {
builder.append("\n --video-source=camera --camera-id=").append(id);
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
builder.append(" (").append(getCameraFacingName(facing)).append(", ");
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
builder.append(activeSize.width()).append("x").append(activeSize.height()).append(", ");
// Capture frame rates for low-FPS mode are the same for every resolution
Range<Integer>[] lowFpsRanges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
SortedSet<Integer> uniqueLowFps = getUniqueSet(lowFpsRanges);
builder.append("fps=").append(uniqueLowFps).append(')');
builder.append(activeSize.width()).append("x").append(activeSize.height());
builder.append(')');
if (includeSizes) {
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = configs.getOutputSizes(MediaCodec.class);
for (android.util.Size size : sizes) {
builder.append("\n - ").append(size.getWidth()).append('x').append(size.getHeight());
}
android.util.Size[] highSpeedSizes = configs.getHighSpeedVideoSizes();
if (highSpeedSizes.length > 0) {
builder.append("\n High speed capture (--camera-high-speed):");
for (android.util.Size size : highSpeedSizes) {
Range<Integer>[] highFpsRanges = configs.getHighSpeedVideoFpsRanges();
SortedSet<Integer> uniqueHighFps = getUniqueSet(highFpsRanges);
builder.append("\n - ").append(size.getWidth()).append("x").append(size.getHeight());
builder.append(" (fps=").append(uniqueHighFps).append(')');
}
}
}
}
}
@@ -135,12 +113,4 @@ public final class LogUtils {
}
return builder.toString();
}
private static SortedSet<Integer> getUniqueSet(Range<Integer>[] ranges) {
SortedSet<Integer> set = new TreeSet<>();
for (Range<Integer> range : ranges) {
set.add(range.getUpper());
}
return set;
}
}

View File

@@ -27,9 +27,6 @@ public class Options {
private String cameraId;
private Size cameraSize;
private CameraFacing cameraFacing;
private CameraAspectRatio cameraAspectRatio;
private int cameraFps;
private boolean cameraHighSpeed;
private boolean showTouches;
private boolean stayAwake;
private List<CodecOption> videoCodecOptions;
@@ -134,18 +131,6 @@ public class Options {
return cameraFacing;
}
public CameraAspectRatio getCameraAspectRatio() {
return cameraAspectRatio;
}
public int getCameraFps() {
return cameraFps;
}
public boolean getCameraHighSpeed() {
return cameraHighSpeed;
}
public boolean getShowTouches() {
return showTouches;
}
@@ -376,9 +361,7 @@ public class Options {
}
break;
case "camera_size":
if (!value.isEmpty()) {
options.cameraSize = parseSize(value);
}
options.cameraSize = parseSize(value);
break;
case "camera_facing":
if (!value.isEmpty()) {
@@ -389,17 +372,6 @@ public class Options {
options.cameraFacing = facing;
}
break;
case "camera_ar":
if (!value.isEmpty()) {
options.cameraAspectRatio = parseCameraAspectRatio(value);
}
break;
case "camera_fps":
options.cameraFps = Integer.parseInt(value);
break;
case "camera_high_speed":
options.cameraHighSpeed = Boolean.parseBoolean(value);
break;
case "send_device_meta":
options.sendDeviceMeta = Boolean.parseBoolean(value);
break;
@@ -444,6 +416,9 @@ public class Options {
}
private static Size parseSize(String size) {
if (size.isEmpty()) {
return null;
}
// input format: "<width>x<height>"
String[] tokens = size.split("x");
if (tokens.length != 2) {
@@ -453,20 +428,4 @@ public class Options {
int height = Integer.parseInt(tokens[1]);
return new Size(width, height);
}
private static CameraAspectRatio parseCameraAspectRatio(String ar) {
if ("sensor".equals(ar)) {
return CameraAspectRatio.sensorAspectRatio();
}
String[] tokens = ar.split(":");
if (tokens.length == 2) {
int w = Integer.parseInt(tokens[0]);
int h = Integer.parseInt(tokens[1]);
return CameraAspectRatio.fromFraction(w, h);
}
float floatAr = Float.parseFloat(tokens[0]);
return CameraAspectRatio.fromFloat(floatAr);
}
}

View File

@@ -2,6 +2,7 @@ package com.genymobile.scrcpy;
import android.os.BatteryManager;
import android.os.Build;
import android.os.Looper;
import java.io.IOException;
import java.util.ArrayList;
@@ -88,12 +89,6 @@ public final class Server {
private static void scrcpy(Options options) throws IOException, ConfigurationException {
Ln.i("Device: [" + Build.MANUFACTURER + "] " + Build.BRAND + " " + Build.MODEL + " (Android " + Build.VERSION.RELEASE + ")");
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S && options.getVideoSource() == VideoSource.CAMERA) {
Ln.e("Camera mirroring is not supported before Android 12");
throw new ConfigurationException("Camera mirroring is not supported");
}
final Device device = new Device(options);
Thread initThread = startInitThread(options);
@@ -143,8 +138,9 @@ public final class Server {
if (options.getVideoSource() == VideoSource.DISPLAY) {
surfaceCapture = new ScreenCapture(device);
} else {
surfaceCapture = new CameraCapture(options.getCameraId(), options.getCameraFacing(), options.getCameraSize(),
options.getMaxSize(), options.getCameraAspectRatio(), options.getCameraFps(), options.getCameraHighSpeed());
CameraCapture.CameraSelection cameraSelection = new CameraCapture.CameraSelection(options.getCameraId(),
options.getCameraFacing());
surfaceCapture = new CameraCapture(cameraSelection, options.getCameraSize());
}
SurfaceEncoder surfaceEncoder = new SurfaceEncoder(surfaceCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
@@ -184,22 +180,7 @@ public final class Server {
return thread;
}
public static void main(String... args) {
int status = 0;
try {
internalMain(args);
} catch (Throwable t) {
t.printStackTrace();
status = 1;
} finally {
// By default, the Java process exits when all non-daemon threads are terminated.
// The Android SDK might start some non-daemon threads internally, preventing the scrcpy server to exit.
// So force the process to exit explicitly.
System.exit(status);
}
}
private static void internalMain(String... args) throws Exception {
public static void main(String... args) throws Exception {
Thread.setDefaultUncaughtExceptionHandler((t, e) -> {
Ln.e("Exception on thread " + t, e);
});

View File

@@ -1,5 +1,7 @@
package com.genymobile.scrcpy;
import android.media.MediaRecorder;
public enum VideoSource {
DISPLAY("display"),
CAMERA("camera");

View File

@@ -32,7 +32,7 @@ public final class Workarounds {
Workarounds.prepareMainLooper();
boolean mustFillAppInfo = false;
boolean mustFillBaseContext = false;
boolean mustFillBaseContext = true;
boolean mustFillAppContext = false;
if (Build.BRAND.equalsIgnoreCase("meizu")) {
@@ -66,7 +66,6 @@ public final class Workarounds {
if (camera) {
mustFillAppInfo = true;
mustFillBaseContext = true;
}
if (mustFillAppInfo) {

View File

@@ -1,6 +1,7 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.FakeContext;
import com.genymobile.scrcpy.Workarounds;
import android.annotation.SuppressLint;
import android.content.Context;