Compare commits

..

15 Commits

Author SHA1 Message Date
Romain Vimont
ba09254b38 refilter_on_resize 2021-02-12 23:26:39 +01:00
Romain Vimont
03cc0cf543 swscalewip 2021-02-12 23:26:39 +01:00
Romain Vimont
f6570a7a41 scale_filter 2021-02-12 23:20:26 +01:00
Romain Vimont
87e3ad7756 Handle window events only once visible
This will avoid corner cases where we need to resize while no frame has
been received yet.
2021-02-12 23:20:26 +01:00
Romain Vimont
c6de5b0a15 Log mipmaps error only if mipmaps were requested 2021-02-12 22:04:50 +01:00
Romain Vimont
0b4b4609dc Extract texture management to a separate file
Move texture management out of screen.c, which already handles the
window and the rendering.

This paves the way to implement optional software filtering (swscale)
properly, as an alternative to mipmaps (which are not available
everywhere).
2021-02-12 21:58:38 +01:00
Romain Vimont
edf710af3c Make use_opengl local
The flag is used only locally, there is no need to store it in the
screen structure.
2021-02-07 19:12:14 +01:00
Romain Vimont
46bf4d55a7 Add intermediate frame in video buffer
There were only two frames simultaneously:
 - one used by the decoder;
 - one used by the renderer.

When the decoder finished decoding a frame, it swapped it with the
rendering frame.

Adding a third frame provides several benefits:
 - the decoder do not have to wait for the renderer to release the
   mutex;
 - it simplifies the video_buffer API;
 - it makes the rendering frame valid until the next call to
   video_buffer_take_rendering_frame(), which will be useful for
   swscaling on window resize.
2021-02-05 17:28:26 +01:00
Romain Vimont
017036ddab Assert non-recursive usage of mutexes 2021-02-05 17:27:26 +01:00
Romain Vimont
a9582b1d43 Add mutex assertions 2021-02-04 08:12:22 +01:00
Romain Vimont
fa3e84b700 Expose mutex assertions
Add a function to assert that the mutex is held (or not).
2021-02-04 08:12:22 +01:00
Romain Vimont
1521de9051 Expose thread id 2021-02-04 08:12:22 +01:00
Romain Vimont
eabaabdb78 Wrap SDL thread functions into scrcpy-specific API
The goal is to expose a consistent API for system tools, and paves the
way to make the "core" independant of SDL in the future.
2021-02-04 08:12:22 +01:00
Romain Vimont
8b48003074 Replace SDL_strdup() by strdup()
The functions SDL_malloc(), SDL_free() and SDL_strdup() were used only
because strdup() was not available everywhere.

Now that it is available, use the native version of these functions.
2021-02-04 08:12:22 +01:00
Romain Vimont
74bd25a0ed Provide strdup() compat
Make strdup() available on all platforms.
2021-02-04 08:12:22 +01:00
50 changed files with 1260 additions and 1556 deletions

View File

@@ -491,6 +491,18 @@ scrcpy -Sw
```
#### Render expired frames
By default, to minimize latency, _scrcpy_ always renders the last decoded frame
available, and drops any previous one.
To force the rendering of all frames (at a cost of a possible increased
latency), use:
```bash
scrcpy --render-expired-frames
```
#### Show touches
For presentations, it may be useful to show physical touches (on the physical

View File

@@ -11,6 +11,7 @@ src = [
'src/event_converter.c',
'src/file_handler.c',
'src/fps_counter.c',
'src/frame_texture.c',
'src/input_manager.c',
'src/opengl.c',
'src/receiver.c',
@@ -33,11 +34,6 @@ else
src += [ 'src/sys/unix/process.c' ]
endif
v4l2_support = host_machine.system() == 'linux'
if v4l2_support
src += [ 'src/v4l2_sink.c' ]
endif
check_functions = [
'strdup'
]
@@ -51,13 +47,10 @@ if not get_option('crossbuild_windows')
dependency('libavformat'),
dependency('libavcodec'),
dependency('libavutil'),
dependency('libswscale'),
dependency('sdl2'),
]
if v4l2_support
dependencies += dependency('libavdevice')
endif
else
# cross-compile mingw32 build (from Linux to Windows)
@@ -108,6 +101,9 @@ foreach f : check_functions
endif
endforeach
# expose the build type
conf.set('NDEBUG', get_option('buildtype') != 'debug')
# the version, updated on release
conf.set_quoted('SCRCPY_VERSION', meson.project_version())
@@ -123,19 +119,29 @@ conf.set('PORTABLE', get_option('portable'))
conf.set('DEFAULT_LOCAL_PORT_RANGE_FIRST', '27183')
conf.set('DEFAULT_LOCAL_PORT_RANGE_LAST', '27199')
# the default max video size for both dimensions, in pixels
# overridden by option --max-size
conf.set('DEFAULT_MAX_SIZE', '0') # 0: unlimited
# the default video orientation
# natural device orientation is 0 and each increment adds 90 degrees
# counterclockwise
# overridden by option --lock-video-orientation
conf.set('DEFAULT_LOCK_VIDEO_ORIENTATION', '-1') # -1: unlocked
# the default video bitrate, in bits/second
# overridden by option --bit-rate
conf.set('DEFAULT_BIT_RATE', '8000000') # 8Mbps
# enable High DPI support
conf.set('HIDPI_SUPPORT', get_option('hidpi_support'))
# run a server debugger and wait for a client to be attached
conf.set('SERVER_DEBUGGER', get_option('server_debugger'))
# select the debugger method ('old' for Android < 9, 'new' for Android >= 9)
conf.set('SERVER_DEBUGGER_METHOD_NEW', get_option('server_debugger_method') == 'new')
# enable V4L2 support (linux only)
conf.set('HAVE_V4L2', v4l2_support)
configure_file(configuration: conf, output: 'config.h')
src_dir = include_directories('src')

View File

@@ -155,6 +155,10 @@ Supported names are currently "direct3d", "opengl", "opengles2", "opengles", "me
.UR https://wiki.libsdl.org/SDL_HINT_RENDER_DRIVER
.UE
.TP
.B \-\-render\-expired\-frames
By default, to minimize latency, scrcpy always renders the last available decoded frame, and drops any previous ones. This flag forces to render all frames, at a cost of a possible increased latency.
.TP
.BI "\-\-rotation " value
Set the initial display rotation. Possibles values are 0, 1, 2 and 3. Each increment adds a 90 degrees rotation counterclockwise.
@@ -163,6 +167,13 @@ Set the initial display rotation. Possibles values are 0, 1, 2 and 3. Each incre
.BI "\-s, \-\-serial " number
The device serial number. Mandatory only if several devices are connected to adb.
.TP
.BI "\-\-scale\-filter filter
Supported filters are "none" and "trilinear".
Trilinear filtering is only available if the renderer is OpenGL 3.0+ or OpenGL
ES 2.0+.
.TP
.BI "\-\-shortcut\-mod " key[+...]][,...]
Specify the modifiers to use for scrcpy shortcuts. Possible keys are "lctrl", "rctrl", "lalt", "ralt", "lsuper" and "rsuper".
@@ -183,16 +194,16 @@ Enable "show touches" on start, restore the initial value on exit.
It only shows physical touches (not clicks from scrcpy).
.TP
.B \-v, \-\-version
Print the version of scrcpy.
.TP
.BI "\-V, \-\-verbosity " value
Set the log level ("debug", "info", "warn" or "error").
Default is "info" for release builds, "debug" for debug builds.
.TP
.B \-v, \-\-version
Print the version of scrcpy.
.TP
.B \-w, \-\-stay-awake
Keep the device on while scrcpy is running, when the device is plugged in.

View File

@@ -10,9 +10,6 @@
#include "util/log.h"
#include "util/str_util.h"
#define STR_IMPL_(x) #x
#define STR(x) STR_IMPL_(x)
void
scrcpy_print_usage(const char *arg0) {
fprintf(stderr,
@@ -26,7 +23,7 @@ scrcpy_print_usage(const char *arg0) {
" -b, --bit-rate value\n"
" Encode the video at the given bit-rate, expressed in bits/s.\n"
" Unit suffixes are supported: 'K' (x1000) and 'M' (x1000000).\n"
" Default is " STR(DEFAULT_BIT_RATE) ".\n"
" Default is %d.\n"
"\n"
" --codec-options key[:type]=value[,...]\n"
" Set a list of comma-separated key:type=value options for the\n"
@@ -84,7 +81,7 @@ scrcpy_print_usage(const char *arg0) {
" Possible values are -1 (unlocked), 0, 1, 2 and 3.\n"
" Natural device orientation is 0, and each increment adds a\n"
" 90 degrees rotation counterclockwise.\n"
" Default is -1 (unlocked).\n"
" Default is %d%s.\n"
"\n"
" --max-fps value\n"
" Limit the frame rate of screen capture (officially supported\n"
@@ -94,7 +91,7 @@ scrcpy_print_usage(const char *arg0) {
" Limit both the width and height of the video to value. The\n"
" other dimension is computed so that the device aspect-ratio\n"
" is preserved.\n"
" Default is 0 (unlimited).\n"
" Default is %d%s.\n"
"\n"
" -n, --no-control\n"
" Disable device control (mirror the device in read-only).\n"
@@ -106,15 +103,9 @@ scrcpy_print_usage(const char *arg0) {
" --no-key-repeat\n"
" Do not forward repeated key events when a key is held down.\n"
"\n"
" --no-mipmaps\n"
" If the renderer is OpenGL 3.0+ or OpenGL ES 2.0+, then\n"
" mipmaps are automatically generated to improve downscaling\n"
" quality. This option disables the generation of mipmaps.\n"
"\n"
" -p, --port port[:port]\n"
" Set the TCP port (range) used by the client to listen.\n"
" Default is " STR(DEFAULT_LOCAL_PORT_RANGE_FIRST) ":"
STR(DEFAULT_LOCAL_PORT_RANGE_LAST) ".\n"
" Default is %d:%d.\n"
"\n"
" --prefer-text\n"
" Inject alpha characters and space as text events instead of\n"
@@ -143,6 +134,12 @@ scrcpy_print_usage(const char *arg0) {
" \"opengles2\", \"opengles\", \"metal\" and \"software\".\n"
" <https://wiki.libsdl.org/SDL_HINT_RENDER_DRIVER>\n"
"\n"
" --render-expired-frames\n"
" By default, to minimize latency, scrcpy always renders the\n"
" last available decoded frame, and drops any previous ones.\n"
" This flag forces to render all frames, at a cost of a\n"
" possible increased latency.\n"
"\n"
" --rotation value\n"
" Set the initial display rotation.\n"
" Possibles values are 0, 1, 2 and 3. Each increment adds a 90\n"
@@ -152,6 +149,11 @@ scrcpy_print_usage(const char *arg0) {
" The device serial number. Mandatory only if several devices\n"
" are connected to adb.\n"
"\n"
" --scale-filter filter\n"
" Supported filters are \"none\" and \"trilinear\".\n"
" Trilinear filtering is only available if the renderer is\n"
" OpenGL 3.0+ or OpenGL ES 2.0+.\n"
"\n"
" --shortcut-mod key[+...]][,...]\n"
" Specify the modifiers to use for scrcpy shortcuts.\n"
" Possible keys are \"lctrl\", \"rctrl\", \"lalt\", \"ralt\",\n"
@@ -173,10 +175,8 @@ scrcpy_print_usage(const char *arg0) {
" on exit.\n"
" It only shows physical touches (not clicks from scrcpy).\n"
"\n"
" --v4l2_sink /dev/videoN\n"
" Output to v4l2loopback device.\n"
" It requires to lock the video orientation (see\n"
" --lock-video-orientation).\n"
" -v, --version\n"
" Print the version of scrcpy.\n"
"\n"
" -V, --verbosity value\n"
" Set the log level (debug, info, warn or error).\n"
@@ -185,9 +185,6 @@ scrcpy_print_usage(const char *arg0) {
#else
" Default is info.\n"
#endif
"\n"
" -v, --version\n"
" Print the version of scrcpy.\n"
"\n"
" -w, --stay-awake\n"
" Keep the device on while scrcpy is running, when the device\n"
@@ -300,7 +297,12 @@ scrcpy_print_usage(const char *arg0) {
"\n"
" Drag & drop APK file\n"
" Install APK from computer\n"
"\n", arg0);
"\n",
arg0,
DEFAULT_BIT_RATE,
DEFAULT_LOCK_VIDEO_ORIENTATION, DEFAULT_LOCK_VIDEO_ORIENTATION >= 0 ? "" : " (unlocked)",
DEFAULT_MAX_SIZE, DEFAULT_MAX_SIZE ? "" : " (unlimited)",
DEFAULT_LOCAL_PORT_RANGE_FIRST, DEFAULT_LOCAL_PORT_RANGE_LAST);
}
static bool
@@ -639,6 +641,62 @@ guess_record_format(const char *filename) {
return 0;
}
static bool
parse_scale_filter(const char *optarg, enum sc_scale_filter *filter) {
// TODO store in a map and loop over the entries instead
if (!strcmp(optarg, "none")) {
*filter = SC_SCALE_FILTER_NONE;
return true;
}
if (!strcmp(optarg, "trilinear")) {
*filter = SC_SCALE_FILTER_TRILINEAR;
return true;
}
if (!strcmp(optarg, "bilinear")) {
*filter = SC_SCALE_FILTER_BILINEAR;
return true;
}
if (!strcmp(optarg, "bicubic")) {
*filter = SC_SCALE_FILTER_BICUBIC;
return true;
}
if (!strcmp(optarg, "x")) {
*filter = SC_SCALE_FILTER_X;
return true;
}
if (!strcmp(optarg, "point")) {
*filter = SC_SCALE_FILTER_POINT;
return true;
}
if (!strcmp(optarg, "area")) {
*filter = SC_SCALE_FILTER_AREA;
return true;
}
if (!strcmp(optarg, "bicublin")) {
*filter = SC_SCALE_FILTER_BICUBLIN;
return true;
}
if (!strcmp(optarg, "gauss")) {
*filter = SC_SCALE_FILTER_GAUSS;
return true;
}
if (!strcmp(optarg, "sinc")) {
*filter = SC_SCALE_FILTER_SINC;
return true;
}
if (!strcmp(optarg, "lanczos")) {
*filter = SC_SCALE_FILTER_LANCZOS;
return true;
}
if (!strcmp(optarg, "spline")) {
*filter = SC_SCALE_FILTER_SPLINE;
return true;
}
LOGE("Unsupported scale filter: %s "
"(expected one of [TODO])", optarg);
return false;
}
#define OPT_RENDER_EXPIRED_FRAMES 1000
#define OPT_WINDOW_TITLE 1001
#define OPT_PUSH_TARGET 1002
@@ -665,8 +723,7 @@ guess_record_format(const char *filename) {
#define OPT_FORWARD_ALL_CLICKS 1023
#define OPT_LEGACY_PASTE 1024
#define OPT_ENCODER_NAME 1025
#define OPT_POWER_OFF_ON_CLOSE 1026
#define OPT_V4L2_SINK 1027
#define OPT_SCALE_FILTER 1026
bool
scrcpy_parse_args(struct scrcpy_cli_args *args, int argc, char *argv[]) {
@@ -703,14 +760,12 @@ scrcpy_parse_args(struct scrcpy_cli_args *args, int argc, char *argv[]) {
{"render-expired-frames", no_argument, NULL,
OPT_RENDER_EXPIRED_FRAMES},
{"rotation", required_argument, NULL, OPT_ROTATION},
{"scale-filter", required_argument, NULL, OPT_SCALE_FILTER},
{"serial", required_argument, NULL, 's'},
{"shortcut-mod", required_argument, NULL, OPT_SHORTCUT_MOD},
{"show-touches", no_argument, NULL, 't'},
{"stay-awake", no_argument, NULL, 'w'},
{"turn-screen-off", no_argument, NULL, 'S'},
#ifdef HAVE_V4L2
{"v4l2_sink", required_argument, NULL, OPT_V4L2_SINK},
#endif
{"verbosity", required_argument, NULL, 'V'},
{"version", no_argument, NULL, 'v'},
{"window-title", required_argument, NULL, OPT_WINDOW_TITLE},
@@ -720,8 +775,6 @@ scrcpy_parse_args(struct scrcpy_cli_args *args, int argc, char *argv[]) {
{"window-height", required_argument, NULL, OPT_WINDOW_HEIGHT},
{"window-borderless", no_argument, NULL,
OPT_WINDOW_BORDERLESS},
{"power-off-on-close", no_argument, NULL,
OPT_POWER_OFF_ON_CLOSE},
{NULL, 0, NULL, 0 },
};
@@ -819,8 +872,7 @@ scrcpy_parse_args(struct scrcpy_cli_args *args, int argc, char *argv[]) {
opts->stay_awake = true;
break;
case OPT_RENDER_EXPIRED_FRAMES:
LOGW("Option --render-expired-frames has been removed. This "
"flag has been ignored.");
opts->render_expired_frames = true;
break;
case OPT_WINDOW_TITLE:
opts->window_title = optarg;
@@ -863,7 +915,9 @@ scrcpy_parse_args(struct scrcpy_cli_args *args, int argc, char *argv[]) {
opts->render_driver = optarg;
break;
case OPT_NO_MIPMAPS:
opts->mipmaps = false;
LOGW("Deprecated option --no-mipmaps. "
"Use --scale-filter=none instead.");
opts->scale_filter = SC_SCALE_FILTER_NONE;
break;
case OPT_NO_KEY_REPEAT:
opts->forward_key_repeat = false;
@@ -891,11 +945,10 @@ scrcpy_parse_args(struct scrcpy_cli_args *args, int argc, char *argv[]) {
case OPT_LEGACY_PASTE:
opts->legacy_paste = true;
break;
case OPT_POWER_OFF_ON_CLOSE:
opts->power_off_on_close = true;
break;
case OPT_V4L2_SINK:
opts->v4l2_device = optarg;
case OPT_SCALE_FILTER:
if (!parse_scale_filter(optarg, &opts->scale_filter)) {
return false;
}
break;
default:
// getopt prints the error message on stderr
@@ -903,23 +956,11 @@ scrcpy_parse_args(struct scrcpy_cli_args *args, int argc, char *argv[]) {
}
}
if (!opts->display && !opts->record_filename && !opts->v4l2_device) {
LOGE("-N/--no-display requires either screen recording (-r/--record)"
#ifdef HAVE_V4L2
" or sink to v4l2loopback device (--v4l2_sink)"
#endif
);
if (!opts->display && !opts->record_filename) {
LOGE("-N/--no-display requires screen recording (-r/--record)");
return false;
}
#ifdef HAVE_V4L2
if (opts->v4l2_device && opts->lock_video_orientation == -1) {
LOGI("Video orientation is locked for v4l2 sink. "
"See --lock-video-orientation.");
opts->lock_video_orientation = 0;
}
#endif
int index = optind;
if (index < argc) {
LOGE("Unexpected additional argument: %s", argv[index]);

View File

@@ -8,7 +8,4 @@
#define MIN(X,Y) (X) < (Y) ? (X) : (Y)
#define MAX(X,Y) (X) > (Y) ? (X) : (Y)
#define container_of(ptr, type, member) \
((type *) (((char *) (ptr)) - offsetof(type, member)))
#endif

View File

@@ -8,9 +8,20 @@
# define _DARWIN_C_SOURCE
#endif
#include <libavcodec/version.h>
#include <libavformat/version.h>
#include <SDL2/SDL_version.h>
// In ffmpeg/doc/APIchanges:
// 2016-04-11 - 6f69f7a / 9200514 - lavf 57.33.100 / 57.5.0 - avformat.h
// Add AVStream.codecpar, deprecate AVStream.codec.
#if (LIBAVFORMAT_VERSION_MICRO >= 100 /* FFmpeg */ && \
LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)) \
|| (LIBAVFORMAT_VERSION_MICRO < 100 && /* Libav */ \
LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 5, 0))
# define SCRCPY_LAVF_HAS_NEW_CODEC_PARAMS_API
#endif
// In ffmpeg/doc/APIchanges:
// 2018-02-06 - 0694d87024 - lavf 58.9.100 - avformat.h
// Deprecate use of av_register_input_format(), av_register_output_format(),
@@ -22,6 +33,15 @@
# define SCRCPY_LAVF_REQUIRES_REGISTER_ALL
#endif
// In ffmpeg/doc/APIchanges:
// 2016-04-21 - 7fc329e - lavc 57.37.100 - avcodec.h
// Add a new audio/video encoding and decoding API with decoupled input
// and output -- avcodec_send_packet(), avcodec_receive_frame(),
// avcodec_send_frame() and avcodec_receive_packet().
#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 37, 100)
# define SCRCPY_LAVF_HAS_NEW_ENCODING_DECODING_API
#endif
#if SDL_VERSION_ATLEAST(2, 0, 5)
// <https://wiki.libsdl.org/SDL_HINT_MOUSE_FOCUS_CLICKTHROUGH>
# define SCRCPY_SDL_HAS_HINT_MOUSE_FOCUS_CLICKTHROUGH
@@ -36,7 +56,7 @@
# define SCRCPY_SDL_HAS_HINT_VIDEO_X11_NET_WM_BYPASS_COMPOSITOR
#endif
#ifndef HAVE_STRDUP
#ifdef HAVE_STRDUP
char *strdup(const char *s);
#endif

View File

@@ -67,9 +67,6 @@ control_msg_serialize(const struct control_msg *msg, unsigned char *buf) {
buffer_write32be(&buf[17],
(uint32_t) msg->inject_scroll_event.vscroll);
return 21;
case CONTROL_MSG_TYPE_BACK_OR_SCREEN_ON:
buf[1] = msg->inject_keycode.action;
return 2;
case CONTROL_MSG_TYPE_SET_CLIPBOARD: {
buf[1] = !!msg->set_clipboard.paste;
size_t len = write_string(msg->set_clipboard.text,
@@ -80,6 +77,7 @@ control_msg_serialize(const struct control_msg *msg, unsigned char *buf) {
case CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE:
buf[1] = msg->set_screen_power_mode.mode;
return 2;
case CONTROL_MSG_TYPE_BACK_OR_SCREEN_ON:
case CONTROL_MSG_TYPE_EXPAND_NOTIFICATION_PANEL:
case CONTROL_MSG_TYPE_COLLAPSE_NOTIFICATION_PANEL:
case CONTROL_MSG_TYPE_GET_CLIPBOARD:

View File

@@ -64,10 +64,6 @@ struct control_msg {
int32_t hscroll;
int32_t vscroll;
} inject_scroll_event;
struct {
enum android_keyevent_action action; // action for the BACK key
// screen may only be turned on on ACTION_DOWN
} back_or_screen_on;
struct {
char *text; // owned, to be freed by free()
bool paste;

View File

@@ -1,43 +1,38 @@
#include "decoder.h"
#include <libavformat/avformat.h>
#include <libavutil/time.h>
#include <SDL2/SDL_events.h>
#include <unistd.h>
#include "events.h"
#include "recorder.h"
#include "video_buffer.h"
#include "trait/frame_sink.h"
#include "util/buffer_util.h"
#include "util/log.h"
/** Downcast packet_sink to decoder */
#define DOWNCAST(SINK) container_of(SINK, struct decoder, packet_sink)
// set the decoded frame as ready for rendering, and notify
static void
decoder_close_first_sinks(struct decoder *decoder, unsigned count) {
while (count) {
struct sc_frame_sink *sink = decoder->sinks[--count];
sink->ops->close(sink);
push_frame(struct decoder *decoder) {
bool previous_frame_skipped;
video_buffer_offer_decoded_frame(decoder->video_buffer,
&previous_frame_skipped);
if (previous_frame_skipped) {
// the previous EVENT_NEW_FRAME will consume this frame
return;
}
static SDL_Event new_frame_event = {
.type = EVENT_NEW_FRAME,
};
SDL_PushEvent(&new_frame_event);
}
static inline void
decoder_close_sinks(struct decoder *decoder) {
decoder_close_first_sinks(decoder, decoder->sink_count);
void
decoder_init(struct decoder *decoder, struct video_buffer *vb) {
decoder->video_buffer = vb;
}
static bool
decoder_open_sinks(struct decoder *decoder) {
for (unsigned i = 0; i < decoder->sink_count; ++i) {
struct sc_frame_sink *sink = decoder->sinks[i];
if (!sink->ops->open(sink)) {
LOGE("Could not open frame sink %d", i);
decoder_close_first_sinks(decoder, i);
return false;
}
}
return true;
}
static bool
bool
decoder_open(struct decoder *decoder, const AVCodec *codec) {
decoder->codec_ctx = avcodec_alloc_context3(codec);
if (!decoder->codec_ctx) {
@@ -51,106 +46,52 @@ decoder_open(struct decoder *decoder, const AVCodec *codec) {
return false;
}
decoder->frame = av_frame_alloc();
if (!decoder->frame) {
LOGE("Could not create decoder frame");
avcodec_close(decoder->codec_ctx);
avcodec_free_context(&decoder->codec_ctx);
return false;
}
if (!decoder_open_sinks(decoder)) {
LOGE("Could not open decoder sinks");
av_frame_free(&decoder->frame);
avcodec_close(decoder->codec_ctx);
avcodec_free_context(&decoder->codec_ctx);
return false;
}
return true;
}
static void
void
decoder_close(struct decoder *decoder) {
decoder_close_sinks(decoder);
av_frame_free(&decoder->frame);
avcodec_close(decoder->codec_ctx);
avcodec_free_context(&decoder->codec_ctx);
}
static bool
push_frame_to_sinks(struct decoder *decoder, const AVFrame *frame) {
for (unsigned i = 0; i < decoder->sink_count; ++i) {
struct sc_frame_sink *sink = decoder->sinks[i];
if (!sink->ops->push(sink, frame)) {
LOGE("Could not send frame to sink %d", i);
return false;
}
}
return true;
}
static bool
bool
decoder_push(struct decoder *decoder, const AVPacket *packet) {
bool is_config = packet->pts == AV_NOPTS_VALUE;
if (is_config) {
// nothing to do
return true;
}
int ret = avcodec_send_packet(decoder->codec_ctx, packet);
if (ret < 0 && ret != AVERROR(EAGAIN)) {
// the new decoding/encoding API has been introduced by:
// <http://git.videolan.org/?p=ffmpeg.git;a=commitdiff;h=7fc329e2dd6226dfecaa4a1d7adf353bf2773726>
#ifdef SCRCPY_LAVF_HAS_NEW_ENCODING_DECODING_API
int ret;
if ((ret = avcodec_send_packet(decoder->codec_ctx, packet)) < 0) {
LOGE("Could not send video packet: %d", ret);
return false;
}
ret = avcodec_receive_frame(decoder->codec_ctx, decoder->frame);
ret = avcodec_receive_frame(decoder->codec_ctx,
decoder->video_buffer->decoding_frame);
if (!ret) {
// a frame was received
bool ok = push_frame_to_sinks(decoder, decoder->frame);
// A frame lost should not make the whole pipeline fail. The error, if
// any, is already logged.
(void) ok;
push_frame(decoder);
} else if (ret != AVERROR(EAGAIN)) {
LOGE("Could not receive video frame: %d", ret);
return false;
}
#else
int got_picture;
int len = avcodec_decode_video2(decoder->codec_ctx,
decoder->video_buffer->decoding_frame,
&got_picture,
packet);
if (len < 0) {
LOGE("Could not decode video packet: %d", len);
return false;
}
if (got_picture) {
push_frame(decoder);
}
#endif
return true;
}
static bool
decoder_packet_sink_open(struct sc_packet_sink *sink, const AVCodec *codec) {
struct decoder *decoder = DOWNCAST(sink);
return decoder_open(decoder, codec);
}
static void
decoder_packet_sink_close(struct sc_packet_sink *sink) {
struct decoder *decoder = DOWNCAST(sink);
decoder_close(decoder);
}
static bool
decoder_packet_sink_push(struct sc_packet_sink *sink, const AVPacket *packet) {
struct decoder *decoder = DOWNCAST(sink);
return decoder_push(decoder, packet);
}
void
decoder_init(struct decoder *decoder) {
static const struct sc_packet_sink_ops ops = {
.open = decoder_packet_sink_open,
.close = decoder_packet_sink_close,
.push = decoder_packet_sink_push,
};
decoder->packet_sink.ops = &ops;
}
void
decoder_add_sink(struct decoder *decoder, struct sc_frame_sink *sink) {
assert(decoder->sink_count < DECODER_MAX_SINKS);
assert(sink);
assert(sink->ops);
decoder->sinks[decoder->sink_count++] = sink;
decoder_interrupt(struct decoder *decoder) {
video_buffer_interrupt(decoder->video_buffer);
}

View File

@@ -3,27 +3,29 @@
#include "common.h"
#include "trait/packet_sink.h"
#include <stdbool.h>
#include <libavformat/avformat.h>
#define DECODER_MAX_SINKS 2
struct video_buffer;
struct decoder {
struct sc_packet_sink packet_sink; // packet sink trait
struct sc_frame_sink *sinks[DECODER_MAX_SINKS];
unsigned sink_count;
struct video_buffer *video_buffer;
AVCodecContext *codec_ctx;
AVFrame *frame;
};
void
decoder_init(struct decoder *decoder);
decoder_init(struct decoder *decoder, struct video_buffer *vb);
bool
decoder_open(struct decoder *decoder, const AVCodec *codec);
void
decoder_add_sink(struct decoder *decoder, struct sc_frame_sink *sink);
decoder_close(struct decoder *decoder);
bool
decoder_push(struct decoder *decoder, const AVPacket *packet);
void
decoder_interrupt(struct decoder *decoder);
#endif

View File

@@ -1,2 +1,3 @@
#define EVENT_NEW_FRAME SDL_USEREVENT
#define EVENT_STREAM_STOPPED (SDL_USEREVENT + 1)
#define EVENT_NEW_SESSION SDL_USEREVENT
#define EVENT_NEW_FRAME (SDL_USEREVENT + 1)
#define EVENT_STREAM_STOPPED (SDL_USEREVENT + 2)

247
app/src/frame_texture.c Normal file
View File

@@ -0,0 +1,247 @@
#include "frame_texture.h"
#include <assert.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#include "util/log.h"
static inline bool
is_swscale_enabled(enum sc_scale_filter scale_filter) {
return scale_filter != SC_SCALE_FILTER_NONE
&& scale_filter != SC_SCALE_FILTER_TRILINEAR;
}
static SDL_Texture *
create_texture(struct sc_frame_texture *ftex, struct size size) {
SDL_Renderer *renderer = ftex->renderer;
SDL_PixelFormatEnum fmt = is_swscale_enabled(ftex->scale_filter)
? SDL_PIXELFORMAT_RGB24
: SDL_PIXELFORMAT_YV12;
SDL_Texture *texture = SDL_CreateTexture(renderer, fmt,
SDL_TEXTUREACCESS_STREAMING,
size.width, size.height);
if (!texture) {
return NULL;
}
if (ftex->scale_filter == SC_SCALE_FILTER_TRILINEAR) {
struct sc_opengl *gl = &ftex->gl;
SDL_GL_BindTexture(texture, NULL, NULL);
// Enable trilinear filtering for downscaling
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_LINEAR_MIPMAP_LINEAR);
gl->TexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -1.f);
SDL_GL_UnbindTexture(texture);
}
return texture;
}
bool
sc_frame_texture_init(struct sc_frame_texture *ftex, SDL_Renderer *renderer,
enum sc_scale_filter scale_filter,
struct size initial_size) {
SDL_RendererInfo renderer_info;
int r = SDL_GetRendererInfo(renderer, &renderer_info);
const char *renderer_name = r ? NULL : renderer_info.name;
LOGI("Renderer: %s", renderer_name ? renderer_name : "(unknown)");
ftex->scale_filter = scale_filter;
// starts with "opengl"
bool use_opengl = renderer_name && !strncmp(renderer_name, "opengl", 6);
if (use_opengl) {
struct sc_opengl *gl = &ftex->gl;
sc_opengl_init(gl);
LOGI("OpenGL version: %s", gl->version);
if (scale_filter == SC_SCALE_FILTER_TRILINEAR) {
bool supports_mipmaps =
sc_opengl_version_at_least(gl, 3, 0, /* OpenGL 3.0+ */
2, 0 /* OpenGL ES 2.0+ */);
if (!supports_mipmaps) {
LOGW("Trilinear filtering disabled "
"(OpenGL 3.0+ or ES 2.0+ required)");
ftex->scale_filter = SC_SCALE_FILTER_NONE;
}
}
} else if (scale_filter == SC_SCALE_FILTER_TRILINEAR) {
LOGD("Trilinear filtering disabled (not an OpenGL renderer)");
}
LOGD("Scale filter: %s\n", sc_scale_filter_name(ftex->scale_filter));
LOGI("Initial texture: %" PRIu16 "x%" PRIu16, initial_size.width,
initial_size.height);
ftex->renderer = renderer;
ftex->texture = create_texture(ftex, initial_size);
if (!ftex->texture) {
LOGC("Could not create texture: %s", SDL_GetError());
SDL_DestroyRenderer(ftex->renderer);
return false;
}
ftex->texture_size = initial_size;
ftex->decoded_frame = NULL;
memset(ftex->data, 0, sizeof(ftex->data));
memset(ftex->linesize, 0, sizeof(ftex->linesize));
return true;
}
void
sc_frame_texture_destroy(struct sc_frame_texture *ftex) {
if (ftex->texture) {
SDL_DestroyTexture(ftex->texture);
}
}
static int
to_sws_filter(enum sc_scale_filter scale_filter) {
switch (scale_filter) {
case SC_SCALE_FILTER_BILINEAR: return SWS_BILINEAR;
case SC_SCALE_FILTER_BICUBIC: return SWS_BICUBIC;
case SC_SCALE_FILTER_X: return SWS_X;
case SC_SCALE_FILTER_POINT: return SWS_POINT;
case SC_SCALE_FILTER_AREA: return SWS_AREA;
case SC_SCALE_FILTER_BICUBLIN: return SWS_BICUBLIN;
case SC_SCALE_FILTER_GAUSS: return SWS_GAUSS;
case SC_SCALE_FILTER_SINC: return SWS_SINC;
case SC_SCALE_FILTER_LANCZOS: return SWS_LANCZOS;
case SC_SCALE_FILTER_SPLINE: return SWS_SPLINE;
default: assert(!"unsupported filter");
}
}
static bool
screen_generate_resized_frame(struct sc_frame_texture *ftex,
struct size target_size) {
assert(is_swscale_enabled(ftex->scale_filter));
// TODO
if (ftex->data[0]) {
av_freep(&ftex->data[0]);
}
int ret = av_image_alloc(ftex->data, ftex->linesize, target_size.width,
target_size.height, AV_PIX_FMT_RGB24, 16);
if (ret < 0) {
return false;
}
const AVFrame *input = ftex->decoded_frame;
assert(input);
int flags = to_sws_filter(ftex->scale_filter);
struct SwsContext *ctx =
sws_getContext(input->width, input->height, AV_PIX_FMT_YUV420P,
target_size.width, target_size.height, AV_PIX_FMT_RGB24,
flags, NULL, NULL, NULL);
if (!ctx) {
return false;
}
sws_scale(ctx, (const uint8_t *const *) input->data, input->linesize, 0,
input->height, ftex->data, ftex->linesize);
sws_freeContext(ctx);
return true;
}
static bool
sc_frame_texture_update_direct(struct sc_frame_texture *ftex,
const AVFrame *frame) {
struct size frame_size = {frame->width, frame->height};
if (ftex->texture_size.width != frame_size.width
|| ftex->texture_size.height != frame_size.height) {
// Frame dimensions changed, destroy texture
SDL_DestroyTexture(ftex->texture);
LOGI("New texture: %" PRIu16 "x%" PRIu16, frame_size.width,
frame_size.height);
ftex->texture = create_texture(ftex, frame_size);
if (!ftex->texture) {
LOGC("Could not create texture: %s", SDL_GetError());
return false;
}
ftex->texture_size = frame_size;
}
SDL_UpdateYUVTexture(ftex->texture, NULL,
frame->data[0], frame->linesize[0],
frame->data[1], frame->linesize[1],
frame->data[2], frame->linesize[2]);
if (ftex->scale_filter == SC_SCALE_FILTER_TRILINEAR) {
SDL_GL_BindTexture(ftex->texture, NULL, NULL);
ftex->gl.GenerateMipmap(GL_TEXTURE_2D);
SDL_GL_UnbindTexture(ftex->texture);
}
return true;
}
static bool
sc_frame_texture_update_swscale(struct sc_frame_texture *ftex,
const AVFrame *frame, struct size target_size) {
if (ftex->texture_size.width != target_size.width
|| ftex->texture_size.height != target_size.height) {
// Frame dimensions changed, destroy texture
SDL_DestroyTexture(ftex->texture);
ftex->texture = create_texture(ftex, target_size);
if (!ftex->texture) {
LOGC("Could not create texture: %s", SDL_GetError());
return false;
}
ftex->texture_size = target_size;
}
// The frame is valid until the next call to
// video_buffer_take_rendering_frame()
ftex->decoded_frame = frame;
bool ok = screen_generate_resized_frame(ftex, target_size);
if (!ok) {
LOGE("Failed to resize frame\n");
return false;
}
SDL_UpdateTexture(ftex->texture, NULL, ftex->data[0], ftex->linesize[0]);
if (ftex->scale_filter == SC_SCALE_FILTER_TRILINEAR) {
SDL_GL_BindTexture(ftex->texture, NULL, NULL);
ftex->gl.GenerateMipmap(GL_TEXTURE_2D);
SDL_GL_UnbindTexture(ftex->texture);
}
return true;
}
bool
sc_frame_texture_update(struct sc_frame_texture *ftex, const AVFrame *frame,
struct size target_size) {
if (is_swscale_enabled(ftex->scale_filter)) {
return sc_frame_texture_update_swscale(ftex, frame, target_size);
}
return sc_frame_texture_update_direct(ftex, frame);
}
bool
sc_frame_texture_resize(struct sc_frame_texture *ftex,
struct size target_size) {
if (is_swscale_enabled(ftex->scale_filter)) {
return sc_frame_texture_update_swscale(ftex, ftex->decoded_frame,
target_size);
}
// Nothing to do
return true;
}

44
app/src/frame_texture.h Normal file
View File

@@ -0,0 +1,44 @@
#ifndef SC_FRAME_TEXTURE_H
#define SC_FRAME_TEXTURE_H
#include "common.h"
#include <stdbool.h>
#include <SDL2/SDL.h>
#include <libavformat/avformat.h>
#include "coords.h"
#include "opengl.h"
#include "scrcpy.h"
struct sc_frame_texture {
SDL_Renderer *renderer; // owned by struct screen
enum sc_scale_filter scale_filter;
struct sc_opengl gl;
SDL_Texture *texture;
struct size texture_size;
// For swscaling
const AVFrame *decoded_frame; // owned by the video_buffer
uint8_t *data[4];
int linesize[4];
};
bool
sc_frame_texture_init(struct sc_frame_texture *ftex, SDL_Renderer *renderer,
enum sc_scale_filter scale_filter,
struct size initial_size);
void
sc_frame_texture_destroy(struct sc_frame_texture *ftex);
bool
sc_frame_texture_update(struct sc_frame_texture *ftex, const AVFrame *frame,
struct size target_size);
bool
sc_frame_texture_resize(struct sc_frame_texture *ftex, struct size target_size);
#endif

View File

@@ -146,25 +146,13 @@ action_cut(struct controller *controller, int actions) {
}
// turn the screen on if it was off, press BACK otherwise
// If the screen is off, it is turned on only on ACTION_DOWN
static void
press_back_or_turn_screen_on(struct controller *controller, int actions) {
press_back_or_turn_screen_on(struct controller *controller) {
struct control_msg msg;
msg.type = CONTROL_MSG_TYPE_BACK_OR_SCREEN_ON;
if (actions & ACTION_DOWN) {
msg.back_or_screen_on.action = AKEY_EVENT_ACTION_DOWN;
if (!controller_push_msg(controller, &msg)) {
LOGW("Could not request 'press back or turn screen on'");
return;
}
}
if (actions & ACTION_UP) {
msg.back_or_screen_on.action = AKEY_EVENT_ACTION_UP;
if (!controller_push_msg(controller, &msg)) {
LOGW("Could not request 'press back or turn screen on'");
}
if (!controller_push_msg(controller, &msg)) {
LOGW("Could not request 'press back or turn screen on'");
}
}
@@ -298,7 +286,7 @@ rotate_client_right(struct screen *screen) {
screen_set_rotation(screen, new_rotation);
}
static void
void
input_manager_process_text_input(struct input_manager *im,
const SDL_TextInputEvent *event) {
if (is_shortcut_mod(im, SDL_GetModState())) {
@@ -378,7 +366,7 @@ convert_input_key(const SDL_KeyboardEvent *from, struct control_msg *to,
return true;
}
static void
void
input_manager_process_key(struct input_manager *im,
const SDL_KeyboardEvent *event) {
// control: indicates the state of the command-line option --no-control
@@ -492,7 +480,9 @@ input_manager_process_key(struct input_manager *im,
return;
case SDLK_i:
if (!shift && !repeat && down) {
switch_fps_counter_state(im->fps_counter);
struct fps_counter *fps_counter =
im->video_buffer->fps_counter;
switch_fps_counter_state(fps_counter);
}
return;
case SDLK_n:
@@ -561,7 +551,7 @@ convert_mouse_motion(const SDL_MouseMotionEvent *from, struct screen *screen,
return true;
}
static void
void
input_manager_process_mouse_motion(struct input_manager *im,
const SDL_MouseMotionEvent *event) {
if (!event->state) {
@@ -615,7 +605,7 @@ convert_touch(const SDL_TouchFingerEvent *from, struct screen *screen,
return true;
}
static void
void
input_manager_process_touch(struct input_manager *im,
const SDL_TouchFingerEvent *event) {
struct control_msg msg;
@@ -647,7 +637,7 @@ convert_mouse_button(const SDL_MouseButtonEvent *from, struct screen *screen,
return true;
}
static void
void
input_manager_process_mouse_button(struct input_manager *im,
const SDL_MouseButtonEvent *event) {
bool control = im->control;
@@ -658,23 +648,13 @@ input_manager_process_mouse_button(struct input_manager *im,
}
bool down = event->type == SDL_MOUSEBUTTONDOWN;
if (!im->forward_all_clicks) {
int action = down ? ACTION_DOWN : ACTION_UP;
if (control && event->button == SDL_BUTTON_X1) {
action_app_switch(im->controller, action);
return;
}
if (control && event->button == SDL_BUTTON_X2 && down) {
expand_notification_panel(im->controller);
return;
}
if (!im->forward_all_clicks && down) {
if (control && event->button == SDL_BUTTON_RIGHT) {
press_back_or_turn_screen_on(im->controller, action);
press_back_or_turn_screen_on(im->controller);
return;
}
if (control && event->button == SDL_BUTTON_MIDDLE) {
action_home(im->controller, action);
action_home(im->controller, ACTION_DOWN | ACTION_UP);
return;
}
@@ -687,9 +667,7 @@ input_manager_process_mouse_button(struct input_manager *im,
bool outside = x < r->x || x >= r->x + r->w
|| y < r->y || y >= r->y + r->h;
if (outside) {
if (down) {
screen_resize_to_fit(im->screen);
}
screen_resize_to_fit(im->screen);
return;
}
}
@@ -758,7 +736,7 @@ convert_mouse_wheel(const SDL_MouseWheelEvent *from, struct screen *screen,
return true;
}
static void
void
input_manager_process_mouse_wheel(struct input_manager *im,
const SDL_MouseWheelEvent *event) {
struct control_msg msg;
@@ -768,46 +746,3 @@ input_manager_process_mouse_wheel(struct input_manager *im,
}
}
}
bool
input_manager_handle_event(struct input_manager *im, SDL_Event *event) {
switch (event->type) {
case SDL_TEXTINPUT:
if (!im->control) {
return true;
}
input_manager_process_text_input(im, &event->text);
return true;
case SDL_KEYDOWN:
case SDL_KEYUP:
// some key events do not interact with the device, so process the
// event even if control is disabled
input_manager_process_key(im, &event->key);
return true;
case SDL_MOUSEMOTION:
if (!im->control) {
break;
}
input_manager_process_mouse_motion(im, &event->motion);
return true;
case SDL_MOUSEWHEEL:
if (!im->control) {
break;
}
input_manager_process_mouse_wheel(im, &event->wheel);
return true;
case SDL_MOUSEBUTTONDOWN:
case SDL_MOUSEBUTTONUP:
// some mouse events do not interact with the device, so process
// the event even if control is disabled
input_manager_process_mouse_button(im, &event->button);
return true;
case SDL_FINGERMOTION:
case SDL_FINGERDOWN:
case SDL_FINGERUP:
input_manager_process_touch(im, &event->tfinger);
return true;
}
return false;
}

View File

@@ -11,10 +11,11 @@
#include "fps_counter.h"
#include "scrcpy.h"
#include "screen.h"
#include "video_buffer.h"
struct input_manager {
struct controller *controller;
struct fps_counter *fps_counter;
struct video_buffer *video_buffer;
struct screen *screen;
// SDL reports repeated events as a boolean, but Android expects the actual
@@ -39,7 +40,28 @@ void
input_manager_init(struct input_manager *im,
const struct scrcpy_options *options);
bool
input_manager_handle_event(struct input_manager *im, SDL_Event *event);
void
input_manager_process_text_input(struct input_manager *im,
const SDL_TextInputEvent *event);
void
input_manager_process_key(struct input_manager *im,
const SDL_KeyboardEvent *event);
void
input_manager_process_mouse_motion(struct input_manager *im,
const SDL_MouseMotionEvent *event);
void
input_manager_process_touch(struct input_manager *im,
const SDL_TouchFingerEvent *event);
void
input_manager_process_mouse_button(struct input_manager *im,
const SDL_MouseButtonEvent *event);
void
input_manager_process_mouse_wheel(struct input_manager *im,
const SDL_MouseWheelEvent *event);
#endif

View File

@@ -6,9 +6,6 @@
#include <stdbool.h>
#include <unistd.h>
#include <libavformat/avformat.h>
#ifdef HAVE_V4L2
# include <libavdevice/avdevice.h>
#endif
#define SDL_MAIN_HANDLED // avoid link error on Linux Windows Subsystem
#include <SDL2/SDL.h>
@@ -31,11 +28,6 @@ print_version(void) {
fprintf(stderr, " - libavutil %d.%d.%d\n", LIBAVUTIL_VERSION_MAJOR,
LIBAVUTIL_VERSION_MINOR,
LIBAVUTIL_VERSION_MICRO);
#ifdef HAVE_V4L2
fprintf(stderr, " - libavdevice %d.%d.%d\n", LIBAVDEVICE_VERSION_MAJOR,
LIBAVDEVICE_VERSION_MINOR,
LIBAVDEVICE_VERSION_MICRO);
#endif
}
static SDL_LogPriority
@@ -98,12 +90,6 @@ main(int argc, char *argv[]) {
av_register_all();
#endif
#ifdef HAVE_V4L2
if (args.opts.v4l2_device) {
avdevice_register_all();
}
#endif
if (avformat_network_init()) {
return 1;
}

View File

@@ -4,10 +4,6 @@
#include <libavutil/time.h>
#include "util/log.h"
#include "util/str_util.h"
/** Downcast packet_sink to recorder */
#define DOWNCAST(SINK) container_of(SINK, struct recorder, packet_sink)
static const AVRational SCRCPY_TIME_BASE = {1, 1000000}; // timestamps in us
@@ -23,8 +19,8 @@ find_muxer(const char *name) {
#else
oformat = av_oformat_next(oformat);
#endif
// until null or containing the requested name
} while (oformat && !strlist_contains(oformat->name, ',', name));
// until null or with name "mp4"
} while (oformat && strcmp(oformat->name, name));
return oformat;
}
@@ -61,6 +57,50 @@ recorder_queue_clear(struct recorder_queue *queue) {
}
}
bool
recorder_init(struct recorder *recorder,
const char *filename,
enum sc_record_format format,
struct size declared_frame_size) {
recorder->filename = strdup(filename);
if (!recorder->filename) {
LOGE("Could not strdup filename");
return false;
}
bool ok = sc_mutex_init(&recorder->mutex);
if (!ok) {
LOGC("Could not create mutex");
free(recorder->filename);
return false;
}
ok = sc_cond_init(&recorder->queue_cond);
if (!ok) {
LOGC("Could not create cond");
sc_mutex_destroy(&recorder->mutex);
free(recorder->filename);
return false;
}
queue_init(&recorder->queue);
recorder->stopped = false;
recorder->failed = false;
recorder->format = format;
recorder->declared_frame_size = declared_frame_size;
recorder->header_written = false;
recorder->previous = NULL;
return true;
}
void
recorder_destroy(struct recorder *recorder) {
sc_cond_destroy(&recorder->queue_cond);
sc_mutex_destroy(&recorder->mutex);
free(recorder->filename);
}
static const char *
recorder_get_format_name(enum sc_record_format format) {
switch (format) {
@@ -70,6 +110,88 @@ recorder_get_format_name(enum sc_record_format format) {
}
}
bool
recorder_open(struct recorder *recorder, const AVCodec *input_codec) {
const char *format_name = recorder_get_format_name(recorder->format);
assert(format_name);
const AVOutputFormat *format = find_muxer(format_name);
if (!format) {
LOGE("Could not find muxer");
return false;
}
recorder->ctx = avformat_alloc_context();
if (!recorder->ctx) {
LOGE("Could not allocate output context");
return false;
}
// contrary to the deprecated API (av_oformat_next()), av_muxer_iterate()
// returns (on purpose) a pointer-to-const, but AVFormatContext.oformat
// still expects a pointer-to-non-const (it has not be updated accordingly)
// <https://github.com/FFmpeg/FFmpeg/commit/0694d8702421e7aff1340038559c438b61bb30dd>
recorder->ctx->oformat = (AVOutputFormat *) format;
av_dict_set(&recorder->ctx->metadata, "comment",
"Recorded by scrcpy " SCRCPY_VERSION, 0);
AVStream *ostream = avformat_new_stream(recorder->ctx, input_codec);
if (!ostream) {
avformat_free_context(recorder->ctx);
return false;
}
#ifdef SCRCPY_LAVF_HAS_NEW_CODEC_PARAMS_API
ostream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
ostream->codecpar->codec_id = input_codec->id;
ostream->codecpar->format = AV_PIX_FMT_YUV420P;
ostream->codecpar->width = recorder->declared_frame_size.width;
ostream->codecpar->height = recorder->declared_frame_size.height;
#else
ostream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
ostream->codec->codec_id = input_codec->id;
ostream->codec->pix_fmt = AV_PIX_FMT_YUV420P;
ostream->codec->width = recorder->declared_frame_size.width;
ostream->codec->height = recorder->declared_frame_size.height;
#endif
int ret = avio_open(&recorder->ctx->pb, recorder->filename,
AVIO_FLAG_WRITE);
if (ret < 0) {
LOGE("Failed to open output file: %s", recorder->filename);
// ostream will be cleaned up during context cleaning
avformat_free_context(recorder->ctx);
return false;
}
LOGI("Recording started to %s file: %s", format_name, recorder->filename);
return true;
}
void
recorder_close(struct recorder *recorder) {
if (recorder->header_written) {
int ret = av_write_trailer(recorder->ctx);
if (ret < 0) {
LOGE("Failed to write trailer to %s", recorder->filename);
recorder->failed = true;
}
} else {
// the recorded file is empty
recorder->failed = true;
}
avio_close(recorder->ctx->pb);
avformat_free_context(recorder->ctx);
if (recorder->failed) {
LOGE("Recording failed to %s", recorder->filename);
} else {
const char *format_name = recorder_get_format_name(recorder->format);
LOGI("Recording complete to %s file: %s", format_name, recorder->filename);
}
}
static bool
recorder_write_header(struct recorder *recorder, const AVPacket *packet) {
AVStream *ostream = recorder->ctx->streams[0];
@@ -83,8 +205,13 @@ recorder_write_header(struct recorder *recorder, const AVPacket *packet) {
// copy the first packet to the extra data
memcpy(extradata, packet->data, packet->size);
#ifdef SCRCPY_LAVF_HAS_NEW_CODEC_PARAMS_API
ostream->codecpar->extradata = extradata;
ostream->codecpar->extradata_size = packet->size;
#else
ostream->codec->extradata = extradata;
ostream->codec->extradata_size = packet->size;
#endif
int ret = avformat_write_header(recorder->ctx, NULL);
if (ret < 0) {
@@ -101,7 +228,7 @@ recorder_rescale_packet(struct recorder *recorder, AVPacket *packet) {
av_packet_rescale_ts(packet, SCRCPY_TIME_BASE, ostream->time_base);
}
static bool
bool
recorder_write(struct recorder *recorder, AVPacket *packet) {
if (!recorder->header_written) {
if (packet->pts != AV_NOPTS_VALUE) {
@@ -190,26 +317,7 @@ run_recorder(void *data) {
sc_mutex_unlock(&recorder->mutex);
break;
}
}
if (!recorder->failed) {
if (recorder->header_written) {
int ret = av_write_trailer(recorder->ctx);
if (ret < 0) {
LOGE("Failed to write trailer to %s", recorder->filename);
recorder->failed = true;
}
} else {
// the recorded file is empty
recorder->failed = true;
}
}
if (recorder->failed) {
LOGE("Recording failed to %s", recorder->filename);
} else {
const char *format_name = recorder_get_format_name(recorder->format);
LOGI("Recording complete to %s file: %s", format_name, recorder->filename);
}
LOGD("Recorder thread ended");
@@ -217,108 +325,34 @@ run_recorder(void *data) {
return 0;
}
static bool
recorder_open(struct recorder *recorder, const AVCodec *input_codec) {
bool ok = sc_mutex_init(&recorder->mutex);
if (!ok) {
LOGC("Could not create mutex");
return false;
}
ok = sc_cond_init(&recorder->queue_cond);
if (!ok) {
LOGC("Could not create cond");
goto error_mutex_destroy;
}
queue_init(&recorder->queue);
recorder->stopped = false;
recorder->failed = false;
recorder->header_written = false;
recorder->previous = NULL;
const char *format_name = recorder_get_format_name(recorder->format);
assert(format_name);
const AVOutputFormat *format = find_muxer(format_name);
if (!format) {
LOGE("Could not find muxer");
goto error_cond_destroy;
}
recorder->ctx = avformat_alloc_context();
if (!recorder->ctx) {
LOGE("Could not allocate output context");
goto error_cond_destroy;
}
// contrary to the deprecated API (av_oformat_next()), av_muxer_iterate()
// returns (on purpose) a pointer-to-const, but AVFormatContext.oformat
// still expects a pointer-to-non-const (it has not be updated accordingly)
// <https://github.com/FFmpeg/FFmpeg/commit/0694d8702421e7aff1340038559c438b61bb30dd>
recorder->ctx->oformat = (AVOutputFormat *) format;
av_dict_set(&recorder->ctx->metadata, "comment",
"Recorded by scrcpy " SCRCPY_VERSION, 0);
AVStream *ostream = avformat_new_stream(recorder->ctx, input_codec);
if (!ostream) {
goto error_avformat_free_context;
}
ostream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
ostream->codecpar->codec_id = input_codec->id;
ostream->codecpar->format = AV_PIX_FMT_YUV420P;
ostream->codecpar->width = recorder->declared_frame_size.width;
ostream->codecpar->height = recorder->declared_frame_size.height;
int ret = avio_open(&recorder->ctx->pb, recorder->filename,
AVIO_FLAG_WRITE);
if (ret < 0) {
LOGE("Failed to open output file: %s", recorder->filename);
// ostream will be cleaned up during context cleaning
goto error_avformat_free_context;
}
bool
recorder_start(struct recorder *recorder) {
LOGD("Starting recorder thread");
ok = sc_thread_create(&recorder->thread, run_recorder, "recorder",
bool ok = sc_thread_create(&recorder->thread, run_recorder, "recorder",
recorder);
if (!ok) {
LOGC("Could not start recorder thread");
goto error_avio_close;
return false;
}
LOGI("Recording started to %s file: %s", format_name, recorder->filename);
return true;
error_avio_close:
avio_close(recorder->ctx->pb);
error_avformat_free_context:
avformat_free_context(recorder->ctx);
error_cond_destroy:
sc_cond_destroy(&recorder->queue_cond);
error_mutex_destroy:
sc_mutex_destroy(&recorder->mutex);
return false;
}
static void
recorder_close(struct recorder *recorder) {
void
recorder_stop(struct recorder *recorder) {
sc_mutex_lock(&recorder->mutex);
recorder->stopped = true;
sc_cond_signal(&recorder->queue_cond);
sc_mutex_unlock(&recorder->mutex);
sc_thread_join(&recorder->thread, NULL);
avio_close(recorder->ctx->pb);
avformat_free_context(recorder->ctx);
sc_cond_destroy(&recorder->queue_cond);
sc_mutex_destroy(&recorder->mutex);
}
static bool
void
recorder_join(struct recorder *recorder) {
sc_thread_join(&recorder->thread, NULL);
}
bool
recorder_push(struct recorder *recorder, const AVPacket *packet) {
sc_mutex_lock(&recorder->mutex);
assert(!recorder->stopped);
@@ -342,51 +376,3 @@ recorder_push(struct recorder *recorder, const AVPacket *packet) {
sc_mutex_unlock(&recorder->mutex);
return true;
}
static bool
recorder_packet_sink_open(struct sc_packet_sink *sink, const AVCodec *codec) {
struct recorder *recorder = DOWNCAST(sink);
return recorder_open(recorder, codec);
}
static void
recorder_packet_sink_close(struct sc_packet_sink *sink) {
struct recorder *recorder = DOWNCAST(sink);
recorder_close(recorder);
}
static bool
recorder_packet_sink_push(struct sc_packet_sink *sink, const AVPacket *packet) {
struct recorder *recorder = DOWNCAST(sink);
return recorder_push(recorder, packet);
}
bool
recorder_init(struct recorder *recorder,
const char *filename,
enum sc_record_format format,
struct size declared_frame_size) {
recorder->filename = strdup(filename);
if (!recorder->filename) {
LOGE("Could not strdup filename");
return false;
}
recorder->format = format;
recorder->declared_frame_size = declared_frame_size;
static const struct sc_packet_sink_ops ops = {
.open = recorder_packet_sink_open,
.close = recorder_packet_sink_close,
.push = recorder_packet_sink_push,
};
recorder->packet_sink.ops = &ops;
return true;
}
void
recorder_destroy(struct recorder *recorder) {
free(recorder->filename);
}

View File

@@ -8,7 +8,6 @@
#include "coords.h"
#include "scrcpy.h"
#include "trait/packet_sink.h"
#include "util/queue.h"
#include "util/thread.h"
@@ -20,8 +19,6 @@ struct record_packet {
struct recorder_queue QUEUE(struct record_packet);
struct recorder {
struct sc_packet_sink packet_sink; // packet sink trait
char *filename;
enum sc_record_format format;
AVFormatContext *ctx;
@@ -31,7 +28,7 @@ struct recorder {
sc_thread thread;
sc_mutex mutex;
sc_cond queue_cond;
bool stopped; // set on recorder_close()
bool stopped; // set on recorder_stop() by the stream reader
bool failed; // set on packet write failure
struct recorder_queue queue;
@@ -49,4 +46,22 @@ recorder_init(struct recorder *recorder, const char *filename,
void
recorder_destroy(struct recorder *recorder);
bool
recorder_open(struct recorder *recorder, const AVCodec *input_codec);
void
recorder_close(struct recorder *recorder);
bool
recorder_start(struct recorder *recorder);
void
recorder_stop(struct recorder *recorder);
void
recorder_join(struct recorder *recorder);
bool
recorder_push(struct recorder *recorder, const AVPacket *packet);
#endif

View File

@@ -25,27 +25,43 @@
#include "server.h"
#include "stream.h"
#include "tiny_xpm.h"
#include "video_buffer.h"
#include "util/log.h"
#include "util/net.h"
#ifdef HAVE_V4L2
# include "v4l2_sink.h"
#endif
static const char *sc_scale_filter_names[SC_SCALE_FILTER__COUNT] = {
[SC_SCALE_FILTER_NONE] = "none",
[SC_SCALE_FILTER_TRILINEAR] = "trilinear",
[SC_SCALE_FILTER_BILINEAR] = "bilinear",
[SC_SCALE_FILTER_BICUBIC] = "bicubic",
[SC_SCALE_FILTER_X] = "x",
[SC_SCALE_FILTER_POINT] = "point",
[SC_SCALE_FILTER_AREA] = "area",
[SC_SCALE_FILTER_BICUBLIN] = "bicublin",
[SC_SCALE_FILTER_GAUSS] = "gauss",
[SC_SCALE_FILTER_SINC] = "sinc",
[SC_SCALE_FILTER_LANCZOS] = "lanczos",
[SC_SCALE_FILTER_SPLINE] = "spline",
};
const char *
sc_scale_filter_name(enum sc_scale_filter scale_filter) {
return sc_scale_filter_names[scale_filter];
}
static struct server server;
static struct screen screen;
static struct screen screen = SCREEN_INITIALIZER;
static struct fps_counter fps_counter;
static struct video_buffer video_buffer;
static struct stream stream;
static struct decoder decoder;
static struct recorder recorder;
#ifdef HAVE_V4L2
static struct sc_v4l2_sink v4l2_sink;
#endif
static struct controller controller;
static struct file_handler file_handler;
static struct input_manager input_manager = {
.controller = &controller,
.fps_counter = &fps_counter,
.video_buffer = &video_buffer,
.screen = &screen,
.repeat = 0,
@@ -132,6 +148,30 @@ sdl_init_and_configure(bool display, const char *render_driver,
return true;
}
#if defined(__APPLE__) || defined(__WINDOWS__)
# define CONTINUOUS_RESIZING_WORKAROUND
#endif
#ifdef CONTINUOUS_RESIZING_WORKAROUND
// On Windows and MacOS, resizing blocks the event loop, so resizing events are
// not triggered. As a workaround, handle them in an event handler.
//
// <https://bugzilla.libsdl.org/show_bug.cgi?id=2077>
// <https://stackoverflow.com/a/40693139/1987178>
static int
event_watcher(void *data, SDL_Event *event) {
(void) data;
if (event->type == SDL_WINDOWEVENT
&& event->window.event == SDL_WINDOWEVENT_RESIZED) {
// In practice, it seems to always be called from the same thread in
// that specific case. Anyway, it's just a workaround.
screen_render(&screen, true);
}
return 0;
}
#endif
static bool
is_apk(const char *file) {
const char *ext = strrchr(file, '.');
@@ -153,6 +193,56 @@ handle_event(SDL_Event *event, const struct scrcpy_options *options) {
case SDL_QUIT:
LOGD("User requested to quit");
return EVENT_RESULT_STOPPED_BY_USER;
case EVENT_NEW_FRAME:
if (!screen.has_frame) {
screen.has_frame = true;
// this is the very first frame, show the window
screen_show_window(&screen);
}
if (!screen_update_frame(&screen, &video_buffer)) {
return EVENT_RESULT_CONTINUE;
}
break;
case SDL_WINDOWEVENT:
if (screen.has_frame) {
screen_handle_window_event(&screen, &event->window);
}
break;
case SDL_TEXTINPUT:
if (!options->control) {
break;
}
input_manager_process_text_input(&input_manager, &event->text);
break;
case SDL_KEYDOWN:
case SDL_KEYUP:
// some key events do not interact with the device, so process the
// event even if control is disabled
input_manager_process_key(&input_manager, &event->key);
break;
case SDL_MOUSEMOTION:
if (!options->control) {
break;
}
input_manager_process_mouse_motion(&input_manager, &event->motion);
break;
case SDL_MOUSEWHEEL:
if (!options->control) {
break;
}
input_manager_process_mouse_wheel(&input_manager, &event->wheel);
break;
case SDL_MOUSEBUTTONDOWN:
case SDL_MOUSEBUTTONUP:
// some mouse events do not interact with the device, so process
// the event even if control is disabled
input_manager_process_mouse_button(&input_manager, &event->button);
break;
case SDL_FINGERMOTION:
case SDL_FINGERDOWN:
case SDL_FINGERUP:
input_manager_process_touch(&input_manager, &event->tfinger);
break;
case SDL_DROPFILE: {
if (!options->control) {
break;
@@ -171,24 +261,19 @@ handle_event(SDL_Event *event, const struct scrcpy_options *options) {
action = ACTION_PUSH_FILE;
}
file_handler_request(&file_handler, action, file);
goto end;
break;
}
}
bool consumed = screen_handle_event(&screen, event);
if (consumed) {
goto end;
}
consumed = input_manager_handle_event(&input_manager, event);
(void) consumed;
end:
return EVENT_RESULT_CONTINUE;
}
static bool
event_loop(const struct scrcpy_options *options) {
#ifdef CONTINUOUS_RESIZING_WORKAROUND
if (options->display) {
SDL_AddEventWatch(event_watcher, NULL);
}
#endif
SDL_Event event;
while (SDL_WaitEvent(&event)) {
enum event_result result = handle_event(&event, options);
@@ -251,15 +336,12 @@ scrcpy(const struct scrcpy_options *options) {
bool server_started = false;
bool fps_counter_initialized = false;
bool video_buffer_initialized = false;
bool file_handler_initialized = false;
bool recorder_initialized = false;
#ifdef HAVE_V4L2
bool v4l2_sink_initialized = false;
#endif
bool stream_started = false;
bool controller_initialized = false;
bool controller_started = false;
bool screen_initialized = false;
bool record = !!options->record_filename;
struct server_params params = {
@@ -277,7 +359,6 @@ scrcpy(const struct scrcpy_options *options) {
.codec_options = options->codec_options,
.encoder_name = options->encoder_name,
.force_adb_forward = options->force_adb_forward,
.power_off_on_close = options->power_off_on_close,
};
if (!server_start(&server, options->serial, &params)) {
goto end;
@@ -311,6 +392,12 @@ scrcpy(const struct scrcpy_options *options) {
}
fps_counter_initialized = true;
if (!video_buffer_init(&video_buffer, &fps_counter,
options->render_expired_frames)) {
goto end;
}
video_buffer_initialized = true;
if (options->control) {
if (!file_handler_init(&file_handler, server.serial,
options->push_target)) {
@@ -319,7 +406,7 @@ scrcpy(const struct scrcpy_options *options) {
file_handler_initialized = true;
}
decoder_init(&decoder);
decoder_init(&decoder, &video_buffer);
dec = &decoder;
}
@@ -337,15 +424,14 @@ scrcpy(const struct scrcpy_options *options) {
av_log_set_callback(av_log_callback);
stream_init(&stream, server.video_socket);
stream_init(&stream, server.video_socket, dec, rec);
if (dec) {
stream_add_sink(&stream, &dec->packet_sink);
}
if (rec) {
stream_add_sink(&stream, &rec->packet_sink);
// now we consumed the header values, the socket receives the video stream
// start the stream
if (!stream_start(&stream)) {
goto end;
}
stream_started = true;
if (options->display) {
if (options->control) {
@@ -363,26 +449,14 @@ scrcpy(const struct scrcpy_options *options) {
const char *window_title =
options->window_title ? options->window_title : device_name;
struct screen_params screen_params = {
.window_title = window_title,
.frame_size = frame_size,
.always_on_top = options->always_on_top,
.window_x = options->window_x,
.window_y = options->window_y,
.window_width = options->window_width,
.window_height = options->window_height,
.window_borderless = options->window_borderless,
.rotation = options->rotation,
.mipmaps = options->mipmaps,
.fullscreen = options->fullscreen,
};
if (!screen_init(&screen, &fps_counter, &screen_params)) {
if (!screen_init_rendering(&screen, window_title, frame_size,
options->always_on_top, options->window_x,
options->window_y, options->window_width,
options->window_height,
options->window_borderless,
options->rotation, options->scale_filter)) {
goto end;
}
screen_initialized = true;
decoder_add_sink(&decoder, &screen.frame_sink);
if (options->turn_screen_off) {
struct control_msg msg;
@@ -393,39 +467,25 @@ scrcpy(const struct scrcpy_options *options) {
LOGW("Could not request 'set screen power mode'");
}
}
}
#ifdef HAVE_V4L2
if (options->v4l2_device) {
if (!sc_v4l2_sink_init(&v4l2_sink, options->v4l2_device, frame_size)) {
goto end;
if (options->fullscreen) {
screen_switch_fullscreen(&screen);
}
decoder_add_sink(&decoder, &v4l2_sink.frame_sink);
v4l2_sink_initialized = true;
}
#endif
// now we consumed the header values, the socket receives the video stream
// start the stream
if (!stream_start(&stream)) {
goto end;
}
stream_started = true;
input_manager_init(&input_manager, options);
ret = event_loop(options);
LOGD("quit...");
// Close the window immediately on closing, because screen_destroy() may
// only be called once the stream thread is joined (it may take time)
screen_hide_window(&screen);
screen_destroy(&screen);
end:
// The stream is not stopped explicitly, because it will stop by itself on
// end-of-stream
// stop stream and controller so that they don't continue once their socket
// is shutdown
if (stream_started) {
stream_stop(&stream);
}
if (controller_started) {
controller_stop(&controller);
}
@@ -446,19 +506,6 @@ end:
if (stream_started) {
stream_join(&stream);
}
#ifdef HAVE_V4L2
if (v4l2_sink_initialized) {
sc_v4l2_sink_destroy(&v4l2_sink);
}
#endif
// Destroy the screen only after the stream is guaranteed to be finished,
// because otherwise the screen could receive new frames after destruction
if (screen_initialized) {
screen_destroy(&screen);
}
if (controller_started) {
controller_join(&controller);
}
@@ -475,6 +522,10 @@ end:
file_handler_destroy(&file_handler);
}
if (video_buffer_initialized) {
video_buffer_destroy(&video_buffer);
}
if (fps_counter_initialized) {
fps_counter_join(&fps_counter);
fps_counter_destroy(&fps_counter);

View File

@@ -41,6 +41,25 @@ struct sc_port_range {
uint16_t last;
};
enum sc_scale_filter {
SC_SCALE_FILTER_NONE,
SC_SCALE_FILTER_TRILINEAR, // mipmaps
SC_SCALE_FILTER_BILINEAR,
SC_SCALE_FILTER_BICUBIC,
SC_SCALE_FILTER_X,
SC_SCALE_FILTER_POINT,
SC_SCALE_FILTER_AREA,
SC_SCALE_FILTER_BICUBLIN,
SC_SCALE_FILTER_GAUSS,
SC_SCALE_FILTER_SINC,
SC_SCALE_FILTER_LANCZOS,
SC_SCALE_FILTER_SPLINE,
SC_SCALE_FILTER__COUNT,
};
const char *
sc_scale_filter_name(enum sc_scale_filter scale_filter);
#define SC_WINDOW_POSITION_UNDEFINED (-0x8000)
struct scrcpy_options {
@@ -52,11 +71,11 @@ struct scrcpy_options {
const char *render_driver;
const char *codec_options;
const char *encoder_name;
const char *v4l2_device;
enum sc_log_level log_level;
enum sc_record_format record_format;
struct sc_port_range port_range;
struct sc_shortcut_mods shortcut_mods;
enum sc_scale_filter scale_filter;
uint16_t max_size;
uint32_t bit_rate;
uint16_t max_fps;
@@ -73,16 +92,15 @@ struct scrcpy_options {
bool control;
bool display;
bool turn_screen_off;
bool render_expired_frames;
bool prefer_text;
bool window_borderless;
bool mipmaps;
bool stay_awake;
bool force_adb_forward;
bool disable_screensaver;
bool forward_key_repeat;
bool forward_all_clicks;
bool legacy_paste;
bool power_off_on_close;
};
#define SCRCPY_OPTIONS_DEFAULT { \
@@ -94,7 +112,6 @@ struct scrcpy_options {
.render_driver = NULL, \
.codec_options = NULL, \
.encoder_name = NULL, \
.v4l2_device = NULL, \
.log_level = SC_LOG_LEVEL_INFO, \
.record_format = SC_RECORD_FORMAT_AUTO, \
.port_range = { \
@@ -105,10 +122,11 @@ struct scrcpy_options {
.data = {SC_MOD_LALT, SC_MOD_LSUPER}, \
.count = 2, \
}, \
.max_size = 0, \
.scale_filter = SC_SCALE_FILTER_TRILINEAR, \
.max_size = DEFAULT_MAX_SIZE, \
.bit_rate = DEFAULT_BIT_RATE, \
.max_fps = 0, \
.lock_video_orientation = -1, \
.lock_video_orientation = DEFAULT_LOCK_VIDEO_ORIENTATION, \
.rotation = 0, \
.window_x = SC_WINDOW_POSITION_UNDEFINED, \
.window_y = SC_WINDOW_POSITION_UNDEFINED, \
@@ -121,16 +139,15 @@ struct scrcpy_options {
.control = true, \
.display = true, \
.turn_screen_off = false, \
.render_expired_frames = false, \
.prefer_text = false, \
.window_borderless = false, \
.mipmaps = true, \
.stay_awake = false, \
.force_adb_forward = false, \
.disable_screensaver = false, \
.forward_key_repeat = true, \
.forward_all_clicks = false, \
.legacy_paste = false, \
.power_off_on_close = false, \
}
bool

View File

@@ -4,17 +4,13 @@
#include <string.h>
#include <SDL2/SDL.h>
#include "events.h"
#include "icon.xpm"
#include "scrcpy.h"
#include "tiny_xpm.h"
#include "video_buffer.h"
#include "util/log.h"
#define DISPLAY_MARGINS 96
#define DOWNCAST(SINK) container_of(SINK, struct screen, frame_sink)
static inline struct size
get_rotated_size(struct size size, int rotation) {
struct size rotated_size;
@@ -193,137 +189,32 @@ screen_update_content_rect(struct screen *screen) {
}
}
static inline SDL_Texture *
create_texture(struct screen *screen) {
SDL_Renderer *renderer = screen->renderer;
struct size size = screen->frame_size;
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,
SDL_TEXTUREACCESS_STREAMING,
size.width, size.height);
if (!texture) {
return NULL;
}
if (screen->mipmaps) {
struct sc_opengl *gl = &screen->gl;
SDL_GL_BindTexture(texture, NULL, NULL);
// Enable trilinear filtering for downscaling
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_LINEAR_MIPMAP_LINEAR);
gl->TexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -1.f);
SDL_GL_UnbindTexture(texture);
}
return texture;
}
#if defined(__APPLE__) || defined(__WINDOWS__)
# define CONTINUOUS_RESIZING_WORKAROUND
#endif
#ifdef CONTINUOUS_RESIZING_WORKAROUND
// On Windows and MacOS, resizing blocks the event loop, so resizing events are
// not triggered. As a workaround, handle them in an event handler.
//
// <https://bugzilla.libsdl.org/show_bug.cgi?id=2077>
// <https://stackoverflow.com/a/40693139/1987178>
static int
event_watcher(void *data, SDL_Event *event) {
struct screen *screen = data;
if (event->type == SDL_WINDOWEVENT
&& event->window.event == SDL_WINDOWEVENT_RESIZED) {
// In practice, it seems to always be called from the same thread in
// that specific case. Anyway, it's just a workaround.
screen_render(screen, true);
}
return 0;
}
#endif
static bool
screen_frame_sink_open(struct sc_frame_sink *sink) {
struct screen *screen = DOWNCAST(sink);
(void) screen;
#ifndef NDEBUG
screen->open = true;
#endif
// nothing to do, the screen is already open on the main thread
return true;
}
static void
screen_frame_sink_close(struct sc_frame_sink *sink) {
struct screen *screen = DOWNCAST(sink);
(void) screen;
#ifndef NDEBUG
screen->open = false;
#endif
// nothing to do, the screen lifecycle is not managed by the frame producer
}
static bool
screen_frame_sink_push(struct sc_frame_sink *sink, const AVFrame *frame) {
struct screen *screen = DOWNCAST(sink);
bool previous_frame_skipped;
bool ok = video_buffer_push(&screen->vb, frame, &previous_frame_skipped);
if (!ok) {
return false;
}
if (previous_frame_skipped) {
fps_counter_add_skipped_frame(screen->fps_counter);
// The EVENT_NEW_FRAME triggered for the previous frame will consume
// this new frame instead
} else {
static SDL_Event new_frame_event = {
.type = EVENT_NEW_FRAME,
};
// Post the event on the UI thread
SDL_PushEvent(&new_frame_event);
}
return true;
void
screen_init(struct screen *screen) {
*screen = (struct screen) SCREEN_INITIALIZER;
}
bool
screen_init(struct screen *screen, struct fps_counter *fps_counter,
const struct screen_params *params) {
screen->fps_counter = fps_counter;
screen->resize_pending = false;
screen->has_frame = false;
screen->fullscreen = false;
screen->maximized = false;
bool ok = video_buffer_init(&screen->vb);
if (!ok) {
LOGE("Could not initialize video buffer");
return false;
screen_init_rendering(struct screen *screen, const char *window_title,
struct size frame_size, bool always_on_top,
int16_t window_x, int16_t window_y, uint16_t window_width,
uint16_t window_height, bool window_borderless,
uint8_t rotation, enum sc_scale_filter scale_filter) {
screen->frame_size = frame_size;
screen->rotation = rotation;
if (rotation) {
LOGI("Initial display rotation set to %u", rotation);
}
screen->frame_size = params->frame_size;
screen->rotation = params->rotation;
if (screen->rotation) {
LOGI("Initial display rotation set to %u", screen->rotation);
}
struct size content_size =
get_rotated_size(screen->frame_size, screen->rotation);
struct size content_size = get_rotated_size(frame_size, screen->rotation);
screen->content_size = content_size;
struct size window_size = get_initial_optimal_size(content_size,
params->window_width,
params->window_height);
uint32_t window_flags = SDL_WINDOW_HIDDEN
| SDL_WINDOW_RESIZABLE
| SDL_WINDOW_ALLOW_HIGHDPI;
if (params->always_on_top) {
struct size window_size =
get_initial_optimal_size(content_size, window_width, window_height);
uint32_t window_flags = SDL_WINDOW_HIDDEN | SDL_WINDOW_RESIZABLE;
#ifdef HIDPI_SUPPORT
window_flags |= SDL_WINDOW_ALLOW_HIGHDPI;
#endif
if (always_on_top) {
#ifdef SCRCPY_SDL_HAS_WINDOW_ALWAYS_ON_TOP
window_flags |= SDL_WINDOW_ALWAYS_ON_TOP;
#else
@@ -331,15 +222,15 @@ screen_init(struct screen *screen, struct fps_counter *fps_counter,
"(compile with SDL >= 2.0.5 to enable it)");
#endif
}
if (params->window_borderless) {
if (window_borderless) {
window_flags |= SDL_WINDOW_BORDERLESS;
}
int x = params->window_x != SC_WINDOW_POSITION_UNDEFINED
? params->window_x : (int) SDL_WINDOWPOS_UNDEFINED;
int y = params->window_y != SC_WINDOW_POSITION_UNDEFINED
? params->window_y : (int) SDL_WINDOWPOS_UNDEFINED;
screen->window = SDL_CreateWindow(params->window_title, x, y,
int x = window_x != SC_WINDOW_POSITION_UNDEFINED
? window_x : (int) SDL_WINDOWPOS_UNDEFINED;
int y = window_y != SC_WINDOW_POSITION_UNDEFINED
? window_y : (int) SDL_WINDOWPOS_UNDEFINED;
screen->window = SDL_CreateWindow(window_title, x, y,
window_size.width, window_size.height,
window_flags);
if (!screen->window) {
@@ -352,41 +243,16 @@ screen_init(struct screen *screen, struct fps_counter *fps_counter,
if (!screen->renderer) {
LOGC("Could not create renderer: %s", SDL_GetError());
SDL_DestroyWindow(screen->window);
video_buffer_destroy(&screen->vb);
return false;
}
SDL_RendererInfo renderer_info;
int r = SDL_GetRendererInfo(screen->renderer, &renderer_info);
const char *renderer_name = r ? NULL : renderer_info.name;
LOGI("Renderer: %s", renderer_name ? renderer_name : "(unknown)");
screen->mipmaps = false;
// starts with "opengl"
bool use_opengl = renderer_name && !strncmp(renderer_name, "opengl", 6);
if (use_opengl) {
struct sc_opengl *gl = &screen->gl;
sc_opengl_init(gl);
LOGI("OpenGL version: %s", gl->version);
if (params->mipmaps) {
bool supports_mipmaps =
sc_opengl_version_at_least(gl, 3, 0, /* OpenGL 3.0+ */
2, 0 /* OpenGL ES 2.0+ */);
if (supports_mipmaps) {
LOGI("Trilinear filtering enabled");
screen->mipmaps = true;
} else {
LOGW("Trilinear filtering disabled "
"(OpenGL 3.0+ or ES 2.0+ required)");
}
} else {
LOGI("Trilinear filtering disabled");
}
} else if (params->mipmaps) {
LOGD("Trilinear filtering disabled (not an OpenGL renderer)");
bool ok = sc_frame_texture_init(&screen->ftex, screen->renderer,
scale_filter, frame_size);
if (!ok) {
LOGC("Could not init frame texture");
SDL_DestroyRenderer(screen->renderer);
SDL_DestroyWindow(screen->window);
return false;
}
SDL_Surface *icon = read_xpm(icon_xpm);
@@ -397,27 +263,6 @@ screen_init(struct screen *screen, struct fps_counter *fps_counter,
LOGW("Could not load icon");
}
LOGI("Initial texture: %" PRIu16 "x%" PRIu16, params->frame_size.width,
params->frame_size.height);
screen->texture = create_texture(screen);
if (!screen->texture) {
LOGC("Could not create texture: %s", SDL_GetError());
SDL_DestroyRenderer(screen->renderer);
SDL_DestroyWindow(screen->window);
video_buffer_destroy(&screen->vb);
return false;
}
screen->frame = av_frame_alloc();
if (!screen->frame) {
LOGC("Could not create screen frame");
SDL_DestroyTexture(screen->texture);
SDL_DestroyRenderer(screen->renderer);
SDL_DestroyWindow(screen->window);
video_buffer_destroy(&screen->vb);
return false;
}
// Reset the window size to trigger a SIZE_CHANGED event, to workaround
// HiDPI issues with some SDL renderers when several displays having
// different HiDPI scaling are connected
@@ -425,49 +270,19 @@ screen_init(struct screen *screen, struct fps_counter *fps_counter,
screen_update_content_rect(screen);
if (params->fullscreen) {
screen_switch_fullscreen(screen);
}
#ifdef CONTINUOUS_RESIZING_WORKAROUND
SDL_AddEventWatch(event_watcher, screen);
#endif
static const struct sc_frame_sink_ops ops = {
.open = screen_frame_sink_open,
.close = screen_frame_sink_close,
.push = screen_frame_sink_push,
};
screen->frame_sink.ops = &ops;
#ifndef NDEBUG
screen->open = false;
#endif
return true;
}
static void
void
screen_show_window(struct screen *screen) {
SDL_ShowWindow(screen->window);
}
void
screen_hide_window(struct screen *screen) {
SDL_HideWindow(screen->window);
}
void
screen_destroy(struct screen *screen) {
#ifndef NDEBUG
assert(!screen->open);
#endif
av_frame_free(&screen->frame);
SDL_DestroyTexture(screen->texture);
sc_frame_texture_destroy(&screen->ftex);
SDL_DestroyRenderer(screen->renderer);
SDL_DestroyWindow(screen->window);
video_buffer_destroy(&screen->vb);
}
static void
@@ -528,13 +343,10 @@ screen_set_rotation(struct screen *screen, unsigned rotation) {
}
// recreate the texture and resize the window if the frame size has changed
static bool
static void
prepare_for_frame(struct screen *screen, struct size new_frame_size) {
if (screen->frame_size.width != new_frame_size.width
|| screen->frame_size.height != new_frame_size.height) {
// frame dimension changed, destroy texture
SDL_DestroyTexture(screen->texture);
screen->frame_size = new_frame_size;
struct size new_content_size =
@@ -542,47 +354,20 @@ prepare_for_frame(struct screen *screen, struct size new_frame_size) {
set_content_size(screen, new_content_size);
screen_update_content_rect(screen);
LOGI("New texture: %" PRIu16 "x%" PRIu16,
screen->frame_size.width, screen->frame_size.height);
screen->texture = create_texture(screen);
if (!screen->texture) {
LOGC("Could not create texture: %s", SDL_GetError());
return false;
}
}
return true;
}
// write the frame into the texture
static void
update_texture(struct screen *screen, const AVFrame *frame) {
SDL_UpdateYUVTexture(screen->texture, NULL,
frame->data[0], frame->linesize[0],
frame->data[1], frame->linesize[1],
frame->data[2], frame->linesize[2]);
if (screen->mipmaps) {
SDL_GL_BindTexture(screen->texture, NULL, NULL);
screen->gl.GenerateMipmap(GL_TEXTURE_2D);
SDL_GL_UnbindTexture(screen->texture);
}
}
static bool
screen_update_frame(struct screen *screen) {
av_frame_unref(screen->frame);
video_buffer_consume(&screen->vb, screen->frame);
AVFrame *frame = screen->frame;
fps_counter_add_rendered_frame(screen->fps_counter);
bool
screen_update_frame(struct screen *screen, struct video_buffer *vb) {
const AVFrame *frame = video_buffer_take_rendering_frame(vb);
struct size new_frame_size = {frame->width, frame->height};
if (!prepare_for_frame(screen, new_frame_size)) {
prepare_for_frame(screen, new_frame_size);
struct size rect_size = {screen->rect.w, screen->rect.h};
bool ok = sc_frame_texture_update(&screen->ftex, frame, rect_size);
if (!ok) {
return false;
}
update_texture(screen, frame);
screen_render(screen, false);
return true;
@@ -592,11 +377,18 @@ void
screen_render(struct screen *screen, bool update_content_rect) {
if (update_content_rect) {
screen_update_content_rect(screen);
struct size rect_size = {screen->rect.w, screen->rect.h};
if (!sc_frame_texture_resize(&screen->ftex, rect_size)) {
// FIXME return error
LOGC("oops");
}
}
SDL_RenderClear(screen->renderer);
struct sc_frame_texture *ftex = &screen->ftex;
SDL_RenderClear(ftex->renderer);
if (screen->rotation == 0) {
SDL_RenderCopy(screen->renderer, screen->texture, NULL, &screen->rect);
SDL_RenderCopy(screen->renderer, ftex->texture, NULL,
&screen->rect);
} else {
// rotation in RenderCopyEx() is clockwise, while screen->rotation is
// counterclockwise (to be consistent with --lock-video-orientation)
@@ -616,10 +408,10 @@ screen_render(struct screen *screen, bool update_content_rect) {
dstrect = &screen->rect;
}
SDL_RenderCopyEx(screen->renderer, screen->texture, NULL, dstrect,
SDL_RenderCopyEx(screen->renderer, ftex->texture, NULL, dstrect,
angle, NULL, 0);
}
SDL_RenderPresent(screen->renderer);
SDL_RenderPresent(ftex->renderer);
}
void
@@ -669,52 +461,31 @@ screen_resize_to_pixel_perfect(struct screen *screen) {
content_size.height);
}
bool
screen_handle_event(struct screen *screen, SDL_Event *event) {
switch (event->type) {
case EVENT_NEW_FRAME:
if (!screen->has_frame) {
screen->has_frame = true;
// this is the very first frame, show the window
screen_show_window(screen);
void
screen_handle_window_event(struct screen *screen,
const SDL_WindowEvent *event) {
switch (event->event) {
case SDL_WINDOWEVENT_EXPOSED:
screen_render(screen, true);
break;
case SDL_WINDOWEVENT_SIZE_CHANGED:
screen_render(screen, true);
break;
case SDL_WINDOWEVENT_MAXIMIZED:
screen->maximized = true;
break;
case SDL_WINDOWEVENT_RESTORED:
if (screen->fullscreen) {
// On Windows, in maximized+fullscreen, disabling fullscreen
// mode unexpectedly triggers the "restored" then "maximized"
// events, leaving the window in a weird state (maximized
// according to the events, but not maximized visually).
break;
}
bool ok = screen_update_frame(screen);
if (!ok) {
LOGW("Frame update failed\n");
}
return true;
case SDL_WINDOWEVENT:
if (!screen->has_frame) {
// Do nothing
return true;
}
switch (event->window.event) {
case SDL_WINDOWEVENT_EXPOSED:
screen_render(screen, true);
break;
case SDL_WINDOWEVENT_SIZE_CHANGED:
screen_render(screen, true);
break;
case SDL_WINDOWEVENT_MAXIMIZED:
screen->maximized = true;
break;
case SDL_WINDOWEVENT_RESTORED:
if (screen->fullscreen) {
// On Windows, in maximized+fullscreen, disabling
// fullscreen mode unexpectedly triggers the "restored"
// then "maximized" events, leaving the window in a
// weird state (maximized according to the events, but
// not maximized visually).
break;
}
screen->maximized = false;
apply_pending_resize(screen);
break;
}
return true;
screen->maximized = false;
apply_pending_resize(screen);
break;
}
return false;
}
struct point

View File

@@ -8,24 +8,17 @@
#include <libavformat/avformat.h>
#include "coords.h"
#include "frame_texture.h"
#include "opengl.h"
#include "trait/frame_sink.h"
#include "video_buffer.h"
#include "scrcpy.h"
struct video_buffer;
struct screen {
struct sc_frame_sink frame_sink; // frame sink trait
#ifndef NDEBUG
bool open; // track the open/close state to assert correct behavior
#endif
struct video_buffer vb;
struct fps_counter *fps_counter;
SDL_Window *window;
SDL_Renderer *renderer;
SDL_Texture *texture;
struct sc_opengl gl;
struct sc_frame_texture ftex;
struct size frame_size;
struct size content_size; // rotated frame_size
@@ -41,44 +34,63 @@ struct screen {
bool has_frame;
bool fullscreen;
bool maximized;
bool mipmaps;
AVFrame *frame;
bool no_window;
};
struct screen_params {
const char *window_title;
struct size frame_size;
bool always_on_top;
#define SCREEN_INITIALIZER { \
.window = NULL, \
.renderer = NULL, \
.ftex = {0}, \
.frame_size = { \
.width = 0, \
.height = 0, \
}, \
.content_size = { \
.width = 0, \
.height = 0, \
}, \
.resize_pending = false, \
.windowed_content_size = { \
.width = 0, \
.height = 0, \
}, \
.rotation = 0, \
.rect = { \
.x = 0, \
.y = 0, \
.w = 0, \
.h = 0, \
}, \
.has_frame = false, \
.fullscreen = false, \
.maximized = false, \
.no_window = false, \
}
int16_t window_x;
int16_t window_y;
uint16_t window_width; // accepts SC_WINDOW_POSITION_UNDEFINED
uint16_t window_height; // accepts SC_WINDOW_POSITION_UNDEFINED
bool window_borderless;
uint8_t rotation;
bool mipmaps;
bool fullscreen;
};
// initialize default values
void
screen_init(struct screen *screen);
// initialize screen, create window, renderer and texture (window is hidden)
// window_x and window_y accept SC_WINDOW_POSITION_UNDEFINED
bool
screen_init(struct screen *screen, struct fps_counter *fps_counter,
const struct screen_params *params);
screen_init_rendering(struct screen *screen, const char *window_title,
struct size frame_size, bool always_on_top,
int16_t window_x, int16_t window_y, uint16_t window_width,
uint16_t window_height, bool window_borderless,
uint8_t rotation, enum sc_scale_filter scale_filter);
// show the window
void
screen_show_window(struct screen *screen);
// destroy window, renderer and texture (if any)
void
screen_destroy(struct screen *screen);
// hide the window
//
// It is used to hide the window immediately on closing without waiting for
// screen_destroy()
void
screen_hide_window(struct screen *screen);
// resize if necessary and write the rendered frame into the texture
bool
screen_update_frame(struct screen *screen, struct video_buffer *vb);
// render the texture to the renderer
//
@@ -103,9 +115,9 @@ screen_resize_to_pixel_perfect(struct screen *screen);
void
screen_set_rotation(struct screen *screen, unsigned rotation);
// react to SDL events
bool
screen_handle_event(struct screen *screen, SDL_Event *event);
// react to window events
void
screen_handle_window_event(struct screen *screen, const SDL_WindowEvent *event);
// convert point from window coordinates to frame coordinates
// x and y are expressed in pixels

View File

@@ -293,7 +293,6 @@ execute_server(struct server *server, const struct server_params *params) {
params->stay_awake ? "true" : "false",
params->codec_options ? params->codec_options : "-",
params->encoder_name ? params->encoder_name : "-",
params->power_off_on_close ? "true" : "false",
};
#ifdef SERVER_DEBUGGER
LOGI("Server debugger waiting for a client on device port "
@@ -376,6 +375,8 @@ server_init(struct server *server) {
server->video_socket = INVALID_SOCKET;
server->control_socket = INVALID_SOCKET;
server->port_range.first = 0;
server->port_range.last = 0;
server->local_port = 0;
server->tunnel_enabled = false;
@@ -409,6 +410,8 @@ run_wait_server(void *data) {
bool
server_start(struct server *server, const char *serial,
const struct server_params *params) {
server->port_range = params->port_range;
if (serial) {
server->serial = strdup(serial);
if (!server->serial) {

View File

@@ -26,6 +26,7 @@ struct server {
socket_t server_socket; // only used if !tunnel_forward
socket_t video_socket;
socket_t control_socket;
struct sc_port_range port_range;
uint16_t local_port; // selected from port_range
bool tunnel_enabled;
bool tunnel_forward; // use "adb forward" instead of "adb reverse"
@@ -46,7 +47,6 @@ struct server_params {
bool show_touches;
bool stay_awake;
bool force_adb_forward;
bool power_off_on_close;
};
// init default values

View File

@@ -66,11 +66,25 @@ notify_stopped(void) {
}
static bool
push_packet_to_sinks(struct stream *stream, const AVPacket *packet) {
for (unsigned i = 0; i < stream->sink_count; ++i) {
struct sc_packet_sink *sink = stream->sinks[i];
if (!sink->ops->push(sink, packet)) {
LOGE("Could not send config packet to sink %d", i);
process_config_packet(struct stream *stream, AVPacket *packet) {
if (stream->recorder && !recorder_push(stream->recorder, packet)) {
LOGE("Could not send config packet to recorder");
return false;
}
return true;
}
static bool
process_frame(struct stream *stream, AVPacket *packet) {
if (stream->decoder && !decoder_push(stream->decoder, packet)) {
return false;
}
if (stream->recorder) {
packet->dts = packet->pts;
if (!recorder_push(stream->recorder, packet)) {
LOGE("Could not send packet to recorder");
return false;
}
}
@@ -97,11 +111,9 @@ stream_parse(struct stream *stream, AVPacket *packet) {
packet->flags |= AV_PKT_FLAG_KEY;
}
packet->dts = packet->pts;
bool ok = push_packet_to_sinks(stream, packet);
bool ok = process_frame(stream, packet);
if (!ok) {
LOGE("Could not process packet");
LOGE("Could not process frame");
return false;
}
@@ -144,7 +156,7 @@ stream_push_packet(struct stream *stream, AVPacket *packet) {
if (is_config) {
// config packet
bool ok = push_packet_to_sinks(stream, packet);
bool ok = process_config_packet(stream, packet);
if (!ok) {
return false;
}
@@ -165,33 +177,6 @@ stream_push_packet(struct stream *stream, AVPacket *packet) {
return true;
}
static void
stream_close_first_sinks(struct stream *stream, unsigned count) {
while (count) {
struct sc_packet_sink *sink = stream->sinks[--count];
sink->ops->close(sink);
}
}
static inline void
stream_close_sinks(struct stream *stream) {
stream_close_first_sinks(stream, stream->sink_count);
}
static bool
stream_open_sinks(struct stream *stream, const AVCodec *codec) {
for (unsigned i = 0; i < stream->sink_count; ++i) {
struct sc_packet_sink *sink = stream->sinks[i];
if (!sink->ops->open(sink, codec)) {
LOGE("Could not open packet sink %d", i);
stream_close_first_sinks(stream, i);
return false;
}
}
return true;
}
static int
run_stream(void *data) {
struct stream *stream = data;
@@ -208,15 +193,27 @@ run_stream(void *data) {
goto end;
}
if (!stream_open_sinks(stream, codec)) {
LOGE("Could not open stream sinks");
if (stream->decoder && !decoder_open(stream->decoder, codec)) {
LOGE("Could not open decoder");
goto finally_free_codec_ctx;
}
if (stream->recorder) {
if (!recorder_open(stream->recorder, codec)) {
LOGE("Could not open recorder");
goto finally_close_decoder;
}
if (!recorder_start(stream->recorder)) {
LOGE("Could not start recorder");
goto finally_close_recorder;
}
}
stream->parser = av_parser_init(AV_CODEC_ID_H264);
if (!stream->parser) {
LOGE("Could not initialize parser");
goto finally_close_sinks;
goto finally_stop_and_join_recorder;
}
// We must only pass complete frames to av_parser_parse2()!
@@ -246,8 +243,20 @@ run_stream(void *data) {
}
av_parser_close(stream->parser);
finally_close_sinks:
stream_close_sinks(stream);
finally_stop_and_join_recorder:
if (stream->recorder) {
recorder_stop(stream->recorder);
LOGI("Finishing recording...");
recorder_join(stream->recorder);
}
finally_close_recorder:
if (stream->recorder) {
recorder_close(stream->recorder);
}
finally_close_decoder:
if (stream->decoder) {
decoder_close(stream->decoder);
}
finally_free_codec_ctx:
avcodec_free_context(&stream->codec_ctx);
end:
@@ -256,18 +265,12 @@ end:
}
void
stream_init(struct stream *stream, socket_t socket) {
stream_init(struct stream *stream, socket_t socket,
struct decoder *decoder, struct recorder *recorder) {
stream->socket = socket;
stream->decoder = decoder,
stream->recorder = recorder;
stream->has_pending = false;
stream->sink_count = 0;
}
void
stream_add_sink(struct stream *stream, struct sc_packet_sink *sink) {
assert(stream->sink_count < STREAM_MAX_SINKS);
assert(sink);
assert(sink->ops);
stream->sinks[stream->sink_count++] = sink;
}
bool
@@ -282,6 +285,13 @@ stream_start(struct stream *stream) {
return true;
}
void
stream_stop(struct stream *stream) {
if (stream->decoder) {
decoder_interrupt(stream->decoder);
}
}
void
stream_join(struct stream *stream) {
sc_thread_join(&stream->thread, NULL);

View File

@@ -8,19 +8,16 @@
#include <libavformat/avformat.h>
#include <SDL2/SDL_atomic.h>
#include "trait/packet_sink.h"
#include "util/net.h"
#include "util/thread.h"
#define STREAM_MAX_SINKS 2
struct video_buffer;
struct stream {
socket_t socket;
sc_thread thread;
struct sc_packet_sink *sinks[STREAM_MAX_SINKS];
unsigned sink_count;
struct decoder *decoder;
struct recorder *recorder;
AVCodecContext *codec_ctx;
AVCodecParserContext *parser;
// successive packets may need to be concatenated, until a non-config
@@ -30,14 +27,15 @@ struct stream {
};
void
stream_init(struct stream *stream, socket_t socket);
void
stream_add_sink(struct stream *stream, struct sc_packet_sink *sink);
stream_init(struct stream *stream, socket_t socket,
struct decoder *decoder, struct recorder *recorder);
bool
stream_start(struct stream *stream);
void
stream_stop(struct stream *stream);
void
stream_join(struct stream *stream);

View File

@@ -1,26 +0,0 @@
#ifndef SC_FRAME_SINK
#define SC_FRAME_SINK
#include "common.h"
#include <assert.h>
#include <stdbool.h>
typedef struct AVFrame AVFrame;
/**
* Frame sink trait.
*
* Component able to receive AVFrames should implement this trait.
*/
struct sc_frame_sink {
const struct sc_frame_sink_ops *ops;
};
struct sc_frame_sink_ops {
bool (*open)(struct sc_frame_sink *sink);
void (*close)(struct sc_frame_sink *sink);
bool (*push)(struct sc_frame_sink *sink, const AVFrame *frame);
};
#endif

View File

@@ -1,27 +0,0 @@
#ifndef SC_PACKET_SINK
#define SC_PACKET_SINK
#include "common.h"
#include <assert.h>
#include <stdbool.h>
typedef struct AVCodec AVCodec;
typedef struct AVPacket AVPacket;
/**
* Packet sink trait.
*
* Component able to receive AVPackets should implement this trait.
*/
struct sc_packet_sink {
const struct sc_packet_sink_ops *ops;
};
struct sc_packet_sink_ops {
bool (*open)(struct sc_packet_sink *sink, const AVCodec *codec);
void (*close)(struct sc_packet_sink *sink);
bool (*push)(struct sc_packet_sink *sink, const AVPacket *packet);
};
#endif

View File

@@ -140,24 +140,6 @@ parse_integer_with_suffix(const char *s, long *out) {
return true;
}
bool
strlist_contains(const char *list, char sep, const char *s) {
char *p;
do {
p = strchr(list, sep);
size_t token_len = p ? (size_t) (p - list) : strlen(list);
if (!strncmp(list, s, token_len)) {
return true;
}
if (p) {
list = p + 1;
}
} while (p);
return false;
}
size_t
utf8_truncation_index(const char *utf8, size_t max_len) {
size_t len = strlen(utf8);

View File

@@ -43,11 +43,6 @@ parse_integers(const char *s, const char sep, size_t max_items, long *out);
bool
parse_integer_with_suffix(const char *s, long *out);
// search s in the list separated by sep
// for example, strlist_contains("a,bc,def", ',', "bc") returns true
bool
strlist_contains(const char *list, char sep, const char *s);
// return the index to truncate a UTF-8 string at a valid position
size_t
utf8_truncation_index(const char *utf8, size_t max_len);

View File

@@ -1,265 +0,0 @@
#include "v4l2_sink.h"
#include "util/log.h"
#include "util/str_util.h"
/** Downcast frame_sink to sc_v4l2_sink */
#define DOWNCAST(SINK) container_of(SINK, struct sc_v4l2_sink, frame_sink)
static const AVOutputFormat *
find_muxer(const char *name) {
#ifdef SCRCPY_LAVF_HAS_NEW_MUXER_ITERATOR_API
void *opaque = NULL;
#endif
const AVOutputFormat *oformat = NULL;
do {
#ifdef SCRCPY_LAVF_HAS_NEW_MUXER_ITERATOR_API
oformat = av_muxer_iterate(&opaque);
#else
oformat = av_oformat_next(oformat);
#endif
// until null or containing the requested name
} while (oformat && !strlist_contains(oformat->name, ',', name));
return oformat;
}
static bool
encode_and_send_frame(struct sc_v4l2_sink *vs, const AVFrame *frame) {
int ret = avcodec_send_frame(vs->encoder_ctx, frame);
if (ret < 0 && ret != AVERROR(EAGAIN)) {
LOGE("Could not send v4l2 video frame: %d", ret);
return false;
}
AVPacket *packet = &vs->packet;
ret = avcodec_receive_packet(vs->encoder_ctx, packet);
if (ret == 0) {
// A packet was received
av_write_frame(vs->format_ctx, packet);
av_packet_unref(packet);
} else if (ret != AVERROR(EAGAIN)) {
LOGE("Could not receive v4l2 video packet: %d", ret);
return false;
}
return true;
}
static int
run_v4l2_sink(void *data) {
struct sc_v4l2_sink *vs = data;
for (;;) {
sc_mutex_lock(&vs->mutex);
while (!vs->stopped && vs->vb.pending_frame_consumed) {
sc_cond_wait(&vs->cond, &vs->mutex);
}
if (vs->stopped) {
sc_mutex_unlock(&vs->mutex);
break;
}
sc_mutex_unlock(&vs->mutex);
video_buffer_consume(&vs->vb, vs->frame);
bool ok = encode_and_send_frame(vs, vs->frame);
if (!ok) {
LOGE("Could not send frame to v4l2 sink");
break;
}
}
LOGD("V4l2 thread ended");
return 0;
}
static bool
sc_v4l2_sink_open(struct sc_v4l2_sink *vs) {
bool ok = video_buffer_init(&vs->vb);
if (!ok) {
return false;
}
ok = sc_mutex_init(&vs->mutex);
if (!ok) {
LOGC("Could not create mutex");
goto error_video_buffer_destroy;
}
ok = sc_cond_init(&vs->cond);
if (!ok) {
LOGC("Could not create cond");
goto error_mutex_destroy;
}
// FIXME
const AVOutputFormat *format = find_muxer("video4linux2,v4l2");
if (!format) {
LOGE("Could not find v4l2 muxer");
goto error_cond_destroy;
}
const AVCodec *encoder = avcodec_find_encoder(AV_CODEC_ID_RAWVIDEO);
if (!encoder) {
LOGE("Raw video encoder not found");
return false;
}
vs->format_ctx = avformat_alloc_context();
if (!vs->format_ctx) {
LOGE("Could not allocate v4l2 output context");
return false;
}
// contrary to the deprecated API (av_oformat_next()), av_muxer_iterate()
// returns (on purpose) a pointer-to-const, but AVFormatContext.oformat
// still expects a pointer-to-non-const (it has not be updated accordingly)
// <https://github.com/FFmpeg/FFmpeg/commit/0694d8702421e7aff1340038559c438b61bb30dd>
vs->format_ctx->oformat = (AVOutputFormat *) format;
AVStream *ostream = avformat_new_stream(vs->format_ctx, encoder);
if (!ostream) {
LOGE("Could not allocate new v4l2 stream");
goto error_avformat_free_context;
return false;
}
ostream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
ostream->codecpar->codec_id = encoder->id;
ostream->codecpar->format = AV_PIX_FMT_YUV420P;
ostream->codecpar->width = vs->frame_size.width;
ostream->codecpar->height = vs->frame_size.height;
int ret = avio_open(&vs->format_ctx->pb, vs->device_name, AVIO_FLAG_WRITE);
if (ret < 0) {
LOGE("Failed to open output device: %s", vs->device_name);
// ostream will be cleaned up during context cleaning
goto error_avformat_free_context;
}
vs->encoder_ctx = avcodec_alloc_context3(encoder);
if (!vs->encoder_ctx) {
LOGC("Could not allocate codec context for v4l2");
goto error_avio_close;
}
vs->encoder_ctx->width = vs->frame_size.width;
vs->encoder_ctx->height = vs->frame_size.height;
vs->encoder_ctx->pix_fmt = AV_PIX_FMT_YUV420P;
vs->encoder_ctx->time_base.num = 1;
vs->encoder_ctx->time_base.den = 1;
if (avcodec_open2(vs->encoder_ctx, encoder, NULL) < 0) {
LOGE("Could not open codec for v4l2");
goto error_avcodec_free_context;
}
vs->frame = av_frame_alloc();
if (!vs->frame) {
LOGE("Could not create v4l2 frame");
goto error_avcodec_close;
}
LOGD("Starting v4l2 thread");
ok = sc_thread_create(&vs->thread, run_v4l2_sink, "v4l2", vs);
if (!ok) {
LOGC("Could not start v4l2 thread");
goto error_av_frame_free;
}
LOGI("v4l2 sink started to device: %s", vs->device_name);
return true;
error_av_frame_free:
av_frame_free(&vs->frame);
error_avcodec_close:
avcodec_close(vs->encoder_ctx);
error_avcodec_free_context:
avcodec_free_context(&vs->encoder_ctx);
error_avio_close:
avio_close(vs->format_ctx->pb);
error_avformat_free_context:
avformat_free_context(vs->format_ctx);
error_cond_destroy:
sc_cond_destroy(&vs->cond);
error_mutex_destroy:
sc_mutex_destroy(&vs->mutex);
error_video_buffer_destroy:
video_buffer_destroy(&vs->vb);
return false;
}
static void
sc_v4l2_sink_close(struct sc_v4l2_sink *vs) {
av_frame_free(&vs->frame);
avcodec_close(vs->encoder_ctx);
avcodec_free_context(&vs->encoder_ctx);
avio_close(vs->format_ctx->pb);
avformat_free_context(vs->format_ctx);
sc_cond_destroy(&vs->cond);
sc_mutex_destroy(&vs->mutex);
video_buffer_destroy(&vs->vb);
}
static bool
sc_v4l2_sink_push(struct sc_v4l2_sink *vs, const AVFrame *frame) {
bool ok = video_buffer_push(&vs->vb, frame, NULL);
if (!ok) {
return false;
}
// signal possible change of vs->vb.pending_frame_consumed
sc_cond_signal(&vs->cond);
return true;
}
static bool
sc_v4l2_frame_sink_open(struct sc_frame_sink *sink) {
struct sc_v4l2_sink *vs = DOWNCAST(sink);
return sc_v4l2_sink_open(vs);
}
static void
sc_v4l2_frame_sink_close(struct sc_frame_sink *sink) {
struct sc_v4l2_sink *vs = DOWNCAST(sink);
sc_v4l2_sink_close(vs);
}
static bool
sc_v4l2_frame_sink_push(struct sc_frame_sink *sink, const AVFrame *frame) {
struct sc_v4l2_sink *vs = DOWNCAST(sink);
return sc_v4l2_sink_push(vs, frame);
}
bool
sc_v4l2_sink_init(struct sc_v4l2_sink *vs, const char *device_name,
struct size frame_size) {
vs->device_name = strdup(device_name);
if (!vs->device_name) {
LOGE("Could not strdup v4l2 device name");
return false;
}
vs->frame_size = frame_size;
static const struct sc_frame_sink_ops ops = {
.open = sc_v4l2_frame_sink_open,
.close = sc_v4l2_frame_sink_close,
.push = sc_v4l2_frame_sink_push,
};
vs->frame_sink.ops = &ops;
return true;
}
void
sc_v4l2_sink_destroy(struct sc_v4l2_sink *vs) {
free(vs->device_name);
}

View File

@@ -1,38 +0,0 @@
#ifndef SC_V4L2_SINK_H
#define SC_V4L2_SINK_H
#include "common.h"
#include "coords.h"
#include "trait/frame_sink.h"
#include "video_buffer.h"
#include <libavformat/avformat.h>
struct sc_v4l2_sink {
struct sc_frame_sink frame_sink; // frame sink trait
struct video_buffer vb;
AVFormatContext *format_ctx;
AVCodecContext *encoder_ctx;
char *device_name;
struct size frame_size;
sc_thread thread;
sc_mutex mutex;
sc_cond cond;
bool stopped;
AVFrame *frame;
AVPacket packet;
};
bool
sc_v4l2_sink_init(struct sc_v4l2_sink *vs, const char *device_name,
struct size frame_size);
void
sc_v4l2_sink_destroy(struct sc_v4l2_sink *vs);
#endif

View File

@@ -7,82 +7,133 @@
#include "util/log.h"
bool
video_buffer_init(struct video_buffer *vb) {
vb->pending_frame = av_frame_alloc();
if (!vb->pending_frame) {
return false;
video_buffer_init(struct video_buffer *vb, struct fps_counter *fps_counter,
bool render_expired_frames) {
vb->fps_counter = fps_counter;
vb->decoding_frame = av_frame_alloc();
if (!vb->decoding_frame) {
goto error_0;
}
vb->tmp_frame = av_frame_alloc();
if (!vb->tmp_frame) {
av_frame_free(&vb->pending_frame);
return false;
vb->pending_frame = av_frame_alloc();
if (!vb->pending_frame) {
goto error_1;
}
vb->rendering_frame = av_frame_alloc();
if (!vb->rendering_frame) {
goto error_2;
}
bool ok = sc_mutex_init(&vb->mutex);
if (!ok) {
av_frame_free(&vb->pending_frame);
av_frame_free(&vb->tmp_frame);
return false;
goto error_3;
}
// there is initially no frame, so consider it has already been consumed
vb->render_expired_frames = render_expired_frames;
if (render_expired_frames) {
ok = sc_cond_init(&vb->pending_frame_consumed_cond);
if (!ok) {
sc_mutex_destroy(&vb->mutex);
goto error_2;
}
// interrupted is not used if expired frames are not rendered
// since offering a frame will never block
vb->interrupted = false;
}
// there is initially no rendering frame, so consider it has already been
// consumed
vb->pending_frame_consumed = true;
return true;
error_3:
av_frame_free(&vb->rendering_frame);
error_2:
av_frame_free(&vb->pending_frame);
error_1:
av_frame_free(&vb->decoding_frame);
error_0:
return false;
}
void
video_buffer_destroy(struct video_buffer *vb) {
if (vb->render_expired_frames) {
sc_cond_destroy(&vb->pending_frame_consumed_cond);
}
sc_mutex_destroy(&vb->mutex);
av_frame_free(&vb->rendering_frame);
av_frame_free(&vb->pending_frame);
av_frame_free(&vb->tmp_frame);
av_frame_free(&vb->decoding_frame);
}
static inline void
swap_frames(AVFrame **lhs, AVFrame **rhs) {
AVFrame *tmp = *lhs;
*lhs = *rhs;
*rhs = tmp;
static void
video_buffer_swap_decoding_frame(struct video_buffer *vb) {
sc_mutex_assert(&vb->mutex);
AVFrame *tmp = vb->decoding_frame;
vb->decoding_frame = vb->pending_frame;
vb->pending_frame = tmp;
}
bool
video_buffer_push(struct video_buffer *vb, const AVFrame *frame,
bool *previous_frame_skipped) {
sc_mutex_lock(&vb->mutex);
// Use a temporary frame to preserve pending_frame in case of error.
// tmp_frame is an empty frame, no need to call av_frame_unref() beforehand.
int r = av_frame_ref(vb->tmp_frame, frame);
if (r) {
LOGE("Could not ref frame: %d", r);
return false;
}
// Now that av_frame_ref() succeeded, we can replace the previous
// pending_frame
swap_frames(&vb->pending_frame, &vb->tmp_frame);
av_frame_unref(vb->tmp_frame);
if (previous_frame_skipped) {
*previous_frame_skipped = !vb->pending_frame_consumed;
}
vb->pending_frame_consumed = false;
sc_mutex_unlock(&vb->mutex);
return true;
static void
video_buffer_swap_rendering_frame(struct video_buffer *vb) {
sc_mutex_assert(&vb->mutex);
AVFrame *tmp = vb->rendering_frame;
vb->rendering_frame = vb->pending_frame;
vb->pending_frame = tmp;
}
void
video_buffer_consume(struct video_buffer *vb, AVFrame *dst) {
video_buffer_offer_decoded_frame(struct video_buffer *vb,
bool *previous_frame_skipped) {
sc_mutex_lock(&vb->mutex);
if (vb->render_expired_frames) {
// wait for the current (expired) frame to be consumed
while (!vb->pending_frame_consumed && !vb->interrupted) {
sc_cond_wait(&vb->pending_frame_consumed_cond, &vb->mutex);
}
} else if (!vb->pending_frame_consumed) {
fps_counter_add_skipped_frame(vb->fps_counter);
}
video_buffer_swap_decoding_frame(vb);
*previous_frame_skipped = !vb->pending_frame_consumed;
vb->pending_frame_consumed = false;
sc_mutex_unlock(&vb->mutex);
}
const AVFrame *
video_buffer_take_rendering_frame(struct video_buffer *vb) {
sc_mutex_lock(&vb->mutex);
assert(!vb->pending_frame_consumed);
vb->pending_frame_consumed = true;
av_frame_move_ref(dst, vb->pending_frame);
// av_frame_move_ref() resets its source frame, so no need to call
// av_frame_unref()
fps_counter_add_rendered_frame(vb->fps_counter);
video_buffer_swap_rendering_frame(vb);
if (vb->render_expired_frames) {
// unblock video_buffer_offer_decoded_frame()
sc_cond_signal(&vb->pending_frame_consumed_cond);
}
sc_mutex_unlock(&vb->mutex);
// rendering_frame is only written from this thread, no need to lock
return vb->rendering_frame;
}
void
video_buffer_interrupt(struct video_buffer *vb) {
if (vb->render_expired_frames) {
sc_mutex_lock(&vb->mutex);
vb->interrupted = true;
sc_mutex_unlock(&vb->mutex);
// wake up blocking wait
sc_cond_signal(&vb->pending_frame_consumed_cond);
}
}

View File

@@ -12,39 +12,57 @@
typedef struct AVFrame AVFrame;
/**
* A video buffer holds 1 pending frame, which is the last frame received from
* the producer (typically, the decoder).
* There are 3 frames in memory:
* - one frame is held by the decoder (decoding_frame)
* - one frame is held by the renderer (rendering_frame)
* - one frame is shared between the decoder and the renderer (pending_frame)
*
* If a pending frame has not been consumed when the producer pushes a new
* frame, then it is lost. The intent is to always provide access to the very
* last frame to minimize latency.
* The decoder decodes a packet into the decoding_frame (it may takes time).
*
* The producer and the consumer typically do not live in the same thread.
* That's the reason why the callback on_frame_available() does not provide the
* frame as parameter: the consumer might post an event to its own thread to
* retrieve the pending frame from there, and that frame may have changed since
* the callback if producer pushed a new one in between.
* Once the frame is decoded, it calls video_buffer_offer_decoded_frame(),
* which swaps the decoding and pending frames.
*
* When the renderer is notified that a new frame is available, it calls
* video_buffer_take_rendering_frame() to retrieve it, which swaps the pending
* and rendering frames. The frame is valid until the next call, without
* blocking the decoder.
*/
struct video_buffer {
AVFrame *decoding_frame;
AVFrame *pending_frame;
AVFrame *tmp_frame; // To preserve the pending frame on error
AVFrame *rendering_frame;
sc_mutex mutex;
bool render_expired_frames;
bool interrupted;
sc_cond pending_frame_consumed_cond;
bool pending_frame_consumed;
struct fps_counter *fps_counter;
};
bool
video_buffer_init(struct video_buffer *vb);
video_buffer_init(struct video_buffer *vb, struct fps_counter *fps_counter,
bool render_expired_frames);
void
video_buffer_destroy(struct video_buffer *vb);
bool
video_buffer_push(struct video_buffer *vb, const AVFrame *frame, bool *skipped);
// set the decoded frame as ready for rendering
// the output flag is set to report whether the previous frame has been skipped
void
video_buffer_consume(struct video_buffer *vb, AVFrame *dst);
video_buffer_offer_decoded_frame(struct video_buffer *vb,
bool *previous_frame_skipped);
// mark the rendering frame as consumed and return it
// the frame is valid until the next call to this function
const AVFrame *
video_buffer_take_rendering_frame(struct video_buffer *vb);
// wake up and avoid any blocking call
void
video_buffer_interrupt(struct video_buffer *vb);
#endif

View File

@@ -58,6 +58,7 @@ static void test_options(void) {
"--push-target", "/sdcard/Movies",
"--record", "file",
"--record-format", "mkv",
"--render-expired-frames",
"--serial", "0123456789abcdef",
"--show-touches",
"--turn-screen-off",
@@ -86,6 +87,7 @@ static void test_options(void) {
assert(!strcmp(opts->push_target, "/sdcard/Movies"));
assert(!strcmp(opts->record_filename, "file"));
assert(opts->record_format == SC_RECORD_FORMAT_MKV);
assert(opts->render_expired_frames);
assert(!strcmp(opts->serial, "0123456789abcdef"));
assert(opts->show_touches);
assert(opts->turn_screen_off);

View File

@@ -146,18 +146,14 @@ static void test_serialize_inject_scroll_event(void) {
static void test_serialize_back_or_screen_on(void) {
struct control_msg msg = {
.type = CONTROL_MSG_TYPE_BACK_OR_SCREEN_ON,
.back_or_screen_on = {
.action = AKEY_EVENT_ACTION_UP,
},
};
unsigned char buf[CONTROL_MSG_MAX_SIZE];
size_t size = control_msg_serialize(&msg, buf);
assert(size == 2);
assert(size == 1);
const unsigned char expected[] = {
CONTROL_MSG_TYPE_BACK_OR_SCREEN_ON,
0x01, // AKEY_EVENT_ACTION_UP
};
assert(!memcmp(buf, expected, sizeof(expected)));
}

View File

@@ -287,18 +287,6 @@ static void test_parse_integer_with_suffix(void) {
assert(!ok);
}
static void test_strlist_contains(void) {
assert(strlist_contains("a,bc,def", ',', "bc"));
assert(!strlist_contains("a,bc,def", ',', "b"));
assert(strlist_contains("", ',', ""));
assert(strlist_contains("abc,", ',', ""));
assert(strlist_contains(",abc", ',', ""));
assert(strlist_contains("abc,,def", ',', ""));
assert(!strlist_contains("abc", ',', ""));
assert(strlist_contains(",,|x", '|', ",,"));
assert(strlist_contains("xyz", '\0', "xyz"));
}
int main(int argc, char *argv[]) {
(void) argc;
(void) argv;
@@ -316,6 +304,5 @@ int main(int argc, char *argv[]) {
test_parse_integer();
test_parse_integers();
test_parse_integer_with_suffix();
test_strlist_contains();
return 0;
}

View File

@@ -2,11 +2,11 @@
[binaries]
name = 'mingw'
c = 'i686-w64-mingw32-gcc'
cpp = 'i686-w64-mingw32-g++'
ar = 'i686-w64-mingw32-ar'
strip = 'i686-w64-mingw32-strip'
pkgconfig = 'i686-w64-mingw32-pkg-config'
c = '/usr/bin/i686-w64-mingw32-gcc'
cpp = '/usr/bin/i686-w64-mingw32-g++'
ar = '/usr/bin/i686-w64-mingw32-ar'
strip = '/usr/bin/i686-w64-mingw32-strip'
pkgconfig = '/usr/bin/i686-w64-mingw32-pkg-config'
[host_machine]
system = 'windows'

View File

@@ -2,11 +2,11 @@
[binaries]
name = 'mingw'
c = 'x86_64-w64-mingw32-gcc'
cpp = 'x86_64-w64-mingw32-g++'
ar = 'x86_64-w64-mingw32-ar'
strip = 'x86_64-w64-mingw32-strip'
pkgconfig = 'x86_64-w64-mingw32-pkg-config'
c = '/usr/bin/x86_64-w64-mingw32-gcc'
cpp = '/usr/bin/x86_64-w64-mingw32-g++'
ar = '/usr/bin/x86_64-w64-mingw32-ar'
strip = '/usr/bin/x86_64-w64-mingw32-strip'
pkgconfig = '/usr/bin/x86_64-w64-mingw32-pkg-config'
[host_machine]
system = 'windows'

View File

@@ -1,7 +1 @@
strCommand = "cmd /c scrcpy.exe"
For Each Arg In WScript.Arguments
strCommand = strCommand & " """ & replace(Arg, """", """""""""") & """"
Next
CreateObject("Wscript.Shell").Run strCommand, 0, false
CreateObject("Wscript.Shell").Run "cmd /c scrcpy.exe", 0, false

View File

@@ -4,7 +4,6 @@ project('scrcpy', 'c',
default_options: [
'c_std=c11',
'warning_level=2',
'b_ndebug=if-release',
])
if get_option('compile_app')

View File

@@ -3,5 +3,6 @@ option('compile_server', type: 'boolean', value: true, description: 'Build the s
option('crossbuild_windows', type: 'boolean', value: false, description: 'Build for Windows from Linux')
option('prebuilt_server', type: 'string', description: 'Path of the prebuilt server')
option('portable', type: 'boolean', value: false, description: 'Use scrcpy-server from the same directory as the scrcpy executable')
option('hidpi_support', type: 'boolean', value: true, description: 'Enable High DPI support')
option('server_debugger', type: 'boolean', value: false, description: 'Run a server debugger and wait for a client to be attached')
option('server_debugger_method', type: 'combo', choices: ['old', 'new'], value: 'new', description: 'Select the debugger method (Android < 9: "old", Android >= 9: "new")')

View File

@@ -19,21 +19,19 @@ public final class CleanUp {
// not instantiable
}
public static void configure(boolean disableShowTouches, int restoreStayOn, boolean restoreNormalPowerMode, boolean powerOffScreen, int displayId)
throws IOException {
boolean needProcess = disableShowTouches || restoreStayOn != -1 || restoreNormalPowerMode || powerOffScreen;
public static void configure(boolean disableShowTouches, int restoreStayOn, boolean restoreNormalPowerMode) throws IOException {
boolean needProcess = disableShowTouches || restoreStayOn != -1 || restoreNormalPowerMode;
if (needProcess) {
startProcess(disableShowTouches, restoreStayOn, restoreNormalPowerMode, powerOffScreen, displayId);
startProcess(disableShowTouches, restoreStayOn, restoreNormalPowerMode);
} else {
// There is no additional clean up to do when scrcpy dies
unlinkSelf();
}
}
private static void startProcess(boolean disableShowTouches, int restoreStayOn, boolean restoreNormalPowerMode, boolean powerOffScreen,
int displayId) throws IOException {
private static void startProcess(boolean disableShowTouches, int restoreStayOn, boolean restoreNormalPowerMode) throws IOException {
String[] cmd = {"app_process", "/", CleanUp.class.getName(), String.valueOf(disableShowTouches), String.valueOf(
restoreStayOn), String.valueOf(restoreNormalPowerMode), String.valueOf(powerOffScreen), String.valueOf(displayId)};
restoreStayOn), String.valueOf(restoreNormalPowerMode)};
ProcessBuilder builder = new ProcessBuilder(cmd);
builder.environment().put("CLASSPATH", SERVER_PATH);
@@ -63,8 +61,6 @@ public final class CleanUp {
boolean disableShowTouches = Boolean.parseBoolean(args[0]);
int restoreStayOn = Integer.parseInt(args[1]);
boolean restoreNormalPowerMode = Boolean.parseBoolean(args[2]);
boolean powerOffScreen = Boolean.parseBoolean(args[3]);
int displayId = Integer.parseInt(args[4]);
if (disableShowTouches || restoreStayOn != -1) {
ServiceManager serviceManager = new ServiceManager();
@@ -80,12 +76,9 @@ public final class CleanUp {
}
}
if (Device.isScreenOn()) {
if (powerOffScreen) {
Ln.i("Power off screen");
Device.powerOffScreen(displayId);
} else if (restoreNormalPowerMode) {
Ln.i("Restoring normal power mode");
if (restoreNormalPowerMode) {
Ln.i("Restoring normal power mode");
if (Device.isScreenOn()) {
Device.setScreenPowerMode(Device.POWER_MODE_NORMAL);
}
}

View File

@@ -71,13 +71,6 @@ public final class ControlMessage {
return msg;
}
public static ControlMessage createBackOrScreenOn(int action) {
ControlMessage msg = new ControlMessage();
msg.type = TYPE_BACK_OR_SCREEN_ON;
msg.action = action;
return msg;
}
public static ControlMessage createSetClipboard(String text, boolean paste) {
ControlMessage msg = new ControlMessage();
msg.type = TYPE_SET_CLIPBOARD;

View File

@@ -11,7 +11,6 @@ public class ControlMessageReader {
static final int INJECT_KEYCODE_PAYLOAD_LENGTH = 13;
static final int INJECT_TOUCH_EVENT_PAYLOAD_LENGTH = 27;
static final int INJECT_SCROLL_EVENT_PAYLOAD_LENGTH = 20;
static final int BACK_OR_SCREEN_ON_LENGTH = 1;
static final int SET_SCREEN_POWER_MODE_PAYLOAD_LENGTH = 1;
static final int SET_CLIPBOARD_FIXED_PAYLOAD_LENGTH = 1;
@@ -67,15 +66,13 @@ public class ControlMessageReader {
case ControlMessage.TYPE_INJECT_SCROLL_EVENT:
msg = parseInjectScrollEvent();
break;
case ControlMessage.TYPE_BACK_OR_SCREEN_ON:
msg = parseBackOrScreenOnEvent();
break;
case ControlMessage.TYPE_SET_CLIPBOARD:
msg = parseSetClipboard();
break;
case ControlMessage.TYPE_SET_SCREEN_POWER_MODE:
msg = parseSetScreenPowerMode();
break;
case ControlMessage.TYPE_BACK_OR_SCREEN_ON:
case ControlMessage.TYPE_EXPAND_NOTIFICATION_PANEL:
case ControlMessage.TYPE_COLLAPSE_NOTIFICATION_PANEL:
case ControlMessage.TYPE_GET_CLIPBOARD:
@@ -153,14 +150,6 @@ public class ControlMessageReader {
return ControlMessage.createInjectScrollEvent(position, hScroll, vScroll);
}
private ControlMessage parseBackOrScreenOnEvent() {
if (buffer.remaining() < BACK_OR_SCREEN_ON_LENGTH) {
return null;
}
int action = toUnsigned(buffer.get());
return ControlMessage.createBackOrScreenOn(action);
}
private ControlMessage parseSetClipboard() {
if (buffer.remaining() < SET_CLIPBOARD_FIXED_PAYLOAD_LENGTH) {
return null;

View File

@@ -14,7 +14,7 @@ import java.util.concurrent.TimeUnit;
public class Controller {
private static final int DEFAULT_DEVICE_ID = 0;
private static final int DEVICE_ID_VIRTUAL = -1;
private static final ScheduledExecutorService EXECUTOR = Executors.newSingleThreadScheduledExecutor();
@@ -45,7 +45,7 @@ public class Controller {
MotionEvent.PointerCoords coords = new MotionEvent.PointerCoords();
coords.orientation = 0;
coords.size = 0;
coords.size = 1;
pointerProperties[i] = props;
pointerCoords[i] = coords;
@@ -101,7 +101,7 @@ public class Controller {
break;
case ControlMessage.TYPE_BACK_OR_SCREEN_ON:
if (device.supportsInputEvents()) {
pressBackOrTurnScreenOn(msg.getAction());
pressBackOrTurnScreenOn();
}
break;
case ControlMessage.TYPE_EXPAND_NOTIFICATION_PANEL:
@@ -208,13 +208,9 @@ public class Controller {
// Right-click and middle-click only work if the source is a mouse
boolean nonPrimaryButtonPressed = (buttons & ~MotionEvent.BUTTON_PRIMARY) != 0;
int source = nonPrimaryButtonPressed ? InputDevice.SOURCE_MOUSE : InputDevice.SOURCE_TOUCHSCREEN;
if (source != InputDevice.SOURCE_MOUSE) {
// Buttons must not be set for touch events
buttons = 0;
}
MotionEvent event = MotionEvent
.obtain(lastTouchDown, now, action, pointerCount, pointerProperties, pointerCoords, 0, buttons, 1f, 1f, DEFAULT_DEVICE_ID, 0, source,
.obtain(lastTouchDown, now, action, pointerCount, pointerProperties, pointerCoords, 0, buttons, 1f, 1f, DEVICE_ID_VIRTUAL, 0, source,
0);
return device.injectEvent(event);
}
@@ -237,7 +233,7 @@ public class Controller {
coords.setAxisValue(MotionEvent.AXIS_VSCROLL, vScroll);
MotionEvent event = MotionEvent
.obtain(lastTouchDown, now, MotionEvent.ACTION_SCROLL, 1, pointerProperties, pointerCoords, 0, 0, 1f, 1f, DEFAULT_DEVICE_ID, 0,
.obtain(lastTouchDown, now, MotionEvent.ACTION_SCROLL, 1, pointerProperties, pointerCoords, 0, 0, 1f, 1f, DEVICE_ID_VIRTUAL, 0,
InputDevice.SOURCE_TOUCHSCREEN, 0);
return device.injectEvent(event);
}
@@ -255,22 +251,12 @@ public class Controller {
}, 200, TimeUnit.MILLISECONDS);
}
private boolean pressBackOrTurnScreenOn(int action) {
if (Device.isScreenOn()) {
return device.injectKeyEvent(action, KeyEvent.KEYCODE_BACK, 0, 0);
}
// Screen is off
// Only press POWER on ACTION_DOWN
if (action != KeyEvent.ACTION_DOWN) {
// do nothing,
return true;
}
if (keepPowerModeOff) {
private boolean pressBackOrTurnScreenOn() {
int keycode = Device.isScreenOn() ? KeyEvent.KEYCODE_BACK : KeyEvent.KEYCODE_POWER;
if (keepPowerModeOff && keycode == KeyEvent.KEYCODE_POWER) {
schedulePowerModeOff();
}
return device.injectKeycode(KeyEvent.KEYCODE_POWER);
return device.injectKeycode(keycode);
}
private boolean setClipboard(String text, boolean paste) {

View File

@@ -153,17 +153,13 @@ public final class Device {
return Build.MODEL;
}
public static boolean supportsInputEvents(int displayId) {
return displayId == 0 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
}
public boolean supportsInputEvents() {
return supportsInputEvents;
}
public static boolean injectEvent(InputEvent inputEvent, int mode, int displayId) {
if (!supportsInputEvents(displayId)) {
return false;
public boolean injectEvent(InputEvent inputEvent, int mode) {
if (!supportsInputEvents()) {
throw new AssertionError("Could not inject input event if !supportsInputEvents()");
}
if (displayId != 0 && !InputManager.setDisplayId(inputEvent, displayId)) {
@@ -173,29 +169,10 @@ public final class Device {
return SERVICE_MANAGER.getInputManager().injectInputEvent(inputEvent, mode);
}
public boolean injectEvent(InputEvent inputEvent, int mode) {
if (!supportsInputEvents()) {
throw new AssertionError("Could not inject input event if !supportsInputEvents()");
}
return injectEvent(inputEvent, mode, displayId);
}
public static boolean injectEventOnDisplay(InputEvent event, int displayId) {
return injectEvent(event, InputManager.INJECT_INPUT_EVENT_MODE_ASYNC, displayId);
}
public boolean injectEvent(InputEvent event) {
return injectEvent(event, InputManager.INJECT_INPUT_EVENT_MODE_ASYNC);
}
public static boolean injectKeyEvent(int action, int keyCode, int repeat, int metaState, int displayId) {
long now = SystemClock.uptimeMillis();
KeyEvent event = new KeyEvent(now, now, action, keyCode, repeat, metaState, KeyCharacterMap.VIRTUAL_KEYBOARD, 0, 0,
InputDevice.SOURCE_KEYBOARD);
return injectEventOnDisplay(event, displayId);
}
public boolean injectKeyEvent(int action, int keyCode, int repeat, int metaState) {
long now = SystemClock.uptimeMillis();
KeyEvent event = new KeyEvent(now, now, action, keyCode, repeat, metaState, KeyCharacterMap.VIRTUAL_KEYBOARD, 0, 0,
@@ -203,10 +180,6 @@ public final class Device {
return injectEvent(event);
}
public static boolean injectKeycode(int keyCode, int displayId) {
return injectKeyEvent(KeyEvent.ACTION_DOWN, keyCode, 0, 0, displayId) && injectKeyEvent(KeyEvent.ACTION_UP, keyCode, 0, 0, displayId);
}
public boolean injectKeycode(int keyCode) {
return injectKeyEvent(KeyEvent.ACTION_DOWN, keyCode, 0, 0) && injectKeyEvent(KeyEvent.ACTION_UP, keyCode, 0, 0);
}
@@ -276,13 +249,6 @@ public final class Device {
return SurfaceControl.setDisplayPowerMode(d, mode);
}
public static boolean powerOffScreen(int displayId) {
if (!isScreenOn()) {
return true;
}
return injectKeycode(KeyEvent.KEYCODE_POWER, displayId);
}
/**
* Disable auto-rotation (if enabled), set the screen rotation and re-enable auto-rotation (if it was enabled).
*/

View File

@@ -17,7 +17,6 @@ public class Options {
private boolean stayAwake;
private String codecOptions;
private String encoderName;
private boolean powerOffScreenOnClose;
public Ln.Level getLogLevel() {
return logLevel;
@@ -130,12 +129,4 @@ public class Options {
public void setEncoderName(String encoderName) {
this.encoderName = encoderName;
}
public void setPowerOffScreenOnClose(boolean powerOffScreenOnClose) {
this.powerOffScreenOnClose = powerOffScreenOnClose;
}
public boolean getPowerOffScreenOnClose() {
return this.powerOffScreenOnClose;
}
}

View File

@@ -50,7 +50,7 @@ public final class Server {
}
}
CleanUp.configure(mustDisableShowTouchesOnCleanUp, restoreStayOn, true, options.getPowerOffScreenOnClose(), options.getDisplayId());
CleanUp.configure(mustDisableShowTouchesOnCleanUp, restoreStayOn, true);
boolean tunnelForward = options.isTunnelForward();
@@ -135,7 +135,7 @@ public final class Server {
"The server version (" + BuildConfig.VERSION_NAME + ") does not match the client " + "(" + clientVersion + ")");
}
final int expectedParameters = 16;
final int expectedParameters = 15;
if (args.length != expectedParameters) {
throw new IllegalArgumentException("Expecting " + expectedParameters + " parameters");
}
@@ -185,9 +185,6 @@ public final class Server {
String encoderName = "-".equals(args[14]) ? null : args[14];
options.setEncoderName(encoderName);
boolean powerOffScreenOnClose = Boolean.parseBoolean(args[15]);
options.setPowerOffScreenOnClose(powerOffScreenOnClose);
return options;
}
@@ -233,7 +230,7 @@ public final class Server {
if (encoders != null && encoders.length > 0) {
Ln.e("Try to use one of the available encoders:");
for (MediaCodecInfo encoder : encoders) {
Ln.e(" scrcpy --encoder '" + encoder.getName() + "'");
Ln.e(" scrcpy --encoder-name '" + encoder.getName() + "'");
}
}
}

View File

@@ -154,7 +154,6 @@ public class ControlMessageReaderTest {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
dos.writeByte(ControlMessage.TYPE_BACK_OR_SCREEN_ON);
dos.writeByte(KeyEvent.ACTION_UP);
byte[] packet = bos.toByteArray();
@@ -162,7 +161,6 @@ public class ControlMessageReaderTest {
ControlMessage event = reader.next();
Assert.assertEquals(ControlMessage.TYPE_BACK_OR_SCREEN_ON, event.getType());
Assert.assertEquals(KeyEvent.ACTION_DOWN, event.getAction());
}
@Test