From dea9189239af022002f5e2659f606926001e05a1 Mon Sep 17 00:00:00 2001
From: "F. Duncanh"
Date: Thu, 26 Jun 2025 10:02:58 -0400
Subject: [PATCH 1/3] Add uxplay rendering of audio cover art: uxplay -ca (w/o
filename)
---
README.html | 30 +++++++++----
README.md | 32 ++++++++++----
README.txt | 37 ++++++++++------
renderers/video_renderer.c | 86 +++++++++++++++++++++++++-------------
renderers/video_renderer.h | 3 +-
uxplay.1 | 2 +
uxplay.cpp | 18 ++++----
7 files changed, 141 insertions(+), 67 deletions(-)
diff --git a/README.html b/README.html
index 1bdc8098f..30523708b 100644
--- a/README.html
+++ b/README.html
@@ -8,6 +8,10 @@
class="uri">https://github.com/FDH2/UxPlay (where ALL user issues
should be posted, and latest versions can be found).
+NEW on github: option -ca (with no filename
+given) will now render Apple Music cover art (in audio-only mode) inside
+UxPlay. (-ca <filename> will continue to export cover
+art for display by an external viewer).
NEW in v1.72: Improved Support for (YouTube) HLS
(HTTP Live Streaming) video with the new “-hls” option (introduced in
1.71).* Only streaming from the YouTube iOS app (in "m3u8"
@@ -107,7 +111,8 @@ After installation:
also install.
For Audio-only mode (Apple Music, etc.) best quality is obtained
with the option “uxplay -async”, but there is then a 2 second latency
-imposed by iOS.
+imposed by iOS. Use option “uxplay -ca” to display any “Cover Art” that
+accompanies the audio.
If you are using UxPlay just to mirror the client’s screen
(without showing videos that need audio synchronized with video), it is
best to use the option “uxplay -vsync no”.
@@ -621,12 +626,14 @@ Starting and running UxPlay
framerate is actually streaming by using -vs fpsdisplaysink, and/or
-FPSdata.) When using this, you should use the default timestamp-based
synchronization option -vsync.
-Since UxPlay-1.54, you can display the accompanying “Cover Art”
-from sources like Apple Music in Audio-Only (ALAC) mode: run
+
You can now display (inside UxPlay) the accompanying “Cover Art”
+from sources like Apple Music in Audio-Only (ALAC) mode with the option
+uxplay -ca. The older method of exporting cover art to
+an external viewer remains available: run
“uxplay -ca <name> &” in the background, then run
a image viewer with an autoreload feature: an example is “feh”: run
“feh -R 1 <name>” in the foreground; terminate feh
-and then Uxplay with “ctrl-C fg ctrl-C”.
+and then Uxplay with “ctrl-C fg ctrl-C”.
By default, GStreamer uses an algorithm to search for the best
“videosink” (GStreamer’s term for a graphics driver to display images)
@@ -1067,11 +1074,11 @@
Usage
cleartext.) All users must then know this password. This uses HTTP md5
Digest authentication, which is now regarded as providing weak security,
but it is only used to validate the uxplay password, and no user
-credentials are exposed. _Note: -pin and -pw are alternatives: if both
-are specified at startup, the earlier of these two options is discarded.
-If pwd is not specified, a random 4-digit pin code is
-displayed, and must be entered on the client at each
-new conenction.
+credentials are exposed. If pwd is not
+specified, a random 4-digit pin code is displayed, and must be entered
+on the client at each new connection. Note: -pin
+and -pw are alternatives: if both are specified at startup, the earlier
+of these two options is discarded.
-vsync [x] (In Mirror mode:) this option
(now the default) uses timestamps to synchronize audio
with video on the server, with an optional audio delay in (decimal)
@@ -1236,6 +1243,9 @@
Usage
converted to a whole number of microseconds. Default is 0.25 sec (250000
usec). (However, the client appears to ignore this reported latency,
so this option seems non-functional.)
+-ca (without specifying a filename) now displays
+“cover art” that accompanies Apple Music when played in “Audio-only”
+(ALAC) mode.
-ca filename provides a file (where
filename can include a full path) used for output of “cover
art” (from Apple Music, etc.,) in audio-only ALAC mode. This
@@ -1700,6 +1710,8 @@
5. Mirror screen
introduced 2017, running tvOS 12.2.1), so it does not seem to matter
what version UxPlay claims to be.
Changelog
+
xxxx 2025-06-18 Render Audio cover-art inside UxPlay with -ca option
+(no file specified). Update llhttp to 9.3.0
1.72.1 2025-06-06 minor update: fix regression in -reg option; add
option -rc to specify initialization file; add “-nc no” to
unset “-nc” option (for macOS users, where -nc is default); add
diff --git a/README.md b/README.md
index 76c17fecd..a02ad71a5 100644
--- a/README.md
+++ b/README.md
@@ -2,6 +2,11 @@
### **Now developed at the GitHub site (where ALL user issues should be posted, and latest versions can be found).**
+- **NEW on github**: option -ca (with no filename given) will now render
+ Apple Music cover art (in audio-only mode) inside
+ UxPlay. (-ca `` will continue to export cover art for
+ display by an external viewer).
+
- **NEW in v1.72**: Improved Support for (YouTube) HLS (HTTP Live Streaming)
video with the new "-hls" option (introduced in 1.71).* **Only streaming from the YouTube iOS app
(in \"m3u8\" protocol) is currently supported**: (streaming using the AirPlay icon in a browser window
@@ -93,7 +98,8 @@ status](https://repology.org/badge/vertical-allrepos/uxplay.svg)](https://repolo
- For Audio-only mode (Apple Music, etc.) best quality is obtained
with the option "uxplay -async", but there is then a 2 second
- latency imposed by iOS.
+ latency imposed by iOS. Use option "uxplay -ca" to display any "Cover Art" that
+ accompanies the audio.
- If you are using UxPlay just to mirror the client's screen (without
showing videos that need audio synchronized with video), it is best to
@@ -606,12 +612,14 @@ value advances it.)
-FPSdata.) When using this, you should use the default
timestamp-based synchronization option `-vsync`.
-- Since UxPlay-1.54, you can display the accompanying "Cover Art" from
- sources like Apple Music in Audio-Only (ALAC) mode: run
+- You can now display (inside UxPlay) the accompanying "Cover Art" from
+ sources like Apple Music in Audio-Only (ALAC) mode with the option
+ `uxplay -ca`. _The older method of exporting cover art to an external
+ viewer remains available: run
"`uxplay -ca &`" in the background, then run a image viewer
with an autoreload feature: an example is "feh": run
"`feh -R 1 `" in the foreground; terminate feh and then Uxplay
- with "`ctrl-C fg ctrl-C`".
+ with "`ctrl-C fg ctrl-C`"_.
By default, GStreamer uses an algorithm to search for the best
"videosink" (GStreamer's term for a graphics driver to display images)
@@ -1060,11 +1068,11 @@ can be controlled with a password set when uxplay starts (set it in
the .uxplay startup file, where it is stored as cleartext.) All users must
then know this password. This uses HTTP md5 Digest authentication,
which is now regarded as providing weak security, but it is only used to
-validate the uxplay password, and no user credentials are exposed. _Note:
--pin and -pw are alternatives: if both are specified at startup, the
-earlier of these two options is discarded. If *pwd* is not specified,
-a random 4-digit pin code is displayed, and must be entered on the client
-at **each** new conenction.
+validate the uxplay password, and no user credentials are exposed.
+If *pwd* is **not** specified, a random 4-digit pin code is displayed, and must
+be entered on the client at **each** new connection.
+_Note: -pin and -pw are alternatives: if both are specified at startup, the
+earlier of these two options is discarded._
**-vsync \[x\]** (In Mirror mode:) this option (**now the default**)
uses timestamps to synchronize audio with video on the server, with an
@@ -1245,6 +1253,9 @@ number of microseconds. Default is 0.25 sec (250000 usec). *(However,
the client appears to ignore this reported latency, so this option seems
non-functional.)*
+**-ca** (without specifying a filename) now displays "cover art"
+ that accompanies Apple Music when played in "Audio-only" (ALAC) mode.
+
**-ca *filename*** provides a file (where *filename* can include a full
path) used for output of "cover art" (from Apple Music, *etc.*,) in
audio-only ALAC mode. This file is overwritten with the latest cover art
@@ -1731,6 +1742,9 @@ introduced 2017, running tvOS 12.2.1), so it does not seem to matter
what version UxPlay claims to be.
# Changelog
+xxxx 2025-06-18 Render Audio cover-art inside UxPlay with -ca option (no file
+specified). Update llhttp to 9.3.0
+
1.72.1 2025-06-06 minor update: fix regression in -reg option; add option
-rc to specify initialization file; add "-nc no" to unset "-nc"
option (for macOS users, where -nc is default); add user-installable
diff --git a/README.txt b/README.txt
index aca484c56..36e4f4518 100644
--- a/README.txt
+++ b/README.txt
@@ -2,6 +2,11 @@
### **Now developed at the GitHub site (where ALL user issues should be posted, and latest versions can be found).**
+- **NEW on github**: option -ca (with no filename given) will now
+ render Apple Music cover art (in audio-only mode) inside UxPlay.
+ (-ca `` will continue to export cover art for display by
+ an external viewer).
+
- **NEW in v1.72**: Improved Support for (YouTube) HLS (HTTP Live
Streaming) video with the new "-hls" option (introduced in 1.71).\*
**Only streaming from the YouTube iOS app (in \"m3u8\" protocol) is
@@ -100,7 +105,8 @@ status](https://repology.org/badge/vertical-allrepos/uxplay.svg)](https://repolo
- For Audio-only mode (Apple Music, etc.) best quality is obtained
with the option "uxplay -async", but there is then a 2 second
- latency imposed by iOS.
+ latency imposed by iOS. Use option "uxplay -ca" to display any
+ "Cover Art" that accompanies the audio.
- If you are using UxPlay just to mirror the client's screen (without
showing videos that need audio synchronized with video), it is best
@@ -620,12 +626,13 @@ value advances it.)
-FPSdata.) When using this, you should use the default
timestamp-based synchronization option `-vsync`.
-- Since UxPlay-1.54, you can display the accompanying "Cover Art" from
- sources like Apple Music in Audio-Only (ALAC) mode: run
- "`uxplay -ca &`" in the background, then run a image viewer
- with an autoreload feature: an example is "feh": run
- "`feh -R 1 `" in the foreground; terminate feh and then Uxplay
- with "`ctrl-C fg ctrl-C`".
+- You can now display (inside UxPlay) the accompanying "Cover Art"
+ from sources like Apple Music in Audio-Only (ALAC) mode with the
+ option `uxplay -ca`. *The older method of exporting cover art to an
+ external viewer remains available: run "`uxplay -ca &`" in
+ the background, then run a image viewer with an autoreload feature:
+ an example is "feh": run "`feh -R 1 `" in the foreground;
+ terminate feh and then Uxplay with "`ctrl-C fg ctrl-C`"*.
By default, GStreamer uses an algorithm to search for the best
"videosink" (GStreamer's term for a graphics driver to display images)
@@ -1080,11 +1087,11 @@ access can be controlled with a password set when uxplay starts (set it
in the .uxplay startup file, where it is stored as cleartext.) All users
must then know this password. This uses HTTP md5 Digest authentication,
which is now regarded as providing weak security, but it is only used to
-validate the uxplay password, and no user credentials are exposed.
-\_Note: -pin and -pw are alternatives: if both are specified at startup,
-the earlier of these two options is discarded. If *pwd* is not
-specified, a random 4-digit pin code is displayed, and must be entered
-on the client at **each** new conenction.
+validate the uxplay password, and no user credentials are exposed. If
+*pwd* is **not** specified, a random 4-digit pin code is displayed, and
+must be entered on the client at **each** new connection. *Note: -pin
+and -pw are alternatives: if both are specified at startup, the earlier
+of these two options is discarded.*
**-vsync \[x\]** (In Mirror mode:) this option (**now the default**)
uses timestamps to synchronize audio with video on the server, with an
@@ -1265,6 +1272,9 @@ number of microseconds. Default is 0.25 sec (250000 usec). *(However,
the client appears to ignore this reported latency, so this option seems
non-functional.)*
+**-ca** (without specifying a filename) now displays "cover art" that
+accompanies Apple Music when played in "Audio-only" (ALAC) mode.
+
**-ca *filename*** provides a file (where *filename* can include a full
path) used for output of "cover art" (from Apple Music, *etc.*,) in
audio-only ALAC mode. This file is overwritten with the latest cover art
@@ -1751,6 +1761,9 @@ what version UxPlay claims to be.
# Changelog
+xxxx 2025-06-18 Render Audio cover-art inside UxPlay with -ca option (no
+file specified). Update llhttp to 9.3.0
+
1.72.1 2025-06-06 minor update: fix regression in -reg option; add
option -rc ``{=html} to specify initialization file; add "-nc
no" to unset "-nc" option (for macOS users, where -nc is default); add
diff --git a/renderers/video_renderer.c b/renderers/video_renderer.c
index f336d6d11..c701c8867 100644
--- a/renderers/video_renderer.c
+++ b/renderers/video_renderer.c
@@ -72,7 +72,7 @@ typedef enum {
//GST_PLAY_FLAG_FORCE_SW_DECODERS = (1 << 12),
} GstPlayFlags;
-#define NCODECS 2 /* renderers for h264 and h265 */
+#define NCODECS 3 /* renderers for h264,h265, and jpeg images */
struct video_renderer_s {
GstElement *appsrc, *pipeline;
@@ -95,7 +95,8 @@ static video_renderer_t *renderer_type[NCODECS] = {0};
static int n_renderers = NCODECS;
static char h264[] = "h264";
static char h265[] = "h265";
-static char hls[] = "hls";
+static char hls[] = "hls";
+static char jpeg[] = "jpeg";
static void append_videoflip (GString *launch, const videoflip_t *flip, const videoflip_t *rot) {
/* videoflip image transform */
@@ -164,6 +165,7 @@ static void append_videoflip (GString *launch, const videoflip_t *flip, const vi
* closest used by GStreamer < 1.20.4 is BT709, 2:3:5:1 with * // now use sRGB = 1:1:7:1
* range = 2 -> GST_VIDEO_COLOR_RANGE_16_235 ("limited RGB") */
+static const char jpeg_caps[]="image/jpeg";
static const char h264_caps[]="video/x-h264,stream-format=(string)byte-stream,alignment=(string)au";
static const char h265_caps[]="video/x-h265,stream-format=(string)byte-stream,alignment=(string)au";
@@ -246,18 +248,20 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
appname = NULL;
/* the renderer for hls video will only be built if a HLS uri is provided in
- * the call to video_renderer_init, in which case the h264 and 265 mirror-mode
- * renderers will not be built. This is because it appears that we cannot
+ * the call to video_renderer_init, in which case the h264/h265 mirror-mode and jpeg
+ * audio-mode renderers will not be built. This is because it appears that we cannot
* put playbin into GST_STATE_READY before knowing the uri (?), so cannot use a
- * unified renderer structure with h264, h265 and hls */
+ * unified renderer structure with h264, h265, jpeg and hls */
if (hls_video) {
n_renderers = 1;
+ /* renderer[0]: playbin (hls) */
} else {
- n_renderers = h265_support ? 2 : 1;
+ n_renderers = h265_support ? 3 : 2;
+ /* renderer[0]: jpeg; [1]: h264; [2]: h265 */
}
g_assert (n_renderers <= NCODECS);
for (int i = 0; i < n_renderers; i++) {
- g_assert (i < 2);
+ g_assert (i < 3);
renderer_type[i] = (video_renderer_t *) calloc(1, sizeof(video_renderer_t));
g_assert(renderer_type[i]);
renderer_type[i]->autovideo = auto_videosink;
@@ -280,8 +284,8 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
g_assert(renderer_type[i]->pipeline);
renderer_type[i]->appsrc = NULL;
renderer_type[i]->codec = hls;
- /* if we are not using autovideosink, build a videossink based on the string "videosink" */
- if(strcmp(videosink, "autovideosink")) {
+ /* if we are not using an autovideosink, build a videosink based on the string "videosink" */
+ if (!auto_videosink) {
GstElement *playbin_videosink = make_video_sink(videosink, videosink_options);
if (!playbin_videosink) {
logger_log(logger, LOGGER_ERR, "video_renderer_init: failed to create playbin_videosink");
@@ -297,12 +301,18 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
g_object_set(renderer_type[i]->pipeline, "flags", flags, NULL);
g_object_set (G_OBJECT (renderer_type[i]->pipeline), "uri", uri, NULL);
} else {
+ bool jpeg_pipeline = false;
switch (i) {
case 0:
+ jpeg_pipeline = true;
+ renderer_type[i]->codec = jpeg;
+ caps = gst_caps_from_string(jpeg_caps);
+ break;
+ case 1:
renderer_type[i]->codec = h264;
caps = gst_caps_from_string(h264_caps);
break;
- case 1:
+ case 2:
renderer_type[i]->codec = h265;
caps = gst_caps_from_string(h265_caps);
break;
@@ -310,22 +320,29 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
g_assert(0);
}
GString *launch = g_string_new("appsrc name=video_source ! ");
- g_string_append(launch, "queue ! ");
- g_string_append(launch, parser);
- g_string_append(launch, " ! ");
- g_string_append(launch, decoder);
+ if (jpeg_pipeline) {
+ g_string_append(launch, "jpegdec ");
+ } else {
+ g_string_append(launch, "queue ! ");
+ g_string_append(launch, parser);
+ g_string_append(launch, " ! ");
+ g_string_append(launch, decoder);
+ }
g_string_append(launch, " ! ");
append_videoflip(launch, &videoflip[0], &videoflip[1]);
g_string_append(launch, converter);
g_string_append(launch, " ! ");
g_string_append(launch, "videoscale ! ");
+ if (jpeg_pipeline) {
+ g_string_append(launch, " imagefreeze allow-replace=TRUE ! ");
+ }
g_string_append(launch, videosink);
g_string_append(launch, " name=");
g_string_append(launch, videosink);
g_string_append(launch, "_");
g_string_append(launch, renderer_type[i]->codec);
g_string_append(launch, videosink_options);
- if (video_sync) {
+ if (video_sync && !jpeg_pipeline) {
g_string_append(launch, " sync=true");
sync = true;
} else {
@@ -366,7 +383,6 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer_type[i]->pipeline), clock);
renderer_type[i]->appsrc = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "video_source");
g_assert(renderer_type[i]->appsrc);
-
g_object_set(renderer_type[i]->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
g_string_free(launch, TRUE);
gst_caps_unref(caps);
@@ -388,7 +404,7 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
g_assert(renderer_type[0]->gst_window);
get_X11_Display(renderer_type[0]->gst_window, x11_display_name);
if (renderer_type[0]->gst_window->display) {
- renderer_type[i]->use_x11 = true;
+ renderer_type[0]->use_x11 = true;
} else {
free(renderer_type[0]->gst_window);
renderer_type[0]->gst_window = NULL;
@@ -487,6 +503,16 @@ bool waiting_for_x11_window() {
return false;
}
+void video_renderer_display_jpeg(const void *data, int *data_len) {
+ GstBuffer *buffer;
+ if (renderer && !strcmp(renderer->codec, jpeg)) {
+ buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
+ g_assert(buffer != NULL);
+ gst_buffer_fill(buffer, 0, data, *data_len);
+ gst_app_src_push_buffer (GST_APP_SRC(renderer->appsrc), buffer);
+ }
+}
+
uint64_t video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time) {
GstBuffer *buffer;
GstClockTime pts = (GstClockTime) *ntp_time; /*now in nsecs */
@@ -849,19 +875,19 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, void
return TRUE;
}
-int video_renderer_choose_codec (bool video_is_h265) {
+int video_renderer_choose_codec (bool video_is_jpeg, bool video_is_h265) {
video_renderer_t *renderer_used = NULL;
- video_renderer_t *renderer_unused = NULL;
g_assert(!hls_video);
- if (n_renderers == 1) {
+ if (video_is_jpeg) {
+ renderer_used = renderer_type[0];
+ } else if (n_renderers == 2) {
if (video_is_h265) {
logger_log(logger, LOGGER_ERR, "video is h265 but the -h265 option was not used");
return -1;
}
- renderer_used = renderer_type[0];
+ renderer_used = renderer_type[1];
} else {
- renderer_used = video_is_h265 ? renderer_type[1] : renderer_type[0];
- renderer_unused = video_is_h265 ? renderer_type[0] : renderer_type[1];
+ renderer_used = video_is_h265 ? renderer_type[2] : renderer_type[1];
}
if (renderer_used == NULL) {
return -1;
@@ -880,14 +906,16 @@ int video_renderer_choose_codec (bool video_is_h265) {
logger_log(logger, LOGGER_DEBUG, "video_pipeline state change from %s to %s\n",
gst_element_state_get_name (old_state),gst_element_state_get_name (new_state));
gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
- if (renderer == renderer_type[1]) {
+ if (n_renderers > 2 && renderer == renderer_type[2]) {
logger_log(logger, LOGGER_INFO, "*** video format is h265 high definition (HD/4K) video %dx%d", width, height);
}
- if (renderer_unused) {
- for (int i = 0; i < n_renderers; i++) {
- if (renderer_type[i] != renderer_unused) {
- continue;
- }
+ /* destroy unused renderers */
+ for (int i = 1; i < n_renderers; i++) {
+ if (renderer_type[i] == renderer) {
+ continue;
+ }
+ if (renderer_type[i]) {
+ video_renderer_t *renderer_unused = renderer_type[i];
renderer_type[i] = NULL;
video_renderer_destroy_instance(renderer_unused);
}
diff --git a/renderers/video_renderer.h b/renderers/video_renderer.h
index 1fbdbf4d0..9c307e8b5 100644
--- a/renderers/video_renderer.h
+++ b/renderers/video_renderer.h
@@ -58,13 +58,14 @@ void video_renderer_set_start(float position);
void video_renderer_resume ();
bool video_renderer_is_paused();
uint64_t video_renderer_render_buffer (unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time);
+void video_renderer_display_jpeg(const void *data, int *data_len);
void video_renderer_flush ();
unsigned int video_renderer_listen(void *loop, int id);
void video_renderer_destroy ();
void video_renderer_size(float *width_source, float *height_source, float *width, float *height);
bool waiting_for_x11_window();
bool video_get_playback_info(double *duration, double *position, float *rate, bool *buffer_empty, bool *buffer_full);
-int video_renderer_choose_codec(bool is_h265);
+int video_renderer_choose_codec (bool video_is_jpeg, bool video_is_h265);
unsigned int video_renderer_listen(void *loop, int id);
unsigned int video_reset_callback(void *loop);
#ifdef __cplusplus
diff --git a/uxplay.1 b/uxplay.1
index f4554e333..3cda48dfc 100644
--- a/uxplay.1
+++ b/uxplay.1
@@ -115,6 +115,8 @@ UxPlay 1.72: An open\-source AirPlay mirroring (+ audio streaming) server:
.TP
\fB\-al\fR x Audio latency in seconds (default 0.25) reported to client.
.TP
+\fB\-ca\fR Display cover-art in AirPlay Audio (ALAC) mode.
+.TP
\fB\-ca\fI fn \fR In Airplay Audio (ALAC) mode, write cover-art to file fn.
.TP
\fB\-md\fI fn \fR In Airplay Audio (ALAC) mode, write metadata text to file fn.
diff --git a/uxplay.cpp b/uxplay.cpp
index b869806f5..8e53f2650 100644
--- a/uxplay.cpp
+++ b/uxplay.cpp
@@ -129,6 +129,7 @@ static bool dump_audio = false;
static unsigned char audio_type = 0x00;
static unsigned char previous_audio_type = 0x00;
static bool fullscreen = false;
+static bool render_coverart = false;
static std::string coverart_filename = "";
static std::string metadata_filename = "";
static bool do_append_hostname = true;
@@ -714,7 +715,7 @@ static void print_info (char *name) {
printf(" osssink,oss4sink,osxaudiosink,wasapisink,directsoundsink.\n");
printf("-as 0 (or -a) Turn audio off, streamed video only\n");
printf("-al x Audio latency in seconds (default 0.25) reported to client.\n");
- printf("-ca In Airplay Audio (ALAC) mode, write cover-art to file \n");
+ printf("-ca []In Audio (ALAC) mode, render cover-art [or write to file ]\n");
printf("-md In Airplay Audio (ALAC) mode, write metadata text to file \n");
printf("-reset n Reset after n seconds of client silence (default n=%d, 0=never)\n", MISSED_FEEDBACK_LIMIT);
printf("-nofreeze Do NOT leave frozen screen in place after reset\n");
@@ -1154,20 +1155,20 @@ static void parse_arguments (int argc, char *argv[]) {
if (!file_has_write_access(fn)) {
fprintf(stderr, "%s cannot be written to:\noption \"-admp \" must be to a file with write access\n", fn);
exit(1);
- }
+ }
}
} else if (arg == "-ca" ) {
- if (option_has_value(i, argc, arg, argv[i+1])) {
+ if (i < argc - 1 && *argv[i+1] != '-') {
coverart_filename.erase();
coverart_filename.append(argv[++i]);
const char *fn = coverart_filename.c_str();
+ render_coverart = false;
if (!file_has_write_access(fn)) {
fprintf(stderr, "%s cannot be written to:\noption \"-ca \" must be to a file with write access\n", fn);
exit(1);
}
} else {
- fprintf(stderr,"option -ca must be followed by a filename for cover-art output\n");
- exit(1);
+ render_coverart = true;
}
} else if (arg == "-md" ) {
if (option_has_value(i, argc, arg, argv[i+1])) {
@@ -1175,7 +1176,7 @@ static void parse_arguments (int argc, char *argv[]) {
metadata_filename.append(argv[++i]);
const char *fn = metadata_filename.c_str();
if (!file_has_write_access(fn)) {
- fprintf(stderr, "%s cannot be written to:\noption \"-ca \" must be to a file with write access\n", fn);
+ fprintf(stderr, "%s cannot be written to:\noption \"-md \" must be to a file with write access\n", fn);
exit(1);
}
} else {
@@ -1689,7 +1690,7 @@ extern "C" void video_reset(void *cls) {
extern "C" int video_set_codec(void *cls, video_codec_t codec) {
bool video_is_h265 = (codec == VIDEO_CODEC_H265);
- return video_renderer_choose_codec(video_is_h265);
+ return video_renderer_choose_codec(false, video_is_h265);
}
extern "C" void display_pin(void *cls, char *pin) {
@@ -1963,6 +1964,9 @@ extern "C" void audio_set_coverart(void *cls, const void *buffer, int buflen) {
if (buffer && coverart_filename.length()) {
write_coverart(coverart_filename.c_str(), buffer, buflen);
LOGI("coverart size %d written to %s", buflen, coverart_filename.c_str());
+ } else if (buffer && render_coverart) {
+ video_renderer_choose_codec(true, false); /* video_is_jpeg = true */
+ video_renderer_display_jpeg(buffer, &buflen);
}
}
From b527630b06e587a53e126c89d12975fb8df4f34f Mon Sep 17 00:00:00 2001
From: "F. Duncanh"
Date: Thu, 26 Jun 2025 12:16:09 -0400
Subject: [PATCH 2/3] reset video when coverart rendering terminates
---
lib/raop.h | 3 ++-
lib/raop_handlers.h | 4 ++++
uxplay.cpp | 7 +++++++
3 files changed, 13 insertions(+), 1 deletion(-)
diff --git a/lib/raop.h b/lib/raop.h
index fe81818b2..6cbe3d699 100644
--- a/lib/raop.h
+++ b/lib/raop.h
@@ -82,6 +82,7 @@ struct raop_callbacks_s {
void (*audio_set_volume)(void *cls, float volume);
void (*audio_set_metadata)(void *cls, const void *buffer, int buflen);
void (*audio_set_coverart)(void *cls, const void *buffer, int buflen);
+ void (*audio_stop_coverart_rendering) (void* cls);
void (*audio_remote_control_id)(void *cls, const char *dacp_id, const char *active_remote_header);
void (*audio_set_progress)(void *cls, unsigned int start, unsigned int curr, unsigned int end);
void (*audio_get_format)(void *cls, unsigned char *ct, unsigned short *spf, bool *usingScreen, bool *isMedia, uint64_t *audioFormat);
@@ -99,8 +100,8 @@ struct raop_callbacks_s {
void (*on_video_rate) (void *cls, const float rate);
void (*on_video_stop) (void *cls);
void (*on_video_acquire_playback_info) (void *cls, playback_info_t *playback_video);
-
};
+
typedef struct raop_callbacks_s raop_callbacks_t;
raop_ntp_t *raop_ntp_init(logger_t *logger, raop_callbacks_t *callbacks, const char *remote,
int remote_addr_len, unsigned short timing_rport,
diff --git a/lib/raop_handlers.h b/lib/raop_handlers.h
index 37d3db9ab..51ce62cc3 100644
--- a/lib/raop_handlers.h
+++ b/lib/raop_handlers.h
@@ -1191,6 +1191,10 @@ raop_handler_teardown(raop_conn_t *conn,
if (conn->raop_rtp) {
/* Stop our audio RTP session */
raop_rtp_stop(conn->raop_rtp);
+ /* stop any coverart rendering */
+ if (conn->raop->callbacks.audio_stop_coverart_rendering) {
+ conn->raop->callbacks.audio_stop_coverart_rendering(conn->raop->callbacks.cls);
+ }
}
} else if (teardown_110) {
conn->raop->callbacks.video_reset(conn->raop->callbacks.cls);
diff --git a/uxplay.cpp b/uxplay.cpp
index 8e53f2650..d5f975863 100644
--- a/uxplay.cpp
+++ b/uxplay.cpp
@@ -1970,6 +1970,12 @@ extern "C" void audio_set_coverart(void *cls, const void *buffer, int buflen) {
}
}
+extern "C" void audio_stop_coverart_rendering(void *cls) {
+ if (render_coverart) {
+ video_reset(cls);
+ }
+}
+
extern "C" void audio_set_progress(void *cls, unsigned int start, unsigned int curr, unsigned int end) {
int duration = (int) (end - start)/44100;
int position = (int) (curr - start)/44100;
@@ -2149,6 +2155,7 @@ static int start_raop_server (unsigned short display[5], unsigned short tcp[3],
raop_cbs.video_report_size = video_report_size;
raop_cbs.audio_set_metadata = audio_set_metadata;
raop_cbs.audio_set_coverart = audio_set_coverart;
+ raop_cbs.audio_stop_coverart_rendering = audio_stop_coverart_rendering;
raop_cbs.audio_set_progress = audio_set_progress;
raop_cbs.report_client_request = report_client_request;
raop_cbs.display_pin = display_pin;
From 6381af221b612012ed0b211039efdb3e9ffede28 Mon Sep 17 00:00:00 2001
From: Integral
Date: Sat, 5 Jul 2025 11:17:59 +0800
Subject: [PATCH 3/3] Use waylandsink in Wayland sessions for GStreamer 1.16+
Use waylandsink instead of xvimagesink/ximagesink in Wayland sessions
for GStreamer 1.16+.
---
uxplay.cpp | 11 +++++++++++
1 file changed, 11 insertions(+)
diff --git a/uxplay.cpp b/uxplay.cpp
index d5f975863..e4b190e1c 100644
--- a/uxplay.cpp
+++ b/uxplay.cpp
@@ -35,6 +35,7 @@
#include
#include
#include
+#include
#ifdef _WIN32 /*modifications for Windows compilation */
#include
@@ -2362,6 +2363,16 @@ int main (int argc, char *argv[]) {
if (config_file.length()) {
read_config_file(config_file.c_str(), argv[0]);
}
+
+ guint major, minor, micro, nano;
+ gst_version (&major, &minor, µ, &nano);
+ if (major >= 1 && minor >= 16) {
+ const char *xdg_session_type = getenv("XDG_SESSION_TYPE");
+ if (xdg_session_type && strcmp(xdg_session_type, "wayland") == 0) {
+ videosink = "waylandsink";
+ }
+ }
+
parse_arguments (argc, argv);
log_level = (debug_log ? LOGGER_DEBUG_DATA : LOGGER_INFO);