diff --git a/.gitignore b/.gitignore index c4a9d2c9..c84156cd 100644 --- a/.gitignore +++ b/.gitignore @@ -99,3 +99,6 @@ Icon Network Trash Folder Temporary Items .apdisk + +# Used by zed to store clangd cache +.cache diff --git a/CMakeLists.txt b/CMakeLists.txt index 734aba51..8fa47818 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -305,73 +305,65 @@ endif() if (BUILD_TEST_PLUGIN) target_sources(flutterpi_module PRIVATE src/plugins/testplugin.c) endif() -if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN) - if (NOT HAVE_EGL_GLES2) - message(NOTICE "EGL and OpenGL ES2 are required for gstreamer video player. Gstreamer video player plugin won't be build.") - else() + +set(HAVE_GSTREAMER_VIDEO_PLAYER OFF) +if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN OR BUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN)# + pkg_check_modules(LIBGSTREAMER IMPORTED_TARGET + gstreamer-1.0 + gstreamer-plugins-base-1.0 + gstreamer-app-1.0 + gstreamer-allocators-1.0 + gstreamer-video-1.0 + gstreamer-audio-1.0 + ) + + if (LIBGSTREAMER_FOUND) + string(REPLACE "." ";" LIBGSTREAMER_VERSION_AS_LIST ${LIBGSTREAMER_gstreamer-1.0_VERSION}) + list(GET LIBGSTREAMER_VERSION_AS_LIST 0 LIBGSTREAMER_VERSION_MAJOR) + list(GET LIBGSTREAMER_VERSION_AS_LIST 1 LIBGSTREAMER_VERSION_MINOR) + list(GET LIBGSTREAMER_VERSION_AS_LIST 2 LIBGSTREAMER_VERSION_PATCH) + + target_sources(flutterpi_module PRIVATE src/plugins/gstplayer.c) + target_link_libraries(flutterpi_module PUBLIC PkgConfig::LIBGSTREAMER) + endif() + + if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN AND NOT LIBGSTREAMER_FOUND) if (TRY_BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN) - pkg_check_modules(LIBGSTREAMER IMPORTED_TARGET gstreamer-1.0) - pkg_check_modules(LIBGSTREAMER_PLUGINS_BASE IMPORTED_TARGET gstreamer-plugins-base-1.0) - pkg_check_modules(LIBGSTREAMER_APP IMPORTED_TARGET gstreamer-app-1.0) - pkg_check_modules(LIBGSTREAMER_ALLOCATORS IMPORTED_TARGET gstreamer-allocators-1.0) - pkg_check_modules(LIBGSTREAMER_VIDEO IMPORTED_TARGET gstreamer-video-1.0) + message(NOTICE "Some required gstreamer dependencies were not found. Gstreamer video player plugin won't be built.") else() - pkg_check_modules(LIBGSTREAMER REQUIRED IMPORTED_TARGET gstreamer-1.0) - pkg_check_modules(LIBGSTREAMER_PLUGINS_BASE REQUIRED IMPORTED_TARGET gstreamer-plugins-base-1.0) - pkg_check_modules(LIBGSTREAMER_APP REQUIRED IMPORTED_TARGET gstreamer-app-1.0) - pkg_check_modules(LIBGSTREAMER_ALLOCATORS REQUIRED IMPORTED_TARGET gstreamer-allocators-1.0) - pkg_check_modules(LIBGSTREAMER_VIDEO REQUIRED IMPORTED_TARGET gstreamer-video-1.0) + message(ERROR "Some required gstreamer dependencies were not found. Can't build gstreamer video player plugin.") endif() + endif() - if (LIBGSTREAMER_FOUND AND LIBGSTREAMER_PLUGINS_BASE_FOUND AND LIBGSTREAMER_APP_FOUND AND LIBGSTREAMER_ALLOCATORS_FOUND AND LIBGSTREAMER_VIDEO_FOUND) - # There's no other way to query the libinput version (in code) somehow. - # So we need to roll our own libinput version macro - string(REPLACE "." ";" LIBGSTREAMER_VERSION_AS_LIST ${LIBGSTREAMER_VERSION}) - list(GET LIBGSTREAMER_VERSION_AS_LIST 0 LIBGSTREAMER_VERSION_MAJOR) - list(GET LIBGSTREAMER_VERSION_AS_LIST 1 LIBGSTREAMER_VERSION_MINOR) - list(GET LIBGSTREAMER_VERSION_AS_LIST 2 LIBGSTREAMER_VERSION_PATCH) - - target_sources(flutterpi_module PRIVATE - src/plugins/gstreamer_video_player/plugin.c - src/plugins/gstreamer_video_player/player.c - src/plugins/gstreamer_video_player/frame.c - ) - target_link_libraries(flutterpi_module PUBLIC - PkgConfig::LIBGSTREAMER - PkgConfig::LIBGSTREAMER_PLUGINS_BASE - PkgConfig::LIBGSTREAMER_APP - PkgConfig::LIBGSTREAMER_ALLOCATORS - PkgConfig::LIBGSTREAMER_VIDEO - ) + if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN AND NOT HAVE_EGL_GLES2) + if (TRY_BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN) + message(NOTICE "EGL and OpenGL ES2 are required for gstreamer video player. Gstreamer video player plugin won't be built.") else() - message(NOTICE "Couldn't find gstreamer libraries. Gstreamer video player plugin won't be build.") + message(ERROR "EGL and OpenGL ES2 are required for gstreamer video player. Can't build gstreamer video player plugin.") endif() endif() -endif() -if (BUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN) - if (TRY_BUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN) - pkg_check_modules(LIBGSTREAMER IMPORTED_TARGET gstreamer-1.0) - pkg_check_modules(LIBGSTREAMER_APP IMPORTED_TARGET gstreamer-app-1.0) - pkg_check_modules(LIBGSTREAMER_AUDIO IMPORTED_TARGET gstreamer-audio-1.0) - else() - pkg_check_modules(LIBGSTREAMER REQUIRED IMPORTED_TARGET gstreamer-1.0) - pkg_check_modules(LIBGSTREAMER_APP REQUIRED IMPORTED_TARGET gstreamer-app-1.0) - pkg_check_modules(LIBGSTREAMER_AUDIO REQUIRED IMPORTED_TARGET gstreamer-audio-1.0) + if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN AND LIBGSTREAMER_FOUND AND HAVE_EGL_GLES2) + set(HAVE_GSTREAMER_VIDEO_PLAYER ON) + target_sources(flutterpi_module PRIVATE + src/plugins/gstreamer_video_player/frame.c + src/plugins/gstreamer_video_player/flutter_texture_sink.c + src/plugins/gstreamer_video_player/plugin.c + ) endif() - if (LIBGSTREAMER_FOUND AND LIBGSTREAMER_APP_FOUND AND LIBGSTREAMER_AUDIO_FOUND) + if (BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN AND NOT LIBGSTREAMER_FOUND) + if (TRY_BUILD_GSTREAMER_VIDEO_PLAYER_PLUGIN) + message(NOTICE "Some required gstreamer dependencies were not found. Gstreamer audio player plugin won't be built.") + else() + message(ERROR "Some required gstreamer dependencies were not found. Can't build gstreamer audio player plugin.") + endif() + endif() + + if (BUILD_GSTREAMER_AUDIO_PLAYER_PLUGIN AND LIBGSTREAMER_FOUND) target_sources(flutterpi_module PRIVATE src/plugins/audioplayers/plugin.c - src/plugins/audioplayers/player.c ) - target_link_libraries(flutterpi_module PUBLIC - PkgConfig::LIBGSTREAMER - PkgConfig::LIBGSTREAMER_APP - PkgConfig::LIBGSTREAMER_AUDIO - ) - else() - message(NOTICE "Couldn't find gstreamer libraries. Gstreamer audio player plugin won't be build.") endif() endif() @@ -390,10 +382,10 @@ if (BUILD_SENTRY_PLUGIN) if (SENTRY_BACKEND STREQUAL "crashpad" AND SENTRY_PLUGIN_BUNDLE_CRASHPAD_HANDLER) set(HAVE_BUNDLED_CRASHPAD_HANDLER ON) - + target_sources(flutter-pi PRIVATE src/crashpad_handler_trampoline.cc) # link against the same libraries the crashpad_handler uses - + get_target_property(handler_deps crashpad_handler INTERFACE_LINK_LIBRARIES) target_link_libraries(flutter-pi PUBLIC ${handler_deps}) endif() diff --git a/config.h.in b/config.h.in index 8a8d8a88..fe28bd89 100644 --- a/config.h.in +++ b/config.h.in @@ -26,5 +26,6 @@ #cmakedefine ENABLE_MTRACE #cmakedefine ENABLE_ASAN #cmakedefine HAVE_BUNDLED_CRASHPAD_HANDLER +#cmakedefine HAVE_GSTREAMER_VIDEO_PLAYER #endif diff --git a/src/flutter-pi.c b/src/flutter-pi.c index 10219914..eacd80e9 100644 --- a/src/flutter-pi.c +++ b/src/flutter-pi.c @@ -1039,6 +1039,11 @@ struct gl_renderer *flutterpi_get_gl_renderer(struct flutterpi *flutterpi) { return flutterpi->gl_renderer; } +struct tracer *flutterpi_get_tracer(struct flutterpi *flutterpi) { + ASSERT_NOT_NULL(flutterpi); + return flutterpi->tracer; +} + void flutterpi_set_pointer_kind(struct flutterpi *flutterpi, enum pointer_kind kind) { return compositor_set_cursor(flutterpi->compositor, false, false, true, kind, false, VEC2F(0, 0)); } diff --git a/src/flutter-pi.h b/src/flutter-pi.h index d2e831d9..00f9eb65 100644 --- a/src/flutter-pi.h +++ b/src/flutter-pi.h @@ -92,6 +92,7 @@ struct drmdev; struct locales; struct vk_renderer; struct flutterpi; +struct tracer; /// TODO: Remove this extern struct flutterpi *flutterpi; @@ -188,6 +189,8 @@ bool flutterpi_has_gl_renderer(struct flutterpi *flutterpi); struct gl_renderer *flutterpi_get_gl_renderer(struct flutterpi *flutterpi); +struct tracer *flutterpi_get_tracer(struct flutterpi *flutterpi); + void flutterpi_set_pointer_kind(struct flutterpi *flutterpi, enum pointer_kind kind); void flutterpi_trace_event_instant(struct flutterpi *flutterpi, const char *name); diff --git a/src/modesetting.c b/src/modesetting.c index 722a71ea..892c2973 100644 --- a/src/modesetting.c +++ b/src/modesetting.c @@ -2448,9 +2448,6 @@ int kms_req_builder_push_fb_layer( /* id_range */ false, 0 // clang-format on ); - if (plane == NULL) { - LOG_DEBUG("Couldn't find a fitting cursor plane.\n"); - } } /// TODO: Not sure we can use crtc_x, crtc_y, etc with primary planes diff --git a/src/notifier_listener.c b/src/notifier_listener.c index d8156538..278b9749 100644 --- a/src/notifier_listener.c +++ b/src/notifier_listener.c @@ -112,10 +112,12 @@ struct listener *notifier_listen(struct notifier *notifier, listener_cb_t notify return NULL; } - r = listener_notify(l, notifier->state); - if (r == kUnlisten) { - listener_destroy(l); - return NULL; + if (notifier->is_value_notifier) { + r = listener_notify(l, notifier->state); + if (r == kUnlisten) { + listener_destroy(l); + return NULL; + } } notifier_lock(notifier); diff --git a/src/platformchannel.c b/src/platformchannel.c index 4bc13be1..5ed3cbca 100644 --- a/src/platformchannel.c +++ b/src/platformchannel.c @@ -1320,6 +1320,10 @@ int platch_respond_native_error_std(const FlutterPlatformMessageResponseHandle * return platch_respond_error_std(handle, "nativeerror", strerror(_errno), &STDINT32(_errno)); } +int platch_respond_malformed_message_std(const FlutterPlatformMessage *message) { + return platch_respond_error_std(message->response_handle, "malformed-message", "The platform message received was malformed.", &STDNULL); +} + /************************ * JSON METHOD CHANNELS * ************************/ @@ -2483,3 +2487,13 @@ MALLOCLIKE MUST_CHECK char *raw_std_method_call_get_method_dup(const struct raw_ ATTR_PURE const struct raw_std_value *raw_std_method_call_get_arg(const struct raw_std_value *value) { return raw_std_value_after(value); } + +ATTR_PURE const struct raw_std_value *raw_std_method_call_from_buffer(const void *buffer, size_t buffer_size) { + const struct raw_std_value *envelope = (const struct raw_std_value *) buffer; + + if (!raw_std_method_call_check(envelope, buffer_size)) { + return NULL; + } else { + return envelope; + } +} diff --git a/src/platformchannel.h b/src/platformchannel.h index 76c03315..14ea7287 100644 --- a/src/platformchannel.h +++ b/src/platformchannel.h @@ -3,7 +3,7 @@ * Platform Channels * * Encoding/Decoding of flutter platform messages, with different - * + * * Supported codecs: * - standard message & method codec, * - json message & method codec @@ -1491,6 +1491,8 @@ int platch_respond_illegal_arg_ext_std(const FlutterPlatformMessageResponseHandl int platch_respond_native_error_std(const FlutterPlatformMessageResponseHandle *handle, int _errno); +int platch_respond_malformed_message_std(const FlutterPlatformMessage *message); + int platch_respond_success_json(const FlutterPlatformMessageResponseHandle *handle, struct json_value *return_value); int platch_respond_error_json( @@ -1614,6 +1616,7 @@ ATTR_PURE bool raw_std_method_call_check(const struct raw_std_value *value, size ATTR_PURE bool raw_std_method_call_response_check(const struct raw_std_value *value, size_t buffer_size); ATTR_PURE bool raw_std_event_check(const struct raw_std_value *value, size_t buffer_size); +ATTR_PURE const struct raw_std_value *raw_std_method_call_from_buffer(const void *buffer, size_t buffer_size); ATTR_PURE const struct raw_std_value *raw_std_method_call_get_method(const struct raw_std_value *value); ATTR_PURE bool raw_std_method_call_is_method(const struct raw_std_value *value, const char *method_name); MALLOCLIKE MUST_CHECK char *raw_std_method_call_get_method_dup(const struct raw_std_value *value); diff --git a/src/plugins/audioplayers/player.c b/src/plugins/audioplayers/player.c deleted file mode 100644 index 1e948e62..00000000 --- a/src/plugins/audioplayers/player.c +++ /dev/null @@ -1,605 +0,0 @@ -#define _GNU_SOURCE - -#include -#include - -#include -#include -#include -#include - -#include "flutter-pi.h" -#include "platformchannel.h" -#include "plugins/audioplayers.h" -#include "util/asserts.h" -#include "util/logging.h" - -struct audio_player { - GstElement *source; - GstElement *playbin; - GstBus *bus; - - GstElement *panorama; - GstElement *audiobin; - GstElement *audiosink; - GstPad *panoramaSinkPad; - - bool is_initialized; - bool is_playing; - bool is_looping; - bool is_seek_completed; - double playback_rate; - - char *url; - char *player_id; - char *event_channel_name; - - _Atomic bool event_subscribed; -}; - -// Private Class functions -static gboolean audio_player_on_bus_message(GstBus *bus, GstMessage *message, struct audio_player *data); -static gboolean audio_player_on_refresh(struct audio_player *data); -static void audio_player_set_playback(struct audio_player *self, int64_t seekTo, double rate); -static void audio_player_on_media_error(struct audio_player *self, GError *error, gchar *debug); -static void audio_player_on_media_state_change(struct audio_player *self, GstObject *src, GstState *old_state, GstState *new_state); -static void audio_player_on_prepared(struct audio_player *self, bool value); -static void audio_player_on_position_update(struct audio_player *self); -static void audio_player_on_duration_update(struct audio_player *self); -static void audio_player_on_seek_completed(struct audio_player *self); -static void audio_player_on_playback_ended(struct audio_player *self); - -static int on_bus_fd_ready(sd_event_source *src, int fd, uint32_t revents, void *userdata) { - struct audio_player *player = userdata; - GstMessage *msg; - - (void) src; - (void) fd; - (void) revents; - - /* DEBUG_TRACE_BEGIN(player, "on_bus_fd_ready"); */ - - msg = gst_bus_pop(player->bus); - if (msg != NULL) { - audio_player_on_bus_message(player->bus, msg, player); - gst_message_unref(msg); - } - - /* DEBUG_TRACE_END(player, "on_bus_fd_ready"); */ - - return 0; -} - -static void audio_player_source_setup(GstElement *playbin, GstElement *source, GstElement **p_src) { - (void)(playbin); - (void)(p_src); - - if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "ssl-strict") != 0) { - g_object_set(G_OBJECT(source), "ssl-strict", FALSE, NULL); - } -} - -struct audio_player *audio_player_new(char *player_id, char *channel) { - GPollFD fd; - sd_event_source *busfd_event_source; - int ok; - - struct audio_player *self = malloc(sizeof(struct audio_player)); - if (self == NULL) { - return NULL; - } - - self->url = NULL; - self->source = NULL; - self->is_initialized = false; - self->is_playing = false; - self->is_looping = false; - self->is_seek_completed = false; - self->playback_rate = 1.0; - self->event_subscribed = false; - - gst_init(NULL, NULL); - self->playbin = gst_element_factory_make("playbin", NULL); - if (!self->playbin) { - LOG_ERROR("Could not create gstreamer playbin.\n"); - goto deinit_self; - } - - // Setup stereo balance controller - self->panorama = gst_element_factory_make("audiopanorama", NULL); - if (self->panorama) { - self->audiobin = gst_bin_new(NULL); - self->audiosink = gst_element_factory_make("autoaudiosink", NULL); - - gst_bin_add_many(GST_BIN(self->audiobin), self->panorama, self->audiosink, NULL); - gst_element_link(self->panorama, self->audiosink); - - GstPad *sinkpad = gst_element_get_static_pad(self->panorama, "sink"); - self->panoramaSinkPad = gst_ghost_pad_new("sink", sinkpad); - gst_element_add_pad(self->audiobin, self->panoramaSinkPad); - gst_object_unref(GST_OBJECT(sinkpad)); - - g_object_set(G_OBJECT(self->playbin), "audio-sink", self->audiobin, NULL); - g_object_set(G_OBJECT(self->panorama), "method", 1, NULL); - } else { - self->audiobin = NULL; - self->audiosink = NULL; - self->panoramaSinkPad = NULL; - } - - g_signal_connect(self->playbin, "source-setup", G_CALLBACK(audio_player_source_setup), &self->source); - - self->bus = gst_element_get_bus(self->playbin); - - gst_bus_get_pollfd(self->bus, &fd); - - flutterpi_sd_event_add_io(&busfd_event_source, fd.fd, EPOLLIN, on_bus_fd_ready, self); - - // Refresh continuously to emit recurring events - g_timeout_add(1000, (GSourceFunc) audio_player_on_refresh, self); - - self->player_id = strdup(player_id); - if (self->player_id == NULL) { - goto deinit_player; - } - - // audioplayers player event channel clang: - // /events/ - ok = asprintf(&self->event_channel_name, "%s/events/%s", channel, player_id); - ASSERT_MSG(ok, "event channel name OEM"); - - if (ok < 0) { - goto deinit_player_id; - } - - return self; - - //Deinit doesn't require to NULL, as we just delete player. -deinit_player_id: - free(self->player_id); - -deinit_player: - gst_object_unref(self->bus); - - if (self->panorama != NULL) { - gst_element_set_state(self->audiobin, GST_STATE_NULL); - - gst_element_remove_pad(self->audiobin, self->panoramaSinkPad); - gst_bin_remove(GST_BIN(self->audiobin), self->audiosink); - gst_bin_remove(GST_BIN(self->audiobin), self->panorama); - - self->panorama = NULL; - self->audiosink = NULL; - self->panoramaSinkPad = NULL; - self->audiobin = NULL; - } - - gst_element_set_state(self->playbin, GST_STATE_NULL); - gst_object_unref(self->playbin); - -deinit_self: - free(self); - return NULL; -} - -gboolean audio_player_on_bus_message(GstBus *bus, GstMessage *message, struct audio_player *data) { - (void) bus; - switch (GST_MESSAGE_TYPE(message)) { - case GST_MESSAGE_ERROR: { - GError *err; - gchar *debug; - - gst_message_parse_error(message, &err, &debug); - audio_player_on_media_error(data, err, debug); - g_error_free(err); - g_free(debug); - break; - } - case GST_MESSAGE_STATE_CHANGED: { - GstState old_state, new_state; - - gst_message_parse_state_changed(message, &old_state, &new_state, NULL); - audio_player_on_media_state_change(data, message->src, &old_state, &new_state); - break; - } - case GST_MESSAGE_EOS: - audio_player_on_playback_ended(data); - break; - case GST_MESSAGE_DURATION_CHANGED: - audio_player_on_duration_update(data); - break; - case GST_MESSAGE_ASYNC_DONE: - if (!data->is_seek_completed) { - audio_player_on_seek_completed(data); - data->is_seek_completed = true; - } - break; - default: - // For more GstMessage types see: - // https://gstreamer.freedesktop.org/documentation/gstreamer/gstmessage.html?gi-language=c#enumerations - break; - } - - // Continue watching for messages - return TRUE; -} - -gboolean audio_player_on_refresh(struct audio_player *self) { - if (self == NULL) { - return FALSE; - } - - GstState playbinState; - gst_element_get_state(self->playbin, &playbinState, NULL, GST_CLOCK_TIME_NONE); - if (playbinState == GST_STATE_PLAYING) { - audio_player_on_position_update(self); - } - return TRUE; -} - -void audio_player_set_playback(struct audio_player *self, int64_t seekTo, double rate) { - const GstSeekFlags seek_flags = GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE; - - if (!self->is_initialized) { - return; - } - // See: - // https://gstreamer.freedesktop.org/documentation/tutorials/basic/playback-speed.html?gi-language=c - if (!self->is_seek_completed) { - return; - } - if (rate == 0) { - // Do not set rate if it's 0, rather pause. - audio_player_pause(self); - return; - } - self->playback_rate = rate; - self->is_seek_completed = false; - - GstEvent *seek_event; - if (rate > 0) { - seek_event = gst_event_new_seek(rate, GST_FORMAT_TIME, seek_flags, GST_SEEK_TYPE_SET, seekTo * GST_MSECOND, GST_SEEK_TYPE_NONE, -1); - } else { - seek_event = gst_event_new_seek(rate, GST_FORMAT_TIME, seek_flags, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, seekTo * GST_MSECOND); - } - - if (!gst_element_send_event(self->playbin, seek_event)) { - // Not clear how to treat this error? - const int64_t seekMs = seekTo * GST_MSECOND; - LOG_ERROR("Could not set playback to position " GST_STIME_FORMAT " and rate %f.\n", GST_TIME_ARGS(seekMs), rate); - self->is_seek_completed = true; - } -} - -void audio_player_on_media_error(struct audio_player *self, GError *error, gchar *debug) { - if (!self->event_subscribed) { - return; - } - - char error_code[16] = {0}; - snprintf(error_code, sizeof(error_code), "%d", error->code); - // clang-format off - platch_send_error_event_std( - self->event_channel_name, - error_code, - error->message, - debug ? &STDSTRING(debug) : NULL - ); - // clang-format on -} - -void audio_player_on_media_state_change(struct audio_player *self, GstObject *src, GstState *old_state, GstState *new_state) { - (void) old_state; - if (src == GST_OBJECT(self->playbin)) { - LOG_DEBUG("%s: on_media_state_change(old_state=%d, new_state=%d)\n", self->player_id, *old_state, *new_state); - if (*new_state == GST_STATE_READY) { - // Need to set to pause state, in order to make player functional - GstStateChangeReturn ret = gst_element_set_state(self->playbin, GST_STATE_PAUSED); - if (ret == GST_STATE_CHANGE_FAILURE) { - LOG_ERROR("Unable to set the pipeline to the paused state.\n"); - } - - self->is_initialized = false; - } else if (*old_state == GST_STATE_PAUSED && *new_state == GST_STATE_PLAYING) { - audio_player_on_position_update(self); - audio_player_on_duration_update(self); - } else if (*new_state >= GST_STATE_PAUSED) { - if (!self->is_initialized) { - self->is_initialized = true; - audio_player_on_prepared(self, true); - if (self->is_playing) { - audio_player_resume(self); - } - } - } else if (self->is_initialized) { - self->is_initialized = false; - } - } -} - -void audio_player_on_prepared(struct audio_player *self, bool value) { - if (!self->event_subscribed) { - return; - } - - // clang-format off - platch_send_success_event_std( - self->event_channel_name, - &STDMAP2( - STDSTRING("event"), STDSTRING("audio.onPrepared"), - STDSTRING("value"), STDBOOL(value) - ) - ); - // clang-format on -} - -void audio_player_on_position_update(struct audio_player *self) { - if (!self->event_subscribed) { - return; - } - - // clang-format off - platch_send_success_event_std( - self->event_channel_name, - &STDMAP2( - STDSTRING("event"), STDSTRING("audio.onCurrentPosition"), - STDSTRING("value"), STDINT64(audio_player_get_position(self)) - ) - ); - // clang-format on -} - -void audio_player_on_duration_update(struct audio_player *self) { - if (!self->event_subscribed) { - return; - } - // clang-format off - platch_send_success_event_std( - self->event_channel_name, - &STDMAP2( - STDSTRING("event"), STDSTRING("audio.onDuration"), - STDSTRING("value"), STDINT64(audio_player_get_duration(self)) - ) - ); - // clang-format on -} -void audio_player_on_seek_completed(struct audio_player *self) { - audio_player_on_position_update(self); - - if (self->event_subscribed) { - // clang-format off - platch_send_success_event_std( - self->event_channel_name, - &STDMAP2( - STDSTRING("event"), STDSTRING("audio.onSeekComplete"), - STDSTRING("value"), STDBOOL(true) - ) - ); - // clang-format on - } - self->is_seek_completed = true; -} -void audio_player_on_playback_ended(struct audio_player *self) { - if (self->event_subscribed) { - // clang-format off - platch_send_success_event_std( - self->event_channel_name, - &STDMAP2( - STDSTRING("event"), STDSTRING("audio.onComplete"), - STDSTRING("value"), STDBOOL(true) - ) - ); - // clang-format on - } - - if (audio_player_get_looping(self)) { - audio_player_play(self); - } else { - audio_player_pause(self); - audio_player_set_position(self, 0); - } -} - -void audio_player_set_looping(struct audio_player *self, bool is_looping) { - self->is_looping = is_looping; -} - -bool audio_player_get_looping(struct audio_player *self) { - return self->is_looping; -} - -void audio_player_play(struct audio_player *self) { - audio_player_set_position(self, 0); - audio_player_resume(self); -} - -void audio_player_pause(struct audio_player *self) { - self->is_playing = false; - - if (!self->is_initialized) { - return; - } - - GstStateChangeReturn ret = gst_element_set_state(self->playbin, GST_STATE_PAUSED); - if (ret == GST_STATE_CHANGE_FAILURE) { - LOG_ERROR("Unable to set the pipeline to the paused state.\n"); - return; - } - audio_player_on_position_update(self); // Update to exact position when pausing -} - -void audio_player_resume(struct audio_player *self) { - self->is_playing = true; - if (!self->is_initialized) { - return; - } - - GstStateChangeReturn ret = gst_element_set_state(self->playbin, GST_STATE_PLAYING); - if (ret == GST_STATE_CHANGE_FAILURE) { - LOG_ERROR("Unable to set the pipeline to the playing state.\n"); - return; - } - audio_player_on_position_update(self); - audio_player_on_duration_update(self); -} - -void audio_player_destroy(struct audio_player *self) { - if (self->is_initialized) { - audio_player_pause(self); - } - - if (self->source) { - gst_object_unref(GST_OBJECT(self->source)); - self->source = NULL; - } - - gst_object_unref(self->bus); - self->bus = NULL; - - if (self->panorama != NULL) { - gst_element_set_state(self->audiobin, GST_STATE_NULL); - - gst_element_remove_pad(self->audiobin, self->panoramaSinkPad); - gst_bin_remove(GST_BIN(self->audiobin), self->audiosink); - gst_bin_remove(GST_BIN(self->audiobin), self->panorama); - - self->panorama = NULL; - self->audiosink = NULL; - self->panoramaSinkPad = NULL; - self->audiobin = NULL; - } - - gst_element_set_state(self->playbin, GST_STATE_NULL); - gst_object_unref(self->playbin); - self->playbin = NULL; - - self->is_initialized = false; - - if (self->url != NULL) { - free(self->url); - self->url = NULL; - } - - if (self->player_id != NULL) { - free(self->player_id); - self->player_id = NULL; - } - - if (self->event_channel_name != NULL) { - free(self->event_channel_name); - self->event_channel_name = NULL;; - } - - free(self); -} - -int64_t audio_player_get_position(struct audio_player *self) { - gint64 current = 0; - if (!gst_element_query_position(self->playbin, GST_FORMAT_TIME, ¤t)) { - LOG_ERROR("Could not query current position.\n"); - return 0; - } - return current / 1000000; -} - -int64_t audio_player_get_duration(struct audio_player *self) { - gint64 duration = 0; - if (!gst_element_query_duration(self->playbin, GST_FORMAT_TIME, &duration)) { - LOG_ERROR("Could not query current duration.\n"); - return 0; - } - return duration / 1000000; -} - -void audio_player_set_volume(struct audio_player *self, double volume) { - if (volume > 1) { - volume = 1; - } else if (volume < 0) { - volume = 0; - } - g_object_set(G_OBJECT(self->playbin), "volume", volume, NULL); -} - -void audio_player_set_balance(struct audio_player *self, double balance) { - if (!self->panorama) { - return; - } - - if (balance > 1.0l) { - balance = 1.0l; - } else if (balance < -1.0l) { - balance = -1.0l; - } - g_object_set(G_OBJECT(self->panorama), "panorama", balance, NULL); -} - -void audio_player_set_playback_rate(struct audio_player *self, double rate) { - audio_player_set_playback(self, audio_player_get_position(self), rate); -} - -void audio_player_set_position(struct audio_player *self, int64_t position) { - if (!self->is_initialized) { - return; - } - audio_player_set_playback(self, position, self->playback_rate); -} - -void audio_player_set_source_url(struct audio_player *self, char *url) { - ASSERT_NOT_NULL(url); - if (self->url == NULL || !streq(self->url, url)) { - LOG_DEBUG("%s: set source=%s\n", self->player_id, url); - if (self->url != NULL) { - free(self->url); - self->url = NULL; - } - self->url = strdup(url); - gst_element_set_state(self->playbin, GST_STATE_NULL); - self->is_initialized = false; - self->is_playing = false; - - if (strlen(self->url) != 0) { - g_object_set(self->playbin, "uri", self->url, NULL); - if (self->playbin->current_state != GST_STATE_READY) { - if (gst_element_set_state(self->playbin, GST_STATE_READY) == GST_STATE_CHANGE_FAILURE) { - //This should not happen generally - LOG_ERROR("Could not set player into ready state.\n"); - } - } - } - } else { - audio_player_on_prepared(self, true); - } -} - -bool audio_player_is_id(struct audio_player *self, char *player_id) { - return streq(self->player_id, player_id); -} - -const char* audio_player_subscribe_channel_name(const struct audio_player *self) { - return self->event_channel_name; -} - -bool audio_player_set_subscription_status(struct audio_player *self, const char *channel, bool value) { - if (strcmp(self->event_channel_name, channel) == 0) { - self->event_subscribed = value; - return true; - } else { - return false; - } -} - -void audio_player_release(struct audio_player *self) { - self->is_initialized = false; - self->is_playing = false; - if (self->url != NULL) { - free(self->url); - self->url = NULL; - } - - GstState playbinState; - gst_element_get_state(self->playbin, &playbinState, NULL, GST_CLOCK_TIME_NONE); - - if (playbinState > GST_STATE_NULL) { - gst_element_set_state(self->playbin, GST_STATE_NULL); - } -} diff --git a/src/plugins/audioplayers/plugin.c b/src/plugins/audioplayers/plugin.c index 2f136f1a..16c3d7fe 100644 --- a/src/plugins/audioplayers/plugin.c +++ b/src/plugins/audioplayers/plugin.c @@ -1,333 +1,1064 @@ #define _GNU_SOURCE +#include +#include +#include + +#include +#include + +#include "flutter_embedder.h" +#include "util/asserts.h" +#include "util/macros.h" + #include "flutter-pi.h" #include "platformchannel.h" #include "pluginregistry.h" -#include "plugins/audioplayers.h" +#include "notifier_listener.h" + #include "util/collection.h" #include "util/list.h" #include "util/logging.h" +#include "util/khash.h" +#include "plugins/gstplayer.h" #define AUDIOPLAYERS_LOCAL_CHANNEL "xyz.luan/audioplayers" #define AUDIOPLAYERS_GLOBAL_CHANNEL "xyz.luan/audioplayers.global" -static struct audio_player *audioplayers_linux_plugin_get_player(char *player_id, char *mode); -static void audioplayers_linux_plugin_dispose_player(struct audio_player *player); +#define STR_LINK_TROUBLESHOOTING \ + "https://github.com/bluefireteam/audioplayers/blob/main/troubleshooting.md" + +KHASH_MAP_INIT_STR(audioplayers, struct gstplayer *) + +struct audioplayer_meta { + char *id; + char *event_channel; + bool subscribed; + bool release_on_stop; -struct audio_player_entry { - struct list_head entry; - struct audio_player *player; + struct listener *duration_listener; + struct listener *eos_listener; + struct listener *error_listener; }; -static struct plugin { +struct plugin { struct flutterpi *flutterpi; bool initialized; - struct list_head players; -} plugin; + khash_t(audioplayers) players; +}; -static int on_local_method_call(char *channel, struct platch_obj *object, FlutterPlatformMessageResponseHandle *responsehandle) { - struct audio_player *player; - struct std_value *args, *tmp; - const char *method; - char *player_id, *mode; - struct std_value result = STDNULL; - int ok; +static const char *player_get_id(struct gstplayer *player) { + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + + return meta->id; +} + +#define LOG_AUDIOPLAYER_DEBUG(player, fmtstring, ...) LOG_DEBUG("audio player \"%s\": " fmtstring, player_get_id(player), ##__VA_ARGS__) +#define LOG_AUDIOPLAYER_ERROR(player, fmtstring, ...) LOG_ERROR("audio player \"%s\": " fmtstring, player_get_id(player), ##__VA_ARGS__) + +static void on_receive_event_ch(void *userdata, const FlutterPlatformMessage *message); + +static void respond_plugin_error_ext(const FlutterPlatformMessageResponseHandle *response_handle, const char *message, struct std_value *details) { + platch_respond_error_std(response_handle, "LinuxAudioError", (char*) message, details); +} + +static void respond_plugin_error(const FlutterPlatformMessageResponseHandle *response_handle, const char *message) { + respond_plugin_error_ext(response_handle, message, NULL); +} + +static bool ensure_gstreamer_initialized(struct plugin *plugin, const FlutterPlatformMessageResponseHandle *responsehandle) { + if (plugin->initialized) { + return true; + } + + GError *error; + gboolean success = gst_init_check(NULL, NULL, &error); + if (success) { + plugin->initialized = true; + return true; + } + + char *details = NULL; + int status = asprintf(&details, "%s (Domain: %s, Code: %d)", error->message, g_quark_to_string(error->domain), error->code); + if (status == -1) { + // ENOMEM; + return false; + } + + // clang-format off + respond_plugin_error_ext( + responsehandle, + "Failed to initialize gstreamer.", + &STDSTRING(details) + ); + // clang-format on + + free(details); + + return false; +} + +static struct gstplayer *get_player_by_id(struct plugin *plugin, const char *id) { + khint_t index = kh_get_audioplayers(&plugin->players, id); + if (index == kh_end(&plugin->players)) { + return NULL; + } - (void) responsehandle; - (void) channel; - method = object->method; - args = &object->std_arg; + return kh_value(&plugin->players, index); +} - LOG_DEBUG("call(method=%s)\n", method); +static const struct raw_std_value *get_player_id_from_arg(const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + if (!raw_std_value_is_map(arg)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg` to be a map."); + return NULL; + } - if (args == NULL || !STDVALUE_IS_MAP(*args)) { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg` to be a map."); + const struct raw_std_value *player_id = raw_std_map_find_str(arg, "playerId"); + if (player_id == NULL || !raw_std_value_is_string(player_id)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playerId']` to be a string."); + return NULL; } - tmp = stdmap_get_str(&object->std_arg, "playerId"); - if (tmp == NULL || !STDVALUE_IS_STRING(*tmp)) { - LOG_ERROR("Call missing mandatory parameter player_id.\n"); - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playerId'] to be a string."); + return player_id; +} + +static struct gstplayer *get_player_from_arg(struct plugin *plugin, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + const struct raw_std_value *id = get_player_id_from_arg(arg, responsehandle); + if (id == NULL) { + return NULL; + } + + char *id_duped = raw_std_string_dup(id); + if (id_duped == NULL) { + return NULL; + } + + struct gstplayer *player = get_player_by_id(plugin, id_duped); + + free(id_duped); + + if (player == NULL) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playerId']` to be a valid player id."); + return NULL; } - player_id = STDVALUE_AS_STRING(*tmp); - tmp = stdmap_get_str(args, "mode"); - if (tmp == NULL) { - mode = ""; - } else if (STDVALUE_IS_STRING(*tmp)) { - mode = STDVALUE_AS_STRING(*tmp); + + return player; +} + +static void send_error_event(struct audioplayer_meta *meta, GError *error) { + if (!meta->subscribed) { + return; + } + + gchar* message; + if (error->domain == GST_STREAM_ERROR || + error->domain == GST_RESOURCE_ERROR) { + message = + "Failed to set source. For troubleshooting, " + "see: " STR_LINK_TROUBLESHOOTING; } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['mode']` to be a string or null."); + message = "Unknown GstGError. See details."; + } + + char *details = NULL; + int status = asprintf(&details, "%s (Domain: %s, Code: %d)", error->message, g_quark_to_string(error->domain), error->code); + if (status == -1) { + // ENOMEM; + return; + } + + // clang-format off + platch_send_error_event_std( + meta->event_channel, + "LinuxAudioError", + message, + &STDSTRING(details) + ); + // clang-format on + + free(details); +} + +static void send_prepared_event(struct audioplayer_meta *meta, bool prepared) { + if (!meta->subscribed) { + return; + } + + // clang-format off + platch_send_success_event_std( + meta->event_channel, + &STDMAP2( + STDSTRING("event"), STDSTRING("audio.onPrepared"), + STDSTRING("value"), STDBOOL(prepared) + ) + ); + // clang-format on +} + +static void send_duration_update(struct audioplayer_meta *meta, bool has_duration, int64_t duration_ms) { + if (!meta->subscribed) { + return; + } + + if (!has_duration) { + // TODO: Check the behaviour in upstream audioplayers + return; + } + + // clang-format off + platch_send_success_event_std( + meta->event_channel, + &STDMAP2( + STDSTRING("event"), STDSTRING("audio.onDuration"), + STDSTRING("value"), STDINT64(duration_ms) + ) + ); + // clang-format on +} + +static void send_seek_completed(struct audioplayer_meta *meta) { + if (!meta->subscribed) { + return; + } + + // clang-format off + platch_send_success_event_std( + meta->event_channel, + &STDMAP1( + STDSTRING("event"), STDSTRING("audio.onSeekComplete") + ) + ); + // clang-format on +} + +static void send_playback_complete(struct audioplayer_meta *meta) { + if (!meta->subscribed) { + return; + } + + // clang-format off + platch_send_success_event_std( + meta->event_channel, + &STDMAP1( + STDSTRING("event"), STDSTRING("audio.onComplete") + ) + ); + // clang-format on +} + +UNUSED static void send_player_log(struct audioplayer_meta *meta, const char *message) { + if (!meta->subscribed) { + return; + } + + // clang-format off + platch_send_success_event_std( + meta->event_channel, + &STDMAP2( + STDSTRING("event"), STDSTRING("audio.onLog"), + STDSTRING("value"), STDSTRING((char*) message) + ) + ); + // clang-format on +} + +static void on_create(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + const struct raw_std_value *player_id = get_player_id_from_arg(arg, responsehandle); + if (!player_id) { + return; + } + + if (!ensure_gstreamer_initialized(p, responsehandle)) { + return; + } + + struct audioplayer_meta *meta = calloc(1, sizeof(struct audioplayer_meta)); + if (meta == NULL) { + platch_respond_native_error_std(responsehandle, ENOMEM); + return; } - player = audioplayers_linux_plugin_get_player(player_id, mode); + meta->id = raw_std_string_dup(player_id); + if (meta->id == NULL) { + platch_respond_native_error_std(responsehandle, ENOMEM); + return; + } + + LOG_DEBUG("create(id: \"%s\")\n", meta->id); + + int status = 0; + khint_t index = kh_put(audioplayers, &p->players, meta->id, &status); + if (status == -1) { + free(meta->id); + free(meta); + platch_respond_native_error_std(responsehandle, ENOMEM); + return; + } else if (status == 0) { + free(meta->id); + free(meta); + + platch_respond_illegal_arg_std(responsehandle, "Player with given id already exists."); + return; + } + + status = asprintf(&meta->event_channel, "xyz.luan/audioplayers/events/%s", meta->id); + if (status == -1) { + kh_del(audioplayers, &p->players, index); + free(meta->id); + free(meta); + + platch_respond_native_error_std(responsehandle, ENOMEM); + return; + } + + struct gstplayer *player = gstplayer_new( + p->flutterpi, + NULL, + meta, + /* play_video */ false, /* play_audio */ true, + NULL + ); if (player == NULL) { - return platch_respond_native_error_std(responsehandle, ENOMEM); - } - - if (streq(method, "create")) { - //audioplayers_linux_plugin_get_player() creates player if it doesn't exist - } else if (streq(method, "pause")) { - audio_player_pause(player); - } else if (streq(method, "resume")) { - audio_player_resume(player); - } else if (streq(method, "stop")) { - audio_player_pause(player); - audio_player_set_position(player, 0); - } else if (streq(method, "release")) { - audio_player_release(player); - } else if (streq(method, "seek")) { - tmp = stdmap_get_str(args, "position"); - if (tmp == NULL || !STDVALUE_IS_INT(*tmp)) { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['position']` to be an int."); - } + free(meta->event_channel); + kh_del(audioplayers, &p->players, index); + free(meta->id); + free(meta); - int64_t position = STDVALUE_AS_INT(*tmp); - audio_player_set_position(player, position); - } else if (streq(method, "setSourceUrl")) { - tmp = stdmap_get_str(args, "url"); - if (tmp == NULL || !STDVALUE_IS_STRING(*tmp)) { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['url']` to be a string."); - } - char *url = STDVALUE_AS_STRING(*tmp); + platch_respond_error_std(responsehandle, "not-initialized", "Could not initialize gstplayer.", NULL); + return; + } - tmp = stdmap_get_str(args, "isLocal"); - if (tmp == NULL || !STDVALUE_IS_BOOL(*tmp)) { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['isLocal']` to be a bool."); - } + gstplayer_set_userdata(player, meta); - bool is_local = STDVALUE_AS_BOOL(*tmp); - if (is_local) { - char *local_url = NULL; - ok = asprintf(&local_url, "file://%s", url); - if (ok < 0) { - return platch_respond_native_error_std(responsehandle, ENOMEM); - } - url = local_url; - } + plugin_registry_set_receiver_v2( + flutterpi_get_plugin_registry(flutterpi), + meta->event_channel, + on_receive_event_ch, + player + ); - audio_player_set_source_url(player, url); - } else if (streq(method, "getDuration")) { - result = STDINT64(audio_player_get_duration(player)); - } else if (streq(method, "setVolume")) { - tmp = stdmap_get_str(args, "volume"); - if (tmp != NULL && STDVALUE_IS_FLOAT(*tmp)) { - audio_player_set_volume(player, STDVALUE_AS_FLOAT(*tmp)); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['volume']` to be a float."); - } - } else if (streq(method, "getCurrentPosition")) { - result = STDINT64(audio_player_get_position(player)); - } else if (streq(method, "setPlaybackRate")) { - tmp = stdmap_get_str(args, "playbackRate"); - if (tmp != NULL && STDVALUE_IS_FLOAT(*tmp)) { - audio_player_set_playback_rate(player, STDVALUE_AS_FLOAT(*tmp)); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playbackRate']` to be a float."); - } - } else if (streq(method, "setReleaseMode")) { - tmp = stdmap_get_str(args, "releaseMode"); - if (tmp != NULL && STDVALUE_IS_STRING(*tmp)) { - char *release_mode = STDVALUE_AS_STRING(*tmp); - bool looping = strstr(release_mode, "loop") != NULL; - audio_player_set_looping(player, looping); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['releaseMode']` to be a string."); - } - } else if (streq(method, "setPlayerMode")) { - // TODO check support for low latency mode: - // https://gstreamer.freedesktop.org/documentation/additional/design/latency.html?gi-language=c - } else if (strcmp(method, "setBalance") == 0) { - tmp = stdmap_get_str(args, "balance"); - if (tmp != NULL && STDVALUE_IS_FLOAT(*tmp)) { - audio_player_set_balance(player, STDVALUE_AS_FLOAT(*tmp)); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['balance']` to be a float."); + kh_value(&p->players, index) = player; + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_pause(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "pause()\n"); + + gstplayer_pause(player); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_resume(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "resume()\n"); + + /// TODO: Should resume behave different to play? + gstplayer_play(player); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_stop(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "stop()\n"); + + /// TODO: Maybe provide gstplayer_stop + int err = gstplayer_pause(player); + if (err != 0) { + platch_respond_success_std(responsehandle, NULL); + return; + } + + err = gstplayer_seek_to(player, 0, /* nearest_keyframe */ false); + if (err != 0) { + platch_respond_success_std(responsehandle, NULL); + return; + } + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_release(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "release()\n"); + + gstplayer_set_source(player, NULL); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_seek(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + const struct raw_std_value *position = raw_std_map_find_str(arg, "position"); + if (position == NULL || !raw_std_value_is_int(position)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['position'] to be an int."); + return; + } + + int64_t position_int = raw_std_value_as_int(position); + + LOG_AUDIOPLAYER_DEBUG(player, "seek(position_ms: %"PRIi64")\n", position_int); + + gstplayer_seek_with_completer( + player, + position_int, + /* nearest_keyframe */ false, + (struct async_completer) { + .on_done = (void_callback_t) send_seek_completed, + .on_error = NULL, + .userdata = gstplayer_get_userdata(player) } - } else if (strcmp(method, "emitLog") == 0) { - tmp = stdmap_get_str(args, "message"); - char *message; - - if (tmp == NULL) { - message = ""; - } else if (STDVALUE_IS_STRING(*tmp)) { - message = STDVALUE_AS_STRING(*tmp); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message']` to be a string."); + ); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_set_source_url_complete(void *userdata) { + struct audioplayer_meta *meta = userdata; + + send_prepared_event(meta, true); +} + +static void on_set_source_url(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + const struct raw_std_value *src_url = raw_std_map_find_str(arg, "url"); + if (src_url == NULL || !raw_std_value_is_string(src_url)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['url']` to be a string."); + return; + } + + const struct raw_std_value *is_local = raw_std_map_find_str(arg, "isLocal"); + if (src_url != NULL && !raw_std_value_is_null(is_local) && !raw_std_value_is_bool(is_local)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['isLocal']` to be a bool or null."); + return; + } + + const struct raw_std_value *mime_type = raw_std_map_find_str(arg, "mimeType"); + if (mime_type != NULL && !raw_std_value_is_null(mime_type) && !raw_std_value_is_string(mime_type)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['mimeType']` to be a bool or null."); + return; + } + + char *src_url_duped = raw_std_string_dup(src_url); + if (!src_url_duped) return; + + LOG_AUDIOPLAYER_DEBUG(player, "set_source_url(url: \"%s\")\n", src_url_duped); + + // audioplayers attempts to use file paths (e.g. /tmp/abcd) as source URIs. + // detect that and constrcut a proper url from it. + if (src_url_duped[0] == '/') { + free(src_url_duped); + + int result = asprintf( + &src_url_duped, + "file://%.*s", + (int) raw_std_string_get_length(src_url), + raw_std_string_get_nonzero_terminated(src_url) + ); + if (result < 0) { + return; } + } - // Avoid unused variable compile message if debugging is disabled. - (void) message; - - LOG_DEBUG("%s\n", message); - //TODO: https://github.com/bluefireteam/audioplayers/blob/main/packages/audioplayers_linux/linux/audio_player.cc#L247 - } else if (strcmp(method, "emitError") == 0) { - tmp = stdmap_get_str(args, "code"); - char *code; - - if (tmp == NULL) { - code = ""; - } else if (STDVALUE_IS_STRING(*tmp)) { - code = STDVALUE_AS_STRING(*tmp); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['code']` to be a string."); + bool ok = gstplayer_set_source_with_completer( + player, + src_url_duped, + (struct async_completer) { + .on_done = on_set_source_url_complete, + .userdata = gstplayer_get_userdata(player) } + ); - tmp = stdmap_get_str(args, "message"); - char *message; + free(src_url_duped); - if (tmp == NULL) { - message = ""; - } else if (STDVALUE_IS_STRING(*tmp)) { - message = STDVALUE_AS_STRING(*tmp); - } else { - return platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message']` to be a string."); - } + if (!ok) { + respond_plugin_error(responsehandle, "Could not preroll pipeline."); + return; + } - LOG_ERROR("Error: %s; message=%s\n", code, message); - //TODO: https://github.com/bluefireteam/audioplayers/blob/main/packages/audioplayers_linux/linux/audio_player.cc#L144 - } else if (strcmp(method, "dispose") == 0) { - audioplayers_linux_plugin_dispose_player(player); - player = NULL; - } else { - return platch_respond_not_implemented(responsehandle); + platch_respond_success_std(responsehandle, NULL); +} + +static void on_get_duration(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "get_duration()\n"); + + int64_t duration_ms = gstplayer_get_duration(player); + if (duration_ms == -1) { + platch_respond_success_std(responsehandle, NULL); + return; } - return platch_respond_success_std(responsehandle, &result); + platch_respond_success_std(responsehandle, &STDINT64(duration_ms)); } -static int on_global_method_call(char *channel, struct platch_obj *object, FlutterPlatformMessageResponseHandle *responsehandle) { - (void) responsehandle; - (void) channel; - (void) object; +static void on_set_volume(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + const struct raw_std_value *volume = raw_std_map_find_str(arg, "volume"); + if (volume == NULL || !raw_std_value_is_float64(volume)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['volume'] to be a double."); + return; + } + + double volume_float = raw_std_value_as_float64(volume); + + LOG_AUDIOPLAYER_DEBUG(player, "set_volume(volume: %f)\n", volume_float); - return platch_respond_success_std(responsehandle, &STDBOOL(true)); + gstplayer_set_volume(player, volume_float); + + platch_respond_success_std(responsehandle, NULL); } -static int on_receive_event_ch(char *channel, struct platch_obj *object, FlutterPlatformMessageResponseHandle *responsehandle) { - if (strcmp(object->method, "listen") == 0) { - LOG_DEBUG("%s: listen()\n", channel); +static void on_get_position(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } - list_for_each_entry_safe(struct audio_player_entry, entry, &plugin.players, entry) { - if (audio_player_set_subscription_status(entry->player, channel, true)) { - return platch_respond_success_std(responsehandle, NULL); - } - } + int64_t position = gstplayer_get_position(player); + if (position < 0) { + platch_respond_success_std(responsehandle, &STDNULL); + return; + } - LOG_ERROR("%s: player not found\n", channel); - return platch_respond_not_implemented(responsehandle); - } else if (strcmp(object->method, "cancel") == 0) { - LOG_DEBUG("%s: cancel()\n", channel); + platch_respond_success_std(responsehandle, &STDINT64(position)); +} - list_for_each_entry_safe(struct audio_player_entry, entry, &plugin.players, entry) { - if (audio_player_set_subscription_status(entry->player, channel, false)) { - return platch_respond_success_std(responsehandle, NULL); - } - } +static void on_set_playback_rate(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } + + const struct raw_std_value *rate = raw_std_map_find_str(arg, "playbackRate"); + if (rate == NULL || !raw_std_value_is_float64(rate)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playbackRate'] to be a double."); + return; + } + + double rate_float = raw_std_value_as_float64(rate); - LOG_ERROR("%s: player not found\n", channel); - return platch_respond_not_implemented(responsehandle); + LOG_AUDIOPLAYER_DEBUG(player, "set_playback_rate(rate: %f)\n", rate_float); + + if (rate_float < 0.0) { + respond_plugin_error(responsehandle, "Backward playback is not supported.\n"); + return; + } else if (rate_float == 0.0) { + gstplayer_pause(player); } else { - return platch_respond_not_implemented(responsehandle); + gstplayer_set_playback_speed(player, rate_float); } - return 0; + platch_respond_success_std(responsehandle, NULL); } -enum plugin_init_result audioplayers_plugin_init(struct flutterpi *flutterpi, void **userdata_out) { - int ok; +static void on_set_release_mode(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } - (void) userdata_out; + const struct raw_std_value *mode = raw_std_map_find_str(arg, "releaseMode"); + if (mode == NULL || !raw_std_value_is_string(mode)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['releaseMode'] to be a string."); + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "set_release_mode(mode: %.*s)\n", + (int) raw_std_string_get_length(mode), + raw_std_string_get_nonzero_terminated(mode) + ); - plugin.flutterpi = flutterpi; - plugin.initialized = false; - list_inithead(&plugin.players); + bool is_release = false; + bool is_loop = false; + bool is_stop = false; - ok = plugin_registry_set_receiver_locked(AUDIOPLAYERS_GLOBAL_CHANNEL, kStandardMethodCall, on_global_method_call); - if (ok != 0) { - return PLUGIN_INIT_RESULT_ERROR; + if (raw_std_string_equals(mode, "ReleaseMode.release")) { + is_release = true; + } else if (raw_std_string_equals(mode, "ReleaseMode.loop")) { + is_loop = true; + } else if (raw_std_string_equals(mode, "ReleaseMode.stop")) { + is_stop = true; + } else { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['releaseMode']` to be a string-ification of a ReleaseMode enum value."); + return; } - ok = plugin_registry_set_receiver_locked(AUDIOPLAYERS_LOCAL_CHANNEL, kStandardMethodCall, on_local_method_call); - if (ok != 0) { - goto fail_remove_global_receiver; + // TODO: Handle ReleaseMode.release & ReleaseMode.stop + (void) is_release; + (void) is_stop; + + int err = gstplayer_set_looping(player, is_loop, false); + if (err != 0) { + platch_respond_success_std(responsehandle, NULL); + return; } - return PLUGIN_INIT_RESULT_INITIALIZED; + platch_respond_success_std(responsehandle, NULL); +} -fail_remove_global_receiver: - plugin_registry_remove_receiver_locked(AUDIOPLAYERS_GLOBAL_CHANNEL); +static void on_set_player_mode(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } - return PLUGIN_INIT_RESULT_ERROR; + const struct raw_std_value *mode = raw_std_map_find_str(arg, "playerMode"); + if (mode == NULL || !raw_std_value_is_string(mode)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playerMode'] to be a string."); + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "set_player_mode(mode: %.*s)\n", + (int) raw_std_string_get_length(mode), + raw_std_string_get_nonzero_terminated(mode) + ); + + bool is_media_player = false; + bool is_low_latency = false; + + if (raw_std_string_equals(mode, "PlayerMode.mediaPlayer")) { + is_media_player = true; + } else if (raw_std_string_equals(mode, "PlayerMode.lowLatency")) { + is_low_latency = true; + } else { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['playerMode']` to be a string-ification of a PlayerMode enum value."); + return; + } + + // TODO: Handle player mode + // TODO check support for low latency mode: + // https://gstreamer.freedesktop.org/documentation/additional/design/latency.html?gi-language=c + (void) is_media_player; + (void) is_low_latency; + + platch_respond_success_std(responsehandle, NULL); } -void audioplayers_plugin_deinit(struct flutterpi *flutterpi, void *userdata) { - (void) flutterpi; - (void) userdata; +static void on_set_balance(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } - plugin_registry_remove_receiver_locked(AUDIOPLAYERS_GLOBAL_CHANNEL); - plugin_registry_remove_receiver_locked(AUDIOPLAYERS_LOCAL_CHANNEL); + const struct raw_std_value *balance = raw_std_map_find_str(arg, "balance"); + if (balance == NULL || !raw_std_value_is_float64(balance)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['balance'] to be a double."); + return; + } + + double balance_float = raw_std_value_as_float64(balance); + + LOG_AUDIOPLAYER_DEBUG(player, "set_balance(balance: %f)\n", balance_float); - list_for_each_entry_safe(struct audio_player_entry, entry, &plugin.players, entry) { - audio_player_destroy(entry->player); - list_del(&entry->entry); - free(entry); + if (balance_float < -1.0) { + balance_float = -1.0; + } else if (balance_float > 1.0) { + balance_float = 1.0; } + + gstplayer_set_audio_balance(player, balance_float); + + platch_respond_success_std(responsehandle, NULL); } -static struct audio_player *audioplayers_linux_plugin_get_player(char *player_id, char *mode) { - struct audio_player_entry *entry; - struct audio_player *player; +static void on_player_emit_log(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; + } - (void) mode; + const struct raw_std_value *message = raw_std_map_find_str(arg, "message"); + if (message == NULL || !raw_std_value_is_string(message)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message'] to be a string."); + return; + } - list_for_each_entry_safe(struct audio_player_entry, entry, &plugin.players, entry) { - if (audio_player_is_id(entry->player, player_id)) { - return entry->player; - } + LOG_DEBUG("%.*s", (int) raw_std_string_get_length(message), raw_std_string_get_nonzero_terminated(message)); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_player_emit_error(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); + if (player == NULL) { + return; } - entry = malloc(sizeof *entry); - ASSUME(entry != NULL); + const struct raw_std_value *code = raw_std_map_find_str(arg, "code"); + if (code == NULL || !raw_std_value_is_string(code)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['code'] to be a string."); + return; + } - LOG_DEBUG("Create player(id=%s)\n", player_id); - player = audio_player_new(player_id, AUDIOPLAYERS_LOCAL_CHANNEL); + const struct raw_std_value *message = raw_std_map_find_str(arg, "message"); + if (message == NULL || !raw_std_value_is_string(message)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message'] to be a string."); + return; + } + + LOG_ERROR( + "%.*s, %.*s", + (int) raw_std_string_get_length(code), raw_std_string_get_nonzero_terminated(code), + (int) raw_std_string_get_length(message), raw_std_string_get_nonzero_terminated(message) + ); + platch_respond_success_std(responsehandle, NULL); +} + +static void on_dispose(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + const struct raw_std_value *id = get_player_id_from_arg(arg, responsehandle); + if (id == NULL) { + return; + } + + struct gstplayer *player = get_player_from_arg(p, arg, responsehandle); if (player == NULL) { - LOG_ERROR("player(id=%s) cannot be created", player_id); - free(entry); - return NULL; + return; + } + + LOG_AUDIOPLAYER_DEBUG(player, "dispose()\n"); + + char *id_duped = raw_std_string_dup(id); + + khint_t index = kh_get(audioplayers, &p->players, id_duped); + + // Should be valid since we already know the player exists from above + assert(index <= kh_end(&p->players)); + + free(id_duped); + + // Remove the entry from the hashmap + kh_del(audioplayers, &p->players, index); + + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + + plugin_registry_remove_receiver_v2(flutterpi_get_plugin_registry(p->flutterpi), meta->event_channel); + free(meta->event_channel); + free(meta->id); + free(meta); + + // Destroy the player + gstplayer_destroy(player); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_player_method_call(void *userdata, const FlutterPlatformMessage *message) { + struct plugin *plugin = userdata; + + const struct raw_std_value *envelope = raw_std_method_call_from_buffer(message->message, message->message_size); + if (!envelope) { + platch_respond_malformed_message_std(message); + return; } - const char* event_channel = audio_player_subscribe_channel_name(player); - // set a receiver on the videoEvents event channel - int ok = plugin_registry_set_receiver( - event_channel, - kStandardMethodCall, - on_receive_event_ch + const struct raw_std_value *arg = raw_std_method_call_get_arg(envelope); + ASSERT_NOT_NULL(arg); + + if (raw_std_method_call_is_method(envelope, "create")) { + on_create(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "pause")) { + on_pause(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "resume")) { + on_resume(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "stop")) { + on_stop(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "release")) { + on_release(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "seek")) { + on_seek(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setSourceUrl")) { + on_set_source_url(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "getDuration")) { + on_get_duration(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setVolume")) { + on_set_volume(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "getCurrentPosition")) { + on_get_position(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setPlaybackRate")) { + on_set_playback_rate(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setReleaseMode")) { + on_set_release_mode(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setPlayerMode")) { + on_set_player_mode(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setBalance") == 0) { + on_set_balance(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "emitLog") == 0) { + on_player_emit_log(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "emitError") == 0) { + on_player_emit_error(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "dispose") == 0) { + on_dispose(plugin, arg, message->response_handle); + } else { + platch_respond_not_implemented(message->response_handle); + } +} + +static void on_init(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + (void) p; + (void) arg; + platch_respond_success_std(responsehandle, NULL); +} + +static void on_set_audio_context(struct plugin *p, const struct raw_std_value *arg, const FlutterPlatformMessageResponseHandle *responsehandle) { + (void) p; + (void) arg; + platch_respond_success_std(responsehandle, NULL); +} + +static void on_emit_log( + struct plugin *p, + const struct raw_std_value *arg, + const FlutterPlatformMessageResponseHandle *responsehandle +) { + (void) p; + + const struct raw_std_value *message = raw_std_map_find_str(arg, "message"); + if (message == NULL || !raw_std_value_is_string(message)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message'] to be a string."); + return; + } + + LOG_DEBUG("%.*s", (int) raw_std_string_get_length(message), raw_std_string_get_nonzero_terminated(message)); + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_emit_error( + struct plugin *p, + const struct raw_std_value *arg, + const FlutterPlatformMessageResponseHandle *responsehandle +) { + (void) p; + + const struct raw_std_value *code = raw_std_map_find_str(arg, "code"); + if (code == NULL || !raw_std_value_is_string(code)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['code'] to be a string."); + return; + } + + const struct raw_std_value *message = raw_std_map_find_str(arg, "message"); + if (message == NULL || !raw_std_value_is_string(message)) { + platch_respond_illegal_arg_std(responsehandle, "Expected `arg['message'] to be a string."); + return; + } + + LOG_ERROR( + "%.*s, %.*s", + (int) raw_std_string_get_length(code), raw_std_string_get_nonzero_terminated(code), + (int) raw_std_string_get_length(message), raw_std_string_get_nonzero_terminated(message) ); - if (ok != 0) { - LOG_ERROR("Cannot set player receiver for event channel: %s\n", event_channel); - audio_player_destroy(player); - free(entry); - return NULL; + + platch_respond_success_std(responsehandle, NULL); +} + +static void on_global_method_call(void *userdata, const FlutterPlatformMessage *message) { + struct plugin *plugin = userdata; + + const struct raw_std_value *envelope = raw_std_method_call_from_buffer(message->message, message->message_size); + if (!envelope) { + platch_respond_malformed_message_std(message); + return; } - entry->entry = (struct list_head){ NULL, NULL }; - entry->player = player; + const struct raw_std_value *arg = raw_std_method_call_get_arg(envelope); + ASSERT_NOT_NULL(arg); - list_add(&entry->entry, &plugin.players); - return player; + if (raw_std_method_call_is_method(envelope, "init")) { + on_init(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "setAudioContext")) { + on_set_audio_context(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "emitLog")) { + on_emit_log(plugin, arg, message->response_handle); + } else if (raw_std_method_call_is_method(envelope, "emitError")) { + on_emit_error(plugin, arg, message->response_handle); + } else { + platch_respond_not_implemented(message->response_handle); + } +} + +static enum listener_return on_duration_notify(void *arg, void *userdata) { + ASSERT_NOT_NULL(userdata); + struct gstplayer *player = userdata; + + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + ASSERT_NOT_NULL(meta); + + if (arg != NULL) { + int64_t *duration_ms = arg; + send_duration_update(meta, true, *duration_ms); + } else { + send_duration_update(meta, false, -1); + } + + return kNoAction; +} + +static enum listener_return on_eos_notify(void *arg, void *userdata) { + (void) arg; + + ASSERT_NOT_NULL(userdata); + struct gstplayer *player = userdata; + + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + ASSERT_NOT_NULL(meta); + + send_playback_complete(meta); + + return kNoAction; +} + +static enum listener_return on_error_notify(void *arg, void *userdata) { + ASSERT_NOT_NULL(arg); + GError *error = arg; + + ASSERT_NOT_NULL(userdata); + struct gstplayer *player = userdata; + + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + ASSERT_NOT_NULL(meta); + + send_error_event(meta, error); + + return kNoAction; } -static void audioplayers_linux_plugin_dispose_player(struct audio_player *player) { - list_for_each_entry_safe(struct audio_player_entry, entry, &plugin.players, entry) { - if (entry->player == player) { - list_del(&entry->entry); - plugin_registry_remove_receiver(audio_player_subscribe_channel_name(player)); - audio_player_destroy(player); +static void on_receive_event_ch(void *userdata, const FlutterPlatformMessage *message) { + ASSERT_NOT_NULL(userdata); + + struct gstplayer *player = userdata; + + struct audioplayer_meta *meta = gstplayer_get_userdata(player); + ASSERT_NOT_NULL(meta); + + const struct raw_std_value *envelope = raw_std_method_call_from_buffer(message->message, message->message_size); + if (envelope == NULL) { + platch_respond_malformed_message_std(message); + return; + } + + /// TODO: Implement + if (raw_std_method_call_is_method(envelope, "listen")) { + platch_respond_success_std(message->response_handle, NULL); + + if (!meta->subscribed) { + meta->subscribed = true; + + meta->duration_listener = notifier_listen(gstplayer_get_duration_notifier(player), on_duration_notify, NULL, player); + meta->eos_listener = notifier_listen(gstplayer_get_eos_notifier(player), on_eos_notify, NULL, player); + meta->error_listener = notifier_listen(gstplayer_get_error_notifier(player), on_error_notify, NULL, player); + } + } else if (raw_std_method_call_is_method(envelope, "cancel")) { + platch_respond_success_std(message->response_handle, NULL); + + if (meta->subscribed) { + meta->subscribed = false; + + notifier_unlisten(gstplayer_get_eos_notifier(player), meta->error_listener); + notifier_unlisten(gstplayer_get_eos_notifier(player), meta->eos_listener); + notifier_unlisten(gstplayer_get_duration_notifier(player), meta->duration_listener); } + } else { + platch_respond_not_implemented(message->response_handle); } } +enum plugin_init_result audioplayers_plugin_init(struct flutterpi *flutterpi, void **userdata_out) { + int ok; + + (void) userdata_out; + + struct plugin *plugin = calloc(1, sizeof(struct plugin)); + if (plugin == NULL) { + return PLUGIN_INIT_RESULT_ERROR; + } + + plugin->flutterpi = flutterpi; + plugin->initialized = false; + + ok = plugin_registry_set_receiver_v2_locked( + flutterpi_get_plugin_registry(flutterpi), + AUDIOPLAYERS_GLOBAL_CHANNEL, + on_global_method_call, + plugin + ); + if (ok != 0) { + return PLUGIN_INIT_RESULT_ERROR; + } + + ok = plugin_registry_set_receiver_v2_locked( + flutterpi_get_plugin_registry(flutterpi), + AUDIOPLAYERS_LOCAL_CHANNEL, + on_player_method_call, + plugin + ); + if (ok != 0) { + goto fail_remove_global_receiver; + } + + return PLUGIN_INIT_RESULT_INITIALIZED; + +fail_remove_global_receiver: + plugin_registry_remove_receiver_v2_locked( + flutterpi_get_plugin_registry(flutterpi), + AUDIOPLAYERS_GLOBAL_CHANNEL + ); + + return PLUGIN_INIT_RESULT_ERROR; +} + +void audioplayers_plugin_deinit(struct flutterpi *flutterpi, void *userdata) { + (void) flutterpi; + + ASSERT_NOT_NULL(userdata); + struct plugin *plugin = userdata; + + plugin_registry_remove_receiver_v2_locked(flutterpi_get_plugin_registry(flutterpi), AUDIOPLAYERS_GLOBAL_CHANNEL); + plugin_registry_remove_receiver_v2_locked(flutterpi_get_plugin_registry(flutterpi), AUDIOPLAYERS_LOCAL_CHANNEL); + + const char *id; + struct gstplayer *player; + kh_foreach(&plugin->players, id, player, { + gstplayer_destroy(player); + free((char*) id); + }) +} + FLUTTERPI_PLUGIN("audioplayers", audioplayers, audioplayers_plugin_init, audioplayers_plugin_deinit) diff --git a/src/plugins/gstplayer.c b/src/plugins/gstplayer.c new file mode 100644 index 00000000..ae792922 --- /dev/null +++ b/src/plugins/gstplayer.c @@ -0,0 +1,1637 @@ +#define _GNU_SOURCE + +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "flutter-pi.h" +#include "notifier_listener.h" +#include "platformchannel.h" +#include "pluginregistry.h" +#include "plugins/gstplayer.h" +#include "texture_registry.h" +#include "tracer.h" +#include "util/logging.h" +#include "util/macros.h" +#include "util/collection.h" +#include "util/asserts.h" + +#include "config.h" + +#ifdef HAVE_GSTREAMER_VIDEO_PLAYER + #include "gstreamer_video_player.h" +#endif + +#define LOG_PLAYER_DEBUG(player, fmtstring, ...) LOG_DEBUG("gstplayer-%"PRIi64": " fmtstring, player->debug_id, ##__VA_ARGS__) +#ifdef DEBUG + #define LOG_PLAYER_ERROR(player, fmtstring, ...) LOG_ERROR("gstplayer-%"PRIi64": " fmtstring, player->debug_id, ##__VA_ARGS__) +#else + #define LOG_PLAYER_ERROR(player, fmtstring, ...) LOG_ERROR(fmtstring, ##__VA_ARGS__) +#endif + +#define LOG_GST_SET_STATE_ERROR(player, _element) \ + LOG_PLAYER_ERROR( \ + player, \ + "setting gstreamer playback state failed. gst_element_set_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", \ + GST_ELEMENT_NAME(_element) \ + ) + +#define LOG_GST_GET_STATE_ERROR(player, _element) \ + LOG_PLAYER_ERROR( \ + player, \ + "last gstreamer state change failed. gst_element_get_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", \ + GST_ELEMENT_NAME(_element) \ + ) + +struct incomplete_video_info { + bool has_resolution; + bool has_fps; + bool has_duration; + bool has_seeking_info; + struct video_info info; +}; + +enum playpause_state { kPaused, kPlaying, kStepping }; + +enum playback_direction { kForward, kBackward }; + +#define PLAYPAUSE_STATE_AS_STRING(playpause_state) \ + ((playpause_state) == kPaused ? "paused" : \ + (playpause_state) == kPlaying ? "playing" : \ + (playpause_state) == kStepping ? "stepping" : \ + "?") + + +#ifdef DEBUG +static int64_t allocate_id() { + static atomic_int_fast64_t next_id = 1; + + return atomic_fetch_add_explicit(&next_id, 1, memory_order_relaxed); +} +#endif +struct gstplayer { +#ifdef DEBUG + int64_t debug_id; +#endif + + struct flutterpi *flutterpi; + + void *userdata; + + /** + * @brief The desired playback rate that should be used when @ref playpause_state is kPlayingForward. (should be > 0) + * + */ + double playback_rate_forward; + + /** + * @brief The desired playback rate that should be used when @ref playpause_state is kPlayingBackward. (should be < 0) + * + */ + double playback_rate_backward; + + /** + * @brief True if the video should seemlessly start from the beginning once the end is reached. + * + */ + bool looping; + + /** + * @brief True if the looping should use gapless looping using either the about-to-finish callback + * from playbin or segments. + * + * Configured in gstplayer_set_looping + */ + bool gapless_looping; + + /** + * @brief The desired playback state. Either paused, playing, or single-frame stepping. + * + */ + enum playpause_state playpause_state; + + /** + * @brief The desired playback direction. + * + */ + enum playback_direction direction; + + /** + * @brief The actual, currently used playback rate. + * + */ + double current_playback_rate; + + /** + * @brief The position reported if gstreamer position queries fail (for example, because gstreamer is currently + * seeking to a new position. In that case, fallback_position_ms will be the seeking target position, so we report the + * new position while we're seeking to it) + */ + int64_t fallback_position_ms; + + /** + * @brief True if there's a position that apply_playback_state should seek to. + * + */ + bool has_desired_position; + + /** + * @brief True if gstplayer should seek to the nearest keyframe instead, which is a bit faster. + * + */ + bool do_fast_seeking; + + /** + * @brief The position, if any, that apply_playback_state should seek to. + * + */ + int64_t desired_position_ms; + + struct notifier video_info_notifier, buffering_state_notifier, error_notifier; + struct notifier duration_notifier, seeking_info_notifier; + struct notifier eos_notifier; + + bool has_sent_info; + struct incomplete_video_info info; + + bool has_duration; + int64_t duration; + + bool has_seeking_info; + struct seeking_info seeking_info; + + /** + * The flutter texture that this video player is pushing frames to. + */ + struct texture *texture; + + sd_event_source *busfd_events; + + /** + * The gstreamer playbin, which gstplayer is just + * a wrapper around, basically. + */ + GstElement *playbin; + + /** + * The gstreamer audiopanorama element, used as the "audio-filter" + * if audio playback is enabled, and used to change the audio + * left/right balance. + */ + GstElement *audiopanorama; + + /** + * True if we're playing back a live source, + * e.g. a live stream + */ + bool is_live; + + /** + * Callbacks to be called on ASYNC_DONE gstreamer messages. + * + * ASYNC_DONE messages indicate completion of an async state + * change or a flushing seek. + */ + size_t n_async_completers; + struct async_completer completers[8]; + + /** + * @brief Use the playbin "uri" property and "about-to-finish" signal + * to achieve gapless looping, if looping is desired. + * + * It's a bit unclear whether this is worse or equally as good as + * using segments; so segment looping is preferred for now. + * + * However, segments are not always super reliable (e.g. playbin3 + * segment looping is broken in gstreamer < 1.22.9), so the playbin + * method is kept intact still as a backup. + */ + bool playbin_gapless; + + /** + * @brief Use segments to do gapless looping, if looping is desired. + * + * (Instead of e.g. seeking back to start on EOS, or setting the + * playbin uri property in about-to-finish) + */ + bool segment_gapless; + + /** + * The source uri this gstplayer should play back. + * + * Mostly used to as the argument to `g_object_set(p->playbin, "uri", ...)` + * in on_about_to_finish, as querying the current source uri from the playbin + * is not always reliable. + */ + char *uri; + + /** + * True if we did already issue a flushing seek + * with GST_SEEK_FLAG_SEGMENT. + * + * A flushing seek with GST_SEEK_FLAG_SEGMENT has to be + * issued to start gapless looping. + */ + bool did_configure_segment; +}; + +static struct async_completer pop_completer(struct gstplayer *player) { + ASSERT(player->n_async_completers > 0); + + struct async_completer completer = player->completers[0]; + + player->n_async_completers--; + if (player->n_async_completers > 0) { + memmove(player->completers + 0, player->completers + 1, player->n_async_completers * sizeof(struct async_completer)); + } + + return completer; +} + +static void on_async_done_message(struct gstplayer *player) { + if (player->n_async_completers > 0) { + struct async_completer completer = pop_completer(player); + + if (completer.on_done) { + completer.on_done(completer.userdata); + } + } +} + +static void on_async_error(struct gstplayer *player, GError *error) { + if (player->n_async_completers > 0) { + struct async_completer completer = pop_completer(player); + + if (completer.on_error) { + completer.on_error(completer.userdata, error); + } + } +} + +static int maybe_send_video_info(struct gstplayer *player) { + struct video_info *duped; + + if (player->info.has_resolution && player->info.has_fps && player->info.has_duration && player->info.has_seeking_info) { + // we didn't send the info yet but we have complete video info now. + // send it! + duped = memdup(&(player->info.info), sizeof(player->info.info)); + if (duped == NULL) { + return ENOMEM; + } + + notifier_notify(&player->video_info_notifier, duped); + } + + return 0; +} + +static void fetch_duration(struct gstplayer *player) { + gboolean ok; + int64_t duration; + + ok = gst_element_query_duration(player->playbin, GST_FORMAT_TIME, &duration); + if (ok == FALSE) { + if (player->is_live) { + player->info.info.duration_ms = INT64_MAX; + player->info.has_duration = true; + + player->has_duration = true; + player->duration = INT64_MAX; + return; + } else { + LOG_PLAYER_ERROR(player, "Could not fetch duration. (gst_element_query_duration)\n"); + return; + } + } + + player->info.info.duration_ms = GST_TIME_AS_MSECONDS(duration); + player->info.has_duration = true; + + player->duration = GST_TIME_AS_MSECONDS(duration); + player->has_duration = true; +} + +static void fetch_seeking(struct gstplayer *player) { + GstQuery *seeking_query; + gboolean ok, seekable; + int64_t seek_begin, seek_end; + + seeking_query = gst_query_new_seeking(GST_FORMAT_TIME); + ok = gst_element_query(player->playbin, seeking_query); + if (ok == FALSE) { + if (player->is_live) { + player->info.info.can_seek = false; + player->info.info.seek_begin_ms = 0; + player->info.info.seek_end_ms = 0; + player->info.has_seeking_info = true; + + player->seeking_info.can_seek = false; + player->seeking_info.seek_begin_ms = 0; + player->seeking_info.seek_end_ms = 0; + player->has_seeking_info = true; + return; + } else { + LOG_PLAYER_DEBUG(player, "Could not query seeking info. (gst_element_query)\n"); + return; + } + } + + gst_query_parse_seeking(seeking_query, NULL, &seekable, &seek_begin, &seek_end); + + gst_query_unref(seeking_query); + + player->info.info.can_seek = seekable; + player->info.info.seek_begin_ms = GST_TIME_AS_MSECONDS(seek_begin); + player->info.info.seek_end_ms = GST_TIME_AS_MSECONDS(seek_end); + player->info.has_seeking_info = true; + + player->seeking_info.can_seek = seekable; + player->seeking_info.seek_begin_ms = GST_TIME_AS_MSECONDS(seek_begin); + player->seeking_info.seek_end_ms = GST_TIME_AS_MSECONDS(seek_end); + player->has_seeking_info = true; +} + +static void update_buffering_state(struct gstplayer *player, GstObject *element) { + struct buffering_state *state; + GstBufferingMode mode; + GstQuery *query; + gboolean ok, busy; + int64_t start, stop, buffering_left; + int n_ranges, percent, avg_in, avg_out; + + query = gst_query_new_buffering(GST_FORMAT_TIME); + ok = gst_element_query(GST_ELEMENT(element), query); + if (ok == FALSE) { + LOG_PLAYER_DEBUG(player, "Could not query precise buffering state.\n"); + goto fail_unref_query; + } + + gst_query_parse_buffering_percent(query, &busy, &percent); + gst_query_parse_buffering_stats(query, &mode, &avg_in, &avg_out, &buffering_left); + + n_ranges = (int) gst_query_get_n_buffering_ranges(query); + + state = malloc(sizeof(*state) + n_ranges * sizeof(struct buffering_range)); + if (state == NULL) { + goto fail_unref_query; + } + + for (int i = 0; i < n_ranges; i++) { + ok = gst_query_parse_nth_buffering_range(query, (unsigned int) i, &start, &stop); + if (ok == FALSE) { + LOG_ERROR("Could not parse %dth buffering range from buffering state. (gst_query_parse_nth_buffering_range)\n", i); + goto fail_free_state; + } + + state->ranges[i].start_ms = GST_TIME_AS_MSECONDS(start); + state->ranges[i].stop_ms = GST_TIME_AS_MSECONDS(stop); + } + + gst_query_unref(query); + + state->percent = percent; + state->mode = + (mode == GST_BUFFERING_STREAM ? BUFFERING_MODE_STREAM : + mode == GST_BUFFERING_DOWNLOAD ? BUFFERING_MODE_DOWNLOAD : + mode == GST_BUFFERING_TIMESHIFT ? BUFFERING_MODE_TIMESHIFT : + mode == GST_BUFFERING_LIVE ? BUFFERING_MODE_LIVE : + (assert(0), BUFFERING_MODE_STREAM)); + state->avg_in = avg_in; + state->avg_out = avg_out; + state->time_left_ms = buffering_left; + state->n_ranges = n_ranges; + + notifier_notify(&player->buffering_state_notifier, state); + return; + +fail_free_state: + free(state); + +fail_unref_query: + gst_query_unref(query); +} + +static int apply_playback_state(struct gstplayer *player) { + GstStateChangeReturn ok; + GstState desired_state, current_state, pending_state; + double desired_rate; + int64_t position; + + ok = gst_element_get_state(player->playbin, ¤t_state, &pending_state, 0); + if (ok == GST_STATE_CHANGE_FAILURE) { + LOG_PLAYER_DEBUG( + player, + "last gstreamer pipeline state change failed. gst_element_get_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", + GST_ELEMENT_NAME(player->playbin) + ); + return EIO; + } + + if (current_state == GST_STATE_NULL) { + // We don't have a playback source right now. + // Don't do anything. + return 0; + } + + desired_state = player->playpause_state == kPlaying ? GST_STATE_PLAYING : GST_STATE_PAUSED; /* use GST_STATE_PAUSED if we're stepping */ + + /// Use 1.0 if we're stepping, otherwise use the stored playback rate for the current direction. + if (player->playpause_state == kStepping) { + desired_rate = player->direction == kForward ? 1.0 : -1.0; + } else { + desired_rate = player->direction == kForward ? player->playback_rate_forward : player->playback_rate_backward; + } + + bool is_segment_looping = player->looping && player->gapless_looping && player->segment_gapless; + if (player->current_playback_rate != desired_rate || player->has_desired_position || (player->did_configure_segment != is_segment_looping)) { + if (player->has_desired_position) { + position = player->desired_position_ms * GST_MSECOND; + } else { + ok = gst_element_query_position(GST_ELEMENT(player->playbin), GST_FORMAT_TIME, &position); + if (ok == FALSE) { + LOG_PLAYER_ERROR(player, "Could not get the current playback position to apply the playback speed.\n"); + return EIO; + } + } + + GstSeekFlags seek_flags = GST_SEEK_FLAG_FLUSH; + + // Only configure segment looping if we actually + // are segment looping, because it will + // swallow the end-of-stream events apparently. + if (is_segment_looping) { + seek_flags |= GST_SEEK_FLAG_SEGMENT; + } + + if (player->do_fast_seeking) { + seek_flags |= GST_SEEK_FLAG_KEY_UNIT | GST_SEEK_FLAG_SNAP_NEAREST; + } else { + seek_flags |= GST_SEEK_FLAG_ACCURATE; + } + + if (player->direction == kForward) { + LOG_PLAYER_DEBUG( + player, + "gst_element_seek(..., rate: %f, start: %" GST_TIME_FORMAT ", end: %" GST_TIME_FORMAT ", ...)\n", + desired_rate, + GST_TIME_ARGS(position), + GST_TIME_ARGS(GST_CLOCK_TIME_NONE) + ); + + ok = gst_element_seek( + GST_ELEMENT(player->playbin), + desired_rate, + GST_FORMAT_TIME, + seek_flags, + GST_SEEK_TYPE_SET, position, + GST_SEEK_TYPE_SET, GST_CLOCK_TIME_NONE + ); + if (ok == FALSE) { + LOG_PLAYER_ERROR( + player, + "Could not set the new playback speed / playback position (speed: %f, pos: %" GST_TIME_FORMAT ").\n", + desired_rate, + GST_TIME_ARGS(position) + ); + return EIO; + } + } else { + LOG_PLAYER_DEBUG( + player, + "gst_element_seek(..., rate: %f, start: %" GST_TIME_FORMAT ", end: %" GST_TIME_FORMAT ", ...)\n", + desired_rate, + GST_TIME_ARGS(0), + GST_TIME_ARGS(position) + ); + ok = gst_element_seek( + GST_ELEMENT(player->playbin), + desired_rate, + GST_FORMAT_TIME, + seek_flags, + GST_SEEK_TYPE_SET, 0, + GST_SEEK_TYPE_SET, position + ); + + if (ok == FALSE) { + LOG_PLAYER_ERROR( + player, + "Could not set the new playback speed / playback position (speed: %f, pos: %" GST_TIME_FORMAT ").\n", + desired_rate, + GST_TIME_ARGS(position) + ); + return EIO; + } + } + + player->current_playback_rate = desired_rate; + player->fallback_position_ms = GST_TIME_AS_MSECONDS(position); + player->has_desired_position = false; + player->did_configure_segment = is_segment_looping; + } + + if (pending_state == GST_STATE_VOID_PENDING) { + if (current_state == desired_state) { + // we're already in the desired state, and we're also not changing it + // no need to do anything. + LOG_PLAYER_DEBUG( + player, + "apply_playback_state(playing: %s): already in desired state and none pending\n", + PLAYPAUSE_STATE_AS_STRING(player->playpause_state) + ); + return 0; + } + + LOG_PLAYER_DEBUG( + player, + "apply_playback_state(playing: %s): setting state to %s\n", + PLAYPAUSE_STATE_AS_STRING(player->playpause_state), + gst_element_state_get_name(desired_state) + ); + + ok = gst_element_set_state(player->playbin, desired_state); + + if (ok == GST_STATE_CHANGE_FAILURE) { + LOG_GST_SET_STATE_ERROR(player, player->playbin); + return EIO; + } + } else if (pending_state != desired_state) { + // queue to be executed when pending async state change completes + /// TODO: Implement properly + + LOG_PLAYER_DEBUG( + player, + "apply_playback_state(playing: %s): async state change in progress, setting state to %s\n", + PLAYPAUSE_STATE_AS_STRING(player->playpause_state), + gst_element_state_get_name(desired_state) + ); + + ok = gst_element_set_state(player->playbin, desired_state); + if (ok == GST_STATE_CHANGE_FAILURE) { + LOG_GST_SET_STATE_ERROR(player, player->playbin); + return EIO; + } + } + return 0; +} + +static void on_eos_message(struct gstplayer *player, GstMessage *msg) { + if (GST_MESSAGE_SRC(msg) == GST_OBJECT(player->playbin)) { + if (player->looping) { + LOG_PLAYER_DEBUG(player, "playbin end of stream, seeking back to start (flushing)\n"); + player->desired_position_ms = 0; + player->has_desired_position = true; + apply_playback_state(player); + } else { + LOG_PLAYER_DEBUG(player, "playbin end of stream\n"); + notifier_notify(&player->eos_notifier, NULL); + } + } else { + LOG_PLAYER_DEBUG(player, "end of stream for element: %s\n", GST_MESSAGE_SRC_NAME(msg)); + } +} + +static void on_gstreamer_error_message(struct gstplayer *player, GstMessage *msg) { + (void) player; + + GError *error; + gchar *debug_info; + + gst_message_parse_error(msg, &error, &debug_info); + + LOG_PLAYER_ERROR( + player, + "gstreamer error: code: %d, domain: %s, msg: %s (debug info: %s)\n", + error->code, + g_quark_to_string(error->domain), + error->message, + debug_info + ); + + on_async_error(player, error); + + notifier_notify(&player->error_notifier, error); + + g_clear_error(&error); + g_free(debug_info); +} + +static void on_gstreamer_warning_message(struct gstplayer *player, GstMessage *msg) { + (void) player; + + GError *error; + gchar *debug_info; + + gst_message_parse_warning(msg, &error, &debug_info); + + LOG_PLAYER_ERROR( + player, + "gstreamer warning: code: %d, domain: %s, msg: %s (debug info: %s)\n", + error->code, + g_quark_to_string(error->domain), + error->message, + debug_info + ); + g_clear_error(&error); + g_free(debug_info); +} + +static void on_gstreamer_info_message(struct gstplayer *player, GstMessage *msg) { + GError *error; + gchar *debug_info; + + gst_message_parse_info(msg, &error, &debug_info); + + LOG_PLAYER_DEBUG(player, "gstreamer info: %s (debug info: %s)\n", error->message, debug_info); + g_clear_error(&error); + g_free(debug_info); +} + +static void on_buffering_message(struct gstplayer *player, GstMessage *msg) { + GstBufferingMode mode; + int64_t buffering_left; + int percent, avg_in, avg_out; + + gst_message_parse_buffering(msg, &percent); + gst_message_parse_buffering_stats(msg, &mode, &avg_in, &avg_out, &buffering_left); + + if (percent == 0 || percent == 100) { + LOG_PLAYER_DEBUG( + player, + "buffering, src: %s, percent: %d, mode: %s, avg in: %d B/s, avg out: %d B/s, %" GST_TIME_FORMAT "\n", + GST_MESSAGE_SRC_NAME(msg), + percent, + mode == GST_BUFFERING_STREAM ? "stream" : + mode == GST_BUFFERING_DOWNLOAD ? "download" : + mode == GST_BUFFERING_TIMESHIFT ? "timeshift" : + mode == GST_BUFFERING_LIVE ? "live" : + "?", + avg_in, + avg_out, + GST_TIME_ARGS(buffering_left * GST_MSECOND) + ); + } + + /// TODO: GST_MESSAGE_BUFFERING is only emitted when we actually need to wait on some buffering till we can resume the playback. + /// However, the info we send to the callback also contains information on the buffered video ranges. + /// That information is constantly changing, but we only notify the player about it when we actively wait for the buffer to be filled. + update_buffering_state(player, GST_MESSAGE_SRC(msg)); +} + +static void on_state_changed_message(struct gstplayer *player, GstMessage *msg) { + GstState old, current, pending; + + gst_message_parse_state_changed(msg, &old, ¤t, &pending); + + if (GST_MESSAGE_SRC(msg) == GST_OBJECT(player->playbin)) { + LOG_PLAYER_DEBUG( + player, + "playbin state changed: old: %s, current: %s, pending: %s\n", + gst_element_state_get_name(old), + gst_element_state_get_name(current), + gst_element_state_get_name(pending) + ); + + if (current == GST_STATE_READY || current == GST_STATE_NULL) { + if (player->has_duration) { + player->has_duration = false; + notifier_notify(&player->duration_notifier, NULL); + } + + player->info.has_duration = false; + + player->has_seeking_info = false; + player->info.has_seeking_info = false; + + player->did_configure_segment = false; + } else if ((current == GST_STATE_PAUSED || current == GST_STATE_PLAYING) && (old == GST_STATE_READY || old == GST_STATE_NULL)) { + // it's our pipeline that changed to either playing / paused, and we don't have info about our video duration yet. + // get that info now. + // technically we can already fetch the duration when the decodebin changed to PAUSED state. + + if (!player->has_duration) { + fetch_duration(player); + + if (player->has_duration) { + int64_t *duped = memdup(&player->duration, sizeof(int64_t)); + + notifier_notify(&player->duration_notifier, duped); + } + } + + if (!player->has_seeking_info) { + fetch_seeking(player); + + if (player->has_seeking_info) { + struct seeking_info *duped = memdup(&player->seeking_info, sizeof(struct seeking_info)); + + notifier_notify(&player->seeking_info_notifier, duped); + } + } + + maybe_send_video_info(player); + } + } +} + +static void on_segment_start_message(struct gstplayer *player, GstMessage *msg) { + GstFormat format; + gint64 position; + gst_message_parse_segment_start(msg, &format, &position); + + if (format == GST_FORMAT_TIME) { + LOG_PLAYER_DEBUG( + player, + "segment start. src: %s, position: %" GST_TIME_FORMAT "\n", + GST_MESSAGE_SRC_NAME(msg), + GST_TIME_ARGS(position) + ); + } else { + LOG_PLAYER_DEBUG( + player, + "segment start. src: %s, position: %" PRId64 " (%s)\n", + GST_MESSAGE_SRC_NAME(msg), + position, + gst_format_get_name(format) + ); + } +} + +static void on_segment_done_message(struct gstplayer *player, GstMessage *msg) { + (void) msg; + + if (player->looping && player->gapless_looping && player->segment_gapless) { + LOG_PLAYER_DEBUG(player, "Segment done. Seeking back to segment start (segment, non-flushing)\n"); + gboolean ok = gst_element_seek( + player->playbin, + player->current_playback_rate, + GST_FORMAT_TIME, + GST_SEEK_FLAG_SEGMENT, + GST_SEEK_TYPE_SET, 0, + GST_SEEK_TYPE_SET, GST_CLOCK_TIME_NONE + ); + if (!ok) { + LOG_PLAYER_DEBUG(player, "Could not seek back to segment start.\n"); + } + } +} + +static void on_duration_changed_message(struct gstplayer *player, GstMessage *msg) { + (void) msg; + + if (!player->has_duration) { + fetch_duration(player); + + if (player->has_duration) { + int64_t *duped = memdup(&player->duration, sizeof(int64_t)); + + notifier_notify(&player->duration_notifier, duped); + } + } + + if (!player->has_seeking_info) { + fetch_seeking(player); + + if (player->has_seeking_info) { + struct seeking_info *duped = memdup(&player->seeking_info, sizeof(struct seeking_info)); + + notifier_notify(&player->seeking_info_notifier, duped); + } + } + + maybe_send_video_info(player); +} + +static void on_about_to_finish_message(struct gstplayer *player) { + if (player->looping && player->uri && player->playbin_gapless) { + LOG_PLAYER_DEBUG(player, "Got about-to-finish signal, configuring next playback item\n"); + g_object_set(player->playbin, "uri", player->uri, NULL); + } else { + LOG_PLAYER_DEBUG(player, "Got about-to-finish signal\n"); + } +} + +static void on_application_message(struct gstplayer *player, GstMessage *msg) { + if (gst_message_has_name(msg, "appsink-eos")) { + // unhandled + } else if (gst_message_has_name(msg, "video-info")) { + const GstStructure *structure = gst_message_get_structure(msg); + + const GValue *value = gst_structure_get_value(structure, "info"); + assert(G_VALUE_HOLDS_POINTER(value)); + + GstVideoInfo *info = g_value_get_pointer(value); + + player->info.info.width = GST_VIDEO_INFO_WIDTH(info); + player->info.info.height = GST_VIDEO_INFO_HEIGHT(info); + player->info.info.fps = (double) GST_VIDEO_INFO_FPS_N(info) / GST_VIDEO_INFO_FPS_D(info); + player->info.has_resolution = true; + player->info.has_fps = true; + + gst_video_info_free(info); + + LOG_PLAYER_DEBUG(player, "Determined resolution: %d x %d and framerate: %f\n", player->info.info.width, player->info.info.height, player->info.info.fps); + } else if (gst_message_has_name(msg, "about-to-finish")) { + on_about_to_finish_message(player); + } +} + +static void start_async(struct gstplayer *player, struct async_completer completer) { + ASSERT(player->n_async_completers < ARRAY_SIZE(player->completers)); + + player->completers[player->n_async_completers++] = completer; +} + +static void on_bus_message(struct gstplayer *player, GstMessage *msg) { + switch (GST_MESSAGE_TYPE(msg)) { + case GST_MESSAGE_EOS: + on_eos_message(player, msg); + break; + + case GST_MESSAGE_ERROR: + on_gstreamer_error_message(player, msg); + break; + + case GST_MESSAGE_WARNING: + on_gstreamer_warning_message(player, msg); + break; + + case GST_MESSAGE_INFO: + on_gstreamer_info_message(player, msg); + break; + + case GST_MESSAGE_TAG: { + if (0) { + GstTagList *tags; + gst_message_parse_tag(msg, &tags); + + char *str = gst_tag_list_to_string(tags); + + LOG_PLAYER_DEBUG(player, "%s found tags: %s\n", GST_MESSAGE_SRC_NAME(msg), str); + + free(str); + } + break; + } + + case GST_MESSAGE_BUFFERING: + on_buffering_message(player, msg); + break; + + case GST_MESSAGE_STATE_CHANGED: + on_state_changed_message(player, msg); + break; + + case GST_MESSAGE_APPLICATION: + on_application_message(player, msg); + break; + + case GST_MESSAGE_SEGMENT_START: + on_segment_start_message(player, msg); + break; + + case GST_MESSAGE_SEGMENT_DONE: + on_segment_done_message(player, msg); + break; + + case GST_MESSAGE_DURATION_CHANGED: + on_duration_changed_message(player, msg); + break; + + case GST_MESSAGE_LATENCY: + LOG_PLAYER_DEBUG(player, "redistributing latency\n"); + gst_bin_recalculate_latency(GST_BIN(player->playbin)); + break; + + case GST_MESSAGE_ASYNC_DONE: + on_async_done_message(player); + break; + + case GST_MESSAGE_REQUEST_STATE: { + GstState requested; + + gst_message_parse_request_state(msg, &requested); + gst_element_set_state(GST_ELEMENT(player->playbin), requested); + break; + } + + case GST_MESSAGE_QOS: { + if (0) { + gboolean live = false; + uint64_t running_time = 0; + uint64_t stream_time = 0; + uint64_t timestamp = 0; + uint64_t duration = 0; + + GstFormat format = GST_FORMAT_DEFAULT; + uint64_t processed = 0; + uint64_t dropped = 0; + + int64_t jitter = 0; + double proportion = 1.0; + int quality = 0; + + gst_message_parse_qos(msg, &live, &running_time, &stream_time, ×tamp, &duration); + gst_message_parse_qos_stats(msg, &format, &processed, &dropped); + gst_message_parse_qos_values(msg, &jitter, &proportion, &quality); + + LOG_PLAYER_DEBUG( + player, + "Quality of Service: %s\n" + " live: %s\n" + " running time: %" GST_TIME_FORMAT "\n" + " stream time: %" GST_TIME_FORMAT "\n" + " timestamp: %" GST_TIME_FORMAT "\n" + " duration: %" GST_TIME_FORMAT "\n" + " processed: %" PRIu64 " (%s)\n" + " dropped: %" PRIu64 " (%s)\n" + " jitter: %" PRId64 "\n" + " proportion: %f\n" + " quality: %d\n", + GST_MESSAGE_SRC_NAME(msg), + live ? "yes" : "no", + GST_TIME_ARGS(running_time), + GST_TIME_ARGS(stream_time), + GST_TIME_ARGS(timestamp), + GST_TIME_ARGS(duration), + processed, gst_format_get_name(format), + dropped, gst_format_get_name(format), + jitter, + proportion, + quality + ); + } + break; + } + + default: + if (0) { + LOG_PLAYER_DEBUG(player, "gstreamer message: %s, src: %s\n", GST_MESSAGE_TYPE_NAME(msg), GST_MESSAGE_SRC_NAME(msg)); + } + + break; + } + return; +} + +static int on_bus_fd_ready(sd_event_source *s, int fd, uint32_t revents, void *userdata) { + (void) s; + (void) fd; + (void) revents; + + struct gstplayer *player = userdata; + + GstMessage *msg = gst_bus_pop(gst_element_get_bus(player->playbin)); + if (msg != NULL) { + on_bus_message(player, msg); + gst_message_unref(msg); + } + + return 0; +} + +void on_source_setup(GstElement *playbin, GstElement *source, gpointer userdata) { + (void) playbin; + + ASSERT_NOT_NULL(userdata); + + if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "extra-headers") != NULL) { + g_object_set(source, "extra-headers", (GstStructure *) userdata, NULL); + } else { + LOG_ERROR("Failed to set custom HTTP headers because gstreamer source element has no 'extra-headers' property.\n"); + } +} + +/** + * See: https://gitlab.freedesktop.org/gstreamer/gstreamer/-/blob/main/subprojects/gst-plugins-base/gst/playback/gstplay-enum.h + */ +typedef enum { + GST_PLAY_FLAG_VIDEO = (1 << 0), + GST_PLAY_FLAG_AUDIO = (1 << 1), + GST_PLAY_FLAG_TEXT = (1 << 2) +} GstPlayFlags; + +UNUSED static void on_element_setup(GstElement *playbin, GstElement *element, gpointer userdata) { + (void) playbin; + (void) userdata; + + GstElementFactory *factory = gst_element_get_factory(element); + if (factory == NULL) { + return; + } + + const char *factory_name = gst_plugin_feature_get_name(factory); + + if (g_str_has_prefix(factory_name, "v4l2video") && g_str_has_suffix(factory_name, "dec")) { + gst_util_set_object_arg(G_OBJECT(element), "capture-io-mode", "dmabuf"); + LOG_DEBUG("Applied capture-io-mode = dmabuf\n"); + } +} + +static void on_about_to_finish(GstElement *playbin, gpointer userdata) { + (void) userdata; + + GstBus *bus = gst_element_get_bus(playbin); + if (bus == NULL) { + LOG_ERROR("Could not acquire bus to post about-to-finish message.\n"); + return; + } + + GstStructure *s = gst_structure_new_empty("about-to-finish"); + if (s == NULL) { + LOG_ERROR("Could not create about-to-finish gst structure.\n"); + gst_object_unref(bus); + return; + } + + GstMessage *msg = gst_message_new_application(GST_OBJECT(playbin), s); + if (msg == NULL) { + LOG_ERROR("Could not create about-to-finish gst message.\n"); + gst_structure_free(s); + gst_object_unref(bus); + return; + } + + gboolean ok = gst_bus_post(bus, msg); + if (ok != TRUE) { + LOG_ERROR("Could not notify player about about-to-finish signal.\n"); + } + + gst_object_unref(bus); +} + +UNUSED static GstPadProbeReturn on_video_sink_event(GstPad *pad, GstPadProbeInfo *info, gpointer userdata) { + GstBus *bus = userdata; + + (void) pad; + + GstEvent *event = gst_pad_probe_info_get_event(info); + if (event == NULL) { + return GST_PAD_PROBE_OK; + } + + if (GST_EVENT_TYPE(event) != GST_EVENT_CAPS) { + return GST_PAD_PROBE_OK; + } + + GstCaps *caps = NULL; + gst_event_parse_caps(event, &caps); + + if (!caps) { + LOG_ERROR("Could not parse caps event.\n"); + return GST_PAD_PROBE_OK; + } + + GstVideoInfo *videoinfo = gst_video_info_new(); + ASSUME(videoinfo != NULL); + + if (!gst_video_info_from_caps(videoinfo, caps)) { + LOG_ERROR("Could not determine video properties of caps event.\n"); + return GST_PAD_PROBE_OK; + } + + GValue v = G_VALUE_INIT; + g_value_init(&v, G_TYPE_POINTER); + g_value_set_pointer(&v, videoinfo); + + GstStructure *msg_structure = gst_structure_new_empty("video-info"); + gst_structure_set_value(msg_structure, "info", &v); + + gst_bus_post(bus, gst_message_new_application(GST_OBJECT(pad), msg_structure)); + + // We're just interested in the caps event. + // Once we have that, we can unlisten. + return GST_PAD_PROBE_REMOVE; +} + +struct gstplayer *gstplayer_new(struct flutterpi *flutterpi, const char *uri, void *userdata, bool play_video, bool play_audio, GstStructure *headers) { + ASSERT_NOT_NULL(flutterpi); + + struct gstplayer *p = calloc(1, sizeof(struct gstplayer)); + if (p == NULL) { + return NULL; + } + +#ifdef DEBUG + p->debug_id = allocate_id(); +#endif + p->userdata = userdata; + p->current_playback_rate = 1.0; + p->playback_rate_forward = 1.0; + p->playback_rate_backward = 1.0; + + // Gapless looping is configured in the gstplayer_set_looping call. + // + // Without gapless looping, we'll just seek back to start on EOS, + // which always works. + p->gapless_looping = false; + + // Gapless looping using playbin "about-to-finish" is unreliable + // in audio playback. + // + // E.g., using the audioplayers example and looping the first ("coin") + // sound, switching to the second sound will first play the second sound, + // then play part of the first sound at higher pitch, and then loop the + // second sound. + // + // Also, it seems like the playbin recreates all the elements & decoders, + // so it's not super resource-saving either. + p->playbin_gapless = false; + + // Segment gapless looping works mostly fine, but is also + // not completely reliable. + // + // E.g., looping the second ("laser") sound of the audioplayers + // example will play back 1-2 seconds of noise after + // the laser sound, then play the laser sound, then noise, etc. + // + // Segment looping does not work with playbin3 in gstreamer + // < 1.22.9 because of a bug in multiqueue. + p->segment_gapless = true; + + value_notifier_init(&p->video_info_notifier, NULL, free); + value_notifier_init(&p->duration_notifier, NULL, free); + value_notifier_init(&p->seeking_info_notifier, NULL, free); + value_notifier_init(&p->buffering_state_notifier, NULL, free); + change_notifier_init(&p->error_notifier); + change_notifier_init(&p->eos_notifier); + + // playbin is more reliable for now than playbin3 (see above) + p->playbin = gst_element_factory_make("playbin", "playbin"); + if (p->playbin == NULL) { + LOG_PLAYER_ERROR(p, "Couldn't create playbin instance.\n"); + goto fail_free_p; + } + + gint flags = 0; + + g_object_get(p->playbin, "flags", &flags, NULL); + + if (play_video) { + flags |= GST_PLAY_FLAG_VIDEO; + } else { + flags &= ~GST_PLAY_FLAG_VIDEO; + } + + if (play_audio) { + flags |= GST_PLAY_FLAG_AUDIO; + } else { + flags &= ~GST_PLAY_FLAG_AUDIO; + } + + flags &= ~GST_PLAY_FLAG_TEXT; + + g_object_set(p->playbin, "flags", flags, NULL); + + if (play_video) { +#ifdef HAVE_GSTREAMER_VIDEO_PLAYER + p->texture = flutterpi_create_texture(flutterpi); + if (p->texture == NULL) { + goto fail_unref_playbin; + } + + struct gl_renderer *gl_renderer = flutterpi_get_gl_renderer(flutterpi); + + GstElement *sink = flutter_gl_texture_sink_new(p->texture, gl_renderer); + if (sink == NULL) { + goto fail_destroy_texture; + } + + // playbin (playsink) takes a (sinking) reference + // on the video sink + g_object_set(p->playbin, "video-sink", sink, NULL); + + GstPad *video_sink_pad = gst_element_get_static_pad(sink, "sink"); + if (video_sink_pad == NULL) { + LOG_PLAYER_ERROR(p, "Could not acquire sink pad of video sink to wait for video configuration.\n"); + goto fail_destroy_texture; + } + + // This will send a `video-info` application message to the bus when it sees a caps event. + gst_pad_add_probe(video_sink_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, on_video_sink_event, gst_pipeline_get_bus(GST_PIPELINE(p->playbin)), NULL); + + gst_object_unref(video_sink_pad); + video_sink_pad = NULL; + + // Apply capture-io-mode: dmabuf to any v4l2 decoders. + /// TODO: This might be unnecessary / deprecated nowadays. + g_signal_connect(p->playbin, "element-setup", G_CALLBACK(on_element_setup), NULL); + + gst_object_unref(sink); +#else + (void) flutterpi; + + ASSERT_MSG(0, "Video playback with gstplayer is only supported when building with the gstreamer video player plugin."); + goto fail_unref_playbin; +#endif + } + + + if (play_audio) { + p->audiopanorama = gst_element_factory_make("audiopanorama", NULL); + if (p->audiopanorama != NULL) { + g_object_set(p->playbin, "audio-filter", p->audiopanorama, NULL); + } + } + + // Only try to configure headers if we actually have some. + if (headers != NULL && gst_structure_n_fields(headers) > 0) { + g_signal_connect(p->playbin, "source-setup", G_CALLBACK(on_source_setup), headers); + } + + g_signal_connect(p->playbin, "about-to-finish", G_CALLBACK(on_about_to_finish), NULL); + + // Listen to the bus + GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(p->playbin)); + ASSERT_NOT_NULL(bus); + + GPollFD fd; + gst_bus_get_pollfd(bus, &fd); + + flutterpi_sd_event_add_io(&p->busfd_events, fd.fd, EPOLLIN, on_bus_fd_ready, p); + + gst_object_unref(bus); + + // If we have a URI, preroll it. + if (uri != NULL) { + g_object_set(p->playbin, "uri", uri, NULL); + + GstStateChangeReturn status = gst_element_set_state(p->playbin, GST_STATE_PAUSED); + if (status == GST_STATE_CHANGE_NO_PREROLL) { + LOG_PLAYER_DEBUG(p, "Is live!\n"); + p->is_live = true; + } else if (status == GST_STATE_CHANGE_FAILURE) { + LOG_PLAYER_ERROR(p, "Could not set pipeline to paused state.\n"); + goto fail_rm_event_source; + } else { + LOG_PLAYER_DEBUG(p, "Not live!\n"); + p->is_live = false; + } + + p->uri = strdup(uri); + } + + return p; + +fail_rm_event_source: + sd_event_source_set_enabled(p->busfd_events, false); + sd_event_source_unref(p->busfd_events); + +fail_destroy_texture: UNUSED + gst_object_unref(p->playbin); + + // The flutter upload sink uses the texture internally, + // so the playbin (which contains the upload sink) must be destroyed first, + // before the texture can be destroyed. + if (play_video) { + texture_destroy(p->texture); + } + return NULL; + +fail_unref_playbin: + gst_object_unref(p->playbin); + +fail_free_p: + free(p); + return NULL; +} + +struct gstplayer *gstplayer_new_from_asset(struct flutterpi *flutterpi, const char *asset_path, const char *package_name, void *userdata) { + struct gstplayer *player; + char *uri; + int ok; + + (void) package_name; + + ok = asprintf(&uri, "file://%s/%s", flutterpi_get_asset_bundle_path(flutterpi), asset_path); + if (ok < 0) { + return NULL; + } + + player = gstplayer_new(flutterpi, uri, userdata, /* play_video */ true, /* play_audio */ true, NULL); + + free(uri); + + return player; +} + +struct gstplayer *gstplayer_new_from_network(struct flutterpi *flutterpi, const char *uri, enum format_hint format_hint, void *userdata, GstStructure *headers) { + (void) format_hint; + return gstplayer_new(flutterpi, uri, userdata, /* play_video */ true, /* play_audio */ true, headers); +} + +struct gstplayer *gstplayer_new_from_file(struct flutterpi *flutterpi, const char *uri, void *userdata) { + return gstplayer_new(flutterpi, uri, userdata, /* play_video */ true, /* play_audio */ true, NULL); +} + +struct gstplayer *gstplayer_new_from_content_uri(struct flutterpi *flutterpi, const char *uri, void *userdata, GstStructure *headers) { + return gstplayer_new(flutterpi, uri, userdata, /* play_video */ true, /* play_audio */ true, headers); +} + +struct gstplayer *gstplayer_new_from_pipeline(struct flutterpi *flutterpi, const char *pipeline, void *userdata) { + /// TODO: Implement + (void) flutterpi; + (void) pipeline; + (void) userdata; + return NULL; +} + +void gstplayer_destroy(struct gstplayer *player) { + LOG_PLAYER_DEBUG(player, "destroy()\n"); + notifier_deinit(&player->video_info_notifier); + notifier_deinit(&player->duration_notifier); + notifier_deinit(&player->seeking_info_notifier); + notifier_deinit(&player->buffering_state_notifier); + notifier_deinit(&player->error_notifier); + notifier_deinit(&player->eos_notifier); + gst_element_set_state(GST_ELEMENT(player->playbin), GST_STATE_READY); + gst_element_set_state(GST_ELEMENT(player->playbin), GST_STATE_NULL); + gst_object_unref(player->playbin); + if (player->texture) { + texture_destroy(player->texture); + } + free(player); +} + +int64_t gstplayer_get_texture_id(struct gstplayer *player) { + // If the player was started with play_video == false, player->texture is NULL. + return player->texture ? texture_get_id(player->texture) : -1; +} + +void gstplayer_set_userdata(struct gstplayer *player, void *userdata) { + player->userdata = userdata; +} + +void *gstplayer_get_userdata(struct gstplayer *player) { + return player->userdata; +} + +int gstplayer_play(struct gstplayer *player) { + LOG_PLAYER_DEBUG(player, "play()\n"); + player->playpause_state = kPlaying; + player->direction = kForward; + return apply_playback_state(player); +} + +int gstplayer_pause(struct gstplayer *player) { + LOG_PLAYER_DEBUG(player, "pause()\n"); + player->playpause_state = kPaused; + player->direction = kForward; + return apply_playback_state(player); +} + +int gstplayer_set_looping(struct gstplayer *player, bool looping, bool gapless) { + LOG_PLAYER_DEBUG(player, "set_looping(%s, gapless: %s)\n", looping ? "true" : "false", gapless ? "true" : "false"); + + if (player->playbin_gapless && gapless) { + // If we're enabling (gapless) looping, + // already configure the next playback URI, + // since we don't know if the about-to-finish callback + // has already arrived or not. + if (!player->looping && looping && player->uri) { + g_object_set(player->playbin, "uri", player->uri, NULL); + } + } + + player->looping = looping; + player->gapless_looping = gapless; + + apply_playback_state(player); + + return 0; +} + +int gstplayer_set_volume(struct gstplayer *player, double volume) { + LOG_PLAYER_DEBUG(player, "set_volume(%f)\n", volume); + g_object_set(player->playbin, "volume", (gdouble) volume, NULL); + return 0; +} + +int64_t gstplayer_get_position(struct gstplayer *player) { + GstState current, pending; + gboolean ok; + int64_t position; + + GstStateChangeReturn statechange = gst_element_get_state(GST_ELEMENT(player->playbin), ¤t, &pending, 0); + if (statechange == GST_STATE_CHANGE_FAILURE) { + LOG_GST_GET_STATE_ERROR(player, player->playbin); + return -1; + } + + if (statechange == GST_STATE_CHANGE_ASYNC) { + // we don't have position data yet. + // report the latest known (or the desired) position. + return player->fallback_position_ms; + } + + ok = gst_element_query_position(player->playbin, GST_FORMAT_TIME, &position); + if (ok == FALSE) { + LOG_PLAYER_ERROR(player, "Could not query gstreamer position. (gst_element_query_position)\n"); + return 0; + } + + return GST_TIME_AS_MSECONDS(position); +} + +int64_t gstplayer_get_duration(struct gstplayer *player) { + if (!player->has_duration) { + return -1; + } else { + return player->duration; + } +} + +int gstplayer_seek_to(struct gstplayer *player, int64_t position, bool nearest_keyframe) { + LOG_PLAYER_DEBUG(player, "seek_to(%" PRId64 ")\n", position); + player->has_desired_position = true; + player->desired_position_ms = position; + player->do_fast_seeking = nearest_keyframe; + return apply_playback_state(player); +} + +int gstplayer_seek_with_completer(struct gstplayer *player, int64_t position, bool nearest_keyframe, struct async_completer completer) { + LOG_PLAYER_DEBUG(player, "seek_to(%" PRId64 ")\n", position); + player->has_desired_position = true; + player->desired_position_ms = position; + player->do_fast_seeking = nearest_keyframe; + + if (completer.on_done || completer.on_error) { + start_async(player, completer); + } + + return apply_playback_state(player); +} + +int gstplayer_set_playback_speed(struct gstplayer *player, double playback_speed) { + LOG_PLAYER_DEBUG(player, "set_playback_speed(%f)\n", playback_speed); + ASSERT_MSG(playback_speed > 0, "playback speed must be > 0."); + player->playback_rate_forward = playback_speed; + return apply_playback_state(player); +} + +int gstplayer_step_forward(struct gstplayer *player) { + gboolean gst_ok; + int ok; + + ASSERT_NOT_NULL(player); + + player->playpause_state = kStepping; + player->direction = kForward; + ok = apply_playback_state(player); + if (ok != 0) { + return ok; + } + + gst_ok = gst_element_send_event(player->playbin, gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE)); + if (gst_ok == FALSE) { + LOG_PLAYER_ERROR(player, "Could not send frame-step event to pipeline. (gst_element_send_event)\n"); + return EIO; + } + return 0; +} + +int gstplayer_step_backward(struct gstplayer *player) { + gboolean gst_ok; + int ok; + + ASSERT_NOT_NULL(player); + + player->playpause_state = kStepping; + player->direction = kBackward; + ok = apply_playback_state(player); + if (ok != 0) { + return ok; + } + + gst_ok = gst_element_send_event(player->playbin, gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE)); + if (gst_ok == FALSE) { + LOG_PLAYER_ERROR(player, "Could not send frame-step event to pipeline. (gst_element_send_event)\n"); + return EIO; + } + + return 0; +} + +void gstplayer_set_audio_balance(struct gstplayer *player, float balance) { + if (player->audiopanorama) { + g_object_set(player->audiopanorama, "panorama", (gfloat) balance, NULL); + } +} + +float gstplayer_get_audio_balance(struct gstplayer *player) { + if (player->audiopanorama) { + gfloat balance = 0.0; + g_object_get(player->audiopanorama, "panorama", &balance, NULL); + return balance; + } else { + return 0.0; + } +} + +bool gstplayer_set_source_with_completer(struct gstplayer *p, const char *uri, struct async_completer completer) { + GstStateChangeReturn result; + const char *current_uri = NULL; + + g_object_get(p->playbin, "current-uri", ¤t_uri, NULL); + + // If we're already playing back the desired uri, don't change it. + if ((current_uri == uri) || (uri && current_uri && streq(current_uri, uri))) { + if (completer.on_done) { + completer.on_done(completer.userdata); + } + + return true; + } + + p->uri = strdup(uri); + + // If the playbin supports instant-uri, use it. + // if (g_object_class_find_property(G_OBJECT_GET_CLASS(p->playbin), "instant-uri")) { + // g_object_set(p->playbin, "instant-uri", TRUE, "uri", uri, NULL); + // } else { + + result = gst_element_set_state(p->playbin, GST_STATE_NULL); + if (result != GST_STATE_CHANGE_SUCCESS) { + LOG_PLAYER_ERROR(p, "Could not set pipeline to NULL state to change uri.\n"); + return false; + } + + g_object_set(p->playbin, "uri", uri, NULL); + + result = gst_element_set_state(p->playbin, GST_STATE_PAUSED); + if (result == GST_STATE_CHANGE_FAILURE) { + LOG_PLAYER_ERROR(p, "Could not set pipeline to PAUSED state to play new uri.\n"); + return false; + } else if (result == GST_STATE_CHANGE_NO_PREROLL) { + p->is_live = true; + + if (completer.on_done != NULL) { + completer.on_done(completer.userdata); + } + } else if (result == GST_STATE_CHANGE_SUCCESS) { + p->is_live = false; + + if (completer.on_done) { + completer.on_done(completer.userdata); + } + } else if (result == GST_STATE_CHANGE_ASYNC) { + /// TODO: What is is_live here? + p->is_live = false; + + if (completer.on_done || completer.on_error) { + start_async(p, completer); + } + } + + gstplayer_seek_to(p, 0, false); + + return true; +} + +bool gstplayer_set_source(struct gstplayer *p, const char *uri) { + return gstplayer_set_source_with_completer(p, uri, (struct async_completer) { + .on_done = NULL, + .on_error = NULL, + .userdata = NULL + }); +} + +struct notifier *gstplayer_get_video_info_notifier(struct gstplayer *player) { + return &player->video_info_notifier; +} + +struct notifier *gstplayer_get_duration_notifier(struct gstplayer *player) { + return &player->duration_notifier; +} + +struct notifier *gstplayer_get_seeking_info_notifier(struct gstplayer *player) { + return &player->seeking_info_notifier; +} + +struct notifier *gstplayer_get_buffering_state_notifier(struct gstplayer *player) { + return &player->buffering_state_notifier; +} + +struct notifier *gstplayer_get_error_notifier(struct gstplayer *player) { + return &player->error_notifier; +} + +struct notifier *gstplayer_get_eos_notifier(struct gstplayer *player) { + return &player->eos_notifier; +} diff --git a/src/plugins/gstplayer.h b/src/plugins/gstplayer.h new file mode 100644 index 00000000..a5f1922f --- /dev/null +++ b/src/plugins/gstplayer.h @@ -0,0 +1,230 @@ +#ifndef _FLUTTERPI_INCLUDE_PLUGINS_GSTPLAYER_H +#define _FLUTTERPI_INCLUDE_PLUGINS_GSTPLAYER_H + +#include +#include + +#include + +#include "util/collection.h" + +#include "config.h" + +#define GSTREAMER_VER(major, minor, patch) ((((major) &0xFF) << 16) | (((minor) &0xFF) << 8) | ((patch) &0xFF)) +#define THIS_GSTREAMER_VER GSTREAMER_VER(LIBGSTREAMER_VERSION_MAJOR, LIBGSTREAMER_VERSION_MINOR, LIBGSTREAMER_VERSION_PATCH) + +enum format_hint { FORMAT_HINT_NONE, FORMAT_HINT_MPEG_DASH, FORMAT_HINT_HLS, FORMAT_HINT_SS, FORMAT_HINT_OTHER }; + +enum buffering_mode { BUFFERING_MODE_STREAM, BUFFERING_MODE_DOWNLOAD, BUFFERING_MODE_TIMESHIFT, BUFFERING_MODE_LIVE }; + +struct buffering_range { + int64_t start_ms; + int64_t stop_ms; +}; + +struct buffering_state { + // The percentage that the buffer is filled. + // If this is 100 playback will resume. + int percent; + + // The buffering mode currently used by the pipeline. + enum buffering_mode mode; + + // The average input / consumption speed in bytes per second. + int avg_in, avg_out; + + // Time left till buffering finishes, in ms. + // 0 means not buffering right now. + int64_t time_left_ms; + + // The ranges of already buffered video. + // For the BUFFERING_MODE_DOWNLOAD and BUFFERING_MODE_TIMESHIFT buffering modes, this specifies the ranges + // where efficient seeking is possible. + // For the BUFFERING_MODE_STREAM and BUFFERING_MODE_LIVE buffering modes, this describes the oldest and + // newest item in the buffer. + int n_ranges; + + // Flexible array member. + // For example, if n_ranges is 2, just allocate using + // `state = malloc(sizeof(struct buffering_state) + 2*sizeof(struct buffering_range))` + // and we can use state->ranges[0] and so on. + // This is cool because we don't need to allocate two blocks of memory and we can just call + // `free` once to free the whole thing. + // More precisely, we don't need to define a new function we can give to value_notifier_init + // as the value destructor, we can just use `free`. + struct buffering_range ranges[]; +}; + +#define BUFFERING_STATE_SIZE(n_ranges) (sizeof(struct buffering_state) + (n_ranges) * sizeof(struct buffering_range)) + +struct video_info; +struct gstplayer; +struct flutterpi; +struct notifier; + +typedef struct _GstStructure GstStructure; + +struct async_completer { + void_callback_t on_done; + void (*on_error)(void *userdata, GError *error); + void *userdata; +}; + +/// Create a gstreamer video player. +struct gstplayer *gstplayer_new( + struct flutterpi *flutterpi, + const char *uri, + void *userdata, + bool play_video, + bool play_audio, + GstStructure *headers +); + +/// Create a gstreamer video player that loads the video from a flutter asset. +/// @arg asset_path The path of the asset inside the asset bundle. +/// @arg package_name The name of the package containing the asset +/// @arg userdata The userdata associated with this player +struct gstplayer *gstplayer_new_from_asset(struct flutterpi *flutterpi, const char *asset_path, const char *package_name, void *userdata); + +/// Create a gstreamer video player that loads the video from a network URI. +/// @arg uri The URI to the video. (for example, http://, https://, rtmp://, rtsp://) +/// @arg format_hint A hint to the format of the video. FORMAT_HINT_NONE means there's no hint. +/// @arg userdata The userdata associated with this player. +struct gstplayer *gstplayer_new_from_network(struct flutterpi *flutterpi, const char *uri, enum format_hint format_hint, void *userdata, GstStructure *headers); + +/// Create a gstreamer video player that loads the video from a file URI. +/// @arg uri The file:// URI to the video. +/// @arg userdata The userdata associated with this player. +struct gstplayer *gstplayer_new_from_file(struct flutterpi *flutterpi, const char *uri, void *userdata); + +/// Create a gstreamer video player with a custom gstreamer pipeline. +/// @arg pipeline The description of the custom pipeline that should be used. Should contain an appsink called "sink". +/// @arg userdata The userdata associated with this player. +struct gstplayer *gstplayer_new_from_pipeline(struct flutterpi *flutterpi, const char *pipeline, void *userdata); + +/// Destroy this gstreamer player instance and the resources +/// associated with it. (texture, gstreamer pipeline, etc) +/// +/// Should be called on the flutterpi main/platform thread, +/// because otherwise destroying the gstreamer event bus listener +/// might be a race condition. +void gstplayer_destroy(struct gstplayer *player); + +/// Set the generic userdata associated with this gstreamer player instance. +/// Overwrites the userdata set in the constructor and any userdata previously +/// set using @ref gstplayer_set_userdata. +/// @arg userdata The new userdata that should be associated with this player. +void gstplayer_set_userdata(struct gstplayer *player, void *userdata); + +/// Get the userdata that was given to the constructor or was previously set using +/// @ref gstplayer_set_userdata. +/// @returns userdata associated with this player. +void *gstplayer_get_userdata(struct gstplayer *player); + +/// Get the id of the flutter external texture that this player is rendering into. +int64_t gstplayer_get_texture_id(struct gstplayer *player); + +/// Add a http header (consisting of a string key and value) to the list of http headers that +/// gstreamer will use when playing back from a HTTP/S URI. +/// This has no effect after @ref gstplayer_initialize was called. +void gstplayer_put_http_header(struct gstplayer *player, const char *key, const char *value); + +/// Initializes the video playback, i.e. boots up the gstreamer pipeline, starts +/// buffering the video. +/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. +int gstplayer_initialize(struct gstplayer *player); + +/// Set the current playback state to "playing" if that's not the case already. +/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. +int gstplayer_play(struct gstplayer *player); + +/// Sets the current playback state to "paused" if that's not the case already. +/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. +int gstplayer_pause(struct gstplayer *player); + +/// Get the current playback position. +/// @returns Current playback position, in milliseconds from the beginning of the video. +int64_t gstplayer_get_position(struct gstplayer *player); + +/// Get the duration of the currently playing medium. +/// @returns Duration of the current medium in milliseconds, -1 if the duration +/// is not yet known, or INT64_MAX for live sources. +int64_t gstplayer_get_duration(struct gstplayer *player); + +/// Set whether the video should loop. +/// @arg looping Whether the video should start playing from the beginning when the +/// end is reached. +int gstplayer_set_looping(struct gstplayer *player, bool looping, bool gapless); + +/// Set the playback volume. +/// @arg volume Desired volume as a value between 0 and 1. +int gstplayer_set_volume(struct gstplayer *player, double volume); + +/// Seek to a specific position in the video. +/// @arg position Position to seek to in milliseconds from the beginning of the video. +/// @arg nearest_keyframe If true, seek to the nearest keyframe instead. Might be faster but less accurate. +int gstplayer_seek_to(struct gstplayer *player, int64_t position, bool nearest_keyframe); + +/// Seek to a specific position in the video and call +/// @arg on_seek_done with @arg userdata when done. +int gstplayer_seek_with_completer(struct gstplayer *player, int64_t position, bool nearest_keyframe, struct async_completer completer); + +/// Set the playback speed of the player. +/// 1.0: normal playback speed +/// 0.5: half playback speed +/// 2.0: double playback speed +int gstplayer_set_playback_speed(struct gstplayer *player, double playback_speed); + +int gstplayer_step_forward(struct gstplayer *player); + +int gstplayer_step_backward(struct gstplayer *player); + +void gstplayer_set_audio_balance(struct gstplayer *player, float balance); + +float gstplayer_get_audio_balance(struct gstplayer *player); + +bool gstplayer_set_source(struct gstplayer *p, const char *uri); + +bool gstplayer_set_source_with_completer(struct gstplayer *p, const char *uri, struct async_completer completer); + +struct video_info { + int width, height; + + double fps; + + int64_t duration_ms; + + bool can_seek; + int64_t seek_begin_ms, seek_end_ms; +}; + +/// @brief Get the value notifier for the video info. +/// +/// Gets notified with a value of type `struct video_info*` when the video info changes. +/// The listeners will be called on an internal gstreamer thread. +/// So you need to make sure you do the proper rethreading in the listener callback. +struct notifier *gstplayer_get_video_info_notifier(struct gstplayer *player); + +struct seeking_info { + bool can_seek; + int64_t seek_begin_ms, seek_end_ms; +}; + +struct notifier *gstplayer_get_seeking_info_notifier(struct gstplayer *player); + +struct notifier *gstplayer_get_duration_notifier(struct gstplayer *player); + +struct notifier *gstplayer_get_eos_notifier(struct gstplayer *player); + +/// @brief Get the value notifier for the buffering state. +/// +/// Gets notified with a value of type `struct buffering_state*` when the buffering state changes. +/// The listeners will be called on the main flutterpi platform thread. +struct notifier *gstplayer_get_buffering_state_notifier(struct gstplayer *player); + +/// @brief Get the change notifier for errors. +/// +/// Gets notified when an error happens. (Not yet implemented) +struct notifier *gstplayer_get_error_notifier(struct gstplayer *player); + +#endif diff --git a/src/plugins/gstreamer_video_player.h b/src/plugins/gstreamer_video_player.h index 02e6ed25..6b248e6a 100644 --- a/src/plugins/gstreamer_video_player.h +++ b/src/plugins/gstreamer_video_player.h @@ -1,10 +1,12 @@ -#ifndef _FLUTTERPI_INCLUDE_PLUGINS_OMXPLAYER_VIDEO_PLUGIN_H -#define _FLUTTERPI_INCLUDE_PLUGINS_OMXPLAYER_VIDEO_PLUGIN_H +#ifndef _FLUTTERPI_INCLUDE_PLUGINS_GSTREAMER_VIDEO_PLAYER_H +#define _FLUTTERPI_INCLUDE_PLUGINS_GSTREAMER_VIDEO_PLAYER_H #include "util/collection.h" #include "util/lock_ops.h" #include "util/refcounting.h" +#include + #include "config.h" #if !defined(HAVE_EGL_GLES2) @@ -14,173 +16,9 @@ #include "gles.h" #endif -enum format_hint { FORMAT_HINT_NONE, FORMAT_HINT_MPEG_DASH, FORMAT_HINT_HLS, FORMAT_HINT_SS, FORMAT_HINT_OTHER }; - -enum buffering_mode { BUFFERING_MODE_STREAM, BUFFERING_MODE_DOWNLOAD, BUFFERING_MODE_TIMESHIFT, BUFFERING_MODE_LIVE }; - -struct buffering_range { - int64_t start_ms; - int64_t stop_ms; -}; - -struct buffering_state { - // The percentage that the buffer is filled. - // If this is 100 playback will resume. - int percent; - - // The buffering mode currently used by the pipeline. - enum buffering_mode mode; - - // The average input / consumption speed in bytes per second. - int avg_in, avg_out; - - // Time left till buffering finishes, in ms. - // 0 means not buffering right now. - int64_t time_left_ms; - - // The ranges of already buffered video. - // For the BUFFERING_MODE_DOWNLOAD and BUFFERING_MODE_TIMESHIFT buffering modes, this specifies the ranges - // where efficient seeking is possible. - // For the BUFFERING_MODE_STREAM and BUFFERING_MODE_LIVE buffering modes, this describes the oldest and - // newest item in the buffer. - int n_ranges; - - // Flexible array member. - // For example, if n_ranges is 2, just allocate using - // `state = malloc(sizeof(struct buffering_state) + 2*sizeof(struct buffering_range))` - // and we can use state->ranges[0] and so on. - // This is cool because we don't need to allocate two blocks of memory and we can just call - // `free` once to free the whole thing. - // More precisely, we don't need to define a new function we can give to value_notifier_init - // as the value destructor, we can just use `free`. - struct buffering_range ranges[]; -}; - -#define BUFFERING_STATE_SIZE(n_ranges) (sizeof(struct buffering_state) + (n_ranges) * sizeof(struct buffering_range)) - -struct video_info; -struct gstplayer; -struct flutterpi; - -/// Create a gstreamer video player that loads the video from a flutter asset. -/// @arg asset_path The path of the asset inside the asset bundle. -/// @arg package_name The name of the package containing the asset -/// @arg userdata The userdata associated with this player -struct gstplayer *gstplayer_new_from_asset(struct flutterpi *flutterpi, const char *asset_path, const char *package_name, void *userdata); - -/// Create a gstreamer video player that loads the video from a network URI. -/// @arg uri The URI to the video. (for example, http://, https://, rtmp://, rtsp://) -/// @arg format_hint A hint to the format of the video. FORMAT_HINT_NONE means there's no hint. -/// @arg userdata The userdata associated with this player. -struct gstplayer *gstplayer_new_from_network(struct flutterpi *flutterpi, const char *uri, enum format_hint format_hint, void *userdata); - -/// Create a gstreamer video player that loads the video from a file URI. -/// @arg uri The file:// URI to the video. -/// @arg userdata The userdata associated with this player. -struct gstplayer *gstplayer_new_from_file(struct flutterpi *flutterpi, const char *uri, void *userdata); - -/// Create a gstreamer video player with a custom gstreamer pipeline. -/// @arg pipeline The description of the custom pipeline that should be used. Should contain an appsink called "sink". -/// @arg userdata The userdata associated with this player. -struct gstplayer *gstplayer_new_from_pipeline(struct flutterpi *flutterpi, const char *pipeline, void *userdata); - -/// Destroy this gstreamer player instance and the resources -/// associated with it. (texture, gstreamer pipeline, etc) -/// -/// Should be called on the flutterpi main/platform thread, -/// because otherwise destroying the gstreamer event bus listener -/// might be a race condition. -void gstplayer_destroy(struct gstplayer *player); - -DECLARE_LOCK_OPS(gstplayer) - -/// Set the generic userdata associated with this gstreamer player instance. -/// Overwrites the userdata set in the constructor and any userdata previously -/// set using @ref gstplayer_set_userdata_locked. -/// @arg userdata The new userdata that should be associated with this player. -void gstplayer_set_userdata_locked(struct gstplayer *player, void *userdata); - -/// Get the userdata that was given to the constructor or was previously set using -/// @ref gstplayer_set_userdata_locked. -/// @returns userdata associated with this player. -void *gstplayer_get_userdata_locked(struct gstplayer *player); - -/// Get the id of the flutter external texture that this player is rendering into. -int64_t gstplayer_get_texture_id(struct gstplayer *player); - -//void gstplayer_set_info_callback(struct gstplayer *player, gstplayer_info_callback_t cb, void *userdata); - -//void gstplayer_set_buffering_callback(struct gstplayer *player, gstplayer_buffering_callback_t callback, void *userdata); - -/// Add a http header (consisting of a string key and value) to the list of http headers that -/// gstreamer will use when playing back from a HTTP/S URI. -/// This has no effect after @ref gstplayer_initialize was called. -void gstplayer_put_http_header(struct gstplayer *player, const char *key, const char *value); - -/// Initializes the video playback, i.e. boots up the gstreamer pipeline, starts -/// buffering the video. -/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. -int gstplayer_initialize(struct gstplayer *player); - -/// Get the video info. If the video info (format, size, etc) is already known, @arg callback will be called -/// synchronously, inside this call. If the video info is not known, @arg callback will be called on the flutter-pi -/// platform thread as soon as the info is known. -/// @returns The handle for the deferred callback. -//struct sd_event_source_generic *gstplayer_probe_video_info(struct gstplayer *player, gstplayer_info_callback_t callback, void *userdata); - -/// Set the current playback state to "playing" if that's not the case already. -/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. -int gstplayer_play(struct gstplayer *player); - -/// Sets the current playback state to "paused" if that's not the case already. -/// @returns 0 if initialization was successfull, errno-style error code if an error ocurred. -int gstplayer_pause(struct gstplayer *player); - -/// Get the current playback position. -/// @returns Current playback position, in milliseconds from the beginning of the video. -int64_t gstplayer_get_position(struct gstplayer *player); - -/// Set whether the video should loop. -/// @arg looping Whether the video should start playing from the beginning when the -/// end is reached. -int gstplayer_set_looping(struct gstplayer *player, bool looping); - -/// Set the playback volume. -/// @arg volume Desired volume as a value between 0 and 1. -int gstplayer_set_volume(struct gstplayer *player, double volume); - -/// Seek to a specific position in the video. -/// @arg position Position to seek to in milliseconds from the beginning of the video. -/// @arg nearest_keyframe If true, seek to the nearest keyframe instead. Might be faster but less accurate. -int gstplayer_seek_to(struct gstplayer *player, int64_t position, bool nearest_keyframe); - -/// Set the playback speed of the player. -/// 1.0: normal playback speed -/// 0.5: half playback speed -/// 2.0: double playback speed -int gstplayer_set_playback_speed(struct gstplayer *player, double playback_speed); - -int gstplayer_step_forward(struct gstplayer *player); - -int gstplayer_step_backward(struct gstplayer *player); - -/// @brief Get the value notifier for the video info. -/// -/// Gets notified with a value of type `struct video_info*` when the video info changes. -/// The listeners will be called on an internal gstreamer thread. -/// So you need to make sure you do the proper rethreading in the listener callback. -struct notifier *gstplayer_get_video_info_notifier(struct gstplayer *player); - -/// @brief Get the value notifier for the buffering state. -/// -/// Gets notified with a value of type `struct buffering_state*` when the buffering state changes. -/// The listeners will be called on the main flutterpi platform thread. -struct notifier *gstplayer_get_buffering_state_notifier(struct gstplayer *player); - -/// @brief Get the change notifier for errors. -/// -/// Gets notified when an error happens. (Not yet implemented) -struct notifier *gstplayer_get_error_notifier(struct gstplayer *player); +#if !defined(HAVE_GSTREAMER_VIDEO_PLAYER) + #error "gstreamer_video_player.h can't be used when building without gstreamer video player." +#endif struct video_frame; struct gl_renderer; @@ -212,19 +50,6 @@ DECLARE_REF_OPS(frame_interface) typedef struct _GstVideoInfo GstVideoInfo; typedef struct _GstVideoMeta GstVideoMeta; -struct video_info { - int width, height; - double fps; - int64_t duration_ms; - bool can_seek; - int64_t seek_begin_ms, seek_end_ms; -}; - -struct frame_info { - const GstVideoInfo *gst_info; - uint32_t drm_format; - EGLint egl_color_space; -}; struct _GstSample; @@ -238,4 +63,9 @@ struct gl_texture_frame; const struct gl_texture_frame *frame_get_gl_frame(struct video_frame *frame); +struct texture; +struct gl_renderer; +typedef struct _GstElement GstElement; +GstElement *flutter_gl_texture_sink_new(struct texture *texture, struct gl_renderer *renderer); + #endif diff --git a/src/plugins/gstreamer_video_player/flutter_texture_sink.c b/src/plugins/gstreamer_video_player/flutter_texture_sink.c new file mode 100644 index 00000000..8730e671 --- /dev/null +++ b/src/plugins/gstreamer_video_player/flutter_texture_sink.c @@ -0,0 +1,300 @@ +#include +#include +#include +#include +#include + +#include "plugins/gstreamer_video_player.h" +#include "plugins/gstplayer.h" +#include "texture_registry.h" +#include "util/logging.h" + +#if !defined(HAVE_EGL_GLES2) + #error "gstreamer video player requires EGL and OpenGL ES2 support." +#else + #include "egl.h" + #include "gles.h" +#endif + +struct texture_sink { + struct texture *fl_texture; + struct frame_interface *interface; +}; + +static void on_destroy_texture_frame(const struct texture_frame *texture_frame, void *userdata) { + struct video_frame *frame; + + (void) texture_frame; + + ASSERT_NOT_NULL(texture_frame); + ASSERT_NOT_NULL(userdata); + + frame = userdata; + + frame_destroy(frame); +} + +static void on_appsink_eos(GstAppSink *appsink, void *userdata) { + gboolean ok; + + ASSERT_NOT_NULL(appsink); + ASSERT_NOT_NULL(userdata); + + (void) userdata; + + LOG_DEBUG("on_appsink_eos()\n"); + + // this method is called from the streaming thread. + // we shouldn't access the player directly here, it could change while we use it. + // post a message to the gstreamer bus instead, will be handled by + // @ref on_bus_message. + ok = gst_element_post_message( + GST_ELEMENT(appsink), + gst_message_new_application(GST_OBJECT(appsink), gst_structure_new_empty("appsink-eos")) + ); + if (ok == FALSE) { + LOG_ERROR("Could not post appsink end-of-stream event to the message bus.\n"); + } +} + +static GstFlowReturn on_appsink_new_preroll(GstAppSink *appsink, void *userdata) { + struct video_frame *frame; + GstSample *sample; + + ASSERT_NOT_NULL(appsink); + ASSERT_NOT_NULL(userdata); + + struct texture_sink *meta = userdata; + + sample = gst_app_sink_try_pull_preroll(appsink, 0); + if (sample == NULL) { + LOG_ERROR("gstreamer returned a NULL sample.\n"); + return GST_FLOW_ERROR; + } + + // supply video info here + frame = frame_new(meta->interface, sample, NULL); + + // the frame has a reference on the sample internally. + gst_sample_unref(sample); + + if (frame != NULL) { + texture_push_frame( + meta->fl_texture, + &(struct texture_frame){ + .gl = *frame_get_gl_frame(frame), + .destroy = on_destroy_texture_frame, + .userdata = frame, + } + ); + } + + return GST_FLOW_OK; +} + +static GstFlowReturn on_appsink_new_sample(GstAppSink *appsink, void *userdata) { + struct video_frame *frame; + GstSample *sample; + + ASSERT_NOT_NULL(appsink); + ASSERT_NOT_NULL(userdata); + + struct texture_sink *meta = userdata; + + sample = gst_app_sink_try_pull_sample(appsink, 0); + if (sample == NULL) { + LOG_ERROR("gstreamer returned a NULL sample.\n"); + return GST_FLOW_ERROR; + } + + // supply video info here + frame = frame_new(meta->interface, sample, NULL); + + // the frame has a reference on the sample internally. + gst_sample_unref(sample); + + if (frame != NULL) { + texture_push_frame( + meta->fl_texture, + &(struct texture_frame){ + .gl = *frame_get_gl_frame(frame), + .destroy = on_destroy_texture_frame, + .userdata = frame, + } + ); + } + + return GST_FLOW_OK; +} + +static void on_appsink_cbs_destroy(void *userdata) { + struct gstplayer *player; + + LOG_DEBUG("on_appsink_cbs_destroy()\n"); + ASSERT_NOT_NULL(userdata); + + player = userdata; + + (void) player; +} + +static GstCaps *caps_for_frame_interface(struct frame_interface *interface) { + GstCaps *caps = gst_caps_new_empty(); + if (caps == NULL) { + return NULL; + } + + /// TODO: Add dmabuf caps here + for_each_format_in_frame_interface(i, format, interface) { + GstVideoFormat gst_format = gst_video_format_from_drm_format(format->format); + if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) { + continue; + } + + gst_caps_append(caps, gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string(gst_format), NULL)); + } + + return caps; +} + +#if THIS_GSTREAMER_VER >= GSTREAMER_VER(1, 20, 0) +static gboolean on_appsink_new_event(GstAppSink *appsink, gpointer userdata) { + (void) userdata; + + GstMiniObject *obj; + + do { + obj = gst_app_sink_try_pull_object(appsink, 0); + if (obj == NULL) { + return FALSE; + } + + if (!GST_IS_EVENT(obj)) { + LOG_DEBUG("Got non-event from gst_app_sink_try_pull_object.\n"); + } + } while (obj && !GST_IS_EVENT(obj)); + + // GstEvent *event = GST_EVENT_CAST(obj); + + // char *str = gst_structure_to_string(gst_event_get_structure(event)); + // LOG_DEBUG("Got event: %s\n", str); + // g_free(str); + + gst_mini_object_unref(obj); + + return FALSE; +} +#endif + +UNUSED static gboolean on_appsink_propose_allocation(GstAppSink *appsink, GstQuery *query, gpointer userdata) { + (void) appsink; + (void) userdata; + + gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL); + + return FALSE; +} + +UNUSED static GstPadProbeReturn on_query_appsink_pad(GstPad *pad, GstPadProbeInfo *info, void *userdata) { + GstQuery *query; + + (void) pad; + (void) userdata; + + query = gst_pad_probe_info_get_query(info); + if (query == NULL) { + LOG_DEBUG("Couldn't get query from pad probe info.\n"); + return GST_PAD_PROBE_OK; + } + + if (GST_QUERY_TYPE(query) != GST_QUERY_ALLOCATION) { + return GST_PAD_PROBE_OK; + } + + gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL); + + return GST_PAD_PROBE_HANDLED; +} + +GstElement *flutter_gl_texture_sink_new(struct texture *texture, struct gl_renderer *renderer) { + ASSERT_NOT_NULL(texture); + ASSERT_NOT_NULL(renderer); + + struct texture_sink *meta = calloc(1, sizeof(struct texture_sink)); + if (meta == NULL) { + return NULL; + } + + meta->fl_texture = texture; + + GstElement *element = gst_element_factory_make("appsink", "appsink"); + if (element == NULL) { + free(meta); + return NULL; + } + + meta->interface = frame_interface_new(renderer); + if (meta->interface == NULL) { + gst_object_unref(element); + free(meta); + return NULL; + } + + GstCaps *caps = caps_for_frame_interface(meta->interface); + if (caps == NULL) { + frame_interface_unref(meta->interface); + gst_object_unref(element); + free(meta); + return NULL; + } + + GstBaseSink *basesink = GST_BASE_SINK_CAST(element); + GstAppSink *appsink = GST_APP_SINK_CAST(element); + + gst_base_sink_set_max_lateness(basesink, 20 * GST_MSECOND); + gst_base_sink_set_qos_enabled(basesink, TRUE); + gst_base_sink_set_sync(basesink, TRUE); + gst_app_sink_set_max_buffers(appsink, 2); + gst_app_sink_set_emit_signals(appsink, TRUE); + gst_app_sink_set_drop(appsink, FALSE); + gst_app_sink_set_caps(appsink, caps); + gst_caps_unref(caps); + + GstAppSinkCallbacks cbs; + memset(&cbs, 0, sizeof(cbs)); + + cbs.new_preroll = on_appsink_new_preroll; + cbs.new_sample = on_appsink_new_sample; + cbs.eos = on_appsink_eos; +#if THIS_GSTREAMER_VER >= GSTREAMER_VER(1, 20, 0) + cbs.new_event = on_appsink_new_event; +#endif + +#if THIS_GSTREAMER_VER >= GSTREAMER_VER(1, 24, 0) + cbs.propose_allocation = on_appsink_propose_allocation; +#endif + + // If instead of conditional compilation so + // this is type-checked even for >= 1.24.0. + if (THIS_GSTREAMER_VER < GSTREAMER_VER(1, 24, 0)) { + GstPad *pad = gst_element_get_static_pad(element, "sink"); + if (pad == NULL) { + LOG_ERROR("Couldn't get static pad `sink` from appsink.\n"); + frame_interface_unref(meta->interface); + gst_object_unref(element); + free(meta); + return NULL; + } + + gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, on_query_appsink_pad, NULL, NULL); + } + + gst_app_sink_set_callbacks( + GST_APP_SINK(appsink), + &cbs, + meta, + on_appsink_cbs_destroy + ); + + return element; +} diff --git a/src/plugins/gstreamer_video_player/frame.c b/src/plugins/gstreamer_video_player/frame.c index 66498ce6..59062e0b 100644 --- a/src/plugins/gstreamer_video_player/frame.c +++ b/src/plugins/gstreamer_video_player/frame.c @@ -16,14 +16,12 @@ // This will error if we don't have EGL / OpenGL ES support. #include "gl_renderer.h" #include "plugins/gstreamer_video_player.h" +#include "plugins/gstplayer.h" #include "util/logging.h" #include "util/refcounting.h" #define MAX_N_PLANES 4 -#define GSTREAMER_VER(major, minor, patch) ((((major) &0xFF) << 16) | (((minor) &0xFF) << 8) | ((patch) &0xFF)) -#define THIS_GSTREAMER_VER GSTREAMER_VER(LIBGSTREAMER_VERSION_MAJOR, LIBGSTREAMER_VERSION_MINOR, LIBGSTREAMER_VERSION_PATCH) - #define DRM_FOURCC_FORMAT "c%c%c%c" #define DRM_FOURCC_ARGS(format) (format) & 0xFF, ((format) >> 8) & 0xFF, ((format) >> 16) & 0xFF, ((format) >> 24) & 0xFF @@ -38,7 +36,6 @@ struct video_frame { int dmabuf_fds[MAX_N_PLANES]; EGLImageKHR image; - size_t width, height; struct gl_texture_frame gl_frame; }; @@ -359,14 +356,18 @@ UNUSED int dup_gst_buffer_range_as_dmabuf(struct gbm_device *gbm_device, GstBuff return -1; } - bo = gbm_bo_create(gbm_device, map_info.size, 1, GBM_FORMAT_R8, GBM_BO_USE_LINEAR); + // Create a square texture large enough to fit our bytes instead of one with only one huge row, + // because some drivers have limitations on the row length. (Intel) + uint32_t dim = (uint32_t) ceil(sqrt(map_info.size)); + + bo = gbm_bo_create(gbm_device, dim, dim, GBM_FORMAT_R8, GBM_BO_USE_LINEAR); if (bo == NULL) { LOG_ERROR("Couldn't create GBM BO to copy video frame into.\n"); goto fail_unmap_buffer; } map_data = NULL; - map = gbm_bo_map(bo, 0, 0, map_info.size, 1, GBM_BO_TRANSFER_WRITE, &stride, &map_data); + map = gbm_bo_map(bo, 0, 0, dim, dim, GBM_BO_TRANSFER_WRITE, &stride, &map_data); if (map == NULL) { LOG_ERROR("Couldn't mmap GBM BO to copy video frame into it.\n"); goto fail_destroy_bo; @@ -415,14 +416,18 @@ UNUSED int dup_gst_memory_as_dmabuf(struct gbm_device *gbm_device, GstMemory *me return -1; } - bo = gbm_bo_create(gbm_device, map_info.size, 1, GBM_FORMAT_R8, GBM_BO_USE_LINEAR); + // Create a square texture large enough to fit our bytes instead of one with only one huge row, + // because some drivers have limitations on the row length. (Intel) + uint32_t dim = (uint32_t) ceil(sqrt(map_info.size)); + + bo = gbm_bo_create(gbm_device, dim, dim, GBM_FORMAT_R8, GBM_BO_USE_LINEAR); if (bo == NULL) { LOG_ERROR("Couldn't create GBM BO to copy video frame into.\n"); goto fail_unmap_buffer; } map_data = NULL; - map = gbm_bo_map(bo, 0, 0, map_info.size, 1, GBM_BO_TRANSFER_WRITE, &stride, &map_data); + map = gbm_bo_map(bo, 0, 0, dim, dim, GBM_BO_TRANSFER_WRITE, &stride, &map_data); if (map == NULL) { LOG_ERROR("Couldn't mmap GBM BO to copy video frame into it.\n"); goto fail_destroy_bo; @@ -619,14 +624,22 @@ get_plane_infos(GstBuffer *buffer, const GstVideoInfo *info, struct gbm_device * goto fail_close_fds; } + static bool logged_dmabuf_feedback = false; + if (n_memories != 1) { + if (!logged_dmabuf_feedback) { + LOG_DEBUG("INFO: Flutter-Pi is using manual dmabuf uploads to show video frames. This can result in poor performance.\n"); + logged_dmabuf_feedback = true; + } + ok = dup_gst_buffer_range_as_dmabuf(gbm_device, buffer, memory_index, n_memories); if (ok < 0) { - LOG_ERROR("Could not duplicate gstreamer buffer range as dmabuf.\n"); + LOG_ERROR("Could not upload gstreamer buffer range into dmabufs.\n"); ok = EIO; goto fail_close_fds; } + plane_infos[i].fd = ok; } else { memory = gst_buffer_peek_memory(buffer, memory_index); @@ -647,11 +660,16 @@ get_plane_infos(GstBuffer *buffer, const GstVideoInfo *info, struct gbm_device * plane_infos[i].fd = ok; } else { + if (!logged_dmabuf_feedback) { + LOG_DEBUG("INFO: Flutter-Pi is using manual dmabuf uploads to show video frames. This can result in poor performance.\n"); + logged_dmabuf_feedback = true; + } + /// TODO: When duping, duplicate all non-dmabuf memories into one /// gbm buffer instead. ok = dup_gst_memory_as_dmabuf(gbm_device, memory); if (ok < 0) { - LOG_ERROR("Could not duplicate gstreamer memory as dmabuf.\n"); + LOG_ERROR("Could not upload gstreamer memory into dmabuf.\n"); ok = EIO; goto fail_close_fds; } @@ -802,7 +820,7 @@ static EGLint egl_vertical_chroma_siting_from_gst_info(const GstVideoInfo *info) } } -struct video_frame *frame_new(struct frame_interface *interface, GstSample *sample, const GstVideoInfo *info) { +static struct video_frame *frame_new_egl_imported(struct frame_interface *interface, GstSample *sample, const GstVideoInfo *info) { #define PUT_ATTR(_key, _value) \ do { \ assert(attr_index + 2 <= ARRAY_SIZE(attributes)); \ @@ -811,12 +829,11 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp } while (false) struct video_frame *frame; struct plane_info planes[MAX_N_PLANES]; - GstVideoInfo _info; + GstVideoInfo video_info; EGLBoolean egl_ok; GstBuffer *buffer; EGLImageKHR egl_image; gboolean gst_ok; - uint32_t drm_format; GstCaps *caps; GLuint texture; GLenum gl_error; @@ -838,13 +855,13 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp return NULL; } - info = &_info; - - gst_ok = gst_video_info_from_caps(&_info, caps); + gst_ok = gst_video_info_from_caps(&video_info, caps); if (gst_ok == FALSE) { - LOG_ERROR("Could not get video info from video sample caps.\n"); + LOG_ERROR("Could not get video info from caps.\n"); return NULL; } + + info = &video_info; } else { caps = NULL; } @@ -854,16 +871,17 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp height = GST_VIDEO_INFO_HEIGHT(info); n_planes = GST_VIDEO_INFO_N_PLANES(info); - // query the drm format for this sample - drm_format = drm_format_from_gst_info(info); + uint64_t drm_modifier = DRM_FORMAT_MOD_LINEAR; + uint32_t drm_format = drm_format_from_gst_info(info); if (drm_format == DRM_FORMAT_INVALID) { LOG_ERROR("Video format has no EGL equivalent.\n"); return NULL; } + bool external_only; for_each_format_in_frame_interface(i, format, interface) { - if (format->format == drm_format && format->modifier == DRM_FORMAT_MOD_LINEAR) { + if (format->format == drm_format && format->modifier == drm_modifier) { external_only = format->external_only; goto format_supported; } @@ -872,7 +890,7 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp LOG_ERROR( "Video format is not supported by EGL: %" DRM_FOURCC_FORMAT " (modifier: %" PRIu64 ").\n", DRM_FOURCC_ARGS(drm_format), - (uint64_t) DRM_FORMAT_MOD_LINEAR + (uint64_t) drm_modifier ); return NULL; @@ -1129,6 +1147,29 @@ struct video_frame *frame_new(struct frame_interface *interface, GstSample *samp return NULL; } +static struct video_frame *frame_new_egl_duped(struct frame_interface *interface, GstSample *sample, const GstVideoInfo *info) { + (void) interface; + (void) sample; + (void) info; + return NULL; +} + +struct video_frame *frame_new(struct frame_interface *interface, GstSample *sample, const GstVideoInfo *info) { + struct video_frame *frame; + + frame = frame_new_egl_imported(interface, sample, info); + if (frame != NULL) { + return frame; + } + + frame = frame_new_egl_duped(interface, sample, info); + if (frame != NULL) { + return frame; + } + + return NULL; +} + void frame_destroy(struct video_frame *frame) { EGLBoolean egl_ok; int ok; diff --git a/src/plugins/gstreamer_video_player/player.c b/src/plugins/gstreamer_video_player/player.c deleted file mode 100644 index a6555eec..00000000 --- a/src/plugins/gstreamer_video_player/player.c +++ /dev/null @@ -1,1337 +0,0 @@ -#define _GNU_SOURCE - -#include -#include -#include - -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -#include "flutter-pi.h" -#include "notifier_listener.h" -#include "platformchannel.h" -#include "pluginregistry.h" -#include "plugins/gstreamer_video_player.h" -#include "texture_registry.h" -#include "util/collection.h" -#include "util/logging.h" - -#ifdef DEBUG - #define DEBUG_TRACE_BEGIN(player, name) trace_begin(player, name) - #define DEBUG_TRACE_END(player, name) trace_end(player, name) - #define DEBUG_TRACE_INSTANT(player, name) trace_instant(player, name) -#else - #define DEBUG_TRACE_BEGIN(player, name) \ - do { \ - } while (0) - #define DEBUG_TRACE_END(player, name) \ - do { \ - } while (0) - #define DEBUG_TRACE_INSTANT(player, name) \ - do { \ - } while (0) -#endif - -#define LOG_GST_SET_STATE_ERROR(_element) \ - LOG_ERROR( \ - "setting gstreamer playback state failed. gst_element_set_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", \ - GST_ELEMENT_NAME(_element) \ - ) -#define LOG_GST_GET_STATE_ERROR(_element) \ - LOG_ERROR( \ - "last gstreamer state change failed. gst_element_get_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", \ - GST_ELEMENT_NAME(_element) \ - ) - -struct incomplete_video_info { - bool has_resolution; - bool has_fps; - bool has_duration; - bool has_seeking_info; - struct video_info info; -}; - -enum playpause_state { kPaused, kPlaying, kStepping }; - -enum playback_direction { kForward, kBackward }; - -#define PLAYPAUSE_STATE_AS_STRING(playpause_state) \ - ((playpause_state) == kPaused ? "paused" : \ - (playpause_state) == kPlaying ? "playing" : \ - (playpause_state) == kStepping ? "stepping" : \ - "?") - -struct gstplayer { - pthread_mutex_t lock; - - struct flutterpi *flutterpi; - void *userdata; - - char *video_uri; - char *pipeline_description; - - GstStructure *headers; - - /** - * @brief The desired playback rate that should be used when @ref playpause_state is kPlayingForward. (should be > 0) - * - */ - double playback_rate_forward; - - /** - * @brief The desired playback rate that should be used when @ref playpause_state is kPlayingBackward. (should be < 0) - * - */ - double playback_rate_backward; - - /** - * @brief True if the video should seemlessly start from the beginning once the end is reached. - * - */ - atomic_bool looping; - - /** - * @brief The desired playback state. Either paused, playing, or single-frame stepping. - * - */ - enum playpause_state playpause_state; - - /** - * @brief The desired playback direction. - * - */ - enum playback_direction direction; - - /** - * @brief The actual, currently used playback rate. - * - */ - double current_playback_rate; - - /** - * @brief The position reported if gstreamer position queries fail (for example, because gstreamer is currently - * seeking to a new position. In that case, fallback_position_ms will be the seeking target position, so we report the - * new position while we're seeking to it) - */ - int64_t fallback_position_ms; - - /** - * @brief True if there's a position that apply_playback_state should seek to. - * - */ - bool has_desired_position; - - /** - * @brief True if gstplayer should seek to the nearest keyframe instead, which is a bit faster. - * - */ - bool do_fast_seeking; - - /** - * @brief The position, if any, that apply_playback_state should seek to. - * - */ - int64_t desired_position_ms; - - struct notifier video_info_notifier, buffering_state_notifier, error_notifier; - - bool is_initialized; - bool has_sent_info; - struct incomplete_video_info info; - - bool has_gst_info; - GstVideoInfo gst_info; - - struct texture *texture; - int64_t texture_id; - - struct frame_interface *frame_interface; - - GstElement *pipeline, *sink; - GstBus *bus; - sd_event_source *busfd_events; - - bool is_live; -}; - -#define MAX_N_PLANES 4 -#define MAX_N_EGL_DMABUF_IMAGE_ATTRIBUTES 6 + 6 * MAX_N_PLANES + 1 - -UNUSED static inline void lock(struct gstplayer *player) { - pthread_mutex_lock(&player->lock); -} - -UNUSED static inline void unlock(struct gstplayer *player) { - pthread_mutex_unlock(&player->lock); -} - -UNUSED static inline void trace_instant(struct gstplayer *player, const char *name) { - return flutterpi_trace_event_instant(player->flutterpi, name); -} - -UNUSED static inline void trace_begin(struct gstplayer *player, const char *name) { - return flutterpi_trace_event_begin(player->flutterpi, name); -} - -UNUSED static inline void trace_end(struct gstplayer *player, const char *name) { - return flutterpi_trace_event_end(player->flutterpi, name); -} - -static int maybe_send_info(struct gstplayer *player) { - struct video_info *duped; - - if (player->info.has_resolution && player->info.has_fps && player->info.has_duration && player->info.has_seeking_info) { - // we didn't send the info yet but we have complete video info now. - // send it! - duped = memdup(&(player->info.info), sizeof(player->info.info)); - if (duped == NULL) { - return ENOMEM; - } - - notifier_notify(&player->video_info_notifier, duped); - } - return 0; -} - -static void fetch_duration(struct gstplayer *player) { - gboolean ok; - int64_t duration; - - ok = gst_element_query_duration(player->pipeline, GST_FORMAT_TIME, &duration); - if (ok == FALSE) { - if (player->is_live) { - player->info.info.duration_ms = INT64_MAX; - player->info.has_duration = true; - return; - } else { - LOG_ERROR("Could not fetch duration. (gst_element_query_duration)\n"); - return; - } - } - - player->info.info.duration_ms = GST_TIME_AS_MSECONDS(duration); - player->info.has_duration = true; -} - -static void fetch_seeking(struct gstplayer *player) { - GstQuery *seeking_query; - gboolean ok, seekable; - int64_t seek_begin, seek_end; - - seeking_query = gst_query_new_seeking(GST_FORMAT_TIME); - ok = gst_element_query(player->pipeline, seeking_query); - if (ok == FALSE) { - if (player->is_live) { - player->info.info.can_seek = false; - player->info.info.seek_begin_ms = 0; - player->info.info.seek_end_ms = 0; - player->info.has_seeking_info = true; - return; - } else { - LOG_DEBUG("Could not query seeking info. (gst_element_query)\n"); - return; - } - } - - gst_query_parse_seeking(seeking_query, NULL, &seekable, &seek_begin, &seek_end); - - gst_query_unref(seeking_query); - - player->info.info.can_seek = seekable; - player->info.info.seek_begin_ms = GST_TIME_AS_MSECONDS(seek_begin); - player->info.info.seek_end_ms = GST_TIME_AS_MSECONDS(seek_end); - player->info.has_seeking_info = true; -} - -static void update_buffering_state(struct gstplayer *player) { - struct buffering_state *state; - GstBufferingMode mode; - GstQuery *query; - gboolean ok, busy; - int64_t start, stop, buffering_left; - int n_ranges, percent, avg_in, avg_out; - - query = gst_query_new_buffering(GST_FORMAT_TIME); - ok = gst_element_query(player->pipeline, query); - if (ok == FALSE) { - LOG_ERROR("Could not query buffering state. (gst_element_query)\n"); - goto fail_unref_query; - } - - gst_query_parse_buffering_percent(query, &busy, &percent); - gst_query_parse_buffering_stats(query, &mode, &avg_in, &avg_out, &buffering_left); - - n_ranges = (int) gst_query_get_n_buffering_ranges(query); - - state = malloc(sizeof(*state) + n_ranges * sizeof(struct buffering_range)); - if (state == NULL) { - goto fail_unref_query; - } - - for (int i = 0; i < n_ranges; i++) { - ok = gst_query_parse_nth_buffering_range(query, (unsigned int) i, &start, &stop); - if (ok == FALSE) { - LOG_ERROR("Could not parse %dth buffering range from buffering state. (gst_query_parse_nth_buffering_range)\n", i); - goto fail_free_state; - } - - state->ranges[i].start_ms = GST_TIME_AS_MSECONDS(start); - state->ranges[i].stop_ms = GST_TIME_AS_MSECONDS(stop); - } - - gst_query_unref(query); - - state->percent = percent; - state->mode = - (mode == GST_BUFFERING_STREAM ? BUFFERING_MODE_STREAM : - mode == GST_BUFFERING_DOWNLOAD ? BUFFERING_MODE_DOWNLOAD : - mode == GST_BUFFERING_TIMESHIFT ? BUFFERING_MODE_TIMESHIFT : - mode == GST_BUFFERING_LIVE ? BUFFERING_MODE_LIVE : - (assert(0), BUFFERING_MODE_STREAM)); - state->avg_in = avg_in; - state->avg_out = avg_out; - state->time_left_ms = buffering_left; - state->n_ranges = n_ranges; - - notifier_notify(&player->buffering_state_notifier, state); - return; - -fail_free_state: - free(state); - -fail_unref_query: - gst_query_unref(query); -} - -static int init(struct gstplayer *player, bool force_sw_decoders); - -static void maybe_deinit(struct gstplayer *player); - -static int apply_playback_state(struct gstplayer *player) { - GstStateChangeReturn ok; - GstState desired_state, current_state, pending_state; - double desired_rate; - int64_t position; - - desired_state = player->playpause_state == kPlaying ? GST_STATE_PLAYING : GST_STATE_PAUSED; /* use GST_STATE_PAUSED if we're stepping */ - - /// Use 1.0 if we're stepping, otherwise use the stored playback rate for the current direction. - if (player->playpause_state == kStepping) { - desired_rate = player->direction == kForward ? 1.0 : -1.0; - } else { - desired_rate = player->direction == kForward ? player->playback_rate_forward : player->playback_rate_backward; - } - - if (player->current_playback_rate != desired_rate || player->has_desired_position) { - if (player->has_desired_position) { - position = player->desired_position_ms * GST_MSECOND; - } else { - ok = gst_element_query_position(GST_ELEMENT(player->pipeline), GST_FORMAT_TIME, &position); - if (ok == FALSE) { - LOG_ERROR("Could not get the current playback position to apply the playback speed.\n"); - return EIO; - } - } - - if (player->direction == kForward) { - LOG_DEBUG( - "gst_element_seek(..., rate: %f, start: %" GST_TIME_FORMAT ", end: %" GST_TIME_FORMAT ", ...)\n", - desired_rate, - GST_TIME_ARGS(position), - GST_TIME_ARGS(GST_CLOCK_TIME_NONE) - ); - ok = gst_element_seek( - GST_ELEMENT(player->pipeline), - desired_rate, - GST_FORMAT_TIME, - GST_SEEK_FLAG_FLUSH | - (player->do_fast_seeking ? GST_SEEK_FLAG_KEY_UNIT | GST_SEEK_FLAG_SNAP_NEAREST : GST_SEEK_FLAG_ACCURATE), - GST_SEEK_TYPE_SET, - position, - GST_SEEK_TYPE_SET, - GST_CLOCK_TIME_NONE - ); - if (ok == FALSE) { - LOG_ERROR( - "Could not set the new playback speed / playback position (speed: %f, pos: %" GST_TIME_FORMAT ").\n", - desired_rate, - GST_TIME_ARGS(position) - ); - return EIO; - } - } else { - LOG_DEBUG( - "gst_element_seek(..., rate: %f, start: %" GST_TIME_FORMAT ", end: %" GST_TIME_FORMAT ", ...)\n", - desired_rate, - GST_TIME_ARGS(0), - GST_TIME_ARGS(position) - ); - ok = gst_element_seek( - GST_ELEMENT(player->pipeline), - desired_rate, - GST_FORMAT_TIME, - GST_SEEK_FLAG_FLUSH | - (player->do_fast_seeking ? GST_SEEK_FLAG_KEY_UNIT | GST_SEEK_FLAG_SNAP_NEAREST : GST_SEEK_FLAG_ACCURATE), - GST_SEEK_TYPE_SET, - 0, - GST_SEEK_TYPE_SET, - position - ); - - if (ok == FALSE) { - LOG_ERROR( - "Could not set the new playback speed / playback position (speed: %f, pos: %" GST_TIME_FORMAT ").\n", - desired_rate, - GST_TIME_ARGS(position) - ); - return EIO; - } - } - - player->current_playback_rate = desired_rate; - player->fallback_position_ms = GST_TIME_AS_MSECONDS(position); - player->has_desired_position = false; - } - - DEBUG_TRACE_BEGIN(player, "gst_element_get_state"); - ok = gst_element_get_state(player->pipeline, ¤t_state, &pending_state, 0); - DEBUG_TRACE_END(player, "gst_element_get_state"); - - if (ok == GST_STATE_CHANGE_FAILURE) { - LOG_ERROR( - "last gstreamer pipeline state change failed. gst_element_get_state(element name: %s): GST_STATE_CHANGE_FAILURE\n", - GST_ELEMENT_NAME(player->pipeline) - ); - DEBUG_TRACE_END(player, "apply_playback_state"); - return EIO; - } - - if (pending_state == GST_STATE_VOID_PENDING) { - if (current_state == desired_state) { - // we're already in the desired state, and we're also not changing it - // no need to do anything. - LOG_DEBUG( - "apply_playback_state(playing: %s): already in desired state and none pending\n", - PLAYPAUSE_STATE_AS_STRING(player->playpause_state) - ); - DEBUG_TRACE_END(player, "apply_playback_state"); - return 0; - } - - LOG_DEBUG( - "apply_playback_state(playing: %s): setting state to %s\n", - PLAYPAUSE_STATE_AS_STRING(player->playpause_state), - gst_element_state_get_name(desired_state) - ); - - DEBUG_TRACE_BEGIN(player, "gst_element_set_state"); - ok = gst_element_set_state(player->pipeline, desired_state); - DEBUG_TRACE_END(player, "gst_element_set_state"); - - if (ok == GST_STATE_CHANGE_FAILURE) { - LOG_GST_SET_STATE_ERROR(player->pipeline); - DEBUG_TRACE_END(player, "apply_playback_state"); - return EIO; - } - } else if (pending_state != desired_state) { - // queue to be executed when pending async state change completes - /// TODO: Implement properly - - LOG_DEBUG( - "apply_playback_state(playing: %s): async state change in progress, setting state to %s\n", - PLAYPAUSE_STATE_AS_STRING(player->playpause_state), - gst_element_state_get_name(desired_state) - ); - - DEBUG_TRACE_BEGIN(player, "gst_element_set_state"); - ok = gst_element_set_state(player->pipeline, desired_state); - DEBUG_TRACE_END(player, "gst_element_set_state"); - - if (ok == GST_STATE_CHANGE_FAILURE) { - LOG_GST_SET_STATE_ERROR(player->pipeline); - DEBUG_TRACE_END(player, "apply_playback_state"); - return EIO; - } - } - - DEBUG_TRACE_END(player, "apply_playback_state"); - return 0; -} - -static void on_bus_message(struct gstplayer *player, GstMessage *msg) { - GstState old, current, pending, requested; - GError *error; - gchar *debug_info; - - DEBUG_TRACE_BEGIN(player, "on_bus_message"); - switch (GST_MESSAGE_TYPE(msg)) { - case GST_MESSAGE_ERROR: - gst_message_parse_error(msg, &error, &debug_info); - - LOG_ERROR( - "gstreamer error: code: %d, domain: %s, msg: %s (debug info: %s)\n", - error->code, - g_quark_to_string(error->domain), - error->message, - debug_info - ); - g_clear_error(&error); - g_free(debug_info); - break; - - case GST_MESSAGE_WARNING: - gst_message_parse_warning(msg, &error, &debug_info); - LOG_ERROR("gstreamer warning: %s (debug info: %s)\n", error->message, debug_info); - g_clear_error(&error); - g_free(debug_info); - break; - - case GST_MESSAGE_INFO: - gst_message_parse_info(msg, &error, &debug_info); - LOG_DEBUG("gstreamer info: %s (debug info: %s)\n", error->message, debug_info); - g_clear_error(&error); - g_free(debug_info); - break; - - case GST_MESSAGE_BUFFERING: { - GstBufferingMode mode; - int64_t buffering_left; - int percent, avg_in, avg_out; - - gst_message_parse_buffering(msg, &percent); - gst_message_parse_buffering_stats(msg, &mode, &avg_in, &avg_out, &buffering_left); - - LOG_DEBUG( - "buffering, src: %s, percent: %d, mode: %s, avg in: %d B/s, avg out: %d B/s, %" GST_TIME_FORMAT "\n", - GST_MESSAGE_SRC_NAME(msg), - percent, - mode == GST_BUFFERING_STREAM ? "stream" : - mode == GST_BUFFERING_DOWNLOAD ? "download" : - mode == GST_BUFFERING_TIMESHIFT ? "timeshift" : - mode == GST_BUFFERING_LIVE ? "live" : - "?", - avg_in, - avg_out, - GST_TIME_ARGS(buffering_left * GST_MSECOND) - ); - - /// TODO: GST_MESSAGE_BUFFERING is only emitted when we actually need to wait on some buffering till we can resume the playback. - /// However, the info we send to the callback also contains information on the buffered video ranges. - /// That information is constantly changing, but we only notify the player about it when we actively wait for the buffer to be filled. - DEBUG_TRACE_BEGIN(player, "update_buffering_state"); - update_buffering_state(player); - DEBUG_TRACE_END(player, "update_buffering_state"); - - break; - }; - - case GST_MESSAGE_STATE_CHANGED: - gst_message_parse_state_changed(msg, &old, ¤t, &pending); - LOG_DEBUG( - "state-changed: src: %s, old: %s, current: %s, pending: %s\n", - GST_MESSAGE_SRC_NAME(msg), - gst_element_state_get_name(old), - gst_element_state_get_name(current), - gst_element_state_get_name(pending) - ); - - if (GST_MESSAGE_SRC(msg) == GST_OBJECT(player->pipeline)) { - if (!player->info.has_duration && (current == GST_STATE_PAUSED || current == GST_STATE_PLAYING)) { - // it's our pipeline that changed to either playing / paused, and we don't have info about our video duration yet. - // get that info now. - // technically we can already fetch the duration when the decodebin changed to PAUSED state. - DEBUG_TRACE_BEGIN(player, "fetch video info"); - fetch_duration(player); - fetch_seeking(player); - maybe_send_info(player); - DEBUG_TRACE_END(player, "fetch video info"); - } - } - break; - - case GST_MESSAGE_ASYNC_DONE: break; - - case GST_MESSAGE_LATENCY: - LOG_DEBUG("gstreamer: redistributing latency\n"); - DEBUG_TRACE_BEGIN(player, "gst_bin_recalculate_latency"); - gst_bin_recalculate_latency(GST_BIN(player->pipeline)); - DEBUG_TRACE_END(player, "gst_bin_recalculate_latency"); - break; - - case GST_MESSAGE_EOS: LOG_DEBUG("end of stream, src: %s\n", GST_MESSAGE_SRC_NAME(msg)); break; - - case GST_MESSAGE_REQUEST_STATE: - gst_message_parse_request_state(msg, &requested); - LOG_DEBUG( - "gstreamer state change to %s was requested by %s\n", - gst_element_state_get_name(requested), - GST_MESSAGE_SRC_NAME(msg) - ); - DEBUG_TRACE_BEGIN(player, "gst_element_set_state"); - gst_element_set_state(GST_ELEMENT(player->pipeline), requested); - DEBUG_TRACE_END(player, "gst_element_set_state"); - break; - - case GST_MESSAGE_APPLICATION: - if (player->looping && gst_message_has_name(msg, "appsink-eos")) { - // we have an appsink end of stream event - // and we should be looping, so seek back to start - LOG_DEBUG("appsink eos, seeking back to segment start (flushing)\n"); - DEBUG_TRACE_BEGIN(player, "gst_element_seek"); - gst_element_seek( - GST_ELEMENT(player->pipeline), - player->current_playback_rate, - GST_FORMAT_TIME, - GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, - GST_SEEK_TYPE_SET, - 0, - GST_SEEK_TYPE_SET, - GST_CLOCK_TIME_NONE - ); - DEBUG_TRACE_END(player, "gst_element_seek"); - - apply_playback_state(player); - } - break; - - default: LOG_DEBUG("gstreamer message: %s, src: %s\n", GST_MESSAGE_TYPE_NAME(msg), GST_MESSAGE_SRC_NAME(msg)); break; - } - DEBUG_TRACE_END(player, "on_bus_message"); - return; -} - -static int on_bus_fd_ready(sd_event_source *s, int fd, uint32_t revents, void *userdata) { - struct gstplayer *player; - GstMessage *msg; - - (void) s; - (void) fd; - (void) revents; - - player = userdata; - - DEBUG_TRACE_BEGIN(player, "on_bus_fd_ready"); - - msg = gst_bus_pop(player->bus); - if (msg != NULL) { - on_bus_message(player, msg); - gst_message_unref(msg); - } - - DEBUG_TRACE_END(player, "on_bus_fd_ready"); - - return 0; -} - -static GstPadProbeReturn on_query_appsink(GstPad *pad, GstPadProbeInfo *info, void *userdata) { - GstQuery *query; - - (void) pad; - (void) userdata; - - query = gst_pad_probe_info_get_query(info); - if (query == NULL) { - LOG_DEBUG("Couldn't get query from pad probe info.\n"); - return GST_PAD_PROBE_OK; - } - - if (GST_QUERY_TYPE(query) != GST_QUERY_ALLOCATION) { - return GST_PAD_PROBE_OK; - } - - gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL); - - return GST_PAD_PROBE_HANDLED; -} - -static void on_element_added(GstBin *bin, GstElement *element, void *userdata) { - GstElementFactory *factory; - const char *factory_name; - - (void) userdata; - (void) bin; - - factory = gst_element_get_factory(element); - factory_name = gst_plugin_feature_get_name(factory); - - if (g_str_has_prefix(factory_name, "v4l2video") && g_str_has_suffix(factory_name, "dec")) { - gst_util_set_object_arg(G_OBJECT(element), "capture-io-mode", "dmabuf"); - fprintf(stderr, "[gstreamer video player] found gstreamer V4L2 video decoder element with name \"%s\"\n", GST_OBJECT_NAME(element)); - } -} - -static GstPadProbeReturn on_probe_pad(GstPad *pad, GstPadProbeInfo *info, void *userdata) { - struct gstplayer *player; - GstEvent *event; - GstCaps *caps; - gboolean ok; - - (void) pad; - - player = userdata; - event = GST_PAD_PROBE_INFO_EVENT(info); - - if (GST_EVENT_TYPE(event) != GST_EVENT_CAPS) { - return GST_PAD_PROBE_OK; - } - - gst_event_parse_caps(event, &caps); - if (caps == NULL) { - LOG_ERROR("gstreamer: caps event without caps\n"); - return GST_PAD_PROBE_OK; - } - - ok = gst_video_info_from_caps(&player->gst_info, caps); - if (!ok) { - LOG_ERROR("gstreamer: caps event with invalid video caps\n"); - return GST_PAD_PROBE_OK; - } - - player->has_gst_info = true; - - LOG_DEBUG( - "on_probe_pad, fps: %f, res: % 4d x % 4d, format: %s\n", - (double) GST_VIDEO_INFO_FPS_N(&player->gst_info) / GST_VIDEO_INFO_FPS_D(&player->gst_info), - GST_VIDEO_INFO_WIDTH(&player->gst_info), - GST_VIDEO_INFO_HEIGHT(&player->gst_info), - gst_video_format_to_string(player->gst_info.finfo->format) - ); - - player->info.info.width = GST_VIDEO_INFO_WIDTH(&player->gst_info); - player->info.info.height = GST_VIDEO_INFO_HEIGHT(&player->gst_info); - player->info.info.fps = (double) GST_VIDEO_INFO_FPS_N(&player->gst_info) / GST_VIDEO_INFO_FPS_D(&player->gst_info); - player->info.has_resolution = true; - player->info.has_fps = true; - maybe_send_info(player); - - return GST_PAD_PROBE_OK; -} - -static void on_destroy_texture_frame(const struct texture_frame *texture_frame, void *userdata) { - struct video_frame *frame; - - (void) texture_frame; - - ASSERT_NOT_NULL(texture_frame); - ASSERT_NOT_NULL(userdata); - - frame = userdata; - - frame_destroy(frame); -} - -static void on_appsink_eos(GstAppSink *appsink, void *userdata) { - gboolean ok; - - ASSERT_NOT_NULL(appsink); - ASSERT_NOT_NULL(userdata); - - (void) userdata; - - LOG_DEBUG("on_appsink_eos()\n"); - - // this method is called from the streaming thread. - // we shouldn't access the player directly here, it could change while we use it. - // post a message to the gstreamer bus instead, will be handled by - // @ref on_bus_message. - ok = gst_element_post_message( - GST_ELEMENT(appsink), - gst_message_new_application(GST_OBJECT(appsink), gst_structure_new_empty("appsink-eos")) - ); - if (ok == FALSE) { - LOG_ERROR("Could not post appsink end-of-stream event to the message bus.\n"); - } -} - -static GstFlowReturn on_appsink_new_preroll(GstAppSink *appsink, void *userdata) { - struct video_frame *frame; - struct gstplayer *player; - GstSample *sample; - - ASSERT_NOT_NULL(appsink); - ASSERT_NOT_NULL(userdata); - - player = userdata; - - sample = gst_app_sink_try_pull_preroll(appsink, 0); - if (sample == NULL) { - LOG_ERROR("gstreamer returned a NULL sample.\n"); - return GST_FLOW_ERROR; - } - - /// TODO: Attempt to upload using gst_gl_upload here - frame = frame_new(player->frame_interface, sample, player->has_gst_info ? &player->gst_info : NULL); - - gst_sample_unref(sample); - - if (frame != NULL) { - texture_push_frame( - player->texture, - &(struct texture_frame){ - .gl = *frame_get_gl_frame(frame), - .destroy = on_destroy_texture_frame, - .userdata = frame, - } - ); - } - - return GST_FLOW_OK; -} - -static GstFlowReturn on_appsink_new_sample(GstAppSink *appsink, void *userdata) { - struct video_frame *frame; - struct gstplayer *player; - GstSample *sample; - - ASSERT_NOT_NULL(appsink); - ASSERT_NOT_NULL(userdata); - - player = userdata; - - /// TODO: Attempt to upload using gst_gl_upload here - sample = gst_app_sink_try_pull_sample(appsink, 0); - if (sample == NULL) { - LOG_ERROR("gstreamer returned a NULL sample.\n"); - return GST_FLOW_ERROR; - } - - frame = frame_new(player->frame_interface, sample, player->has_gst_info ? &player->gst_info : NULL); - - gst_sample_unref(sample); - - if (frame != NULL) { - texture_push_frame( - player->texture, - &(struct texture_frame){ - .gl = *frame_get_gl_frame(frame), - .destroy = on_destroy_texture_frame, - .userdata = frame, - } - ); - } - - return GST_FLOW_OK; -} - -static void on_appsink_cbs_destroy(void *userdata) { - struct gstplayer *player; - - LOG_DEBUG("on_appsink_cbs_destroy()\n"); - ASSERT_NOT_NULL(userdata); - - player = userdata; - - (void) player; -} - -void on_source_setup(GstElement *bin, GstElement *source, gpointer userdata) { - (void) bin; - - if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "extra-headers") != NULL) { - g_object_set(source, "extra-headers", (GstStructure *) userdata, NULL); - } else { - LOG_ERROR("Failed to set custom HTTP headers because gstreamer source element has no 'extra-headers' property.\n"); - } -} - -static int init(struct gstplayer *player, bool force_sw_decoders) { - GstStateChangeReturn state_change_return; - sd_event_source *busfd_event_source; - GstElement *pipeline, *sink, *src; - GstBus *bus; - GstPad *pad; - GPollFD fd; - GError *error = NULL; - int ok; - - static const char *default_pipeline_descr = "uridecodebin name=\"src\" ! video/x-raw ! appsink sync=true name=\"sink\""; - - const char *pipeline_descr; - if (player->pipeline_description != NULL) { - pipeline_descr = player->pipeline_description; - } else { - pipeline_descr = default_pipeline_descr; - } - - pipeline = gst_parse_launch(pipeline_descr, &error); - if (pipeline == NULL) { - LOG_ERROR("Could create GStreamer pipeline from description: %s (pipeline: `%s`)\n", error->message, pipeline_descr); - return error->code; - } - - sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink"); - if (sink == NULL) { - LOG_ERROR("Couldn't find appsink in pipeline bin.\n"); - ok = EINVAL; - goto fail_unref_pipeline; - } - - pad = gst_element_get_static_pad(sink, "sink"); - if (pad == NULL) { - LOG_ERROR("Couldn't get static pad \"sink\" from video sink.\n"); - ok = EINVAL; - goto fail_unref_sink; - } - - gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, on_query_appsink, player, NULL); - - src = gst_bin_get_by_name(GST_BIN(pipeline), "src"); - - if (player->video_uri != NULL) { - if (src != NULL) { - g_object_set(G_OBJECT(src), "uri", player->video_uri, NULL); - } else { - LOG_ERROR("Couldn't find \"src\" element to configure Video URI.\n"); - } - } - - if (force_sw_decoders) { - if (src != NULL) { - g_object_set(G_OBJECT(src), "force-sw-decoders", force_sw_decoders, NULL); - } else { - LOG_ERROR("Couldn't find \"src\" element to force sw decoding.\n"); - } - } - - if (player->headers != NULL) { - if (src != NULL) { - g_signal_connect(G_OBJECT(src), "source-setup", G_CALLBACK(on_source_setup), player->headers); - } else { - LOG_ERROR("Couldn't find \"src\" element to configure additional HTTP headers.\n"); - } - } - - gst_base_sink_set_max_lateness(GST_BASE_SINK(sink), 20 * GST_MSECOND); - gst_base_sink_set_qos_enabled(GST_BASE_SINK(sink), TRUE); - gst_base_sink_set_sync(GST_BASE_SINK(sink), TRUE); - gst_app_sink_set_max_buffers(GST_APP_SINK(sink), 2); - gst_app_sink_set_emit_signals(GST_APP_SINK(sink), TRUE); - gst_app_sink_set_drop(GST_APP_SINK(sink), FALSE); - - // configure our caps - // we only accept video formats that we can actually upload to EGL - GstCaps *caps = gst_caps_new_empty(); - for_each_format_in_frame_interface(i, format, player->frame_interface) { - GstVideoFormat gst_format = gst_video_format_from_drm_format(format->format); - if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) { - continue; - } - - gst_caps_append(caps, gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string(gst_format), NULL)); - } - gst_app_sink_set_caps(GST_APP_SINK(sink), caps); - gst_caps_unref(caps); - - gst_app_sink_set_callbacks( - GST_APP_SINK(sink), - &(GstAppSinkCallbacks - ){ .eos = on_appsink_eos, .new_preroll = on_appsink_new_preroll, .new_sample = on_appsink_new_sample, ._gst_reserved = { 0 } }, - player, - on_appsink_cbs_destroy - ); - - gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, on_probe_pad, player, NULL); - - /// FIXME: Make this work for custom pipelines as well. - if (src != NULL) { - g_signal_connect(src, "element-added", G_CALLBACK(on_element_added), player); - } else { - LOG_DEBUG("Couldn't find \"src\" element to setup v4l2 'capture-io-mode' to 'dmabuf'.\n"); - } - - if (src != NULL) { - gst_object_unref(src); - src = NULL; - } - - bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline)); - - gst_bus_get_pollfd(bus, &fd); - - flutterpi_sd_event_add_io(&busfd_event_source, fd.fd, EPOLLIN, on_bus_fd_ready, player); - - LOG_DEBUG("Setting state to paused...\n"); - state_change_return = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED); - if (state_change_return == GST_STATE_CHANGE_NO_PREROLL) { - LOG_DEBUG("Is Live!\n"); - player->is_live = true; - } else { - LOG_DEBUG("Not live!\n"); - player->is_live = false; - } - - player->sink = sink; - /// FIXME: Not sure we need this here. pipeline is floating after gst_parse_launch, which - /// means we should take a reference, but the examples don't increase the refcount. - player->pipeline = pipeline; //gst_object_ref(pipeline); - player->bus = bus; - player->busfd_events = busfd_event_source; - - gst_object_unref(pad); - return 0; - -fail_unref_sink: - gst_object_unref(sink); - -fail_unref_pipeline: - gst_object_unref(pipeline); - - return ok; -} - -static void maybe_deinit(struct gstplayer *player) { - if (player->busfd_events != NULL) { - sd_event_source_unrefp(&player->busfd_events); - } - if (player->sink != NULL) { - gst_object_unref(GST_OBJECT(player->sink)); - player->sink = NULL; - } - if (player->bus != NULL) { - gst_object_unref(GST_OBJECT(player->bus)); - player->bus = NULL; - } - if (player->pipeline != NULL) { - gst_element_set_state(GST_ELEMENT(player->pipeline), GST_STATE_READY); - gst_element_set_state(GST_ELEMENT(player->pipeline), GST_STATE_NULL); - gst_object_unref(GST_OBJECT(player->pipeline)); - player->pipeline = NULL; - } -} - -DEFINE_LOCK_OPS(gstplayer, lock) - -static struct gstplayer *gstplayer_new(struct flutterpi *flutterpi, const char *uri, const char *pipeline_descr, void *userdata) { - struct frame_interface *frame_interface; - struct gstplayer *player; - struct texture *texture; - GstStructure *gst_headers; - int64_t texture_id; - char *uri_owned, *pipeline_descr_owned; - int ok; - - ASSERT_NOT_NULL(flutterpi); - assert((uri != NULL) != (pipeline_descr != NULL)); - - player = malloc(sizeof *player); - if (player == NULL) - return NULL; - - texture = flutterpi_create_texture(flutterpi); - if (texture == NULL) - goto fail_free_player; - - frame_interface = frame_interface_new(flutterpi_get_gl_renderer(flutterpi)); - if (frame_interface == NULL) - goto fail_destroy_texture; - - texture_id = texture_get_id(texture); - - if (uri != NULL) { - uri_owned = strdup(uri); - if (uri_owned == NULL) - goto fail_destroy_frame_interface; - } else { - uri_owned = NULL; - } - - if (pipeline_descr != NULL) { - pipeline_descr_owned = strdup(pipeline_descr); - if (pipeline_descr_owned == NULL) - goto fail_destroy_frame_interface; - } else { - pipeline_descr_owned = NULL; - } - - gst_headers = gst_structure_new_empty("http-headers"); - - ok = pthread_mutex_init(&player->lock, NULL); - if (ok != 0) - goto fail_free_gst_headers; - - ok = value_notifier_init(&player->video_info_notifier, NULL, free /* free(NULL) is a no-op, I checked */); - if (ok != 0) - goto fail_destroy_mutex; - - ok = value_notifier_init(&player->buffering_state_notifier, NULL, free); - if (ok != 0) - goto fail_deinit_video_info_notifier; - - ok = change_notifier_init(&player->error_notifier); - if (ok != 0) - goto fail_deinit_buffering_state_notifier; - - player->flutterpi = flutterpi; - player->userdata = userdata; - player->video_uri = uri_owned; - player->pipeline_description = pipeline_descr_owned; - player->headers = gst_headers; - player->playback_rate_forward = 1.0; - player->playback_rate_backward = 1.0; - player->looping = false; - player->playpause_state = kPaused; - player->direction = kForward; - player->current_playback_rate = 1.0; - player->fallback_position_ms = 0; - player->has_desired_position = false; - player->desired_position_ms = 0; - player->has_sent_info = false; - player->info.has_resolution = false; - player->info.has_fps = false; - player->info.has_duration = false; - player->info.has_seeking_info = false; - player->has_gst_info = false; - memset(&player->gst_info, 0, sizeof(player->gst_info)); - player->texture = texture; - player->texture_id = texture_id; - player->frame_interface = frame_interface; - player->pipeline = NULL; - player->sink = NULL; - player->bus = NULL; - player->busfd_events = NULL; - player->is_live = false; - return player; - - //fail_deinit_error_notifier: - //notifier_deinit(&player->error_notifier); - -fail_deinit_buffering_state_notifier: - notifier_deinit(&player->buffering_state_notifier); - -fail_deinit_video_info_notifier: - notifier_deinit(&player->video_info_notifier); - -fail_destroy_mutex: - pthread_mutex_destroy(&player->lock); - -fail_free_gst_headers: - gst_structure_free(gst_headers); - free(uri_owned); - -fail_destroy_frame_interface: - frame_interface_unref(frame_interface); - -fail_destroy_texture: - texture_destroy(texture); - -fail_free_player: - free(player); - - return NULL; -} - -struct gstplayer *gstplayer_new_from_asset(struct flutterpi *flutterpi, const char *asset_path, const char *package_name, void *userdata) { - struct gstplayer *player; - char *uri; - int ok; - - (void) package_name; - - ok = asprintf(&uri, "file://%s/%s", flutterpi_get_asset_bundle_path(flutterpi), asset_path); - if (ok < 0) { - return NULL; - } - - player = gstplayer_new(flutterpi, uri, NULL, userdata); - - free(uri); - - return player; -} - -struct gstplayer *gstplayer_new_from_network(struct flutterpi *flutterpi, const char *uri, enum format_hint format_hint, void *userdata) { - (void) format_hint; - return gstplayer_new(flutterpi, uri, NULL, userdata); -} - -struct gstplayer *gstplayer_new_from_file(struct flutterpi *flutterpi, const char *uri, void *userdata) { - return gstplayer_new(flutterpi, uri, NULL, userdata); -} - -struct gstplayer *gstplayer_new_from_content_uri(struct flutterpi *flutterpi, const char *uri, void *userdata) { - return gstplayer_new(flutterpi, uri, NULL, userdata); -} - -struct gstplayer *gstplayer_new_from_pipeline(struct flutterpi *flutterpi, const char *pipeline, void *userdata) { - return gstplayer_new(flutterpi, NULL, pipeline, userdata); -} - -void gstplayer_destroy(struct gstplayer *player) { - LOG_DEBUG("gstplayer_destroy(%p)\n", player); - notifier_deinit(&player->video_info_notifier); - notifier_deinit(&player->buffering_state_notifier); - notifier_deinit(&player->error_notifier); - maybe_deinit(player); - pthread_mutex_destroy(&player->lock); - if (player->headers != NULL) { - gst_structure_free(player->headers); - } - if (player->video_uri != NULL) { - free(player->video_uri); - } - if (player->pipeline_description != NULL) { - free(player->pipeline_description); - } - frame_interface_unref(player->frame_interface); - texture_destroy(player->texture); - free(player); -} - -int64_t gstplayer_get_texture_id(struct gstplayer *player) { - return player->texture_id; -} - -void gstplayer_put_http_header(struct gstplayer *player, const char *key, const char *value) { - GValue gvalue = G_VALUE_INIT; - g_value_set_string(&gvalue, value); - gst_structure_take_value(player->headers, key, &gvalue); -} - -void gstplayer_set_userdata_locked(struct gstplayer *player, void *userdata) { - player->userdata = userdata; -} - -void *gstplayer_get_userdata_locked(struct gstplayer *player) { - return player->userdata; -} - -int gstplayer_initialize(struct gstplayer *player) { - return init(player, false); -} - -int gstplayer_play(struct gstplayer *player) { - LOG_DEBUG("gstplayer_play()\n"); - player->playpause_state = kPlaying; - player->direction = kForward; - return apply_playback_state(player); -} - -int gstplayer_pause(struct gstplayer *player) { - LOG_DEBUG("gstplayer_pause()\n"); - player->playpause_state = kPaused; - player->direction = kForward; - return apply_playback_state(player); -} - -int gstplayer_set_looping(struct gstplayer *player, bool looping) { - LOG_DEBUG("gstplayer_set_looping(%s)\n", looping ? "true" : "false"); - player->looping = looping; - return 0; -} - -int gstplayer_set_volume(struct gstplayer *player, double volume) { - (void) player; - (void) volume; - LOG_DEBUG("gstplayer_set_volume(%f)\n", volume); - /// TODO: Implement - return 0; -} - -int64_t gstplayer_get_position(struct gstplayer *player) { - GstState current, pending; - gboolean ok; - int64_t position; - - GstStateChangeReturn statechange = gst_element_get_state(GST_ELEMENT(player->pipeline), ¤t, &pending, 0); - if (statechange == GST_STATE_CHANGE_FAILURE) { - LOG_GST_GET_STATE_ERROR(player->pipeline); - return -1; - } - - if (statechange == GST_STATE_CHANGE_ASYNC) { - // we don't have position data yet. - // report the latest known (or the desired) position. - return player->fallback_position_ms; - } - - DEBUG_TRACE_BEGIN(player, "gstplayer_get_position"); - DEBUG_TRACE_BEGIN(player, "gst_element_query_position"); - ok = gst_element_query_position(player->pipeline, GST_FORMAT_TIME, &position); - DEBUG_TRACE_END(player, "gst_element_query_position"); - - if (ok == FALSE) { - LOG_ERROR("Could not query gstreamer position. (gst_element_query_position)\n"); - return 0; - } - - DEBUG_TRACE_END(player, "gstplayer_get_position"); - return GST_TIME_AS_MSECONDS(position); -} - -int gstplayer_seek_to(struct gstplayer *player, int64_t position, bool nearest_keyframe) { - LOG_DEBUG("gstplayer_seek_to(%" PRId64 ")\n", position); - player->has_desired_position = true; - player->desired_position_ms = position; - player->do_fast_seeking = nearest_keyframe; - return apply_playback_state(player); -} - -int gstplayer_set_playback_speed(struct gstplayer *player, double playback_speed) { - LOG_DEBUG("gstplayer_set_playback_speed(%f)\n", playback_speed); - ASSERT_MSG(playback_speed > 0, "playback speed must be > 0."); - player->playback_rate_forward = playback_speed; - return apply_playback_state(player); -} - -int gstplayer_step_forward(struct gstplayer *player) { - gboolean gst_ok; - int ok; - - ASSERT_NOT_NULL(player); - - player->playpause_state = kStepping; - player->direction = kForward; - ok = apply_playback_state(player); - if (ok != 0) { - return ok; - } - - gst_ok = gst_element_send_event(player->pipeline, gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE)); - if (gst_ok == FALSE) { - LOG_ERROR("Could not send frame-step event to pipeline. (gst_element_send_event)\n"); - return EIO; - } - return 0; -} - -int gstplayer_step_backward(struct gstplayer *player) { - gboolean gst_ok; - int ok; - - ASSERT_NOT_NULL(player); - - player->playpause_state = kStepping; - player->direction = kBackward; - ok = apply_playback_state(player); - if (ok != 0) { - return ok; - } - - gst_ok = gst_element_send_event(player->pipeline, gst_event_new_step(GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE)); - if (gst_ok == FALSE) { - LOG_ERROR("Could not send frame-step event to pipeline. (gst_element_send_event)\n"); - return EIO; - } - - return 0; -} - -struct notifier *gstplayer_get_video_info_notifier(struct gstplayer *player) { - return &player->video_info_notifier; -} - -struct notifier *gstplayer_get_buffering_state_notifier(struct gstplayer *player) { - return &player->buffering_state_notifier; -} - -struct notifier *gstplayer_get_error_notifier(struct gstplayer *player) { - return &player->error_notifier; -} diff --git a/src/plugins/gstreamer_video_player/plugin.c b/src/plugins/gstreamer_video_player/plugin.c index d1de5b49..6690b5b7 100644 --- a/src/plugins/gstreamer_video_player/plugin.c +++ b/src/plugins/gstreamer_video_player/plugin.c @@ -15,6 +15,7 @@ #include "platformchannel.h" #include "pluginregistry.h" #include "plugins/gstreamer_video_player.h" +#include "plugins/gstplayer.h" #include "texture_registry.h" #include "util/collection.h" #include "util/list.h" @@ -97,9 +98,9 @@ static struct gstplayer *get_player_by_evch(const char *const event_channel_name /** * @brief Remove a player instance from the player list. - * + * * Assumes the plugin struct is not locked. - * + * */ static void remove_player(struct gstplayer_meta *meta) { plugin_lock(&plugin); @@ -111,9 +112,9 @@ static void remove_player(struct gstplayer_meta *meta) { /** * @brief Remove a player instance from the player list. - * + * * Assumes the plugin struct is locked. - * + * */ static void remove_player_locked(struct gstplayer_meta *meta) { ASSERT_MUTEX_LOCKED(plugin.lock); @@ -121,7 +122,7 @@ static void remove_player_locked(struct gstplayer_meta *meta) { } static struct gstplayer_meta *get_meta(struct gstplayer *player) { - return (struct gstplayer_meta *) gstplayer_get_userdata_locked(player); + return (struct gstplayer_meta *) gstplayer_get_userdata(player); } /// Get the player id from the given arg, which is a kStdMap. @@ -315,7 +316,7 @@ static enum listener_return on_video_info_notify(void *arg, void *userdata) { /// on_video_info_notify is called on an internal thread, /// but send_initialized_event is (should be) mt-safe send_initialized_event(meta, !info->can_seek, info->width, info->height, info->duration_ms); - + /// FIXME: Threading /// Set this to NULL here so we don't unlisten to it twice. meta->video_info_listener = NULL; @@ -368,7 +369,7 @@ static int on_receive_evch(char *channel, struct platch_obj *object, FlutterPlat return platch_respond_not_implemented(responsehandle); } - meta = gstplayer_get_userdata_locked(player); + meta = gstplayer_get_userdata(player); if (streq("listen", method)) { platch_respond_success_std(responsehandle, NULL); @@ -417,44 +418,32 @@ static int on_initialize(char *channel, struct platch_obj *object, FlutterPlatfo return platch_respond_success_pigeon(responsehandle, NULL); } -static int check_headers(const struct std_value *headers, FlutterPlatformMessageResponseHandle *responsehandle) { - const struct std_value *key, *value; - - if (headers == NULL || STDVALUE_IS_NULL(*headers)) { - return 0; - } else if (!STDVALUE_IS_MAP(*headers)) { - platch_respond_illegal_arg_pigeon(responsehandle, "Expected `arg['httpHeaders']` to be a map of strings or null."); - return EINVAL; - } - - for (int i = 0; i < headers->size; i++) { - key = headers->keys + i; - value = headers->values + i; - - if (STDVALUE_IS_NULL(*key) || STDVALUE_IS_NULL(*value)) { - // ignore this value - continue; - } else if (STDVALUE_IS_STRING(*key) && STDVALUE_IS_STRING(*value)) { - // valid too - continue; - } else { - platch_respond_illegal_arg_pigeon(responsehandle, "Expected `arg['httpHeaders']` to be a map of strings or null."); - return EINVAL; - } - } +static void gst_structure_put_string(GstStructure *structure, const char *key, const char *value) { + GValue gvalue = G_VALUE_INIT; + g_value_set_string(&gvalue, value); + gst_structure_take_value(structure, key, &gvalue); +} - return 0; +static void gst_structure_take_string(GstStructure *structure, const char *key, char *value) { + GValue gvalue = G_VALUE_INIT; + g_value_take_string(&gvalue, value); + gst_structure_take_value(structure, key, &gvalue); } -static int add_headers_to_player(const struct std_value *headers, struct gstplayer *player) { +static bool get_headers(const struct std_value *headers, GstStructure **structure_out, FlutterPlatformMessageResponseHandle *responsehandle) { const struct std_value *key, *value; if (headers == NULL || STDVALUE_IS_NULL(*headers)) { - return 0; + *structure_out = NULL; + return true; } else if (!STDVALUE_IS_MAP(*headers)) { - assert(false); + *structure_out = NULL; + platch_respond_illegal_arg_pigeon(responsehandle, "Expected `arg['httpHeaders']` to be a map of strings or null."); + return false; } + *structure_out = gst_structure_new_empty("http-headers"); + for (int i = 0; i < headers->size; i++) { key = headers->keys + i; value = headers->values + i; @@ -463,13 +452,17 @@ static int add_headers_to_player(const struct std_value *headers, struct gstplay // ignore this value continue; } else if (STDVALUE_IS_STRING(*key) && STDVALUE_IS_STRING(*value)) { - gstplayer_put_http_header(player, STDVALUE_AS_STRING(*key), STDVALUE_AS_STRING(*value)); + gst_structure_put_string(*structure_out, STDVALUE_AS_STRING(*key), STDVALUE_AS_STRING(*value)); } else { - assert(false); + gst_structure_free(*structure_out); + *structure_out = NULL; + + platch_respond_illegal_arg_pigeon(responsehandle, "Expected `arg['httpHeaders']` to be a map of strings or null."); + return false; } } - return 0; + return true; } /// Allocates and initializes a gstplayer_meta struct, which we @@ -612,19 +605,20 @@ static int on_create(char *channel, struct platch_obj *object, FlutterPlatformMe ); } - temp = stdmap_get_str(arg, "httpHeaders"); - - // check our headers are valid, so we don't create our player for nothing - ok = check_headers(temp, responsehandle); - if (ok != 0) { - return 0; - } - // create our actual player (this doesn't initialize it) if (asset != NULL) { player = gstplayer_new_from_asset(flutterpi, asset, package_name, NULL); } else { - player = gstplayer_new_from_network(flutterpi, uri, format_hint, NULL); + temp = stdmap_get_str(arg, "httpHeaders"); + + // check our headers are valid, so we don't create our player for nothing + GstStructure *headers = NULL; + ok = get_headers(temp, &headers, responsehandle); + if (ok == false) { + return 0; + } + + player = gstplayer_new_from_network(flutterpi, uri, format_hint, NULL, headers); } if (player == NULL) { LOG_ERROR("Couldn't create gstreamer video player.\n"); @@ -640,10 +634,7 @@ static int on_create(char *channel, struct platch_obj *object, FlutterPlatformMe goto fail_destroy_player; } - gstplayer_set_userdata_locked(player, meta); - - // Add all our HTTP headers to gstplayer using gstplayer_put_http_header - add_headers_to_player(temp, player); + gstplayer_set_userdata(player, meta); // add it to our player collection add_player(meta); @@ -654,17 +645,8 @@ static int on_create(char *channel, struct platch_obj *object, FlutterPlatformMe goto fail_remove_player; } - // Finally, start initializing - ok = gstplayer_initialize(player); - if (ok != 0) { - goto fail_remove_receiver; - } - return platch_respond_success_pigeon(responsehandle, &STDMAP1(STDSTRING("textureId"), STDINT64(gstplayer_get_texture_id(player)))); -fail_remove_receiver: - plugin_registry_remove_receiver(meta->event_channel_name); - fail_remove_player: remove_player(meta); destroy_meta(meta); @@ -716,7 +698,8 @@ static int on_set_looping(char *channel, struct platch_obj *object, FlutterPlatf return platch_respond_illegal_arg_ext_pigeon(responsehandle, "Expected `arg['isLooping']` to be a boolean, but was:", temp); } - gstplayer_set_looping(player, loop); + gstplayer_set_looping(player, loop, true); + return platch_respond_success_pigeon(responsehandle, NULL); } @@ -1050,7 +1033,6 @@ static int on_initialize_v2(const struct raw_std_value *arg, FlutterPlatformMess } static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageResponseHandle *responsehandle) { - const struct raw_std_value *headers; struct gstplayer_meta *meta; struct gstplayer *player; enum format_hint format_hint; @@ -1152,6 +1134,8 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR format_hint = FORMAT_HINT_NONE; } + GstStructure *headers = NULL; + // arg[4]: HTTP Headers if (size >= 5) { arg = raw_std_value_after(arg); @@ -1160,13 +1144,23 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR headers = NULL; } else if (raw_std_value_is_map(arg)) { for_each_entry_in_raw_std_map(key, value, arg) { - if (!raw_std_value_is_string(key) || !raw_std_value_is_string(value)) { + if (raw_std_value_is_string(key) && raw_std_value_is_string(value)) { + if (headers == NULL) { + headers = gst_structure_new_empty("http-headers"); + } + + char *key_str = raw_std_string_dup(key); + gst_structure_take_string(headers, key_str, raw_std_string_dup(value)); + free(key_str); + } else { goto invalid_headers; } } - headers = arg; } else { invalid_headers: + if (headers != NULL) { + gst_structure_free(headers); + } return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[4]` to be a map of strings or null."); } } else { @@ -1201,7 +1195,7 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR free(asset); asset = NULL; } else if (uri != NULL) { - player = gstplayer_new_from_network(flutterpi, uri, format_hint, NULL); + player = gstplayer_new_from_network(flutterpi, uri, format_hint, NULL, headers); // gstplayer_new_from_network will dup the uri internally. free(uri); @@ -1230,20 +1224,7 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR goto fail_destroy_player; } - gstplayer_set_userdata_locked(player, meta); - - // Add all the HTTP headers to gstplayer using gstplayer_put_http_header - if (headers != NULL) { - for_each_entry_in_raw_std_map(header_name, header_value, headers) { - char *header_name_duped = raw_std_string_dup(header_name); - char *header_value_duped = raw_std_string_dup(header_value); - - gstplayer_put_http_header(player, header_name_duped, header_value_duped); - - free(header_value_duped); - free(header_name_duped); - } - } + gstplayer_set_userdata(player, meta); // Add it to our player collection add_player(meta); @@ -1254,17 +1235,8 @@ static int on_create_v2(const struct raw_std_value *arg, FlutterPlatformMessageR goto fail_remove_player; } - // Finally, start initializing - ok = gstplayer_initialize(player); - if (ok != 0) { - goto fail_remove_receiver; - } - return platch_respond_success_std(responsehandle, &STDINT64(gstplayer_get_texture_id(player))); -fail_remove_receiver: - plugin_registry_remove_receiver(meta->event_channel_name); - fail_remove_player: remove_player(meta); destroy_meta(meta); @@ -1312,7 +1284,21 @@ static int on_set_looping_v2(const struct raw_std_value *arg, FlutterPlatformMes return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[1]` to be a bool."); } - ok = gstplayer_set_looping(player, looping); + // For video playback, gapless looping usually works fine + // it seems. + bool gapless = true; + if (raw_std_list_get_size(arg) >= 3) { + const struct raw_std_value *third = raw_std_list_get_nth_element(arg, 2); + if (raw_std_value_is_null(third)) { + // unchanged + } else if (raw_std_value_is_bool(third)) { + gapless = raw_std_value_as_bool(third); + } else { + return platch_respond_illegal_arg_std(responsehandle, "Expected `arg[2]` to be a bool or null."); + } + } + + ok = gstplayer_set_looping(player, looping, gapless); if (ok != 0) { return platch_respond_native_error_std(responsehandle, ok); } diff --git a/src/util/khash.h b/src/util/khash.h new file mode 100644 index 00000000..f75f3474 --- /dev/null +++ b/src/util/khash.h @@ -0,0 +1,627 @@ +/* The MIT License + + Copyright (c) 2008, 2009, 2011 by Attractive Chaos + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN + ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +*/ + +/* + An example: + +#include "khash.h" +KHASH_MAP_INIT_INT(32, char) +int main() { + int ret, is_missing; + khiter_t k; + khash_t(32) *h = kh_init(32); + k = kh_put(32, h, 5, &ret); + kh_value(h, k) = 10; + k = kh_get(32, h, 10); + is_missing = (k == kh_end(h)); + k = kh_get(32, h, 5); + kh_del(32, h, k); + for (k = kh_begin(h); k != kh_end(h); ++k) + if (kh_exist(h, k)) kh_value(h, k) = 1; + kh_destroy(32, h); + return 0; +} +*/ + +/* + 2013-05-02 (0.2.8): + + * Use quadratic probing. When the capacity is power of 2, stepping function + i*(i+1)/2 guarantees to traverse each bucket. It is better than double + hashing on cache performance and is more robust than linear probing. + + In theory, double hashing should be more robust than quadratic probing. + However, my implementation is probably not for large hash tables, because + the second hash function is closely tied to the first hash function, + which reduce the effectiveness of double hashing. + + Reference: http://research.cs.vt.edu/AVresearch/hashing/quadratic.php + + 2011-12-29 (0.2.7): + + * Minor code clean up; no actual effect. + + 2011-09-16 (0.2.6): + + * The capacity is a power of 2. This seems to dramatically improve the + speed for simple keys. Thank Zilong Tan for the suggestion. Reference: + + - http://code.google.com/p/ulib/ + - http://nothings.org/computer/judy/ + + * Allow to optionally use linear probing which usually has better + performance for random input. Double hashing is still the default as it + is more robust to certain non-random input. + + * Added Wang's integer hash function (not used by default). This hash + function is more robust to certain non-random input. + + 2011-02-14 (0.2.5): + + * Allow to declare global functions. + + 2009-09-26 (0.2.4): + + * Improve portability + + 2008-09-19 (0.2.3): + + * Corrected the example + * Improved interfaces + + 2008-09-11 (0.2.2): + + * Improved speed a little in kh_put() + + 2008-09-10 (0.2.1): + + * Added kh_clear() + * Fixed a compiling error + + 2008-09-02 (0.2.0): + + * Changed to token concatenation which increases flexibility. + + 2008-08-31 (0.1.2): + + * Fixed a bug in kh_get(), which has not been tested previously. + + 2008-08-31 (0.1.1): + + * Added destructor +*/ + + +#ifndef __AC_KHASH_H +#define __AC_KHASH_H + +/*! + @header + + Generic hash table library. + */ + +#define AC_VERSION_KHASH_H "0.2.8" + +#include +#include +#include + +/* compiler specific configuration */ + +#if UINT_MAX == 0xffffffffu +typedef unsigned int khint32_t; +#elif ULONG_MAX == 0xffffffffu +typedef unsigned long khint32_t; +#endif + +#if ULONG_MAX == ULLONG_MAX +typedef unsigned long khint64_t; +#else +typedef unsigned long long khint64_t; +#endif + +#ifndef kh_inline +#ifdef _MSC_VER +#define kh_inline __inline +#else +#define kh_inline inline +#endif +#endif /* kh_inline */ + +#ifndef klib_unused +#if (defined __clang__ && __clang_major__ >= 3) || (defined __GNUC__ && __GNUC__ >= 3) +#define klib_unused __attribute__ ((__unused__)) +#else +#define klib_unused +#endif +#endif /* klib_unused */ + +typedef khint32_t khint_t; +typedef khint_t khiter_t; + +#define __ac_isempty(flag, i) ((flag[i>>4]>>((i&0xfU)<<1))&2) +#define __ac_isdel(flag, i) ((flag[i>>4]>>((i&0xfU)<<1))&1) +#define __ac_iseither(flag, i) ((flag[i>>4]>>((i&0xfU)<<1))&3) +#define __ac_set_isdel_false(flag, i) (flag[i>>4]&=~(1ul<<((i&0xfU)<<1))) +#define __ac_set_isempty_false(flag, i) (flag[i>>4]&=~(2ul<<((i&0xfU)<<1))) +#define __ac_set_isboth_false(flag, i) (flag[i>>4]&=~(3ul<<((i&0xfU)<<1))) +#define __ac_set_isdel_true(flag, i) (flag[i>>4]|=1ul<<((i&0xfU)<<1)) + +#define __ac_fsize(m) ((m) < 16? 1 : (m)>>4) + +#ifndef kroundup32 +#define kroundup32(x) (--(x), (x)|=(x)>>1, (x)|=(x)>>2, (x)|=(x)>>4, (x)|=(x)>>8, (x)|=(x)>>16, ++(x)) +#endif + +#ifndef kcalloc +#define kcalloc(N,Z) calloc(N,Z) +#endif +#ifndef kmalloc +#define kmalloc(Z) malloc(Z) +#endif +#ifndef krealloc +#define krealloc(P,Z) realloc(P,Z) +#endif +#ifndef kfree +#define kfree(P) free(P) +#endif + +static const double __ac_HASH_UPPER = 0.77; + +#define __KHASH_TYPE(name, khkey_t, khval_t) \ + typedef struct kh_##name##_s { \ + khint_t n_buckets, size, n_occupied, upper_bound; \ + khint32_t *flags; \ + khkey_t *keys; \ + khval_t *vals; \ + } kh_##name##_t; + +#define __KHASH_PROTOTYPES(name, khkey_t, khval_t) \ + extern kh_##name##_t *kh_init_##name(void); \ + extern void kh_destroy_##name(kh_##name##_t *h); \ + extern void kh_clear_##name(kh_##name##_t *h); \ + extern khint_t kh_get_##name(const kh_##name##_t *h, khkey_t key); \ + extern int kh_resize_##name(kh_##name##_t *h, khint_t new_n_buckets); \ + extern khint_t kh_put_##name(kh_##name##_t *h, khkey_t key, int *ret); \ + extern void kh_del_##name(kh_##name##_t *h, khint_t x); + +#define __KHASH_IMPL(name, SCOPE, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) \ + SCOPE kh_##name##_t *kh_init_##name(void) { \ + return (kh_##name##_t*)kcalloc(1, sizeof(kh_##name##_t)); \ + } \ + SCOPE void kh_destroy_##name(kh_##name##_t *h) \ + { \ + if (h) { \ + kfree((void *)h->keys); kfree(h->flags); \ + kfree((void *)h->vals); \ + kfree(h); \ + } \ + } \ + SCOPE void kh_clear_##name(kh_##name##_t *h) \ + { \ + if (h && h->flags) { \ + memset(h->flags, 0xaa, __ac_fsize(h->n_buckets) * sizeof(khint32_t)); \ + h->size = h->n_occupied = 0; \ + } \ + } \ + SCOPE khint_t kh_get_##name(const kh_##name##_t *h, khkey_t key) \ + { \ + if (h->n_buckets) { \ + khint_t k, i, last, mask, step = 0; \ + mask = h->n_buckets - 1; \ + k = __hash_func(key); i = k & mask; \ + last = i; \ + while (!__ac_isempty(h->flags, i) && (__ac_isdel(h->flags, i) || !__hash_equal(h->keys[i], key))) { \ + i = (i + (++step)) & mask; \ + if (i == last) return h->n_buckets; \ + } \ + return __ac_iseither(h->flags, i)? h->n_buckets : i; \ + } else return 0; \ + } \ + SCOPE int kh_resize_##name(kh_##name##_t *h, khint_t new_n_buckets) \ + { /* This function uses 0.25*n_buckets bytes of working space instead of [sizeof(key_t+val_t)+.25]*n_buckets. */ \ + khint32_t *new_flags = 0; \ + khint_t j = 1; \ + { \ + kroundup32(new_n_buckets); \ + if (new_n_buckets < 4) new_n_buckets = 4; \ + if (h->size >= (khint_t)(new_n_buckets * __ac_HASH_UPPER + 0.5)) j = 0; /* requested size is too small */ \ + else { /* hash table size to be changed (shrink or expand); rehash */ \ + new_flags = (khint32_t*)kmalloc(__ac_fsize(new_n_buckets) * sizeof(khint32_t)); \ + if (!new_flags) return -1; \ + memset(new_flags, 0xaa, __ac_fsize(new_n_buckets) * sizeof(khint32_t)); \ + if (h->n_buckets < new_n_buckets) { /* expand */ \ + khkey_t *new_keys = (khkey_t*)krealloc((void *)h->keys, new_n_buckets * sizeof(khkey_t)); \ + if (!new_keys) { kfree(new_flags); return -1; } \ + h->keys = new_keys; \ + if (kh_is_map) { \ + khval_t *new_vals = (khval_t*)krealloc((void *)h->vals, new_n_buckets * sizeof(khval_t)); \ + if (!new_vals) { kfree(new_flags); return -1; } \ + h->vals = new_vals; \ + } \ + } /* otherwise shrink */ \ + } \ + } \ + if (j) { /* rehashing is needed */ \ + for (j = 0; j != h->n_buckets; ++j) { \ + if (__ac_iseither(h->flags, j) == 0) { \ + khkey_t key = h->keys[j]; \ + khval_t val; \ + khint_t new_mask; \ + new_mask = new_n_buckets - 1; \ + if (kh_is_map) val = h->vals[j]; \ + __ac_set_isdel_true(h->flags, j); \ + while (1) { /* kick-out process; sort of like in Cuckoo hashing */ \ + khint_t k, i, step = 0; \ + k = __hash_func(key); \ + i = k & new_mask; \ + while (!__ac_isempty(new_flags, i)) i = (i + (++step)) & new_mask; \ + __ac_set_isempty_false(new_flags, i); \ + if (i < h->n_buckets && __ac_iseither(h->flags, i) == 0) { /* kick out the existing element */ \ + { khkey_t tmp = h->keys[i]; h->keys[i] = key; key = tmp; } \ + if (kh_is_map) { khval_t tmp = h->vals[i]; h->vals[i] = val; val = tmp; } \ + __ac_set_isdel_true(h->flags, i); /* mark it as deleted in the old hash table */ \ + } else { /* write the element and jump out of the loop */ \ + h->keys[i] = key; \ + if (kh_is_map) h->vals[i] = val; \ + break; \ + } \ + } \ + } \ + } \ + if (h->n_buckets > new_n_buckets) { /* shrink the hash table */ \ + h->keys = (khkey_t*)krealloc((void *)h->keys, new_n_buckets * sizeof(khkey_t)); \ + if (kh_is_map) h->vals = (khval_t*)krealloc((void *)h->vals, new_n_buckets * sizeof(khval_t)); \ + } \ + kfree(h->flags); /* free the working space */ \ + h->flags = new_flags; \ + h->n_buckets = new_n_buckets; \ + h->n_occupied = h->size; \ + h->upper_bound = (khint_t)(h->n_buckets * __ac_HASH_UPPER + 0.5); \ + } \ + return 0; \ + } \ + SCOPE khint_t kh_put_##name(kh_##name##_t *h, khkey_t key, int *ret) \ + { \ + khint_t x; \ + if (h->n_occupied >= h->upper_bound) { /* update the hash table */ \ + if (h->n_buckets > (h->size<<1)) { \ + if (kh_resize_##name(h, h->n_buckets - 1) < 0) { /* clear "deleted" elements */ \ + *ret = -1; return h->n_buckets; \ + } \ + } else if (kh_resize_##name(h, h->n_buckets + 1) < 0) { /* expand the hash table */ \ + *ret = -1; return h->n_buckets; \ + } \ + } /* TODO: to implement automatically shrinking; resize() already support shrinking */ \ + { \ + khint_t k, i, site, last, mask = h->n_buckets - 1, step = 0; \ + x = site = h->n_buckets; k = __hash_func(key); i = k & mask; \ + if (__ac_isempty(h->flags, i)) x = i; /* for speed up */ \ + else { \ + last = i; \ + while (!__ac_isempty(h->flags, i) && (__ac_isdel(h->flags, i) || !__hash_equal(h->keys[i], key))) { \ + if (__ac_isdel(h->flags, i)) site = i; \ + i = (i + (++step)) & mask; \ + if (i == last) { x = site; break; } \ + } \ + if (x == h->n_buckets) { \ + if (__ac_isempty(h->flags, i) && site != h->n_buckets) x = site; \ + else x = i; \ + } \ + } \ + } \ + if (__ac_isempty(h->flags, x)) { /* not present at all */ \ + h->keys[x] = key; \ + __ac_set_isboth_false(h->flags, x); \ + ++h->size; ++h->n_occupied; \ + *ret = 1; \ + } else if (__ac_isdel(h->flags, x)) { /* deleted */ \ + h->keys[x] = key; \ + __ac_set_isboth_false(h->flags, x); \ + ++h->size; \ + *ret = 2; \ + } else *ret = 0; /* Don't touch h->keys[x] if present and not deleted */ \ + return x; \ + } \ + SCOPE void kh_del_##name(kh_##name##_t *h, khint_t x) \ + { \ + if (x != h->n_buckets && !__ac_iseither(h->flags, x)) { \ + __ac_set_isdel_true(h->flags, x); \ + --h->size; \ + } \ + } + +#define KHASH_DECLARE(name, khkey_t, khval_t) \ + __KHASH_TYPE(name, khkey_t, khval_t) \ + __KHASH_PROTOTYPES(name, khkey_t, khval_t) + +#define KHASH_INIT2(name, SCOPE, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) \ + __KHASH_TYPE(name, khkey_t, khval_t) \ + __KHASH_IMPL(name, SCOPE, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) + +#define KHASH_INIT(name, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) \ + KHASH_INIT2(name, static kh_inline klib_unused, khkey_t, khval_t, kh_is_map, __hash_func, __hash_equal) + +/* --- BEGIN OF HASH FUNCTIONS --- */ + +/*! @function + @abstract Integer hash function + @param key The integer [khint32_t] + @return The hash value [khint_t] + */ +#define kh_int_hash_func(key) (khint32_t)(key) +/*! @function + @abstract Integer comparison function + */ +#define kh_int_hash_equal(a, b) ((a) == (b)) +/*! @function + @abstract 64-bit integer hash function + @param key The integer [khint64_t] + @return The hash value [khint_t] + */ +#define kh_int64_hash_func(key) (khint32_t)((key)>>33^(key)^(key)<<11) +/*! @function + @abstract 64-bit integer comparison function + */ +#define kh_int64_hash_equal(a, b) ((a) == (b)) +/*! @function + @abstract const char* hash function + @param s Pointer to a null terminated string + @return The hash value + */ +static kh_inline khint_t __ac_X31_hash_string(const char *s) +{ + khint_t h = (khint_t)*s; + if (h) for (++s ; *s; ++s) h = (h << 5) - h + (khint_t)*s; + return h; +} +/*! @function + @abstract Another interface to const char* hash function + @param key Pointer to a null terminated string [const char*] + @return The hash value [khint_t] + */ +#define kh_str_hash_func(key) __ac_X31_hash_string(key) +/*! @function + @abstract Const char* comparison function + */ +#define kh_str_hash_equal(a, b) (strcmp(a, b) == 0) + +static kh_inline khint_t __ac_Wang_hash(khint_t key) +{ + key += ~(key << 15); + key ^= (key >> 10); + key += (key << 3); + key ^= (key >> 6); + key += ~(key << 11); + key ^= (key >> 16); + return key; +} +#define kh_int_hash_func2(key) __ac_Wang_hash((khint_t)key) + +/* --- END OF HASH FUNCTIONS --- */ + +/* Other convenient macros... */ + +/*! + @abstract Type of the hash table. + @param name Name of the hash table [symbol] + */ +#define khash_t(name) kh_##name##_t + +/*! @function + @abstract Initiate a hash table. + @param name Name of the hash table [symbol] + @return Pointer to the hash table [khash_t(name)*] + */ +#define kh_init(name) kh_init_##name() + +/*! @function + @abstract Destroy a hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + */ +#define kh_destroy(name, h) kh_destroy_##name(h) + +/*! @function + @abstract Reset a hash table without deallocating memory. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + */ +#define kh_clear(name, h) kh_clear_##name(h) + +/*! @function + @abstract Resize a hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param s New size [khint_t] + */ +#define kh_resize(name, h, s) kh_resize_##name(h, s) + +/*! @function + @abstract Insert a key to the hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param k Key [type of keys] + @param r Extra return code: -1 if the operation failed; + 0 if the key is present in the hash table; + 1 if the bucket is empty (never used); 2 if the element in + the bucket has been deleted [int*] + @return Iterator to the inserted element [khint_t] + */ +#define kh_put(name, h, k, r) kh_put_##name(h, k, r) + +/*! @function + @abstract Retrieve a key from the hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param k Key [type of keys] + @return Iterator to the found element, or kh_end(h) if the element is absent [khint_t] + */ +#define kh_get(name, h, k) kh_get_##name(h, k) + +/*! @function + @abstract Remove a key from the hash table. + @param name Name of the hash table [symbol] + @param h Pointer to the hash table [khash_t(name)*] + @param k Iterator to the element to be deleted [khint_t] + */ +#define kh_del(name, h, k) kh_del_##name(h, k) + +/*! @function + @abstract Test whether a bucket contains data. + @param h Pointer to the hash table [khash_t(name)*] + @param x Iterator to the bucket [khint_t] + @return 1 if containing data; 0 otherwise [int] + */ +#define kh_exist(h, x) (!__ac_iseither((h)->flags, (x))) + +/*! @function + @abstract Get key given an iterator + @param h Pointer to the hash table [khash_t(name)*] + @param x Iterator to the bucket [khint_t] + @return Key [type of keys] + */ +#define kh_key(h, x) ((h)->keys[x]) + +/*! @function + @abstract Get value given an iterator + @param h Pointer to the hash table [khash_t(name)*] + @param x Iterator to the bucket [khint_t] + @return Value [type of values] + @discussion For hash sets, calling this results in segfault. + */ +#define kh_val(h, x) ((h)->vals[x]) + +/*! @function + @abstract Alias of kh_val() + */ +#define kh_value(h, x) ((h)->vals[x]) + +/*! @function + @abstract Get the start iterator + @param h Pointer to the hash table [khash_t(name)*] + @return The start iterator [khint_t] + */ +#define kh_begin(h) (khint_t)(0) + +/*! @function + @abstract Get the end iterator + @param h Pointer to the hash table [khash_t(name)*] + @return The end iterator [khint_t] + */ +#define kh_end(h) ((h)->n_buckets) + +/*! @function + @abstract Get the number of elements in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @return Number of elements in the hash table [khint_t] + */ +#define kh_size(h) ((h)->size) + +/*! @function + @abstract Get the number of buckets in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @return Number of buckets in the hash table [khint_t] + */ +#define kh_n_buckets(h) ((h)->n_buckets) + +/*! @function + @abstract Iterate over the entries in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @param kvar Variable to which key will be assigned + @param vvar Variable to which value will be assigned + @param code Block of code to execute + */ +#define kh_foreach(h, kvar, vvar, code) { khint_t __i; \ + for (__i = kh_begin(h); __i != kh_end(h); ++__i) { \ + if (!kh_exist(h,__i)) continue; \ + (kvar) = kh_key(h,__i); \ + (vvar) = kh_val(h,__i); \ + code; \ + } } + +/*! @function + @abstract Iterate over the values in the hash table + @param h Pointer to the hash table [khash_t(name)*] + @param vvar Variable to which value will be assigned + @param code Block of code to execute + */ +#define kh_foreach_value(h, vvar, code) { khint_t __i; \ + for (__i = kh_begin(h); __i != kh_end(h); ++__i) { \ + if (!kh_exist(h,__i)) continue; \ + (vvar) = kh_val(h,__i); \ + code; \ + } } + +/* More convenient interfaces */ + +/*! @function + @abstract Instantiate a hash set containing integer keys + @param name Name of the hash table [symbol] + */ +#define KHASH_SET_INIT_INT(name) \ + KHASH_INIT(name, khint32_t, char, 0, kh_int_hash_func, kh_int_hash_equal) + +/*! @function + @abstract Instantiate a hash map containing integer keys + @param name Name of the hash table [symbol] + @param khval_t Type of values [type] + */ +#define KHASH_MAP_INIT_INT(name, khval_t) \ + KHASH_INIT(name, khint32_t, khval_t, 1, kh_int_hash_func, kh_int_hash_equal) + +/*! @function + @abstract Instantiate a hash set containing 64-bit integer keys + @param name Name of the hash table [symbol] + */ +#define KHASH_SET_INIT_INT64(name) \ + KHASH_INIT(name, khint64_t, char, 0, kh_int64_hash_func, kh_int64_hash_equal) + +/*! @function + @abstract Instantiate a hash map containing 64-bit integer keys + @param name Name of the hash table [symbol] + @param khval_t Type of values [type] + */ +#define KHASH_MAP_INIT_INT64(name, khval_t) \ + KHASH_INIT(name, khint64_t, khval_t, 1, kh_int64_hash_func, kh_int64_hash_equal) + +typedef const char *kh_cstr_t; +/*! @function + @abstract Instantiate a hash map containing const char* keys + @param name Name of the hash table [symbol] + */ +#define KHASH_SET_INIT_STR(name) \ + KHASH_INIT(name, kh_cstr_t, char, 0, kh_str_hash_func, kh_str_hash_equal) + +/*! @function + @abstract Instantiate a hash map containing const char* keys + @param name Name of the hash table [symbol] + @param khval_t Type of values [type] + */ +#define KHASH_MAP_INIT_STR(name, khval_t) \ + KHASH_INIT(name, kh_cstr_t, khval_t, 1, kh_str_hash_func, kh_str_hash_equal) + +#endif /* __AC_KHASH_H */