diff --git a/src/FrameGrabber.cpp b/src/FrameGrabber.cpp index d1809c97..2be6e2b2 100644 --- a/src/FrameGrabber.cpp +++ b/src/FrameGrabber.cpp @@ -272,7 +272,7 @@ FrameGrabber::FrameGrabber(): finished_(false), initialized_(false), active_(fal endofstream_(false), accept_buffer_(false), buffering_full_(false), pause_(false), pipeline_(nullptr), src_(nullptr), caps_(nullptr), timer_(nullptr), timer_firstframe_(0), timer_pauseframe_(0), timestamp_(0), duration_(0), pause_duration_(0), frame_count_(0), - buffering_size_(MIN_BUFFER_SIZE), buffering_count_(0), timestamp_on_clock_(true) + keyframe_count_(0), buffering_size_(MIN_BUFFER_SIZE), buffering_count_(0), timestamp_on_clock_(true) { // unique id id_ = BaseToolkit::uniqueId(); @@ -319,11 +319,16 @@ void FrameGrabber::setPaused(bool pause) { // can pause only if already active if (active_) { + // keep time of switch from not-paused to paused if (pause && !pause_) timer_pauseframe_ = gst_clock_get_time(timer_); + // set to paused pause_ = pause; + + // pause pipeline + gst_element_set_state (pipeline_, pause_ ? GST_STATE_PAUSED : GST_STATE_PLAYING); } } @@ -369,11 +374,32 @@ void FrameGrabber::callback_enough_data (GstAppSrc *, gpointer p) if (grabber) { grabber->accept_buffer_ = false; #ifndef NDEBUG - Log::Info("Frame capture : Buffer full"); + Log::Info("Frame capture : Buffer full"); #endif } } + +GstBusSyncReply FrameGrabber::signal_handler(GstBus *, GstMessage *msg, gpointer ptr) +{ + // only handle error messages + if (ptr != nullptr && GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) { + // inform user + GError *error; + gst_message_parse_error(msg, &error, NULL); + Log::Warning("FrameGrabber %s : %s", + std::to_string(reinterpret_cast(ptr)->id()).c_str(), + error->message); + g_error_free(error); +// } else { +// g_printerr("FrameGrabber msg %s \n", GST_MESSAGE_TYPE_NAME(msg)); + } + + // drop all messages to avoid filling up the stack + return GST_BUS_DROP; +} + + GstPadProbeReturn FrameGrabber::callback_event_probe(GstPad *, GstPadProbeInfo * info, gpointer p) { GstEvent *event = GST_PAD_PROBE_INFO_EVENT(info); @@ -409,6 +435,15 @@ void FrameGrabber::addFrame (GstBuffer *buffer, GstCaps *caps) // if initialization succeeded if (initialized_) { + +#ifdef IGNORE_GST_ERROR_MESSAGE + // avoid filling up bus with messages + gst_bus_set_flushing(gst_element_get_bus(pipeline_), true); +#else + // set message handler for the pipeline's bus + gst_bus_set_sync_handler(gst_element_get_bus(pipeline_), + FrameGrabber::signal_handler, this, NULL); +#endif // attach EOS detector GstPad *pad = gst_element_get_static_pad (gst_bin_get_by_name (GST_BIN (pipeline_), "sink"), "sink"); gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, FrameGrabber::callback_event_probe, this, NULL); @@ -458,6 +493,12 @@ void FrameGrabber::addFrame (GstBuffer *buffer, GstCaps *caps) if (timer_pauseframe_ > 0) { // compute duration of the pausing time and add to total pause duration pause_duration_ += gst_clock_get_time(timer_) - timer_pauseframe_; + + // sync audio packets + GstElement *audiosync = GST_ELEMENT_CAST(gst_bin_get_by_name(GST_BIN(pipeline_), "audiosync")); + if (audiosync) + g_object_set(G_OBJECT(audiosync), "ts-offset", -timer_pauseframe_, NULL); + // reset pause frame time timer_pauseframe_ = 0; } @@ -469,25 +510,31 @@ void FrameGrabber::addFrame (GstBuffer *buffer, GstCaps *caps) // if time is zero (first frame) or if delta time is passed one frame duration (with a margin) if ( t == 0 || (t - duration_) > (frame_duration_ - 3000) ) { - // count frames - frame_count_++; - - // set duration to an exact multiples of frame duration - duration_ = ( t / frame_duration_) * frame_duration_; + // add a key frame every second (if keyframecount is valid) + if (keyframe_count_ > 1 && frame_count_ % keyframe_count_ < 1) { + GstEvent *event + = gst_video_event_new_downstream_force_key_unit(timestamp_, + GST_CLOCK_TIME_NONE, + GST_CLOCK_TIME_NONE, + FALSE, + frame_count_ / keyframe_count_); + gst_element_send_event(GST_ELEMENT(src_), event); + } if (timestamp_on_clock_) - // automatic frame presentation time stamp + // automatic frame presentation time stamp (DURATION PRIORITY) // Pipeline set to "do-timestamp"=TRUE // set timestamp to actual time timestamp_ = duration_; else { - // monotonic timestamp increment to keep fixed FPS + // force frame presentation timestamp (FRAMERATE PRIORITY) // Pipeline set to "do-timestamp"=FALSE - timestamp_ += frame_duration_; - // force frame presentation timestamp - buffer->pts = timestamp_; + GST_BUFFER_DTS(buffer) = GST_BUFFER_PTS(buffer) = timestamp_; // set frame duration buffer->duration = frame_duration_; + // monotonic timestamp increment to keep fixed FPS + // Pipeline set to "do-timestamp"=FALSE + timestamp_ += frame_duration_; } // when buffering is (almost) full, refuse buffer 1 frame over 2 @@ -513,6 +560,12 @@ void FrameGrabber::addFrame (GstBuffer *buffer, GstCaps *caps) // push frame gst_app_src_push_buffer (src_, buffer); // NB: buffer will be unrefed by the appsrc + + // count frames + frame_count_++; + + // update duration to an exact multiples of frame duration + duration_ = ( t / frame_duration_) * frame_duration_; } } } diff --git a/src/FrameGrabber.h b/src/FrameGrabber.h index 90d4fa4a..3853436d 100644 --- a/src/FrameGrabber.h +++ b/src/FrameGrabber.h @@ -84,6 +84,7 @@ class FrameGrabber GstClockTime pause_duration_; GstClockTime frame_duration_; guint64 frame_count_; + guint64 keyframe_count_; guint64 buffering_size_; guint64 buffering_count_; bool timestamp_on_clock_; @@ -96,6 +97,7 @@ class FrameGrabber static void callback_need_data (GstAppSrc *, guint, gpointer user_data); static void callback_enough_data (GstAppSrc *, gpointer user_data); static GstPadProbeReturn callback_event_probe(GstPad *, GstPadProbeInfo *info, gpointer user_data); + static GstBusSyncReply signal_handler(GstBus *, GstMessage *, gpointer); }; /** diff --git a/src/Loopback.cpp b/src/Loopback.cpp index 72b5e6d8..140c681d 100644 --- a/src/Loopback.cpp +++ b/src/Loopback.cpp @@ -124,7 +124,7 @@ std::string Loopback::init(GstCaps *caps) // specify streaming framerate in the given caps GstCaps *tmp = gst_caps_copy( caps ); - GValue v = { 0, }; + GValue v = G_VALUE_INIT; g_value_init (&v, GST_TYPE_FRACTION); gst_value_set_fraction (&v, LOOPBACK_FPS, 1); // fixed FPS gst_caps_set_value(tmp, "framerate", &v); diff --git a/src/MediaPlayer.cpp b/src/MediaPlayer.cpp index 7b2b49d0..a5313734 100644 --- a/src/MediaPlayer.cpp +++ b/src/MediaPlayer.cpp @@ -353,7 +353,7 @@ GstBusSyncReply MediaPlayer::signal_handler(GstBus *, GstMessage *msg, gpointer // inform user GError *error; gst_message_parse_error(msg, &error, NULL); - Log::Warning("MediaPlayer %s : %s - %s", + Log::Warning("MediaPlayer %s : %s", std::to_string(reinterpret_cast(ptr)->id()).c_str(), error->message); g_error_free(error); @@ -452,6 +452,9 @@ void MediaPlayer::execute_open() GstAppSinkCallbacks callbacks; #if GST_VERSION_MINOR > 18 && GST_VERSION_MAJOR > 0 callbacks.new_event = NULL; +#if GST_VERSION_MINOR > 23 + callbacks.propose_allocation = NULL; +#endif #endif callbacks.new_preroll = callback_new_preroll; if (singleFrame()) { @@ -494,7 +497,7 @@ void MediaPlayer::execute_open() #ifdef IGNORE_GST_ERROR_MESSAGE // avoid filling up bus with messages - gst_bus_set_flushing(bus, true); + gst_bus_set_flushing(gst_element_get_bus(pipeline_), true); #else // set message handler for the pipeline's bus gst_bus_set_sync_handler(gst_element_get_bus(pipeline_), @@ -639,6 +642,9 @@ void MediaPlayer::execute_open() GstAppSinkCallbacks callbacks; #if GST_VERSION_MINOR > 18 && GST_VERSION_MAJOR > 0 callbacks.new_event = NULL; +#if GST_VERSION_MINOR > 23 + callbacks.propose_allocation = NULL; +#endif #endif callbacks.new_preroll = callback_new_preroll; if (singleFrame()) { diff --git a/src/MultiFileRecorder.cpp b/src/MultiFileRecorder.cpp index 5686a2fd..96769c10 100644 --- a/src/MultiFileRecorder.cpp +++ b/src/MultiFileRecorder.cpp @@ -4,8 +4,6 @@ #include #include -#include - #include "Log.h" #include "GstToolkit.h" #include "BaseToolkit.h" @@ -16,7 +14,7 @@ #include "MultiFileRecorder.h" MultiFileRecorder::MultiFileRecorder() : - fps_(0), width_(0), height_(0), bpp_(3), + fps_(0), width_(0), height_(0), pipeline_(nullptr), src_(nullptr), frame_count_(0), timestamp_(0), frame_duration_(0), cancel_(false), endofstream_(false), accept_buffer_(false), progress_(0.f) { @@ -66,55 +64,80 @@ void MultiFileRecorder::callback_enough_data (GstAppSrc *, gpointer p) grabber->accept_buffer_ = false; } -bool MultiFileRecorder::add_image (const std::string &image_filename) +bool MultiFileRecorder::add_image (const std::string &image_filename, GstCaps *caps) { - if (image_filename.empty()) + std::string uri = GstToolkit::filename_to_uri(image_filename); + if (uri.empty()) return false; - // read pix - int c = 0; - int w = 0; - int h = 0; - unsigned char* rgb = stbi_load(image_filename.c_str(), &w, &h, &c, bpp_); + // create playbin + GstElement *img_pipeline = gst_element_factory_make("playbin", "imgreader"); + + // set uri of file to open + g_object_set(G_OBJECT(img_pipeline), "uri", uri.c_str(), NULL); + + // set flag to only read VIDEO + g_object_set(G_OBJECT(img_pipeline), "flags", 0x00000001, NULL); + + // instruct sink to use the required caps + GstElement *sink = gst_element_factory_make("appsink", "imgsink"); + gst_app_sink_set_caps(GST_APP_SINK(sink), caps); - if ( rgb && w == width_ && h == height_ && c == bpp_) { + // set playbin sink + g_object_set(G_OBJECT(img_pipeline), "video-sink", sink, NULL); - // new buffer - guint size = width_ * height_ * bpp_; - GstBuffer *buffer = gst_buffer_new_and_alloc (size); + /* Start the pipeline */ + gst_element_set_state(img_pipeline, GST_STATE_PLAYING); - // map gst buffer into a memory WRITE target - GstMapInfo map; - gst_buffer_map (buffer, &map, GST_MAP_WRITE); + /* Wait for the pipeline to preroll, i.e., wait for the image to be loaded */ + gst_element_get_state(img_pipeline, NULL, NULL, GST_CLOCK_TIME_NONE); - // transfer pixels from memory to buffer memory - memmove(map.data, rgb, size); + /* Get the sample from appsink */ + GstSample *sample; + g_signal_emit_by_name(sink, "pull-sample", &sample, NULL); - // un-map - gst_buffer_unmap (buffer, &map); + /* Extract the buffer */ + GstBuffer *buffer_read = gst_sample_get_buffer(sample); - // free stbi memory - stbi_image_free( rgb ); + bool ret = false; + // map the buffer to access the data + GstMapInfo map_read; + if ( gst_buffer_map(buffer_read, &map_read, GST_MAP_READ) && map_read.size > 0 ) { - //g_print("frame_added @ timestamp = %ld\n", timestamp_); - GST_BUFFER_DTS(buffer) = GST_BUFFER_PTS(buffer) = timestamp_; + // map a new gst buffer into memory to WRITE target + GstMapInfo map_write; + GstBuffer *buffer_write = gst_buffer_new_and_alloc(map_read.size); + if ( gst_buffer_map(buffer_write, &map_write, GST_MAP_WRITE) ) { - // set frame duration - buffer->duration = frame_duration_; + // transfer pixels from map_read memory to map_write memory (buffer to write to) + memmove(map_write.data, map_read.data, map_read.size); - // monotonic time increment to keep fixed FPS - timestamp_ += frame_duration_; + // un-map buffer + gst_buffer_unmap(buffer_write, &map_write); - // push frame - if ( gst_app_src_push_buffer (src_, buffer) != GST_FLOW_OK ) - return false; + //g_print("frame_added @ timestamp = %ld\n", timestamp_); + GST_BUFFER_DTS(buffer_write) = GST_BUFFER_PTS(buffer_write) = timestamp_; + + // set frame duration + buffer_write->duration = frame_duration_; + + // monotonic time increment to keep fixed FPS + timestamp_ += frame_duration_; + + // push buffer as new frame in appsrc + ret = gst_app_src_push_buffer(src_, buffer_write) == GST_FLOW_OK; + } + // unmap read buffer + gst_buffer_unmap(buffer_read, &map_read); } - else - return false; + /* Clean up */ + gst_sample_unref(sample); + gst_element_set_state(img_pipeline, GST_STATE_NULL); + gst_object_unref(GST_OBJECT(img_pipeline)); - return true; + return ret; } @@ -132,7 +155,19 @@ bool MultiFileRecorder::start_record (const std::string &video_filename) // create a gstreamer pipeline std::string description = "appsrc name=src ! queue ! videoconvert ! videoscale ! "; - description += VideoRecorder::profile_description[ profile_ ]; + + // test for a hardware accelerated encoder + if (Settings::application.render.gpu_decoding && (int) VideoRecorder::hardware_encoder.size() > 0 && + GstToolkit::has_feature(VideoRecorder::hardware_encoder[profile_]) ) { + + description += VideoRecorder::hardware_profile_description[Settings::application.image_sequence.profile]; + Log::Info("MultiFileRecorder use hardware accelerated encoder (%s)", VideoRecorder::hardware_encoder[profile_].c_str()); + } + // revert to software encoder + else + description += VideoRecorder::profile_description[profile_]; + + // qt muxer in .mov file description += "qtmux ! filesink name=sink"; // parse pipeline descriptor @@ -179,7 +214,7 @@ bool MultiFileRecorder::start_record (const std::string &video_filename) // specify recorder resolution and framerate in the source caps GstCaps *caps = gst_caps_new_simple ("video/x-raw", - "format", G_TYPE_STRING, bpp_ < 4 ? "RGB" : "RGBA", + "format", G_TYPE_STRING, "RGB", "width", G_TYPE_INT, width_ - width_%2, "height", G_TYPE_INT, height_ - height_%2, "framerate", GST_TYPE_FRACTION, fps_, 1, @@ -204,35 +239,7 @@ bool MultiFileRecorder::start_record (const std::string &video_filename) int max = 100; accept_buffer_ = false; while (!accept_buffer_ && --max > 0) - std::this_thread::sleep_for(std::chrono::milliseconds(5)); - - -// // send request key frame upstream -// GstEvent* event = gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, TRUE, 1); -// if (!gst_element_send_event( GST_ELEMENT(sink), event) ) -// Log::Warning("MultiFileRecorder: Failed to request key unit."); - -// GstPad *padsrc = gst_element_get_static_pad ( GST_ELEMENT (sink), "sink"); -// gst_pad_push_event(padsrc, gst_event_new_custom(GST_EVENT_CUSTOM_UPSTREAM, gst_structure_new("GstForceKeyUnit", "all-headers", -// G_TYPE_BOOLEAN, TRUE, NULL))); -// gst_object_unref (padsrc); - -// // send request key frame downstream -// GstPad *padsrc = gst_element_get_static_pad ( GST_ELEMENT (src_), "src"); -// GstStructure *s = gst_structure_new("GstForceKeyUnit", -// "timestamp", G_TYPE_UINT64, 0, -// "stream-time", G_TYPE_UINT64, 0, -// "running-time", G_TYPE_UINT64, 0, -// "all-headers", G_TYPE_BOOLEAN, TRUE, -// NULL); -// if ( !gst_pad_push_event(padsrc, gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM, s )) ) -// Log::Warning("MultiFileRecorder: Failed to force key unit."); -// gst_object_unref (padsrc); - - GstEvent* event = gst_video_event_new_downstream_force_key_unit (GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, TRUE, 1); - if (!gst_element_send_event( GST_ELEMENT(src_), event) ) - Log::Warning("MultiFileRecorder: Failed to force key unit."); - + std::this_thread::sleep_for(std::chrono::milliseconds(4)); return true; } @@ -305,8 +312,13 @@ bool MultiFileRecorder::finished () // get the filename from encoder filename_ = promises_.back().get(); if (!filename_.empty()) { - // save path location - Settings::application.recentRecordings.push(filename_); + // save path location if valid + std::string uri = GstToolkit::filename_to_uri(filename_); + MediaInfo media = MediaPlayer::UriDiscoverer(uri); + if (media.valid && !media.isimage) + Settings::application.recentRecordings.push(filename_); + else + Settings::application.recentRecordings.remove(filename_); } // done with this recoding promises_.pop_back(); @@ -324,25 +336,32 @@ std::string MultiFileRecorder::assemble (MultiFileRecorder *rec) rec->progress_ = 0.f; rec->width_ = 0; rec->height_ = 0; - rec->bpp_ = 0; + rec->cancel_ = false; // input files if ( rec->files_.size() < 1 ) { - Log::Warning("MultiFileRecorder: No image given."); + Log::Warning("MultiFileRecorder No image given."); return filename; } - // set recorder resolution from first image - stbi_info( rec->files_.front().c_str(), &rec->width_, &rec->height_, &rec->bpp_); - - if ( rec->width_ < 10 || rec->height_ < 10 || rec->bpp_ < 3 ) { - Log::Warning("MultiFileRecorder: Invalid image %s.", rec->files_.front().c_str()); + // get info first file + std::string uri = GstToolkit::filename_to_uri(rec->files_.front()); + MediaInfo media = MediaPlayer::UriDiscoverer(uri); + if (!media.valid || !media.isimage || media.width < 10 || media.height < 10) { + Log::Warning("MultiFileRecorder Invalid file %s.", rec->files_.front().c_str()); return filename; } + // set recorder resolution from first image + rec->width_ = media.width; + rec->height_ = media.height; + // progress increment float inc_ = 1.f / ( (float) rec->files_.size() + 2.f); + // keyframe increment + guint64 keyf_ = MAXI( 2, rec->files_.size() / 20); + // initialize rec->frame_count_ = 0; filename = BaseToolkit::common_prefix (rec->files_); @@ -350,10 +369,18 @@ std::string MultiFileRecorder::assemble (MultiFileRecorder *rec) filename += "image"; filename += "_sequence.mov"; - Log::Info("MultiFileRecorder creating %s, %d x %d px.", filename.c_str(), rec->width_, rec->height_); + Log::Info("MultiFileRecorder Creating %s, %d x %d px.", filename.c_str(), rec->width_, rec->height_); if ( rec->start_record( filename ) ) { + // specify caps for images (same as video, without framerate) + GstCaps *tmp_caps = gst_caps_copy( gst_app_src_get_caps(rec->src_) ); + GValue v = G_VALUE_INIT; + g_value_init (&v, GST_TYPE_FRACTION); + gst_value_set_fraction (&v, 0, 1); + gst_caps_set_value(tmp_caps, "framerate", &v); + g_value_unset (&v); + // progressing rec->progress_ += inc_; @@ -363,16 +390,25 @@ std::string MultiFileRecorder::assemble (MultiFileRecorder *rec) if ( rec->cancel_ ) break; - if ( rec->add_image( *file ) ) + if ( rec->add_image( *file, tmp_caps) ) { // validate file rec->frame_count_++; + + // add a key frame every + if ( rec->frame_count_%keyf_ < 1 ) { + GstEvent *event = gst_video_event_new_downstream_force_key_unit( + rec->timestamp_, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, FALSE, rec->frame_count_ / keyf_); + if (!gst_element_send_event(GST_ELEMENT(rec->src_), event)) + Log::Info("MultiFileRecorder Failed to force key unit %l.", rec->timestamp_); + } + } else - Log::Info("MultiFileRecorder could not add %s.", file->c_str()); + Log::Info("MultiFileRecorder Could not add %s.", file->c_str()); // pause in case appsrc buffer is full int max = 100; while (!rec->accept_buffer_ && --max > 0) - std::this_thread::sleep_for(std::chrono::milliseconds(10)); + std::this_thread::sleep_for(std::chrono::milliseconds(4)); // progressing rec->progress_ += inc_; @@ -380,13 +416,15 @@ std::string MultiFileRecorder::assemble (MultiFileRecorder *rec) // Give more explanation for possible errors if ( rec->frame_count_ < rec->files_.size()) - Log::Info("MultiFileRecorder not fully successful; are all images %d x %d px?",rec->width_, rec->height_); + Log::Info("MultiFileRecorder Not fully successful; are all images %d x %d px?",rec->width_, rec->height_); // close file properly if ( rec->end_record() ) Log::Info("MultiFileRecorder %d images encoded (%s).", rec->frame_count_, GstToolkit::time_to_string(rec->timestamp_, GstToolkit::TIME_STRING_READABLE).c_str()); else filename = std::string(); + + gst_caps_unref(tmp_caps); } else filename = std::string(); diff --git a/src/MultiFileRecorder.h b/src/MultiFileRecorder.h index 4359d984..e8b97775 100644 --- a/src/MultiFileRecorder.h +++ b/src/MultiFileRecorder.h @@ -44,7 +44,7 @@ class MultiFileRecorder // gstreamer functions static std::string assemble (MultiFileRecorder *rec); bool start_record (const std::string &video_filename); - bool add_image (const std::string &image_filename); + bool add_image (const std::string &image_filename, GstCaps *caps); bool end_record(); // gstreamer callbacks @@ -59,7 +59,6 @@ class MultiFileRecorder int fps_; int width_; int height_; - int bpp_; // encoder std::list files_; diff --git a/src/Recorder.cpp b/src/Recorder.cpp index 4eb5168e..5e45fdc5 100644 --- a/src/Recorder.cpp +++ b/src/Recorder.cpp @@ -36,6 +36,7 @@ #include "Settings.h" #include "GstToolkit.h" #include "SystemToolkit.h" +#include "MediaPlayer.h" #include "Log.h" #include "Audio.h" @@ -165,7 +166,7 @@ const std::vector VideoRecorder::profile_description { // faster (4) // fast (5) "video/x-raw, format=I420 ! x264enc tune=\"zerolatency\" pass=4 quantizer=22 speed-preset=2 ! video/x-h264, profile=baseline ! h264parse ! ", - "video/x-raw, format=Y444_10LE ! x264enc pass=4 quantizer=18 speed-preset=3 ! video/x-h264, profile=(string)high-4:4:4 ! h264parse ! ", + "video/x-raw, format=Y444_10LE ! x264enc tune=\"zerolatency\" pass=4 quantizer=18 speed-preset=3 ! video/x-h264, profile=(string)high-4:4:4 ! h264parse ! ", // Control x265 encoder quality : // NB: apparently x265 only accepts I420 format :( // speed-preset @@ -183,8 +184,8 @@ const std::vector VideoRecorder::profile_description { // crf Quality-controlled variable bitrate [0 51] // default 28 // 24 for x265 should be visually transparent; anything lower will probably just waste file size - "video/x-raw, format=I420 ! x265enc tune=2 speed-preset=2 option-string=\"crf=24\" ! video/x-h265, profile=(string)main ! h265parse ! ", - "video/x-raw, format=I420 ! x265enc tune=6 speed-preset=2 option-string=\"crf=12\" ! video/x-h265, profile=(string)main ! h265parse ! ", + "video/x-raw, format=I420 ! x265enc tune=\"zerolatency\" speed-preset=2 option-string=\"crf=24\" ! video/x-h265, profile=(string)main ! h265parse ! ", + "video/x-raw, format=I420 ! x265enc tune=\"zerolatency\" speed-preset=5 option-string=\"crf=12\" ! video/x-h265, profile=(string)main ! h265parse ! ", // Apple ProRes encoding parameters // pass // cbr (0) – Constant Bitrate Encoding @@ -333,8 +334,9 @@ std::string VideoRecorder::init(GstCaps *caps) // apply settings buffering_size_ = MAX( MIN_BUFFER_SIZE, buffering_preset_value[Settings::application.record.buffering_mode]); - frame_duration_ = gst_util_uint64_scale_int (1, GST_SECOND, framerate_preset_value[Settings::application.record.framerate_mode]); - timestamp_on_clock_ = Settings::application.record.priority_mode < 1; + frame_duration_ = gst_util_uint64_scale_int (1, GST_SECOND, MAXI(framerate_preset_value[Settings::application.record.framerate_mode], 15)); + timestamp_on_clock_ = Settings::application.record.priority_mode < 1; + keyframe_count_ = framerate_preset_value[Settings::application.record.framerate_mode]; // create a gstreamer pipeline std::string description = "appsrc name=src ! videoconvert ! queue ! "; @@ -365,20 +367,23 @@ std::string VideoRecorder::init(GstCaps *caps) else { // Add Audio to pipeline - if (!Settings::application.record.audio_device.empty()) { + if ( Settings::application.accept_audio && + !Settings::application.record.audio_device.empty()) { // ensure the Audio manager has the device specified in settings int current_audio = Audio::manager().index(Settings::application.record.audio_device); if (current_audio > -1) { description += "mux. "; description += Audio::manager().pipeline(current_audio); description += " ! audio/x-raw ! audioconvert ! audioresample ! "; + description += "identity name=audiosync ! "; // select encoder depending on codec if ( Settings::application.record.profile == VP8) description += "opusenc ! opusparse ! queue ! "; else - description += "voaacenc ! aacparse ! queue ! "; + description += "avenc_aac ! aacparse ! queue ! "; Log::Info("Video Recording with audio (%s)", Audio::manager().pipeline(current_audio).c_str()); + } } @@ -416,7 +421,7 @@ std::string VideoRecorder::init(GstCaps *caps) // setup file sink g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")), "location", filename_.c_str(), - "sync", FALSE, + "sync", TRUE, NULL); // setup custom app source @@ -440,7 +445,7 @@ std::string VideoRecorder::init(GstCaps *caps) // specify recorder framerate in the given caps GstCaps *tmp = gst_caps_copy( caps ); - GValue v = { 0, }; + GValue v = G_VALUE_INIT; g_value_init (&v, GST_TYPE_FRACTION); gst_value_set_fraction (&v, framerate_preset_value[Settings::application.record.framerate_mode], 1); gst_caps_set_value(tmp, "framerate", &v); @@ -463,6 +468,12 @@ std::string VideoRecorder::init(GstCaps *caps) return std::string("Video Recording : Failed to configure frame grabber."); } + // Enforce a system clock for the recording pipeline + // (this allows keeping pipeline in synch when recording both + // video and audio - the automatic clock default chooses either + // the video or the audio source, which cause synch problems) + gst_pipeline_use_clock( GST_PIPELINE(pipeline_), gst_system_clock_obtain()); + // start recording GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { @@ -497,9 +508,15 @@ void VideoRecorder::terminate() Log::Info("Video Recording : try a lower resolution / a lower framerate / a larger buffer size / a faster codec."); } - // remember and inform - Settings::application.recentRecordings.push(filename_); - Log::Notify("Video Recording %s is ready.", filename_.c_str()); + // remember and inform if valid + std::string uri = GstToolkit::filename_to_uri(filename_); + MediaInfo media = MediaPlayer::UriDiscoverer(uri); + if (media.valid && !media.isimage) { + Settings::application.recentRecordings.push(filename_); + Log::Notify("Video Recording %s is ready.", filename_.c_str()); + } + else + Settings::application.recentRecordings.remove(filename_); } std::string VideoRecorder::info() const diff --git a/src/Settings.cpp b/src/Settings.cpp index d4661b7d..ab21dfd3 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -186,7 +186,6 @@ void Settings::Save(uint64_t runtime, const std::string &filename) RecordNode->SetAttribute("profile", application.record.profile); RecordNode->SetAttribute("timeout", application.record.timeout); RecordNode->SetAttribute("delay", application.record.delay); - RecordNode->SetAttribute("resolution_mode", application.record.resolution_mode); RecordNode->SetAttribute("framerate_mode", application.record.framerate_mode); RecordNode->SetAttribute("buffering_mode", application.record.buffering_mode); RecordNode->SetAttribute("priority_mode", application.record.priority_mode); @@ -194,6 +193,12 @@ void Settings::Save(uint64_t runtime, const std::string &filename) RecordNode->SetAttribute("audio_device", application.record.audio_device.c_str()); pRoot->InsertEndChild(RecordNode); + // Image sequence + XMLElement *SequenceNode = xmlDoc.NewElement( "Sequence" ); + SequenceNode->SetAttribute("profile", application.image_sequence.profile); + SequenceNode->SetAttribute("framerate", application.image_sequence.framerate_mode); + pRoot->InsertEndChild(SequenceNode); + // Transition XMLElement *TransitionNode = xmlDoc.NewElement( "Transition" ); TransitionNode->SetAttribute("cross_fade", application.transition.cross_fade); @@ -507,7 +512,6 @@ void Settings::Load(const string &filename) recordnode->QueryIntAttribute("profile", &application.record.profile); recordnode->QueryUnsignedAttribute("timeout", &application.record.timeout); recordnode->QueryIntAttribute("delay", &application.record.delay); - recordnode->QueryIntAttribute("resolution_mode", &application.record.resolution_mode); recordnode->QueryIntAttribute("framerate_mode", &application.record.framerate_mode); recordnode->QueryIntAttribute("buffering_mode", &application.record.buffering_mode); recordnode->QueryIntAttribute("priority_mode", &application.record.priority_mode); @@ -520,12 +524,23 @@ void Settings::Load(const string &filename) application.record.path = SystemToolkit::home_path(); const char *dev_ = recordnode->Attribute("audio_device"); - if (dev_) + if (dev_) { application.record.audio_device = std::string(dev_); + // if recording with audio and have a device, force priority to Duration + if (application.accept_audio && !application.record.audio_device.empty()) + application.record.priority_mode = 0; + } else application.record.audio_device = ""; } + // Record + XMLElement * sequencenode = pRoot->FirstChildElement("Sequence"); + if (sequencenode != nullptr) { + sequencenode->QueryIntAttribute("profile", &application.image_sequence.profile); + sequencenode->QueryIntAttribute("framerate", &application.image_sequence.framerate_mode); + } + // Source XMLElement * sourceconfnode = pRoot->FirstChildElement("Source"); if (sourceconfnode != nullptr) { diff --git a/src/Settings.h b/src/Settings.h index b933871d..85531e82 100644 --- a/src/Settings.h +++ b/src/Settings.h @@ -104,7 +104,6 @@ struct RecordConfig int profile; uint timeout; int delay; - int resolution_mode; int framerate_mode; int buffering_mode; int priority_mode; @@ -115,7 +114,6 @@ struct RecordConfig profile = 0; timeout = RECORD_MAX_TIMEOUT; delay = 0; - resolution_mode = 1; framerate_mode = 1; buffering_mode = 2; priority_mode = 1; @@ -318,6 +316,7 @@ struct Application // settings exporters RecordConfig record; + RecordConfig image_sequence; // settings new source SourceConfig source; @@ -380,6 +379,7 @@ struct Application windows[0].h = 930; accept_audio = false; dialogPosition = glm::ivec2(-1, -1); + image_sequence.framerate_mode = 15; } }; diff --git a/src/ShmdataBroadcast.cpp b/src/ShmdataBroadcast.cpp index 63e3734d..16f845d7 100644 --- a/src/ShmdataBroadcast.cpp +++ b/src/ShmdataBroadcast.cpp @@ -131,7 +131,7 @@ std::string ShmdataBroadcast::init(GstCaps *caps) // specify streaming framerate in the given caps GstCaps *tmp = gst_caps_copy( caps ); - GValue v = { 0, }; + GValue v = G_VALUE_INIT; g_value_init (&v, GST_TYPE_FRACTION); gst_value_set_fraction (&v, SHMDATA_FPS, 1); // fixed 30 FPS gst_caps_set_value(tmp, "framerate", &v); diff --git a/src/SourceControlWindow.cpp b/src/SourceControlWindow.cpp index 03e0e0a8..c9c5dda9 100644 --- a/src/SourceControlWindow.cpp +++ b/src/SourceControlWindow.cpp @@ -55,7 +55,8 @@ SourceControlWindow::SourceControlWindow() : WorkspaceWindow("SourceController") play_toggle_request_(false), replay_request_(false), pending_(false), active_label_(LABEL_AUTO_MEDIA_PLAYER), active_selection_(-1), selection_context_menu_(false), selection_mediaplayer_(nullptr), selection_target_slower_(0), selection_target_faster_(0), - mediaplayer_active_(nullptr), mediaplayer_edit_fading_(false), mediaplayer_edit_pipeline_(false), mediaplayer_mode_(false), mediaplayer_slider_pressed_(false), mediaplayer_timeline_zoom_(1.f), + mediaplayer_active_(nullptr), mediaplayer_edit_fading_(false), mediaplayer_set_duration_(0), + mediaplayer_edit_pipeline_(false), mediaplayer_mode_(false), mediaplayer_slider_pressed_(false), mediaplayer_timeline_zoom_(1.f), magnifying_glass(false) { info_.setExtendedStringMode(); @@ -414,7 +415,7 @@ void SourceControlWindow::Render() } if ( ImGui::MenuItem(ICON_FA_HOURGLASS_HALF " Duration")){ - mediaplayer_set_duration_ = true; + mediaplayer_set_duration_ = 1; } } @@ -1505,7 +1506,7 @@ void SourceControlWindow::RenderSingleSource(Source *s) mediaplayer_active_->reopen(); // open dialog to set duration - mediaplayer_set_duration_ = true; + mediaplayer_set_duration_ = 2; } ImGui::PopStyleColor(2); @@ -2168,16 +2169,18 @@ void SourceControlWindow::RenderMediaPlayer(MediaSource *ms) /// Dialog to set timeline duration /// static double timeline_duration_ = 0.0; + static double timeline_duration_previous = 0.0; if (mediaplayer_set_duration_) { - mediaplayer_set_duration_ = false; - // open dialog - if (mediaplayer_active_) { - // get current duration of mediaplayer - GstClockTime end = mediaplayer_active_->timeline()->end(); - timeline_duration_ = (double) ( GST_TIME_AS_MSECONDS(end) ) / 1000.f; - // open dialog to change duration - ImGui::OpenPopup(DIALOG_TIMELINE_DURATION); - } + // get current duration of mediaplayer + GstClockTime end = mediaplayer_active_->timeline()->end(); + timeline_duration_ = (double) (GST_TIME_AS_MSECONDS(end)) / 1000.f; + // remember previous duration for Cancel + // NB: trick with var 'mediaplayer_set_duration_' set to 2 when first time created + timeline_duration_previous = mediaplayer_set_duration_ > 1 ? 0.0 : timeline_duration_; + // open dialog to change duration + ImGui::OpenPopup(DIALOG_TIMELINE_DURATION); + // only once + mediaplayer_set_duration_ = 0; } const ImVec2 tld_dialog_size(buttons_width_ * 2.f, buttons_height_ * 4); ImGui::SetNextWindowSize(tld_dialog_size, ImGuiCond_Always); @@ -2193,26 +2196,41 @@ void SourceControlWindow::RenderMediaPlayer(MediaSource *ms) ImGui::Spacing(); // get current timeline - Timeline tl = *mediaplayer_active_->timeline(); ImGui::InputDouble("second", &timeline_duration_, 1.0f, 10.0f, "%.2f"); timeline_duration_ = ABS(timeline_duration_); bool close = false; ImGui::SetCursorPos(pos + ImVec2(0.f, area.y - buttons_height_)); - if (ImGui::Button(ICON_FA_TIMES " Cancel", ImVec2(area.x * 0.3f, 0))) + if (ImGui::Button(ICON_FA_TIMES " Cancel", ImVec2(area.x * 0.3f, 0))) { + // restore previous timeline duration + timeline_duration_ = timeline_duration_previous; + // close dialog close = true; + } ImGui::SetCursorPos(pos + ImVec2(area.x * 0.7f, area.y - buttons_height_)); ImGui::PushStyleColor(ImGuiCol_Button, ImGui::GetStyleColorVec4(ImGuiCol_Tab)); if (ImGui::Button(ICON_FA_CHECK " Apply", ImVec2(area.x * 0.3f, 0)) || ImGui::IsKeyPressedMap(ImGuiKey_Enter) || ImGui::IsKeyPressedMap(ImGuiKey_KeyPadEnter) ) { - // change timeline end - mediaplayer_active_->timeline()->setEnd( GST_MSECOND * (GstClockTime) ( timeline_duration_ * 1000.f ) ); // close dialog close = true; } ImGui::PopStyleColor(1); - if (close) + if (close) { + // zero duration requested : delete timeline + if (timeline_duration_ < 0.01) { + // set empty timeline + Timeline tl; + mediaplayer_active_->setTimeline(tl); + mediaplayer_active_->play(false); + // re-open the image with NO timeline + mediaplayer_active_->reopen(); + } + // else normal change timeline end + else + mediaplayer_active_->timeline()->setEnd( GST_MSECOND * (GstClockTime) ( timeline_duration_ * 1000.f ) ); + // close popup window ImGui::CloseCurrentPopup(); + } ImGui::EndPopup(); } } diff --git a/src/SourceControlWindow.h b/src/SourceControlWindow.h index 1cfd4a00..2fba4409 100644 --- a/src/SourceControlWindow.h +++ b/src/SourceControlWindow.h @@ -49,7 +49,7 @@ class SourceControlWindow : public WorkspaceWindow // Render a single media player MediaPlayer *mediaplayer_active_; bool mediaplayer_edit_fading_; - bool mediaplayer_set_duration_; + int mediaplayer_set_duration_; bool mediaplayer_edit_pipeline_; bool mediaplayer_mode_; bool mediaplayer_slider_pressed_; diff --git a/src/Stream.cpp b/src/Stream.cpp index bfe5de28..53b6132e 100644 --- a/src/Stream.cpp +++ b/src/Stream.cpp @@ -169,6 +169,9 @@ StreamInfo StreamDiscoverer(const std::string &description, guint w, guint h) GstAppSinkCallbacks callbacks; #if GST_VERSION_MINOR > 18 && GST_VERSION_MAJOR > 0 callbacks.new_event = NULL; +#if GST_VERSION_MINOR > 23 + callbacks.propose_allocation = NULL; +#endif #endif callbacks.eos = NULL; callbacks.new_sample = NULL; @@ -291,6 +294,9 @@ void Stream::execute_open() GstAppSinkCallbacks callbacks; #if GST_VERSION_MINOR > 18 && GST_VERSION_MAJOR > 0 callbacks.new_event = NULL; +#if GST_VERSION_MINOR > 23 + callbacks.propose_allocation = NULL; +#endif #endif callbacks.new_preroll = callback_new_preroll; if (single_frame_) { diff --git a/src/Streamer.cpp b/src/Streamer.cpp index 54df70b9..272a66f1 100644 --- a/src/Streamer.cpp +++ b/src/Streamer.cpp @@ -449,7 +449,7 @@ std::string VideoStreamer::init(GstCaps *caps) // specify streaming framerate in the given caps GstCaps *tmp = gst_caps_copy( caps ); - GValue v = { 0, }; + GValue v = G_VALUE_INIT; g_value_init (&v, GST_TYPE_FRACTION); gst_value_set_fraction (&v, STREAMING_FPS, 1); // fixed 30 FPS gst_caps_set_value(tmp, "framerate", &v); diff --git a/src/TextSource.cpp b/src/TextSource.cpp index 781e8a16..62b36ca7 100644 --- a/src/TextSource.cpp +++ b/src/TextSource.cpp @@ -127,6 +127,9 @@ void TextContents::execute_open() GstAppSinkCallbacks callbacks; #if GST_VERSION_MINOR > 18 && GST_VERSION_MAJOR > 0 callbacks.new_event = NULL; +#if GST_VERSION_MINOR > 23 + callbacks.propose_allocation = NULL; +#endif #endif callbacks.new_preroll = callback_new_preroll; callbacks.eos = callback_end_of_stream; diff --git a/src/UserInterfaceManager.cpp b/src/UserInterfaceManager.cpp index a13c22c8..e7b1a436 100644 --- a/src/UserInterfaceManager.cpp +++ b/src/UserInterfaceManager.cpp @@ -789,8 +789,9 @@ bool UserInterface::TryClose() if (DialogToolkit::FileDialog::busy() || DialogToolkit::ColorPickerDialog::busy()) return false; - // always stop all recordings - FrameGrabbing::manager().stopAll(); + // always stop all recordings and pending actions + FrameGrabbing::manager().stopAll(); + navigator.discardPannel(); // force close if trying to close again although it is already pending for save if (pending_save_on_exit) @@ -3398,7 +3399,7 @@ void Navigator::RenderSourcePannel(Source *s, const ImVec2 &iconsize) ImGui::Text("Source"); // index indicator - ImGui::SetCursorPos(ImVec2(pannel_width_ - 2 * ImGui::GetTextLineHeight(), IMGUI_TOP_ALIGN)); + ImGui::SetCursorPos(ImVec2(pannel_width_ - 2.8f * ImGui::GetTextLineHeightWithSpacing(), IMGUI_TOP_ALIGN)); ImGui::TextDisabled("#%d", Mixer::manager().indexCurrentSource()); ImGui::PopFont(); @@ -3858,7 +3859,7 @@ void Navigator::RenderNewPannel(const ImVec2 &iconsize) IMAGES_FILES_PATTERN); static MultiFileSequence _numbered_sequence; static MultiFileRecorder _video_recorder; - static int _fps = 25; + static int codec_id = -1; ImGui::Text("Image sequence"); @@ -3873,8 +3874,7 @@ void Navigator::RenderNewPannel(const ImVec2 &iconsize) ImGui::SameLine(); ImGuiToolkit::HelpToolTip("Create a source displaying a sequence of images;\n" ICON_FA_CARET_RIGHT " files numbered consecutively\n" - ICON_FA_CARET_RIGHT " create a video from many images\n" - "Supports PNG, JPG or TIF."); + ICON_FA_CARET_RIGHT " create a video from many images"); // return from thread for folder openning if (_selectImagesDialog.closed()) { @@ -3890,9 +3890,16 @@ void Navigator::RenderNewPannel(const ImVec2 &iconsize) // automatically create a MultiFile Source if possible if (_numbered_sequence.valid()) { + // always come back to propose image sequence when possible + codec_id = -1; + // show source preview available if possible std::string label = BaseToolkit::transliterate( BaseToolkit::common_pattern(sourceSequenceFiles) ); - new_source_preview_.setSource( Mixer::manager().createSourceMultifile(sourceSequenceFiles, _fps), label); - } + new_source_preview_ + .setSource(Mixer::manager().createSourceMultifile(sourceSequenceFiles, + Settings::application.image_sequence.framerate_mode), + label); + } else + codec_id = Settings::application.image_sequence.profile; } // multiple files selected @@ -3906,44 +3913,84 @@ void Navigator::RenderNewPannel(const ImVec2 &iconsize) info.appendf("%d %s", (int) sourceSequenceFiles.size(), _numbered_sequence.codec.c_str()); ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN); ImGui::InputText("Images", (char *)info.c_str(), info.size(), ImGuiInputTextFlags_ReadOnly); - info.clear(); - if (_numbered_sequence.location.empty()) - info.append("Not consecutively numbered"); - else - info.appendf("%s", SystemToolkit::base_filename(_numbered_sequence.location).c_str()); - ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN); - ImGui::InputText("Filenames", (char *)info.c_str(), info.size(), ImGuiInputTextFlags_ReadOnly); ImGui::PopStyleColor(1); - // offer to open file browser at location - std::string path = SystemToolkit::path_filename(sourceSequenceFiles.front()); - std::string label = BaseToolkit::truncated(path, 25); - label = BaseToolkit::transliterate(label); - ImGuiToolkit::ButtonOpenUrl( label.c_str(), path.c_str(), ImVec2(IMGUI_RIGHT_ALIGN, 0) ); - ImGui::SameLine(0, IMGUI_SAME_LINE); - ImGui::Text("Folder"); - // set framerate ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN); - ImGui::SliderInt("Framerate", &_fps, 1, 30, "%d fps"); + ImGui::SliderInt("Framerate", &Settings::application.image_sequence.framerate_mode, 1, 30, "%d fps"); if (ImGui::IsItemDeactivatedAfterEdit()){ if (new_source_preview_.filled()) { std::string label = BaseToolkit::transliterate( BaseToolkit::common_pattern(sourceSequenceFiles) ); - new_source_preview_.setSource( Mixer::manager().createSourceMultifile(sourceSequenceFiles, _fps), label); + new_source_preview_ + .setSource(Mixer::manager().createSourceMultifile( + sourceSequenceFiles, + Settings::application.image_sequence.framerate_mode), + label); } } - ImGui::Spacing(); + // select CODEC: decide for gst sequence (codec_id = -1) or encoding a video + ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN); + std::string codec_current = codec_id < 0 ? ICON_FA_SORT_NUMERIC_DOWN " Numbered images" + : std::string(ICON_FA_FILM " ") + VideoRecorder::profile_name[codec_id]; + if (ImGui::BeginCombo("##CodecSequence", codec_current.c_str())) { + // special case; if possible, offer to create an image sequence gst source + if (ImGui::Selectable( ICON_FA_SORT_NUMERIC_DOWN " Numbered images", + codec_id < 0, + _numbered_sequence.valid() + ? ImGuiSelectableFlags_None + : ImGuiSelectableFlags_Disabled)) { + // select id of image sequence + codec_id = -1; + // Open source preview for image sequence + if (_numbered_sequence.valid()) { + std::string label = BaseToolkit::transliterate( + BaseToolkit::common_pattern(sourceSequenceFiles)); + new_source_preview_ + .setSource(Mixer::manager().createSourceMultifile( + sourceSequenceFiles, + Settings::application.image_sequence.framerate_mode), + label); + } + } + // always offer to encode a video + for (int i = VideoRecorder::H264_STANDARD; i < VideoRecorder::VP8; ++i) { + std::string label = std::string(ICON_FA_FILM " ") + VideoRecorder::profile_name[i]; + if (ImGui::Selectable(label.c_str(), codec_id == i)) { + // select id of video encoding codec + codec_id = i; + Settings::application.image_sequence.profile = i; + // close source preview (no image sequence) + new_source_preview_.setSource(); + } + } + ImGui::EndCombo(); + } + // Indication + ImGui::SameLine(); + if (_numbered_sequence.valid()) + ImGuiToolkit::HelpToolTip(ICON_FA_SORT_NUMERIC_DOWN " Selected images are numbered consecutively; " + "an image sequence source can be created.\n\n" + ICON_FA_FILM " Alternatively, choose a codec to encode a video with the selected images and create a video source."); + else + ImGuiToolkit::HelpToolTip(ICON_FA_SORT_NUMERIC_DOWN " Selected images are NOT numbered consecutively; " + "it is not possible to create a sequence source.\n\n" + ICON_FA_FILM " Instead, choose a codec to encode a video with the selected images and create a video source."); - // Offer to create video from sequence - if ( ImGui::Button( ICON_FA_FILM " Make a video", ImVec2(ImGui::GetContentRegionAvail().x, 0)) ) { - // start video recorder - _video_recorder.setFiles( sourceSequenceFiles ); - _video_recorder.setFramerate( _fps ); - _video_recorder.setProfile( (VideoRecorder::Profile) Settings::application.record.profile ); - _video_recorder.start(); - // dialog - ImGui::OpenPopup(LABEL_VIDEO_SEQUENCE); + // if video encoding codec selected + if ( codec_id >= 0 ) + { + // Offer to create video from sequence + ImGui::NewLine(); + if ( ImGui::Button( ICON_FA_FILM " Encode video", ImVec2(ImGui::GetContentRegionAvail().x, 0)) ) { + // start video recorder + _video_recorder.setFiles( sourceSequenceFiles ); + _video_recorder.setFramerate( Settings::application.image_sequence.framerate_mode ); + _video_recorder.setProfile( (VideoRecorder::Profile) Settings::application.image_sequence.profile ); + _video_recorder.start(); + // open dialog + ImGui::OpenPopup(LABEL_VIDEO_SEQUENCE); + } } // video recorder finished: inform and open pannel to import video source from recent recordings @@ -3954,7 +4001,7 @@ void Navigator::RenderNewPannel(const ImVec2 &iconsize) else { Log::Notify("Image sequence saved to %s.", _video_recorder.filename().c_str()); // open the file as new recording -// if (Settings::application.recentRecordings.load_at_start) + // if (Settings::application.recentRecordings.load_at_start) UserInterface::manager().navigator.setNewMedia(Navigator::MEDIA_RECORDING, _video_recorder.filename()); } } @@ -3977,7 +4024,8 @@ void Navigator::RenderNewPannel(const ImVec2 &iconsize) ImGui::ProgressBar(_video_recorder.progress()); ImGui::Spacing(); - if (ImGui::Button(ICON_FA_TIMES " Cancel")) + ImGui::Spacing(); + if (ImGui::Button(ICON_FA_TIMES " Cancel",ImVec2(ImGui::GetContentRegionAvail().x, 0))) _video_recorder.cancel(); ImGui::EndPopup(); @@ -4564,8 +4612,6 @@ void Navigator::RenderMainPannelSession() // // Session // - ImGui::Text("Session"); - std::string sessions_current = Mixer::manager().session()->filename(); if (sessions_current.empty()) sessions_current = ""; @@ -4681,7 +4727,7 @@ void Navigator::RenderMainPannelSession() ImGui::Text(" Custom thumbnail"); } else { - ImGui::Text(" No thumbnail "); + ImGui::Text(" Automatic thumbnail "); } ImGui::EndTooltip(); } @@ -5032,8 +5078,6 @@ void Navigator::RenderMainPannelPlaylist() // // SESSION panel // - ImGui::Text("Playlists"); - // currently active playlist and folder static std::string playlist_header = PLAYLIST_FAVORITES; static Playlist active_playlist; @@ -5523,20 +5567,7 @@ void Navigator::RenderMainPannelSettings() // // Appearance // - ImGui::Text("Settings"); - ImGui::SameLine(); - ImGui::SetCursorPosX( pannel_width_ IMGUI_RIGHT_ALIGN); - if ( ImGuiToolkit::IconButton(ICON_FA_SAVE,"Export settings\nYou can then " - "launch vimix with the option " - "'--settings filename.xml' " - "to restore output windows and configuration.") ){ - // launch file dialog to select file to save settings - if (UserInterface::manager().settingsexportdialog) - UserInterface::manager().settingsexportdialog->open(); - } - int v = Settings::application.accent_color; - ImGui::Spacing(); ImGui::SetCursorPosX(0.5f * width_); if (ImGui::RadioButton("##Color", &v, v)){ Settings::application.accent_color = (v+1)%3; @@ -5609,16 +5640,29 @@ void Navigator::RenderMainPannelSettings() Settings::application.record.buffering_mode = 2; ImGuiToolkit::Indication("Priority when buffer is full and recorder has to skip frames;\n" - ICON_FA_CARET_RIGHT " Duration: Correct duration, variable framerate." - ICON_FA_CARET_RIGHT " Framerate: Correct framerate, shorter duration.\n", + ICON_FA_CARET_RIGHT " Duration: Correct duration, variable framerate.\n" + ICON_FA_CARET_RIGHT " Framerate: Correct framerate, shorter duration.", ICON_FA_CHECK_DOUBLE); ImGui::SameLine(0); ImGui::SetCursorPosX(width_); ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN); - ImGui::Combo("##Priority", &Settings::application.record.priority_mode, "Duration\0Framerate\0"); + const char *prioritylabel[2] = {"Duration", "Framerate"}; + if (ImGui::BeginCombo("##Priority", prioritylabel[Settings::application.record.priority_mode])) { + if (ImGui::Selectable(prioritylabel[0], Settings::application.record.priority_mode == 0)) + Settings::application.record.priority_mode = 0; + if (!Settings::application.accept_audio || Settings::application.record.audio_device.empty()) { + if (ImGui::Selectable(prioritylabel[1], Settings::application.record.priority_mode == 1)) + Settings::application.record.priority_mode = 1; + } else { + ImGui::Selectable(prioritylabel[1], false, ImGuiSelectableFlags_Disabled); + if (ImGui::IsItemHovered()) + ImGuiToolkit::ToolTip("Unable to set priority Framerate when recoding with audio."); + } + ImGui::EndCombo(); + } ImGui::SameLine(0, IMGUI_SAME_LINE); if (ImGuiToolkit::TextButton("Priority")) - Settings::application.record.priority_mode = 1; + Settings::application.record.priority_mode = 0; // // AUDIO @@ -5657,6 +5701,11 @@ void Navigator::RenderMainPannelSettings() + namedev; if (ImGui::Selectable(labeldev.c_str())) { Settings::application.record.audio_device = namedev; + // warning on recording mode + if (Settings::application.record.priority_mode > 0) { + Log::Notify( "When recording with audio, Priority mode must be set to 'Duration'."); + Settings::application.record.priority_mode=0; + } } } ImGui::EndCombo(); @@ -5817,6 +5866,18 @@ void Navigator::RenderMainPannelSettings() // ImGuiToolkit::Spacing(); ImGui::TextDisabled("System"); + ImGui::SameLine(); + + ImGui::SetCursorPosX( pannel_width_ IMGUI_RIGHT_ALIGN); + if ( ImGuiToolkit::IconButton(ICON_FA_SAVE,"Export settings\nYou can then " + "launch vimix with the option " + "'--settings filename.xml' " + "to restore output windows and configuration.") ){ + // launch file dialog to select file to save settings + if (UserInterface::manager().settingsexportdialog) + UserInterface::manager().settingsexportdialog->open(); + } + ImGui::Spacing(); static bool need_restart = false; static bool vsync = (Settings::application.render.vsync > 0); @@ -5961,18 +6022,10 @@ void Navigator::RenderMainPannel(const ImVec2 &iconsize) // Temporary fix for preventing horizontal scrolling (https://github.com/ocornut/imgui/issues/2915) ImGui::SetScrollX(0); - // - // TITLE - // - ImGuiToolkit::PushFont(ImGuiToolkit::FONT_LARGE); - ImGui::SetCursorPosY(0.5f * (iconsize.y - ImGui::GetTextLineHeight())); - ImGui::Text("Vimix"); - // // Panel Mode selector // - // - ImGui::SetCursorPosY(width_ - style.WindowPadding.x); + ImGuiToolkit::PushFont(ImGuiToolkit::FONT_LARGE); ImGui::PushStyleVar(ImGuiStyleVar_SelectableTextAlign, ImVec2(0.5f, 0.5f)); ImGui::Columns(5, NULL, false); bool selected_panel_mode[5] = {0}; @@ -6008,17 +6061,32 @@ void Navigator::RenderMainPannel(const ImVec2 &iconsize) ImGui::EndMenu(); } - ImGui::SetCursorPosY(2.f * width_ - style.WindowPadding.x); - // // Panel content // - if (pannel_main_mode_ == 0) + float __p = width_ + style.ItemSpacing.y + ImGui::GetTextLineHeightWithSpacing(); + ImGui::SetCursorPosY(__p); + if (pannel_main_mode_ == 0) { + ImGuiToolkit::PushFont(ImGuiToolkit::FONT_LARGE); + ImGui::Text("Session"); + ImGui::SetCursorPosY(__p + ImGui::GetFrameHeightWithSpacing()); + ImGui::PopFont(); RenderMainPannelSession(); - else if (pannel_main_mode_ == 1) + } + else if (pannel_main_mode_ == 1) { + ImGuiToolkit::PushFont(ImGuiToolkit::FONT_LARGE); + ImGui::Text("Playlist"); + ImGui::SetCursorPosY(__p + ImGui::GetFrameHeightWithSpacing()); + ImGui::PopFont(); RenderMainPannelPlaylist(); - else + } + else { + ImGuiToolkit::PushFont(ImGuiToolkit::FONT_LARGE); + ImGui::Text("Settings"); + ImGui::SetCursorPosY(__p + ImGui::GetFrameHeightWithSpacing()); + ImGui::PopFont(); RenderMainPannelSettings(); + } // // About vimix diff --git a/src/VideoBroadcast.cpp b/src/VideoBroadcast.cpp index a2492adf..61800693 100644 --- a/src/VideoBroadcast.cpp +++ b/src/VideoBroadcast.cpp @@ -156,7 +156,7 @@ std::string VideoBroadcast::init(GstCaps *caps) // specify streaming framerate in the given caps GstCaps *tmp = gst_caps_copy( caps ); - GValue v = { 0, }; + GValue v = G_VALUE_INIT; g_value_init (&v, GST_TYPE_FRACTION); gst_value_set_fraction (&v, BROADCAST_FPS, 1); // fixed 30 FPS gst_caps_set_value(tmp, "framerate", &v);