diff --git a/app/controllers/bookmarks_controller.rb b/app/controllers/bookmarks_controller.rb index 5da807b68a..f6938628dd 100644 --- a/app/controllers/bookmarks_controller.rb +++ b/app/controllers/bookmarks_controller.rb @@ -141,7 +141,7 @@ def add_to_playlist_action documents playlist = Playlist.find(params[:target_playlist_id]) Array(documents.map(&:id)).each do |id| media_object = SpeedyAF::Proxy::MediaObject.find(id) - media_object.ordered_master_files.to_a.each do |mf| + media_object.sections.each do |mf| clip = AvalonClip.create(master_file: mf) PlaylistItem.create(clip: clip, playlist: playlist) end diff --git a/app/controllers/master_files_controller.rb b/app/controllers/master_files_controller.rb index 9a3a11132d..ff23133395 100644 --- a/app/controllers/master_files_controller.rb +++ b/app/controllers/master_files_controller.rb @@ -203,13 +203,10 @@ def destroy authorize! :destroy, @master_file, message: "You do not have sufficient privileges to delete files" filename = File.basename(@master_file.file_location) if @master_file.file_location.present? filename ||= @master_file.id - media_object = MediaObject.find(@master_file.media_object_id) - media_object.ordered_master_files.delete(@master_file) - media_object.master_files.delete(@master_file) - media_object.save + media_object_id = @master_file.media_object_id @master_file.destroy flash[:notice] = "#{filename} has been deleted from the system" - redirect_to edit_media_object_path(media_object, step: "file-upload") + redirect_to edit_media_object_path(media_object_id, step: "file-upload") end def set_frame diff --git a/app/controllers/media_objects_controller.rb b/app/controllers/media_objects_controller.rb index 0de5e37fbc..1fd28f319a 100644 --- a/app/controllers/media_objects_controller.rb +++ b/app/controllers/media_objects_controller.rb @@ -119,7 +119,7 @@ def add_to_playlist end playlistitem_scope = params[:post][:playlistitem_scope] #'section', 'structure' # If a single masterfile_id wasn't in the request, then create playlist_items for all masterfiles - masterfile_ids = masterfile_id.present? ? [masterfile_id] : @media_object.ordered_master_file_ids + masterfile_ids = masterfile_id.present? ? [masterfile_id] : @media_object.section_ids masterfile_ids.each do |mf_id| mf = SpeedyAF::Proxy::MasterFile.find(mf_id) if playlistitem_scope=='structure' && mf.has_structuralMetadata? && mf.structuralMetadata.xpath('//Span').present? @@ -139,7 +139,7 @@ def add_to_playlist end else #create a single item for the entire masterfile - item_title = @media_object.master_file_ids.count>1? mf.embed_title : @media_object.title + item_title = @media_object.section_ids.count > 1 ? mf.embed_title : @media_object.title clip = AvalonClip.new(title: item_title, master_file: mf) playlist.items += [PlaylistItem.new(clip: clip, playlist: playlist)] end @@ -216,7 +216,7 @@ def update_media_object if !@media_object.save error_messages += ['Failed to create media object:']+@media_object.errors.full_messages elsif master_files_params.respond_to?('each') - old_ordered_master_files = @media_object.ordered_master_files.to_a.collect(&:id) + old_ordered_sections = @media_object.section_ids master_files_params.each_with_index do |file_spec, index| master_file = MasterFile.new(file_spec.except(:structure, :captions, :captions_type, :files, :other_identifier, :label, :date_digitized)) # master_file.media_object = @media_object @@ -230,12 +230,11 @@ def update_media_object master_file.date_digitized = DateTime.parse(file_spec[:date_digitized]).to_time.utc.iso8601 if file_spec[:date_digitized].present? master_file.identifier += Array(params[:files][index][:other_identifier]) master_file.comment += Array(params[:files][index][:comment]) - master_file._media_object = @media_object + master_file.media_object = @media_object if file_spec[:files].present? if master_file.update_derivatives(file_spec[:files], false) master_file.update_stills_from_offset! WaveformJob.perform_later(master_file.id) - @media_object.ordered_master_files += [master_file] else file_location = file_spec.dig(:file_location) || '' message = "Problem saving MasterFile for #{file_location}:" @@ -248,10 +247,14 @@ def update_media_object if error_messages.empty? if api_params[:replace_masterfiles] - old_ordered_master_files.each do |mf| + old_ordered_sections.each do |mf| p = MasterFile.find(mf) + # FIXME: Figure out why this next line is necessary + # without it the save below will fail with Ldp::Gone when attempting to set master_files in the MediaObject before_save callback + # This could be avoided by doing a reload after all of the destroys but I'm afraid that would mess up other changes already staged in-memory. @media_object.master_files.delete(p) - @media_object.ordered_master_files.delete(p) + # Need to manually remove from section_ids in memory to match changes that will persist when the master file is destroyed + @media_object.section_ids -= [p.id] p.destroy end end @@ -284,7 +287,7 @@ def update_media_object def custom_edit if ['preview', 'structure', 'file-upload'].include? @active_step - @masterFiles = load_master_files + @masterFiles = load_sections end if 'preview' == @active_step @@ -364,9 +367,9 @@ def show_progress [encode.master_file_id, mf_status] end ] - master_files_count = @media_object.master_files.size - if master_files_count > 0 - overall.each { |k,v| overall[k] = [0,[100,v.to_f/master_files_count.to_f].min].max.floor } + sections_count = @media_object.sections.size + if sections_count > 0 + overall.each { |k,v| overall[k] = [0,[100,v.to_f/sections_count.to_f].min].max.floor } else overall = {success: 0, error: 0} end @@ -452,7 +455,7 @@ def tree } format.json { result = { @media_object.id => {} } - @media_object.indexed_master_files.each do |mf| + @media_object.sections.each do |mf| result[@media_object.id][mf.id] = mf.derivatives.collect(&:id) end render :json => result @@ -525,11 +528,11 @@ def load_resource def master_file_presenters # Assume that @media_object is a SpeedyAF::Proxy::MediaObject - @media_object.ordered_master_files + @media_object.sections end - def load_master_files(mode = :rw) - @masterFiles ||= mode == :rw ? @media_object.indexed_master_files.to_a : master_file_presenters + def load_sections(mode = :rw) + @masterFiles ||= mode == :rw ? @media_object.sections : master_file_presenters end def set_player_token @@ -554,13 +557,13 @@ def load_player_context if params[:part] index = params[:part].to_i-1 - if index < 0 or index > @media_object.master_files.size-1 + if index < 0 or index > @media_object.sections.size - 1 raise ActiveFedora::ObjectNotFoundError end - params[:content] = @media_object.indexed_master_file_ids[index] + params[:content] = @media_object.section_ids[index] end - load_master_files(mode: :ro) + load_sections(mode: :ro) load_current_stream end @@ -583,7 +586,7 @@ def set_active_file end end if @currentStream.nil? - @currentStream = @media_object.indexed_master_files.first + @currentStream = @media_object.sections.first end return @currentStream end diff --git a/app/helpers/media_objects_helper.rb b/app/helpers/media_objects_helper.rb index 84f370eb1e..e7c3a093c4 100644 --- a/app/helpers/media_objects_helper.rb +++ b/app/helpers/media_objects_helper.rb @@ -195,10 +195,10 @@ def get_duration_from_fragment(start, stop) milliseconds_to_formatted_time((stop.to_i - start.to_i) * 1000, false) end - # This method mirrors the one in the MediaObject model but makes use of the master files passed in which can be SpeedyAF Objects + # This method mirrors the one in the MediaObject model but makes use of the sections passed in which can be SpeedyAF Objects # This would be good to refactor in the future but speeds things up considerably for now - def gather_all_comments(media_object, master_files) - media_object.comment.sort + master_files.collect do |mf| + def gather_all_comments(media_object, sections) + media_object.comment.sort + sections.collect do |mf| mf.comment.reject(&:blank?).collect do |c| mf.display_title.present? ? "[#{mf.display_title}] #{c}" : c end.sort diff --git a/app/helpers/security_helper.rb b/app/helpers/security_helper.rb index 850e33438b..8b6113dbb3 100644 --- a/app/helpers/security_helper.rb +++ b/app/helpers/security_helper.rb @@ -30,20 +30,20 @@ def secure_streams(stream_info, media_object_id) # media_object CDL check only happens once # session tokens retrieved in batch then passed into add_stream_url # Returns Hash[MasterFile.id, stream_info] - def secure_stream_infos(master_files, media_objects) + def secure_stream_infos(sections, media_objects) stream_info_hash = {} not_checked_out_hash = {} - mo_ids = master_files.collect(&:media_object_id) + mo_ids = sections.collect(&:media_object_id) mo_ids.each { |mo_id| not_checked_out_hash[mo_id] ||= not_checked_out?(mo_id, media_object: media_objects.find {|mo| mo.id == mo_id}) } - not_checked_out_master_files = master_files.select { |mf| not_checked_out_hash[mf.media_object_id] } - checked_out_master_files = master_files - not_checked_out_master_files + not_checked_out_sections = sections.select { |mf| not_checked_out_hash[mf.media_object_id] } + checked_out_sections = sections - not_checked_out_sections - not_checked_out_master_files.each { |mf| stream_info_hash[mf.id] = mf.stream_details } + not_checked_out_sections.each { |mf| stream_info_hash[mf.id] = mf.stream_details } - stream_tokens = StreamToken.get_session_tokens_for(session: session, targets: checked_out_master_files.map(&:id)) + stream_tokens = StreamToken.get_session_tokens_for(session: session, targets: checked_out_sections.map(&:id)) stream_token_hash = stream_tokens.pluck(:target, :token).to_h - checked_out_master_files.each { |mf| stream_info_hash[mf.id] = secure_stream_info(mf.stream_details, stream_token_hash[mf.id]) } + checked_out_sections.each { |mf| stream_info_hash[mf.id] = secure_stream_info(mf.stream_details, stream_token_hash[mf.id]) } stream_info_hash end diff --git a/app/javascript/components/MediaObjectRamp.jsx b/app/javascript/components/MediaObjectRamp.jsx index 047ae20f8b..f8476ef071 100644 --- a/app/javascript/components/MediaObjectRamp.jsx +++ b/app/javascript/components/MediaObjectRamp.jsx @@ -40,7 +40,7 @@ const ExpandCollapseArrow = () => { const Ramp = ({ urls, - master_files_count, + sections_count, has_structure, title, share, @@ -153,7 +153,7 @@ const Ramp = ({ ) : ( - {master_files_count > 0 && + {sections_count > 0 &&
@@ -217,7 +217,7 @@ const Ramp = ({ ref={expandCollapseBtnRef} > - {isClosed ? ' Expand' : ' Close'} {master_files_count > 1 ? `${master_files_count} Sections` : 'Section'} + {isClosed ? ' Expand' : ' Close'} {sections_count > 1 ? `${sections_count} Sections` : 'Section'} } @@ -244,13 +244,13 @@ const Ramp = ({ ) } - + {cdl.enabled &&
} - {(cdl.can_stream && master_files_count != 0 && has_transcripts) && + {(cdl.can_stream && sections_count != 0 && has_transcripts) && %(avr-media_object:).freeze, type: "rdfs:Class".freeze property :supplementalFiles, "rdfs:isDefinedBy" => %(avr-media_object:).freeze, type: "rdfs:Class".freeze property :avalon_uploader, "rdfs:isDefinedBy" => %(avr-media_object:).freeze, type: "rdfs:Class".freeze + property :section_list, "rdfs:isDefinedBy" => %(avr-media_object:).freeze, type: "rdfs:Class".freeze end class Collection < RDF::StrictVocabulary("http://avalonmediasystem.org/rdf/vocab/collection#") diff --git a/app/models/batch_entries.rb b/app/models/batch_entries.rb index 6c29e7fe00..8b5ea83bf3 100644 --- a/app/models/batch_entries.rb +++ b/app/models/batch_entries.rb @@ -73,13 +73,13 @@ def encoding_status return (@encoding_status = :error) unless media_object # TODO: match file_locations strings with those in MasterFiles? - if media_object.master_files.to_a.count != files.count + if media_object.sections.count != files.count return (@encoding_status = :error) end # Only return success if all MasterFiles have status 'COMPLETED' status = :success - media_object.master_files.each do |master_file| + media_object.sections.each do |master_file| next if master_file.status_code == 'COMPLETED' # TODO: explore border cases if master_file.status_code == 'FAILED' || master_file.status_code == 'CANCELLED' diff --git a/app/models/concerns/master_file_behavior.rb b/app/models/concerns/master_file_behavior.rb index e4420c7b09..d878514d6c 100644 --- a/app/models/concerns/master_file_behavior.rb +++ b/app/models/concerns/master_file_behavior.rb @@ -108,8 +108,7 @@ def display_title structuralMetadata.section_title elsif title.present? title - # FIXME: The test for media_object.master_file_ids.size is expensive and takes ~0.25 seconds - elsif file_location.present? && (media_object.master_file_ids.size > 1) + elsif file_location.present? && (media_object.section_ids.size > 1) file_location.split("/").last end mf_title.blank? ? nil : mf_title diff --git a/app/models/concerns/media_object_behavior.rb b/app/models/concerns/media_object_behavior.rb index bc7084d779..13994eec75 100644 --- a/app/models/concerns/media_object_behavior.rb +++ b/app/models/concerns/media_object_behavior.rb @@ -14,6 +14,7 @@ # This module contains methods which transform stored values for use either on the MediaObject or the SpeedyAF presenter module MediaObjectBehavior + def as_json(options={}) { id: id, @@ -58,11 +59,11 @@ def access_text end def has_captions - master_files.any? { |mf| mf.has_captions? } + sections.any? { |mf| mf.has_captions? } end def has_transcripts - master_files.any? { |mf| mf.has_transcripts? } + sections.any? { |mf| mf.has_transcripts? } end # CDL methods diff --git a/app/models/concerns/media_object_intercom.rb b/app/models/concerns/media_object_intercom.rb index 34fffff7d6..69e47fea15 100644 --- a/app/models/concerns/media_object_intercom.rb +++ b/app/models/concerns/media_object_intercom.rb @@ -15,7 +15,7 @@ module MediaObjectIntercom def to_ingest_api_hash(include_structure = true, remove_identifiers: false, publish: false) { - files: ordered_master_files.to_a.collect { |mf| mf.to_ingest_api_hash(include_structure, remove_identifiers: remove_identifiers) }, + files: sections.collect { |mf| mf.to_ingest_api_hash(include_structure, remove_identifiers: remove_identifiers) }, fields: { duration: duration, diff --git a/app/models/file_upload_step.rb b/app/models/file_upload_step.rb index a8218691d2..a9ce47269c 100644 --- a/app/models/file_upload_step.rb +++ b/app/models/file_upload_step.rb @@ -36,15 +36,11 @@ def after_step context end def execute context - deleted_master_files = update_master_files context - context[:notice] = "Several clean up jobs have been sent out. Their statuses can be viewed by your sysadmin at #{ Settings.matterhorn.cleanup_log }" unless deleted_master_files.empty? + deleted_sections = update_master_files context + context[:notice] = "Several clean up jobs have been sent out. Their statuses can be viewed by your sysadmin at #{ Settings.matterhorn.cleanup_log }" unless deleted_sections.empty? - # Reloads media_object.master_files, should use .reload when we update hydra-head media = MediaObject.find(context[:media_object].id) - unless media.master_files.empty? - media.save - context[:media_object] = media - end + context[:media_object] = media context end @@ -56,16 +52,15 @@ def execute context # label - Display label in the interface # id - Identifier for the masterFile to help with mapping def update_master_files(context) - media_object = context[:media_object] files = context[:master_files] || {} - deleted_master_files = [] + deleted_sections = [] if not files.blank? files.each_pair do |id,master_file| selected_master_file = MasterFile.find(id) if selected_master_file if master_file[:remove] - deleted_master_files << selected_master_file + deleted_sections << selected_master_file selected_master_file.destroy else selected_master_file.title = master_file[:title] unless master_file[:title].nil? @@ -80,6 +75,6 @@ def update_master_files(context) end end end - deleted_master_files + deleted_sections end end diff --git a/app/models/iiif_manifest_presenter.rb b/app/models/iiif_manifest_presenter.rb index 6933fc6903..daa78fe450 100644 --- a/app/models/iiif_manifest_presenter.rb +++ b/app/models/iiif_manifest_presenter.rb @@ -20,16 +20,16 @@ class IiifManifestPresenter IIIF_ALLOWED_TAGS = ['a', 'b', 'br', 'i', 'img', 'p', 'small', 'span', 'sub', 'sup'].freeze IIIF_ALLOWED_ATTRIBUTES = ['href', 'src', 'alt'].freeze - attr_reader :media_object, :master_files, :lending_enabled + attr_reader :media_object, :sections, :lending_enabled def initialize(media_object:, master_files:, lending_enabled: false) @media_object = media_object - @master_files = master_files + @sections = master_files @lending_enabled = lending_enabled end def file_set_presenters - master_files + sections end def work_presenters @@ -202,7 +202,7 @@ def iiif_metadata_fields end def thumbnail_url - master_file_id = media_object.ordered_master_file_ids.try :first + master_file_id = media_object.section_ids.try :first video_count = media_object.avalon_resource_type.map(&:titleize)&.count { |m| m.start_with?('moving image'.titleize) } || 0 audio_count = media_object.avalon_resource_type.map(&:titleize)&.count { |m| m.start_with?('sound recording'.titleize) } || 0 diff --git a/app/models/master_file.rb b/app/models/master_file.rb index 1a6d4e1240..de171d4a33 100644 --- a/app/models/master_file.rb +++ b/app/models/master_file.rb @@ -239,17 +239,23 @@ def set_workflow( workflow = nil ) # This requires the MasterFile having an actual id def media_object=(mo) + self.save!(validate: false) unless self.persisted? + # Removes existing association if self.media_object.present? - self.media_object.master_files = self.media_object.master_files.to_a.reject { |mf| mf.id == self.id } - self.media_object.ordered_master_files = self.media_object.ordered_master_files.to_a.reject { |mf| mf.id == self.id } - self.media_object.save + self.media_object.section_ids -= [self.id] + self.media_object.save(validate: false) end self._media_object=(mo) + self.save!(validate: false) + unless self.media_object.nil? - self.media_object.ordered_master_files += [self] - self.media_object.save + self.media_object.section_ids += [self.id] + self.media_object.save(validate: false) + # Need to reload here because somehow a cached copy of media_object is saved in memory + # which lacks the updated section_ids and master_file_ids just set and persisted above + self.media_object.reload end end @@ -546,6 +552,11 @@ def self.calculate_working_file_path(old_path) end end + def stop_processing! + # Stops all processing + ActiveEncodeJobs::CancelEncodeJob.perform_later(workflow_id, id) if workflow_id.present? && !finished_processing? + end + protected def mediainfo @@ -766,15 +777,9 @@ def find_encoder_class(klass_name) klass if klass&.ancestors&.include?(ActiveEncode::Base) end - def stop_processing! - # Stops all processing - ActiveEncodeJobs::CancelEncodeJob.perform_later(workflow_id, id) if workflow_id.present? && finished_processing? - end - def update_parent! return unless media_object.present? - media_object.master_files.delete(self) - media_object.ordered_master_files.delete(self) + media_object.section_ids -= [self.id] media_object.set_media_types! media_object.set_duration! if !media_object.save diff --git a/app/models/media_object.rb b/app/models/media_object.rb index 368bb39d1e..c42b9f4a2d 100644 --- a/app/models/media_object.rb +++ b/app/models/media_object.rb @@ -36,6 +36,25 @@ class MediaObject < ActiveFedora::Base before_save :update_dependent_properties!, prepend: true before_save :update_permalink, if: Proc.new { |mo| mo.persisted? && mo.published? }, prepend: true before_save :assign_id!, prepend: true + + after_find do + # Force loading of section_ids from list_source + self.section_ids if self.section_list.nil? + end + + # Persist to master_files only on save to avoid changes to master_files auto-saving and making things out of sync + # This might be able to be removed along with the ordered_aggregation to rely purely on section_list and the relationship + # on the MasterFile model + # Have to handle create specially otherwise will attempt to associate prior to having an id + around_create do |_, block| + block.call + # Saving again will force running through the before_save callback that should do the actual work + self.save!(validate: false) unless self.master_file_ids.sort == self.section_ids.sort + end + before_save do + self.master_files = self.sections unless self.new_record? || self.master_file_ids.sort == self.section_ids.sort + end + after_save :update_dependent_permalinks_job, if: Proc.new { |mo| mo.persisted? && mo.published? } after_save :remove_bookmarks after_update_index :enqueue_long_indexing @@ -117,6 +136,8 @@ def validate_date(date_field) index.as :stored_sortable end + #TODO: get rid of all ordered_* and indexed_* references, after everything is migrated then convert from `ordered_aggregation` to `has_many` + # OR possibly remove the master_files relationship entirely? ordered_aggregation :master_files, class_name: 'MasterFile', through: :list_source # ordered_aggregation gives you accessors media_obj.master_files and media_obj.ordered_master_files # and methods for master_files: first, last, [index], =, <<, +=, delete(mf) @@ -125,10 +146,40 @@ def validate_date(date_field) accepts_nested_attributes_for :master_files, :allow_destroy => true + property :section_list, predicate: Avalon::RDFVocab::MediaObject.section_list, multiple: false do |index| + index.as :symbol + end + + def section_ids= ids + self.section_list = ids.to_json + @sections = nil + @section_ids = ids + end + + def sections= mfs + self.section_ids = mfs.map(&:id) + @sections = mfs + end + + def sections + @sections ||= MasterFile.find(self.section_ids) + end + + def section_ids + return @section_ids if @section_ids + + # Do migration + self.section_ids = self.ordered_master_file_ids if self.section_list.nil? + + return [] if self.section_list.nil? + @section_ids = JSON.parse(self.section_list) + end + def destroy # attempt to stop the matterhorn processing job - self.master_files.each(&:destroy) - self.master_files.clear + self.sections.each(&:stop_processing!) + # avoid calling destroy on each section since it calls save on parent media object + self.sections.each(&:delete) Bookmark.where(document_id: self.id).destroy_all super end @@ -163,7 +214,7 @@ def publish!(user_key, validate: true) end def finished_processing? - self.master_files.all?{ |master_file| master_file.finished_processing? } + self.sections.all? { |section| section.finished_processing? } end def set_duration! @@ -179,42 +230,42 @@ def report_missing_attributes end def set_media_types! - mime_types = master_file_solr_docs.reject { |mf| mf["file_location_ssi"].blank? }.collect do |mf| - Rack::Mime.mime_type(File.extname(mf["file_location_ssi"])) + mime_types = section_solr_docs.reject { |section| section["file_location_ssi"].blank? }.collect do |section| + Rack::Mime.mime_type(File.extname(section["file_location_ssi"])) end.uniq self.format = mime_types.empty? ? nil : mime_types end def set_resource_types! - self.avalon_resource_type = master_file_solr_docs.reject { |mf| mf["file_format_ssi"].blank? }.collect do |mf| - case mf["file_format_ssi"] + self.avalon_resource_type = section_solr_docs.reject { |section| section["file_format_ssi"].blank? }.collect do |section| + case section["file_format_ssi"] when 'Moving image' 'moving image' when 'Sound' 'sound recording' else - mf.file_format.downcase + section.file_format.downcase end end.uniq end def update_dependent_properties! - @master_file_docs = nil + @section_docs = nil self.set_duration! self.set_media_types! self.set_resource_types! end def all_comments - comment.sort + ordered_master_files.to_a.compact.collect do |mf| - mf.comment.reject(&:blank?).collect do |c| - mf.display_title.present? ? "[#{mf.display_title}] #{c}" : c + comment.sort + sections.compact.collect do |section| + section.comment.reject(&:blank?).collect do |c| + section.display_title.present? ? "[#{section.display_title}] #{c}" : c end.sort end.flatten.uniq end def section_labels - all_labels = master_files.collect{|mf|mf.structural_metadata_labels << mf.title} + all_labels = sections.collect{ |section| section.structural_metadata_labels << section.title} all_labels.flatten.uniq.compact end @@ -222,8 +273,8 @@ def section_labels # @return [Array] A unique list of all physical descriptions for the media object def section_physical_descriptions all_pds = [] - self.master_files.each do |master_file| - all_pds += Array(master_file.physical_description) unless master_file.physical_description.nil? + self.sections.each do |section| + all_pds += Array(section.physical_description) unless section.physical_description.nil? end all_pds.uniq end @@ -232,10 +283,9 @@ def section_physical_descriptions # using a copy of the master file solr doc to avoid having to fetch them all from fedora # this is probably okay since this is just aggregating the values already in the master file solr docs - def fill_in_solr_fields_that_need_master_files(solr_doc) - solr_doc['section_id_ssim'] = ordered_master_file_ids - solr_doc["other_identifier_sim"] += master_files.collect {|mf| mf.identifier.to_a }.flatten - solr_doc["date_digitized_ssim"] = master_files.collect {|mf| mf.date_digitized }.compact.map {|t| Time.parse(t).strftime "%F" } + def fill_in_solr_fields_that_need_sections(solr_doc) + solr_doc["other_identifier_sim"] += sections.collect {|section| section.identifier.to_a }.flatten + solr_doc["date_digitized_ssim"] = sections.collect {|section| section.date_digitized }.compact.map {|t| Time.parse(t).strftime "%F" } solr_doc["has_captions_bsi"] = has_captions solr_doc["has_transcripts_bsi"] = has_transcripts solr_doc["section_label_tesim"] = section_labels @@ -265,8 +315,9 @@ def to_solr(include_child_fields: false) solr_doc['note_ssm'] = self.note.collect { |n| n.to_json } solr_doc['other_identifier_ssm'] = self.other_identifier.collect { |oi| oi.to_json } solr_doc['related_item_url_ssm'] = self.related_item_url.collect { |r| r.to_json } + solr_doc['section_id_ssim'] = section_ids if include_child_fields - fill_in_solr_fields_that_need_master_files(solr_doc) + fill_in_solr_fields_that_need_sections(solr_doc) elsif id.present? # avoid error in test suite # Fill in other identifier so these values aren't stripped from the solr doc while waiting for the background job mf_docs = ActiveFedora::SolrService.query("isPartOf_ssim:#{id}", rows: 1_000_000) @@ -317,10 +368,10 @@ def update_dependent_permalinks_job end def update_dependent_permalinks - self.master_files.each do |master_file| + self.sections.each do |section| begin - updated = master_file.ensure_permalink! - master_file.save( validate: false ) if updated + updated = section.ensure_permalink! + section.save( validate: false ) if updated rescue # no-op # Save is called (uncharacteristically) during a destroy. @@ -349,7 +400,7 @@ def merge!(media_objects) media_objects.dup.each do |mo| begin # TODO: mass assignment may speed things up - mo.ordered_master_files.to_a.dup.each { |mf| mf.media_object = self } + mo.sections.each { |section| section.media_object = self } mo.reload.destroy! mergeds << mo @@ -368,7 +419,9 @@ def lending_period # Override to reset memoized fields def reload - @master_file_docs = nil + @section_docs = nil + @sections = nil + @section_ids = nil super end @@ -401,12 +454,17 @@ def self.autocomplete(query, id) private - def master_file_solr_docs - @master_file_docs ||= ActiveFedora::SolrService.query("isPartOf_ssim:#{id}", rows: 1_000_000) + def section_solr_docs + # Explicitly query for each id in section_ids instead of the reverse to ensure consistency + # This may skip master file objects which claim to be a part of this media object but are not + # in the section_list + return [] unless section_ids.present? + query = "id:" + section_ids.join(" id:") + @section_docs ||= ActiveFedora::SolrService.query(query, rows: 1_000_000) end def calculate_duration - master_file_solr_docs.collect { |h| h['duration_ssi'].to_i }.compact.sum + section_solr_docs.collect { |h| h['duration_ssi'].to_i }.compact.sum end def collect_ips_for_index ip_strings @@ -418,6 +476,7 @@ def collect_ips_for_index ip_strings end def sections_with_files(tag: '*') - ordered_master_file_ids.select { |m| SpeedyAF::Proxy::MasterFile.find(m).supplemental_files(tag: tag).present? } + # TODO: Optimize this into a single solr query? + section_ids.select { |m| SpeedyAF::Proxy::MasterFile.find(m).supplemental_files(tag: tag).present? } end end diff --git a/app/models/structure_step.rb b/app/models/structure_step.rb index ee7b2507f2..aa35b0935f 100644 --- a/app/models/structure_step.rb +++ b/app/models/structure_step.rb @@ -20,12 +20,7 @@ def initialize(step = 'structure', title = "Structure", summary = "Organization def execute context media_object = context[:media_object] if ! context[:master_file_ids].nil? - # gather the parts in the right order - # in this situation we cannot use MatterFile.find([]) because - # it will not return the results in the correct order - master_files = context[:master_file_ids].map{ |master_file_id| MasterFile.find(master_file_id) } - # re-add the parts that are now in the right order - media_object.ordered_master_files = master_files + media_object.section_ids = context[:master_file_ids] media_object.save end context diff --git a/app/presenters/speedy_af/proxy/master_file.rb b/app/presenters/speedy_af/proxy/master_file.rb index 113be17358..1584b2d9e5 100644 --- a/app/presenters/speedy_af/proxy/master_file.rb +++ b/app/presenters/speedy_af/proxy/master_file.rb @@ -40,8 +40,7 @@ def display_title structuralMetadata.section_title elsif title.present? title - # FIXME: The test for media_object.master_file_ids.size is expensive and takes ~0.25 seconds - elsif file_location.present? && (media_object.master_file_ids.size > 1) + elsif file_location.present? && (media_object.section_ids.size > 1) file_location.split("/").last end mf_title.blank? ? nil : mf_title diff --git a/app/presenters/speedy_af/proxy/media_object.rb b/app/presenters/speedy_af/proxy/media_object.rb index 7dafe8855f..cf84fe2fce 100644 --- a/app/presenters/speedy_af/proxy/media_object.rb +++ b/app/presenters/speedy_af/proxy/media_object.rb @@ -27,6 +27,7 @@ def initialize(solr_document, instance_defaults = {}) end # Handle this case here until a better fix can be found for multiple solr fields which don't have a model property @attrs[:section_id] = solr_document["section_id_ssim"] + @attrs[:section_ids] = solr_document["section_id_ssim"] @attrs[:hidden?] = solr_document["hidden_bsi"] @attrs[:read_groups] = solr_document["read_access_group_ssim"] || [] @attrs[:edit_groups] = solr_document["edit_access_group_ssim"] || [] @@ -79,30 +80,13 @@ def supplemental_files(tag: '*') end end - def master_file_ids - if real? - real_object.indexed_master_file_ids - elsif section_id.nil? # No master files or not indexed yet - ActiveFedora::Base.logger.warn("Reifying MediaObject because master_files not indexed") - real_object.indexed_master_file_ids - else - section_id - end - end - alias_method :indexed_master_file_ids, :master_file_ids - alias_method :ordered_master_file_ids, :master_file_ids - - def master_files - # NOTE: Defaults are set on returned SpeedyAF::Base objects if field isn't present in the solr doc. - # This is important otherwise speedy_af will reify from fedora when trying to access this field. - # When adding a new property to the master file model that will be used in the interface, - # add it to the default below to avoid reifying for master files lacking a value for the property. - @master_files ||= SpeedyAF::Proxy::MasterFile.where("isPartOf_ssim:#{id}", - order: -> { master_file_ids }, - load_reflections: true) + def sections + return [] unless section_ids.present? + query = "id:" + section_ids.join(" id:") + @sections ||= SpeedyAF::Proxy::MasterFile.where(query, + order: -> { section_ids }, + load_reflections: true) end - alias_method :indexed_master_files, :master_files - alias_method :ordered_master_files, :master_files def collection @collection ||= SpeedyAF::Proxy::Admin::Collection.find(collection_id) @@ -114,7 +98,7 @@ def lending_period def format # TODO figure out how to memoize this - mime_types = master_files.reject { |mf| mf.file_location.blank? }.collect do |mf| + mime_types = sections.reject { |mf| mf.file_location.blank? }.collect do |mf| Rack::Mime.mime_type(File.extname(mf.file_location)) end.uniq mime_types.empty? ? nil : mime_types @@ -149,7 +133,7 @@ def language end def sections_with_files(tag: '*') - master_files.select { |master_file| master_file.supplemental_files(tag: tag).present? }.map(&:id) + sections.select { |master_file| master_file.supplemental_files(tag: tag).present? }.map(&:id) end def permalink_with_query(query_vars = {}) diff --git a/app/views/ingest_batch_mailer/status_email.html.erb b/app/views/ingest_batch_mailer/status_email.html.erb index 373d525fd3..03d8c2cf2b 100644 --- a/app/views/ingest_batch_mailer/status_email.html.erb +++ b/app/views/ingest_batch_mailer/status_email.html.erb @@ -20,8 +20,8 @@ Unless required by applicable law or agreed to in writing, software distributed

<%= media_object.title %>

- <% master_files = media_object.master_files.to_a %> - <% if master_files.count > 0 %> + <% sections = media_object.sections %> + <% if sections.count > 0 %> @@ -29,7 +29,7 @@ Unless required by applicable law or agreed to in writing, software distributed - <% master_files.each do |master_file| %> + <% sections.each do |master_file| %> <% status_code = master_file.status_code.downcase %>
NameStatus
diff --git a/app/views/media_objects/_embed_checkout.html.erb b/app/views/media_objects/_embed_checkout.html.erb index 5dd0b340f5..d77d8d9494 100644 --- a/app/views/media_objects/_embed_checkout.html.erb +++ b/app/views/media_objects/_embed_checkout.html.erb @@ -14,7 +14,7 @@ Unless required by applicable law or agreed to in writing, software distributed --- END LICENSE_HEADER BLOCK --- %> <% if !@masterFiles.blank? %> - <% master_file = @media_object.master_files.first %> + <% master_file = @media_object.sections.first %>
<% if !current_user %> diff --git a/app/views/media_objects/_item_view.html.erb b/app/views/media_objects/_item_view.html.erb index efb7f1794e..872a449881 100644 --- a/app/views/media_objects/_item_view.html.erb +++ b/app/views/media_objects/_item_view.html.erb @@ -42,8 +42,8 @@ Unless required by applicable law or agreed to in writing, software distributed <%= react_component("MediaObjectRamp", { urls: { base_url: request.protocol + request.host_with_port, fullpath_url: request.fullpath }, - master_files_count: @media_object.master_files.size, - has_structure: @media_object.master_files.any?{ |mf| mf.has_structuralMetadata? }, + sections_count: @media_object.sections.size, + has_structure: @media_object.sections.any?{ |mf| mf.has_structuralMetadata? }, title: { content: render('title') }, share: { canShare: (will_partial_list_render? :share), content: lending_enabled?(@media_object) ? (render('share') if can_stream) : render('share') }, timeline: { canCreate: (current_ability.can? :create, Timeline), content: lending_enabled?(@media_object) ? (render('timeline') if can_stream) : render('timeline') }, @@ -62,7 +62,7 @@ Unless required by applicable law or agreed to in writing, software distributed // display the video player $(document).ready(function () { const mediaObjectId = <%= @media_object.id.to_json.html_safe %>; - const sectionIds = <%= @media_object.ordered_master_file_ids.to_json.html_safe %>; + const sectionIds = <%= @media_object.section_ids.to_json.html_safe %>; const transcriptSections = <%= @media_object.sections_with_files(tag: 'transcript').to_json.html_safe %>; // Enable action buttons after derivative is loaded diff --git a/app/views/media_objects/tree.html.erb b/app/views/media_objects/tree.html.erb index dafa98fafd..4aabbcc28f 100644 --- a/app/views/media_objects/tree.html.erb +++ b/app/views/media_objects/tree.html.erb @@ -16,7 +16,7 @@ Unless required by applicable law or agreed to in writing, software distributed
  • <%=@media_object.title%> (<%=@media_object.id%>)
      - <% @media_object.ordered_master_files.to_a.each do |mf| %> + <% @media_object.sections.each do |mf| %>
    • <%=stream_label_for(mf)%> (<%=mf.id%>)
        <% mf.derivatives.each do |d| %> diff --git a/spec/controllers/catalog_controller_spec.rb b/spec/controllers/catalog_controller_spec.rb index b1188c3e7c..1d8a748e79 100644 --- a/spec/controllers/catalog_controller_spec.rb +++ b/spec/controllers/catalog_controller_spec.rb @@ -190,7 +190,7 @@ before(:each) do @media_object = FactoryBot.create(:fully_searchable_media_object) @master_file = FactoryBot.create(:master_file, :with_structure, media_object: @media_object, title: 'Test Label') - @media_object.ordered_master_files += [@master_file] + @media_object.sections += [@master_file] @media_object.save! # Explicitly run indexing job to ensure fields are indexed for structure searching MediaObjectIndexingJob.perform_now(@media_object.id) diff --git a/spec/controllers/master_files_controller_spec.rb b/spec/controllers/master_files_controller_spec.rb index 6500216700..f6e3e07fd7 100644 --- a/spec/controllers/master_files_controller_spec.rb +++ b/spec/controllers/master_files_controller_spec.rb @@ -100,7 +100,7 @@ file = fixture_file_upload('/videoshort.mp4', 'video/mp4') post :create, params: { Filedata: [file], original: 'any', container_id: media_object.id } - master_file = media_object.reload.ordered_master_files.to_a.first + master_file = media_object.reload.sections.first expect(master_file.file_format).to eq "Moving image" expect(flash[:error]).to be_nil @@ -110,7 +110,7 @@ file = fixture_file_upload('/jazz-performance.mp3', 'audio/mp3') post :create, params: { Filedata: [file], original: 'any', container_id: media_object.id } - master_file = media_object.reload.ordered_master_files.to_a.first + master_file = media_object.reload.sections.first expect(master_file.file_format).to eq "Sound" end @@ -147,7 +147,7 @@ class << file post :create, params: { Filedata: [file], original: 'any', container_id: media_object.id } master_file = MasterFile.all.last - expect(media_object.reload.ordered_master_files.to_a).to include master_file + expect(media_object.reload.sections).to include master_file expect(master_file.media_object.id).to eq(media_object.id) expect(flash[:error]).to be_nil @@ -159,7 +159,7 @@ class << file master_file = MasterFile.all.last media_object.reload - expect(media_object.ordered_master_files.to_a).to include master_file + expect(media_object.sections).to include master_file expect(master_file.media_object.id).to eq(media_object.id) expect(flash[:error]).to be_nil @@ -171,7 +171,7 @@ class << file master_file = MasterFile.all.last media_object.reload - expect(media_object.ordered_master_files.to_a).to include master_file + expect(media_object.sections).to include master_file expect(master_file.media_object.id).to eq(media_object.id) expect(flash[:error]).to be_nil @@ -189,7 +189,7 @@ class << file post :create, params: { Filedata: [file], original: 'any', container_id: media_object.id } master_file = MasterFile.all.last - expect(media_object.reload.ordered_master_files.to_a).to include master_file + expect(media_object.reload.sections).to include master_file expect(master_file.media_object.id).to eq(media_object.id) expect(flash[:error]).to be_nil @@ -206,14 +206,14 @@ class << file login_as :administrator expect(controller.current_ability.can?(:destroy, master_file)).to be_truthy expect { post :destroy, params: { id: master_file.id } }.to change { MasterFile.count }.by(-1) - expect(master_file.media_object.reload.ordered_master_files.to_a).not_to include master_file + expect(master_file.media_object.reload.sections).not_to include master_file end it "deletes a master file as manager of owning collection" do login_user master_file.media_object.collection.managers.first expect(controller.current_ability.can?(:destroy, master_file)).to be_truthy expect { post(:destroy, params: { id: master_file.id }) }.to change { MasterFile.count }.by(-1) - expect(master_file.media_object.reload.ordered_master_files.to_a).not_to include master_file + expect(master_file.media_object.reload.sections).not_to include master_file end it "redirect to restricted content page for end users" do @@ -685,18 +685,16 @@ class << file login_as :administrator end it 'moves the master file' do - expect(current_media_object.master_file_ids).to include master_file.id - expect(current_media_object.ordered_master_file_ids).to include master_file.id - expect(target_media_object.master_file_ids).not_to include master_file.id - expect(target_media_object.ordered_master_file_ids).not_to include master_file.id + expect(current_media_object.section_ids).to include master_file.id + expect(target_media_object.section_ids).not_to include master_file.id post('move', params: { id: master_file.id, target: target_media_object.id }) + current_media_object.reload + target_media_object.reload expect(response).to redirect_to(edit_media_object_path(current_media_object)) expect(flash[:success]).not_to be_blank expect(master_file.reload.media_object_id).to eq target_media_object.id - expect(current_media_object.reload.master_file_ids).not_to include master_file.id - expect(current_media_object.ordered_master_file_ids).not_to include master_file.id - expect(target_media_object.reload.master_file_ids).to include master_file.id - expect(target_media_object.ordered_master_file_ids).to include master_file.id + expect(current_media_object.section_ids).not_to include master_file.id + expect(target_media_object.section_ids).to include master_file.id end end end diff --git a/spec/controllers/media_objects_controller_spec.rb b/spec/controllers/media_objects_controller_spec.rb index 74ee109890..7369ccf240 100644 --- a/spec/controllers/media_objects_controller_spec.rb +++ b/spec/controllers/media_objects_controller_spec.rb @@ -160,7 +160,7 @@ login_as(:administrator) session[:intercom_collections] = {} session[:intercom_default_collection] = '' - media_object.ordered_master_files = [master_file_with_structure] + media_object.sections = [master_file_with_structure] allow_any_instance_of(Avalon::Intercom).to receive(:fetch_user_collections).and_return target_collections end it 'should refetch user collections from target and set session' do @@ -304,7 +304,7 @@ end it "should create a new media_object" do # master_file_obj = FactoryBot.create(:master_file, master_file.slice(:files)) - media_object = FactoryBot.create(:media_object)#, master_files: [master_file_obj]) + media_object = FactoryBot.create(:media_object)#, sections: [master_file_obj]) fields = {other_identifier_type: []} descMetadata_fields.each {|f| fields[f] = media_object.send(f) } # fields = media_object.attributes.select {|k,v| descMetadata_fields.include? k.to_sym } @@ -314,17 +314,17 @@ expect(new_media_object.title).to eq media_object.title expect(new_media_object.creator).to eq media_object.creator expect(new_media_object.date_issued).to eq media_object.date_issued - expect(new_media_object.ordered_master_files.to_a.map(&:id)).to match_array new_media_object.master_file_ids + expect(new_media_object.section_ids.size).to eq 1 expect(new_media_object.duration).to eq '6315' expect(new_media_object.format).to eq ['video/mp4'] expect(new_media_object.avalon_resource_type).to eq ['moving image'] - expect(new_media_object.master_files.first.date_digitized).to eq('2015-12-31T00:00:00Z') - expect(new_media_object.master_files.first.identifier).to include('40000000045312') - expect(new_media_object.master_files.first.structuralMetadata.has_content?).to be_truthy - expect(new_media_object.master_files.first.captions.has_content?).to be_truthy - expect(new_media_object.master_files.first.captions.mime_type).to eq('text/vtt') - expect(new_media_object.master_files.first.derivatives.count).to eq(2) - expect(new_media_object.master_files.first.derivatives.first.location_url).to eq(absolute_location) + expect(new_media_object.sections.first.date_digitized).to eq('2015-12-31T00:00:00Z') + expect(new_media_object.sections.first.identifier).to include('40000000045312') + expect(new_media_object.sections.first.structuralMetadata.has_content?).to be_truthy + expect(new_media_object.sections.first.captions.has_content?).to be_truthy + expect(new_media_object.sections.first.captions.mime_type).to eq('text/vtt') + expect(new_media_object.sections.first.derivatives.count).to eq(2) + expect(new_media_object.sections.first.derivatives.first.location_url).to eq(absolute_location) expect(new_media_object.workflow.last_completed_step).to eq([HYDRANT_STEPS.last.step]) end it "should create a new published media_object" do @@ -406,6 +406,7 @@ # master_file_obj = FactoryBot.create(:master_file, master_file.slice(:files)) media_object = FactoryBot.create(:fully_searchable_media_object, :with_completed_workflow) master_file = FactoryBot.create(:master_file, :with_derivative, :with_thumbnail, :with_poster, :with_structure, :with_captions, :with_comments, media_object: media_object) + media_object.reload allow_any_instance_of(MasterFile).to receive(:extract_frame).and_return('some data') media_object.update_dependent_properties! api_hash = media_object.to_ingest_api_hash @@ -415,7 +416,7 @@ expect(new_media_object.title).to eq media_object.title expect(new_media_object.creator).to eq media_object.creator expect(new_media_object.date_issued).to eq media_object.date_issued - expect(new_media_object.ordered_master_files.to_a.map(&:id)).to match_array new_media_object.master_file_ids + expect(new_media_object.section_ids.size).to eq 1 expect(new_media_object.duration).to eq media_object.duration expect(new_media_object.format).to eq media_object.format expect(new_media_object.note).to eq media_object.note @@ -427,13 +428,13 @@ expect(new_media_object.rights_statement).to eq media_object.rights_statement expect(new_media_object.series).to eq media_object.series expect(new_media_object.avalon_resource_type).to eq media_object.avalon_resource_type - expect(new_media_object.master_files.first.date_digitized).to eq(media_object.master_files.first.date_digitized) - expect(new_media_object.master_files.first.identifier).to eq(media_object.master_files.first.identifier) - expect(new_media_object.master_files.first.structuralMetadata.has_content?).to be_truthy - expect(new_media_object.master_files.first.captions.has_content?).to be_truthy - expect(new_media_object.master_files.first.captions.mime_type).to eq(media_object.master_files.first.captions.mime_type) - expect(new_media_object.master_files.first.derivatives.count).to eq(media_object.master_files.first.derivatives.count) - expect(new_media_object.master_files.first.derivatives.first.location_url).to eq(media_object.master_files.first.derivatives.first.location_url) + expect(new_media_object.sections.first.date_digitized).to eq(media_object.sections.first.date_digitized) + expect(new_media_object.sections.first.identifier).to eq(media_object.sections.first.identifier) + expect(new_media_object.sections.first.structuralMetadata.has_content?).to be_truthy + expect(new_media_object.sections.first.captions.has_content?).to be_truthy + expect(new_media_object.sections.first.captions.mime_type).to eq(media_object.sections.first.captions.mime_type) + expect(new_media_object.sections.first.derivatives.count).to eq(media_object.sections.first.derivatives.count) + expect(new_media_object.sections.first.derivatives.first.location_url).to eq(media_object.sections.first.derivatives.first.location_url) expect(new_media_object.workflow.last_completed_step).to eq(media_object.workflow.last_completed_step) end it "should return 422 if master_file update failed" do @@ -483,29 +484,29 @@ expect(JSON.parse(response.body)['id'].class).to eq String expect(JSON.parse(response.body)).not_to include('errors') media_object.reload - expect(media_object.master_files.to_a.size).to eq 2 + expect(media_object.sections.to_a.size).to eq 2 end it "should update the poster and thumbnail for its masterfile" do media_object = FactoryBot.create(:media_object) put 'json_update', params: { format: 'json', id: media_object.id, files: [master_file], collection_id: media_object.collection_id } media_object.reload - expect(media_object.master_files.to_a.size).to eq 1 - expect(ExtractStillJob).to have_been_enqueued.with(media_object.master_files.first.id, { type: 'both', offset: 2000, headers: nil }) + expect(media_object.sections.to_a.size).to eq 1 + expect(ExtractStillJob).to have_been_enqueued.with(media_object.sections.first.id, { type: 'both', offset: 2000, headers: nil }) end it "should update the waveform for its masterfile" do media_object = FactoryBot.create(:media_object) put 'json_update', params: { format: 'json', id: media_object.id, files: [master_file], collection_id: media_object.collection_id } media_object.reload - expect(media_object.master_files.to_a.size).to eq 1 - expect(WaveformJob).to have_been_enqueued.with(media_object.master_files.first.id) + expect(media_object.sections.to_a.size).to eq 1 + expect(WaveformJob).to have_been_enqueued.with(media_object.sections.first.id) end - it "should delete existing master_files and add a new master_file to a media_object" do + it "should delete existing sections and add a new master_file to a media_object" do allow_any_instance_of(MasterFile).to receive(:stop_processing!) put 'json_update', params: { format: 'json', id: media_object.id, files: [master_file], collection_id: media_object.collection_id, replace_masterfiles: true } expect(JSON.parse(response.body)['id'].class).to eq String expect(JSON.parse(response.body)).not_to include('errors') media_object.reload - expect(media_object.master_files.to_a.size).to eq 1 + expect(media_object.sections.to_a.size).to eq 1 end it "should return 404 if media object doesn't exist" do allow_any_instance_of(MediaObject).to receive(:save).and_return false @@ -814,11 +815,11 @@ it "should choose the correct default master_file" do mf1 = FactoryBot.create(:master_file, media_object: media_object) mf2 = FactoryBot.create(:master_file, media_object: media_object) - expect(media_object.ordered_master_files.to_a.first).to eq(mf1) - media_object.ordered_master_files = media_object.ordered_master_files.to_a.reverse + expect(media_object.sections.first).to eq(mf1) + media_object.sections = media_object.sections.reverse media_object.save! controller.instance_variable_set('@media_object', media_object) - expect(media_object.ordered_master_files.to_a.first).to eq(mf2) + expect(media_object.sections.first).to eq(mf2) expect(controller.send('set_active_file')).to eq(mf2) end end @@ -1199,15 +1200,27 @@ expect(json['published_by']).to eq(media_object.avalon_publisher) expect(json['published']).to eq(media_object.published?) expect(json['summary']).to eq(media_object.abstract) - expect(json['fields'].symbolize_keys).to eq(media_object.to_ingest_api_hash(false)[:fields]) + + # FIXME: https://github.com/avalonmediasystem/avalon/issues/5834 + ingest_api_hash = media_object.to_ingest_api_hash(false) + json['fields'].each do |k,v| + if k == "avalon_resource_type" + expect(v.map(&:downcase)).to eq(ingest_api_hash[:fields][k.to_sym]) + elsif k == "record_identifier" + # no-op since not indexed + else + expect(v).to eq(ingest_api_hash[:fields][k.to_sym]) + end + end + # Symbolize keys for master files and derivatives json['files'].each do |mf| mf.symbolize_keys! mf[:files].each { |d| d.symbolize_keys! } end expect(json['files']).to eq(media_object.to_ingest_api_hash(false)[:files]) - expect(json['files'].first[:id]).to eq(media_object.master_files.first.id) - expect(json['files'].first[:files].first[:id]).to eq(media_object.master_files.first.derivatives.first.id) + expect(json['files'].first[:id]).to eq(media_object.sections.first.id) + expect(json['files'].first[:files].first[:id]).to eq(media_object.sections.first.derivatives.first.id) end it "should return 404 if requested media_object not present" do @@ -1335,18 +1348,18 @@ delete :destroy, params: { id: media_object.id } expect(flash[:notice]).to include("media object deleted") expect(MediaObject.exists?(media_object.id)).to be_falsey - expect(MasterFile.exists?(media_object.master_file_ids.first)).to be_falsey + expect(MasterFile.exists?(media_object.section_ids.first)).to be_falsey end it "should remove a MediaObject with multiple MasterFiles" do media_object = FactoryBot.create(:media_object, :with_master_file, collection: collection) FactoryBot.create(:master_file, media_object: media_object) - master_file_ids = media_object.master_files.map(&:id) + section_ids = media_object.section_ids media_object.reload delete :destroy, params: { id: media_object.id } expect(flash[:notice]).to include("media object deleted") expect(MediaObject.exists?(media_object.id)).to be_falsey - master_file_ids.each { |mf_id| expect(MasterFile.exists?(mf_id)).to be_falsey } + section_ids.each { |mf_id| expect(MasterFile.exists?(mf_id)).to be_falsey } end it "should fail when id doesn't exist" do @@ -1534,14 +1547,14 @@ mf.media_object = media_object mf.save end - master_file_ids = media_object.ordered_master_files.to_a.collect(&:id) + section_ids = media_object.section_ids media_object.save login_user media_object.collection.managers.first - put 'update', params: { :id => media_object.id, :master_file_ids => master_file_ids.reverse, :step => 'structure' } + put 'update', params: { :id => media_object.id, :master_file_ids => section_ids.reverse, :step => 'structure' } media_object.reload - expect(media_object.ordered_master_files.to_a.collect(&:id)).to eq master_file_ids.reverse + expect(media_object.section_ids).to eq section_ids.reverse end it 'sets the MIME type' do media_object = FactoryBot.create(:media_object) @@ -1569,11 +1582,11 @@ it "should update all the labels" do login_user media_object.collection.managers.first part_params = {} - media_object.ordered_master_files.to_a.each_with_index { |mf,i| part_params[mf.id] = { id: mf.id, title: "Part #{i}", permalink: '', poster_offset: '00:00:00.000' } } + media_object.sections.each_with_index { |mf,i| part_params[mf.id] = { id: mf.id, title: "Part #{i}", permalink: '', poster_offset: '00:00:00.000' } } params = { id: media_object.id, master_files: part_params, save: 'Save', step: 'file-upload', donot_advance: 'true' } patch 'update', params: params media_object.reload - media_object.ordered_master_files.to_a.each_with_index do |mf,i| + media_object.sections.each_with_index do |mf,i| expect(mf.title).to eq "Part #{i}" end end @@ -1692,19 +1705,19 @@ end describe "#show_progress" do - it "should return information about the processing state of the media object's master_files" do + it "should return information about the processing state of the media object's sections" do media_object = FactoryBot.create(:media_object, :with_master_file) login_as 'administrator' get :show_progress, params: { id: media_object.id, format: 'json' } expect(JSON.parse(response.body)["overall"]).to_not be_empty end - it "should return information about the processing state of the media object's master_files for managers" do + it "should return information about the processing state of the media object's sections for managers" do media_object = FactoryBot.create(:media_object, :with_master_file) login_user media_object.collection.managers.first get :show_progress, params: { id: media_object.id, format: 'json' } expect(JSON.parse(response.body)["overall"]).to_not be_empty end - it "should return something even if the media object has no master_files" do + it "should return something even if the media object has no sections" do media_object = FactoryBot.create(:media_object) login_as 'administrator' get :show_progress, params: { id: media_object.id, format: 'json' } @@ -1728,18 +1741,18 @@ let(:playlist) { FactoryBot.create(:playlist, user: user) } before do - media_object.ordered_master_files = [master_file, master_file_with_structure] + media_object.sections = [master_file, master_file_with_structure] end it "should create a single playlist_item for a single master_file" do expect { - post :add_to_playlist, params: { id: media_object.id, post: { playlist_id: playlist.id, masterfile_id: media_object.ordered_master_file_ids[0], playlistitem_scope: 'section' } } + post :add_to_playlist, params: { id: media_object.id, post: { playlist_id: playlist.id, masterfile_id: media_object.section_ids[0], playlistitem_scope: 'section' } } }.to change { playlist.items.count }.from(0).to(1) - expect(playlist.items[0].title).to eq("#{media_object.title} - #{media_object.ordered_master_files.to_a[0].title}") + expect(playlist.items[0].title).to eq("#{media_object.title} - #{media_object.sections[0].title}") end it "should create playlist_items for each span in a single master_file's structure" do expect { - post :add_to_playlist, params: { id: media_object.id, post: { playlist_id: playlist.id, masterfile_id: media_object.ordered_master_file_ids[1], playlistitem_scope: 'structure' } } + post :add_to_playlist, params: { id: media_object.id, post: { playlist_id: playlist.id, masterfile_id: media_object.section_ids[1], playlistitem_scope: 'structure' } } }.to change { playlist.items.count }.from(0).to(13) expect(playlist.items[0].title).to eq("Test Item - CD 1 - Copland, Three Piano Excerpts from Our Town - Track 1. Story of Our Town") expect(playlist.items[12].title).to eq("Test Item - CD 1 - Track 13. Copland, Danzon Cubano") @@ -1748,21 +1761,21 @@ expect { post :add_to_playlist, params: { id: media_object.id, post: { playlist_id: playlist.id, playlistitem_scope: 'section' } } }.to change { playlist.items.count }.from(0).to(2) - expect(playlist.items[0].title).to eq(media_object.ordered_master_files.to_a[0].embed_title) - expect(playlist.items[1].title).to eq(media_object.ordered_master_files.to_a[1].embed_title) + expect(playlist.items[0].title).to eq(media_object.sections[0].embed_title) + expect(playlist.items[1].title).to eq(media_object.sections[1].embed_title) end it "should create playlist_items for each span in a master_file structures in a media_object" do expect { post :add_to_playlist, params: { id: media_object.id, post: { playlist_id: playlist.id, playlistitem_scope: 'structure' } } }.to change { playlist.items.count }.from(0).to(14) expect(response.response_code).to eq(200) - expect(playlist.items[0].title).to eq("#{media_object.title} - #{media_object.ordered_master_files.to_a[0].title}") + expect(playlist.items[0].title).to eq("#{media_object.title} - #{media_object.sections[0].title}") expect(playlist.items[13].title).to eq("Test Item - CD 1 - Track 13. Copland, Danzon Cubano") end it 'redirects with flash message when playlist is owned by another user' do login_as :user other_playlist = FactoryBot.create(:playlist) - post :add_to_playlist, params: { id: media_object.id, post: { playlist_id: other_playlist.id, masterfile_id: media_object.ordered_master_file_ids[0], playlistitem_scope: 'section' } } + post :add_to_playlist, params: { id: media_object.id, post: { playlist_id: other_playlist.id, masterfile_id: media_object.section_ids[0], playlistitem_scope: 'section' } } expect(response).to have_http_status(403) expect(JSON.parse(response.body).symbolize_keys).to eq({message: "

        You are not authorized to update this playlist.

        ", status: 403}) end @@ -1772,7 +1785,7 @@ media_object perform_enqueued_jobs(only: MediaObjectIndexingJob) WebMock.reset_executed_requests! - post :add_to_playlist, params: { id: media_object.id, post: { playlist_id: playlist.id, masterfile_id: media_object.ordered_master_file_ids[0], playlistitem_scope: 'section' } } + post :add_to_playlist, params: { id: media_object.id, post: { playlist_id: playlist.id, masterfile_id: media_object.section_ids[0], playlistitem_scope: 'section' } } expect(a_request(:any, /#{ActiveFedora.fedora.base_uri}/)).not_to have_been_made end end diff --git a/spec/factories/media_objects.rb b/spec/factories/media_objects.rb index d1eec6d595..f023a97e04 100644 --- a/spec/factories/media_objects.rb +++ b/spec/factories/media_objects.rb @@ -49,6 +49,7 @@ rights_statement { ['http://rightsstatements.org/vocab/InC-EDU/1.0/'] } terms_of_use { [ 'Terms of Use: Be kind. Rewind.' ] } series { [Faker::Lorem.word] } + sections { [] } # after(:create) do |mo| # mo.update_datastream(:descMetadata, { # note: {note[Faker::Lorem.paragraph], @@ -65,9 +66,7 @@ after(:create) do |mo| mf = FactoryBot.create(:master_file) mf.media_object = mo - mf.save - # mo.ordered_master_files += [mf] - mo.save + # Above line will cause a save of both the master file and parent media object end end trait :with_completed_workflow do diff --git a/spec/features/login_redirect_spec.rb b/spec/features/login_redirect_spec.rb index e77f9c5b0e..d61beec9d2 100644 --- a/spec/features/login_redirect_spec.rb +++ b/spec/features/login_redirect_spec.rb @@ -18,12 +18,12 @@ let(:user) { FactoryBot.create(:user, :with_identity) } describe '/media_objects/:id' do - let(:media_object) { FactoryBot.create(:fully_searchable_media_object, master_files: [master_file]) } - let(:master_file) { FactoryBot.create(:master_file, :with_derivative) } + let(:media_object) { FactoryBot.create(:fully_searchable_media_object) } + let!(:master_file) { FactoryBot.create(:master_file, :with_derivative, media_object: media_object) } it 'redirects to item page' do visit media_object_path(media_object) - visit hls_manifest_master_file_path(media_object.master_files.first, "high") + visit hls_manifest_master_file_path(media_object.sections.first, "high") sign_in user expect(page.current_path).to eq media_object_path(media_object) end diff --git a/spec/helpers/media_objects_helper_spec.rb b/spec/helpers/media_objects_helper_spec.rb index b0ae812285..7a95375fd8 100644 --- a/spec/helpers/media_objects_helper_spec.rb +++ b/spec/helpers/media_objects_helper_spec.rb @@ -62,17 +62,17 @@ describe '#gather_all_comments' do let(:media_object) { instance_double("MediaObject", comment: ['MO Comment']) } - let(:master_files) { [instance_double("MasterFile", comment: [])] } + let(:sections) { [instance_double("MasterFile", comment: [])] } it 'returns a list of unique comment strings' do - expect(helper.gather_all_comments(media_object, master_files)).to eq ["MO Comment"] + expect(helper.gather_all_comments(media_object, sections)).to eq ["MO Comment"] end context 'with a master file comment' do - let(:master_files) { [instance_double("MasterFile", comment: ["MF Comment"], display_title: "MF1")] } + let(:sections) { [instance_double("MasterFile", comment: ["MF Comment"], display_title: "MF1")] } it 'returns a list of unique comment strings' do - expect(helper.gather_all_comments(media_object, master_files)).to eq ["MO Comment", "[MF1] MF Comment"] + expect(helper.gather_all_comments(media_object, sections)).to eq ["MO Comment", "[MF1] MF Comment"] end end end diff --git a/spec/jobs/media_object_indexing_job_spec.rb b/spec/jobs/media_object_indexing_job_spec.rb index 15ffad9f87..cfa79b97d1 100644 --- a/spec/jobs/media_object_indexing_job_spec.rb +++ b/spec/jobs/media_object_indexing_job_spec.rb @@ -18,15 +18,16 @@ let(:job) { MediaObjectIndexingJob.new } describe "perform" do + let(:comment) { "A comment" } let!(:media_object) { FactoryBot.create(:media_object) } - let!(:master_file) { FactoryBot.create(:master_file, media_object: media_object) } + let!(:master_file) { FactoryBot.create(:master_file, media_object: media_object, comment: [comment]) } it 'indexes the media object including master_file fields' do before_doc = ActiveFedora::SolrService.query("id:#{media_object.id}").first - expect(before_doc["section_id_ssim"]).to be_blank + expect(before_doc["all_comments_ssim"]).to be_blank job.perform(media_object.id) after_doc = ActiveFedora::SolrService.query("id:#{media_object.id}").first - expect(after_doc["section_id_ssim"]).to eq [master_file.id] + expect(after_doc["all_comments_ssim"]).to eq [comment] end end end diff --git a/spec/lib/avalon/batch/entry_spec.rb b/spec/lib/avalon/batch/entry_spec.rb index f39b46119c..ee67569b65 100644 --- a/spec/lib/avalon/batch/entry_spec.rb +++ b/spec/lib/avalon/batch/entry_spec.rb @@ -164,7 +164,7 @@ describe '#process!' do let(:entry_files) { [{ file: File.join(testdir, filename), offset: '00:00:00.500', label: 'Quis quo', date_digitized: '2015-10-30', skip_transcoding: false }] } - let(:master_file) { entry.media_object.master_files.first } + let(:master_file) { entry.media_object.sections.first } before do entry.process! end diff --git a/spec/lib/avalon/intercom_spec.rb b/spec/lib/avalon/intercom_spec.rb index a92a30d0a4..6feb06963d 100644 --- a/spec/lib/avalon/intercom_spec.rb +++ b/spec/lib/avalon/intercom_spec.rb @@ -86,7 +86,7 @@ expect(response).to eq({ message: 'StandardError', status: 500 }) end it "should respond with a link to the pushed object on target" do - media_object.ordered_master_files=[master_file_with_structure] + media_object.sections=[master_file_with_structure] media_object_hash = media_object.to_ingest_api_hash(false) media_object_hash.merge!( { 'collection_id' => 'cupcake_collection', 'import_bib_record' => true, 'publish' => false } diff --git a/spec/models/batch_entries_spec.rb b/spec/models/batch_entries_spec.rb index 8d8e88c8b5..abc115ec08 100644 --- a/spec/models/batch_entries_spec.rb +++ b/spec/models/batch_entries_spec.rb @@ -106,7 +106,7 @@ media_object = FactoryBot.create(:media_object) batch_entry = FactoryBot.build(:batch_entries, media_object_pid: media_object.id) - expect(media_object.master_files.to_a.count).to eq(0) + expect(media_object.sections.count).to eq(0) expect(JSON.parse(batch_entry.payload)['files'].count).to eq(1) expect(batch_entry.encoding_finished?).to be_truthy @@ -119,7 +119,7 @@ media_object = master_file.media_object batch_entry = FactoryBot.build(:batch_entries, media_object_pid: media_object.id) - expect(media_object.master_files.to_a.count).to eq(1) + expect(media_object.sections.count).to eq(1) expect(JSON.parse(batch_entry.payload)['files'].count).to eq(1) expect(batch_entry.encoding_finished?).to be_truthy @@ -132,7 +132,7 @@ media_object = master_file.media_object batch_entry = FactoryBot.build(:batch_entries, media_object_pid: media_object.id) - expect(media_object.master_files.to_a.count).to eq(1) + expect(media_object.sections.count).to eq(1) expect(JSON.parse(batch_entry.payload)['files'].count).to eq(1) expect(batch_entry.encoding_finished?).to be_truthy @@ -145,7 +145,7 @@ media_object = master_file.media_object batch_entry = FactoryBot.build(:batch_entries, media_object_pid: media_object.id) - expect(media_object.master_files.to_a.count).to eq(1) + expect(media_object.sections.count).to eq(1) expect(JSON.parse(batch_entry.payload)['files'].count).to eq(1) expect(batch_entry.encoding_finished?).to be_truthy @@ -158,7 +158,7 @@ media_object = master_file.media_object batch_entry = FactoryBot.build(:batch_entries, media_object_pid: media_object.id) - expect(media_object.master_files.to_a.count).to eq(1) + expect(media_object.sections.count).to eq(1) expect(JSON.parse(batch_entry.payload)['files'].count).to eq(1) expect(batch_entry.encoding_finished?).to be_falsey diff --git a/spec/models/file_upload_step_spec.rb b/spec/models/file_upload_step_spec.rb index 2d4959e5bf..5ee88ae891 100644 --- a/spec/models/file_upload_step_spec.rb +++ b/spec/models/file_upload_step_spec.rb @@ -16,8 +16,8 @@ describe FileUploadStep do describe '#update_master_files' do - let!(:master_file) {FactoryBot.create(:master_file, title: 'foo')} - let!(:media_object) {FactoryBot.create(:media_object, master_files: [master_file])} + let(:master_file) {FactoryBot.create(:master_file, title: 'foo')} + let(:media_object) {FactoryBot.create(:media_object, sections: [master_file])} it 'should not regenerate a section permalink when the title is changed' do step_context = {media_object: media_object, master_files: {master_file.id => {title: 'new title'}}} expect{FileUploadStep.new.update_master_files(step_context)}.to_not change{master_file.permalink} diff --git a/spec/models/ingest_batch_spec.rb b/spec/models/ingest_batch_spec.rb index cf643c6d1a..159f6cc953 100644 --- a/spec/models/ingest_batch_spec.rb +++ b/spec/models/ingest_batch_spec.rb @@ -22,22 +22,16 @@ expect(ingest_batch.media_object_ids).to eq(media_object_ids) end describe '#finished?' do - it 'returns true when all the master files are finished' do - media_object = FactoryBot.create(:media_object, master_files: [FactoryBot.create(:master_file, :cancelled_processing), FactoryBot.create(:master_file, :completed_processing)]) + it 'returns true when all the sections are finished' do + media_object = FactoryBot.create(:media_object, sections: [FactoryBot.create(:master_file, :cancelled_processing), FactoryBot.create(:master_file, :completed_processing)]) ingest_batch = IngestBatch.new(media_object_ids: [media_object.id], email: 'email@something.com') expect(ingest_batch.finished?).to be true end - # fix: adding master_files to media object parts is broken - it 'returns false when one or more master files are not finished' do - skip "Fix problems with this test" - - media_object = MediaObject.new(id:'avalon:ingest-batch-test') - media_object.add_relationship(:has_part, FactoryBot.build(:master_file, :completed_processing)) - media_object.parts << FactoryBot.build(:master_file) - media_object.save - ingest_batch = IngestBatch.new(media_object_ids: ['avalon:ingest-batch-test'], email: 'email@something.com') - expect(ingest_batch.finished?).to be true + it 'returns false when one or more sections are not finished' do + media_object = FactoryBot.create(:media_object, sections: [FactoryBot.create(:master_file), FactoryBot.create(:master_file)]) + ingest_batch = IngestBatch.new(media_object_ids: [media_object.id], email: 'email@something.com') + expect(ingest_batch.finished?).to be false end end diff --git a/spec/models/master_file_spec.rb b/spec/models/master_file_spec.rb index ef1d0ae5e3..9c00267597 100644 --- a/spec/models/master_file_spec.rb +++ b/spec/models/master_file_spec.rb @@ -84,7 +84,7 @@ end end - describe "master_files=" do + describe "derivatives=" do let(:derivative) {Derivative.create} let(:master_file) {FactoryBot.create(:master_file)} it "should set hasDerivation relationships on self" do @@ -329,14 +329,9 @@ let(:media_path) { File.expand_path("../../master_files-#{SecureRandom.uuid}",__FILE__)} let(:dropbox_path) { File.expand_path("../../collection-#{SecureRandom.uuid}",__FILE__)} let(:upload) { ActionDispatch::Http::UploadedFile.new :tempfile => tempfile, :filename => original, :type => 'video/mp4' } - let(:media_object) { MediaObject.new } + let!(:media_object) { FactoryBot.create(:media_object, sections: [subject]) } let(:collection) { Admin::Collection.new } - subject { - mf = MasterFile.new - mf.media_object = media_object - mf.setContent(upload, dropbox_dir: collection.dropbox_absolute_path) - mf - } + subject { FactoryBot.create(:master_file) } before(:each) do @old_media_path = Settings.encoding.working_file_path @@ -356,11 +351,13 @@ it "should move an uploaded file into the root of the collection's dropbox" do Settings.encoding.working_file_path = nil + subject.setContent(upload, dropbox_dir: collection.dropbox_absolute_path) expect(subject.file_location).to eq(File.realpath(File.join(collection.dropbox_absolute_path,original))) end it "should copy an uploaded file to the media path" do Settings.encoding.working_file_path = media_path + subject.setContent(upload, dropbox_dir: collection.dropbox_absolute_path) expect(File.fnmatch("#{media_path}/*/#{original}", subject.working_file_path.first)).to be true end @@ -373,6 +370,7 @@ it "appends a numerical suffix" do Settings.encoding.working_file_path = nil + subject.setContent(upload, dropbox_dir: collection.dropbox_absolute_path) expect(subject.file_location).to eq(File.realpath(File.join(collection.dropbox_absolute_path,duplicate))) end end @@ -384,14 +382,9 @@ let(:dropbox_file_path) { File.join(dropbox_path, 'nested-dir', original)} let(:media_path) { File.expand_path("../../master_files-#{SecureRandom.uuid}",__FILE__)} let(:dropbox_path) { File.expand_path("../../collection-#{SecureRandom.uuid}",__FILE__)} - let(:media_object) { MediaObject.new } + let!(:media_object) { FactoryBot.create(:media_object, sections: [subject]) } let(:collection) { Admin::Collection.new } - subject { - mf = MasterFile.new - mf.media_object = media_object - mf.setContent(File.new(dropbox_file_path), dropbox_dir: collection.dropbox_absolute_path) - mf - } + subject { FactoryBot.create(:master_file) } before(:each) do @old_media_path = Settings.encoding.working_file_path @@ -412,6 +405,7 @@ it "should not move a file in a subdirectory of the collection's dropbox" do Settings.encoding.working_file_path = nil + subject.setContent(File.new(dropbox_file_path), dropbox_dir: collection.dropbox_absolute_path) expect(subject.file_location).to eq dropbox_file_path expect(File.exist?(dropbox_file_path)).to eq true expect(File.exist?(File.join(collection.dropbox_absolute_path,original))).to eq false @@ -419,6 +413,7 @@ it "should copy an uploaded file to the media path" do Settings.encoding.working_file_path = media_path + subject.setContent(File.new(dropbox_file_path), dropbox_dir: collection.dropbox_absolute_path) expect(File.fnmatch("#{media_path}/*/#{original}", subject.working_file_path.first)).to be true end end @@ -429,7 +424,7 @@ let(:file_size) { 12345 } let(:auth_header) { {"Authorization"=>"Bearer ya29.a0AfH6SMC6vSj4D6po1aDxAr6JmY92azh3lxevSuPKxf9QPPSKmMzqbZvI7B3oIACqqMVono1P0XD2F1Jl_rkayoI6JGz-P2cpg44-55oJFcWychAvUliWeRKf1cifMo9JF10YmXxhIfrG5mu7Ahy9FZpudN92p2JhvTI"} } - subject { MasterFile.new } + subject { FactoryBot.create(:master_file) } it "should set the right properties" do allow(subject).to receive(:reloadTechnicalMetadata!).and_return(nil) @@ -517,8 +512,8 @@ end it 'should have an appropriate title for the embed code with no label (more than 1 section)' do - allow(subject.media_object).to receive(:ordered_master_files).and_return([subject,subject]) - allow(subject.media_object).to receive(:master_file_ids).and_return([subject.id,subject.id]) + allow(subject.media_object).to receive(:sections).and_return([subject,subject]) + allow(subject.media_object).to receive(:section_ids).and_return([subject.id,subject.id]) expect( subject.embed_title ).to eq( 'test - video.mp4' ) end @@ -759,9 +754,13 @@ let(:master_file) { FactoryBot.build(:master_file) } before do allow(ActiveEncode::Base).to receive(:find).and_return(nil) + allow(master_file).to receive(:finished_processing?).and_return(false) end it 'does not error if the master file has no encode' do - expect { master_file.send(:stop_processing!) }.not_to raise_error + expect { master_file.stop_processing! }.not_to raise_error + end + it 'enqueues cancel job if currently processing' do + expect { master_file.stop_processing! }.to have_enqueued_job(ActiveEncodeJobs::CancelEncodeJob) end end @@ -802,10 +801,12 @@ it 'sets a new media object as its parent' do master_file.media_object = media_object2 - expect(media_object1.reload.master_file_ids).not_to include master_file.id - expect(media_object1.reload.ordered_master_file_ids).not_to include master_file.id - expect(media_object2.reload.master_file_ids).to include master_file.id - expect(media_object2.reload.ordered_master_file_ids).to include master_file.id + media_object1.reload + media_object2.reload + expect(media_object1.master_file_ids).not_to include master_file.id + expect(media_object1.section_ids).not_to include master_file.id + expect(media_object2.master_file_ids).to include master_file.id + expect(media_object2.section_ids).to include master_file.id end end diff --git a/spec/models/media_object_spec.rb b/spec/models/media_object_spec.rb index 33da0c443e..65afba3c18 100644 --- a/spec/models/media_object_spec.rb +++ b/spec/models/media_object_spec.rb @@ -456,13 +456,13 @@ describe '#finished_processing?' do it 'returns true if the statuses indicate processing is finished' do - media_object.ordered_master_files += [FactoryBot.create(:master_file, :cancelled_processing)] - media_object.ordered_master_files += [FactoryBot.create(:master_file, :completed_processing)] + media_object.sections += [FactoryBot.create(:master_file, :cancelled_processing)] + media_object.sections += [FactoryBot.create(:master_file, :completed_processing)] expect(media_object.finished_processing?).to be true end it 'returns true if the statuses indicate processing is not finished' do - media_object.ordered_master_files += [FactoryBot.create(:master_file, :cancelled_processing)] - media_object.ordered_master_files += [FactoryBot.create(:master_file)] + media_object.sections += [FactoryBot.create(:master_file, :cancelled_processing)] + media_object.sections += [FactoryBot.create(:master_file)] expect(media_object.finished_processing?).to be false end end @@ -471,10 +471,11 @@ let(:master_file1) { FactoryBot.create(:master_file, media_object: media_object, duration: '40') } let(:master_file2) { FactoryBot.create(:master_file, media_object: media_object, duration: '40') } let(:master_file3) { FactoryBot.create(:master_file, media_object: media_object, duration: nil) } - let(:master_files) { [] } + let(:sections) { [] } before do - master_files + sections + media_object.reload # Explicitly run indexing job to ensure fields are indexed for structure searching MediaObjectIndexingJob.perform_now(media_object.id) end @@ -485,19 +486,19 @@ end end context 'with two master files' do - let(:master_files) { [master_file1, master_file2] } + let(:sections) { [master_file1, master_file2] } it 'returns the correct duration' do expect(media_object.send(:calculate_duration)).to eq(80) end end context 'with two master files one nil' do - let(:master_files) { [master_file1, master_file3] } + let(:sections) { [master_file1, master_file3] } it 'returns the correct duration' do expect(media_object.send(:calculate_duration)).to eq(40) end end context 'with one master file that is nil' do - let(:master_files) { [master_file3] } + let(:sections) { [master_file3] } it 'returns the correct duration' do expect(media_object.send(:calculate_duration)).to eq(0) end @@ -506,21 +507,21 @@ describe '#destroy' do let(:media_object) { FactoryBot.create(:media_object, :with_master_file) } - let(:master_file) { media_object.master_files.first } + let(:master_file) { media_object.sections.first } before do allow(master_file).to receive(:stop_processing!) end - it 'destroys related master_files' do + it 'destroys related sections' do expect { media_object.destroy }.to change { MasterFile.exists?(master_file) }.from(true).to(false) end it 'destroys multiple sections' do FactoryBot.create(:master_file, media_object: media_object) media_object.reload - expect(media_object.master_files.size).to eq 2 - media_object.master_files.each do |mf| + expect(media_object.sections.size).to eq 2 + media_object.sections.each do |mf| allow(mf).to receive(:stop_processing!) end expect { media_object.destroy }.to change { MasterFile.count }.from(2).to(0) @@ -818,7 +819,7 @@ mf2 = FactoryBot.create(:master_file, title: 'Test Label2', physical_description: 'cave paintings', media_object: media_object) media_object.reload - #expect(media_object.ordered_master_files.size).to eq(2) + #expect(media_object.sections.size).to eq(2) expect(media_object.section_physical_descriptions).to match(['cave paintings']) end end @@ -854,10 +855,10 @@ describe 'descMetadata' do it 'sets original_name to default value' do - expect(media_object.descMetadata.original_name).to eq 'descMetadata.xml' + # requires a reload now? + expect(media_object.reload.descMetadata.original_name).to eq 'descMetadata.xml' end it 'is a valid MODS document' do - media_object = FactoryBot.create(:media_object, :with_master_file) xsd_path = File.join(Rails.root, 'spec', 'fixtures', 'mods-3-6.xsd') # Note: we instantiate Schema with a file handle so that relative paths # to included schema definitions can be resolved @@ -995,7 +996,7 @@ context "no error" do it 'merges' do - expect { media_object.merge! media_objects }.to change { media_object.master_files.to_a.count }.by(2) + expect { media_object.merge! media_objects }.to change { media_object.sections.count }.by(2) expect(media_objects.any? { |mo| MediaObject.exists?(mo.id) }).to be_falsey end end @@ -1011,7 +1012,7 @@ expect(fails).to eq([media_objects.first]) expect(media_objects.first.errors.count).to eq(1) - expect(media_object.master_files.to_a.count).to eq(2) + expect(media_object.sections.count).to eq(2) expect(MediaObject.exists?(media_objects.first.id)).to be_truthy expect(MediaObject.exists?(media_objects.second.id)).to be_falsey end @@ -1165,7 +1166,7 @@ describe "#has_captions" do let(:captionless_media_object) { FactoryBot.create(:media_object, :with_master_file) } - let(:captioned_media_object) { FactoryBot.create(:media_object, master_files: [master_file1, master_file2]) } + let(:captioned_media_object) { FactoryBot.create(:media_object, sections: [master_file1, master_file2]) } let(:master_file1) { FactoryBot.create(:master_file) } let(:master_file2) { FactoryBot.create(:master_file, :with_captions) } it "returns false when child master files contain no captions" do @@ -1179,7 +1180,7 @@ describe "#has_transcripts" do let(:transcriptless_media_object) { FactoryBot.create(:media_object, :with_master_file) } - let(:transcript_media_object) { FactoryBot.create(:media_object, master_files: [master_file1, master_file2]) } + let(:transcript_media_object) { FactoryBot.create(:media_object, sections: [master_file1, master_file2]) } let(:master_file1) { FactoryBot.create(:master_file) } let(:master_file2) { FactoryBot.create(:master_file, supplemental_files: [transcript]) } let(:transcript) { FactoryBot.create(:supplemental_file, :with_transcript_tag, :with_transcript_file) } @@ -1191,4 +1192,103 @@ expect(transcript_media_object.has_transcripts).to be true end end + + describe 'section_list' do + let(:section) { FactoryBot.create(:master_file) } + let(:section2) { FactoryBot.create(:master_file) } + let!(:media_object) { FactoryBot.create(:media_object, master_files: [section2, section], sections: [section, section2]) } + + describe 'section_ids' do + it 'returns an ordered list of master file ids' do + expect(media_object.section_ids).to eq [section.id, section2.id] + end + end + + describe 'section_ids=' do + it 'sets ordered list of master file ids without modifying master_file_ids' do + expect(media_object.master_file_ids).to contain_exactly(section.id, section2.id) + expect(media_object.section_ids).to eq [section.id, section2.id] + media_object.section_ids = [section2.id] + expect(media_object.master_file_ids).to contain_exactly(section.id, section2.id) + expect(media_object.section_ids).to eq [section2.id] + end + end + + describe 'sections' do + it 'returns an ordered list of master file objects' do + expect(media_object.sections).to eq [section, section2] + end + end + + describe 'sections=' do + it 'sets ordered list of master file objects without modifying master_file_ids' do + expect(media_object.master_files).to contain_exactly(section, section2) + expect(media_object.sections).to eq [section, section2] + media_object.sections = [section2] + expect(media_object.master_files).to contain_exactly(section, section2) + expect(media_object.sections).to eq [section2] + end + end + + it '#sections and #section_ids stay sync' do + expect(media_object.section_ids).to eq [section.id, section2.id] + expect(media_object.sections).to eq [section, section2] + media_object.sections = [section2] + expect(media_object.section_ids).to eq [section2.id] + expect(media_object.sections).to eq [section2] + media_object.section_ids = [section.id] + expect(media_object.section_ids).to eq [section.id] + expect(media_object.sections).to eq [section] + end + + context 'migrating ordered_aggregation' do + let!(:media_object) do + mo = FactoryBot.build(:media_object) + mo.ordered_master_files = [section, section2] + # Trick the callback to avoid persisting section_list + mo.instance_variable_set(:@section_ids, mo.master_file_ids) + mo.save + mo.reload + end + + it 'reads from ordered_aggregation' do + expect(media_object.ordered_master_files.to_a).to eq [section, section2] + expect(media_object.section_list).to eq nil + mo = MediaObject.find(media_object.id) + expect(mo.section_list).not_to eq nil + expect(mo.ordered_master_files.to_a).to eq [section, section2] + expect(mo.sections).to eq mo.ordered_master_files.to_a + expect(mo.section_ids).to eq mo.ordered_master_file_ids + end + + it 'prefers reading from section_list when set' do + expect(media_object.section_list).to eq nil + mo = MediaObject.find(media_object.id) + new_section = FactoryBot.create(:master_file) + mo.sections += [new_section] + mo.save + mo = MediaObject.find(media_object.id) + expect(mo.section_list).not_to eq nil + expect(mo.sections).not_to eq mo.ordered_master_files.to_a + expect(mo.section_ids).to eq [section.id, section2.id, new_section.id] + end + end + end + + describe '#reload' do + let(:section) { FactoryBot.create(:master_file) } + + context 'resets cached values' do + it 'resets sections' do + expect(media_object.sections).to eq [] + expect(media_object.section_ids).to eq [] + media_object.sections += [section] + expect(media_object.sections).to eq [section] + expect(media_object.section_ids).to eq [section.id] + media_object.reload + expect(media_object.sections).to eq [] + expect(media_object.section_ids).to eq [] + end + end + end end diff --git a/spec/models/working_file_path_spec.rb b/spec/models/working_file_path_spec.rb index 050e0d975d..2e410837f7 100644 --- a/spec/models/working_file_path_spec.rb +++ b/spec/models/working_file_path_spec.rb @@ -93,7 +93,7 @@ it 'sends the working_file_path to active_encode' do MasterFileBuilder.build(media_object, params) - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first expect(File.exist? master_file.working_file_path.first).to be true input = FileLocator.new(master_file.working_file_path.first).uri.to_s expect(encoder_class).to have_received(:create).with(input, { headers: nil, master_file_id: master_file.id, preset: workflow }) @@ -104,7 +104,7 @@ it 'sends the working_file_path to active_encode' do MasterFileBuilder.build(media_object, params) - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first expect(File.exist? master_file.working_file_path.first).to be true input_path = FileLocator.new(master_file.working_file_path.first).uri.to_s expect(encoder_class).to have_received(:create).with(input_path, master_file_id: master_file.id, outputs: [{ label: "high", url: input_path }], preset: "pass_through") @@ -119,7 +119,7 @@ it 'sends the working_file_path to active_encode' do MasterFileBuilder.build(media_object, params) - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first expect(File.exist? master_file.working_file_path.first).to be true input = FileLocator.new(master_file.working_file_path.first).uri.to_s expect(encoder_class).to have_received(:create).with(input, { headers: nil, master_file_id: master_file.id, preset: workflow }) @@ -130,7 +130,7 @@ it 'sends the working_file_path to active_encode' do MasterFileBuilder.build(media_object, params) - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first expect(File.exist? master_file.working_file_path.first).to be true input_path = FileLocator.new(master_file.working_file_path.first).uri.to_s expect(encoder_class).to have_received(:create).with(input_path, master_file_id: master_file.id, outputs: [{ label: "high", url: input_path }], preset: "pass_through") @@ -152,7 +152,7 @@ it 'sends the working_file_path to active_encode' do entry.process! - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first expect(File.exist? master_file.working_file_path.first).to be true input = FileLocator.new(master_file.working_file_path.first).uri.to_s expect(encoder_class).to have_received(:create).with(input, { headers: nil, master_file_id: master_file.id, preset: workflow }) @@ -163,7 +163,7 @@ it 'sends the working_file_path to active_encode' do entry.process! - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first expect(File.exist? master_file.working_file_path.first).to be true input_path = FileLocator.new(master_file.working_file_path.first).uri.to_s expect(encoder_class).to have_received(:create).with(input_path, master_file_id: master_file.id, outputs: [{ label: "high", url: input_path }], preset: "pass_through" ) @@ -191,7 +191,7 @@ # TODO: Ensure all working file copies are cleaned up by the background job it 'sends the working_file_path to active_encode' do entry.process! - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first working_file_path_high = master_file.working_file_path.find { |file| file.include? "high" } working_file_path_medium = master_file.working_file_path.find { |file| file.include? "medium" } working_file_path_low = master_file.working_file_path.find { |file| file.include? "low" } @@ -224,7 +224,7 @@ it 'sends the file_location to active_encode' do MasterFileBuilder.build(media_object, params) - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first input = FileLocator.new(master_file.file_location).uri.to_s expect(encoder_class).to have_received(:create).with(input, { headers: nil, master_file_id: master_file.id, preset: workflow }) end @@ -234,7 +234,7 @@ it 'sends the file_location to active_encode' do MasterFileBuilder.build(media_object, params) - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first input_path = FileLocator.new(master_file.file_location).uri.to_s expect(encoder_class).to have_received(:create).with(input_path, master_file_id: master_file.id, outputs: [{ label: "high", url: input_path }], preset: "pass_through") end @@ -248,7 +248,7 @@ it 'sends the file_location to active_encode' do MasterFileBuilder.build(media_object, params) - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first input = FileLocator.new(master_file.file_location).uri.to_s expect(encoder_class).to have_received(:create).with(input, { headers: nil, master_file_id: master_file.id, preset: workflow }) end @@ -258,7 +258,7 @@ it 'sends the file_location to active_encode' do MasterFileBuilder.build(media_object, params) - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first input_path = FileLocator.new(master_file.file_location).uri.to_s expect(encoder_class).to have_received(:create).with(input_path, master_file_id: master_file.id, outputs: [{ label: "high", url: input_path }], preset: "pass_through") end @@ -279,7 +279,7 @@ it 'sends the file_location to active_encode' do entry.process! - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first input = FileLocator.new(master_file.file_location).uri.to_s expect(encoder_class).to have_received(:create).with(input, { headers: nil, master_file_id: master_file.id, preset: workflow }) end @@ -289,7 +289,7 @@ it 'sends the file_location to active_encode' do entry.process! - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first input_path = FileLocator.new(master_file.file_location).uri.to_s expect(encoder_class).to have_received(:create).with(input_path, master_file_id: master_file.id, outputs: [{ label: "high", url: input_path }], preset: "pass_through") end @@ -314,7 +314,7 @@ it 'sends the derivative locations to active_encode' do entry.process! - master_file = media_object.reload.master_files.first + master_file = media_object.reload.sections.first input_path = Addressable::URI.convert_path(File.absolute_path(filename_high)).to_s expect(encoder_class).to have_received(:create).with( diff --git a/spec/presenters/speedy_af/proxy/media_object_spec.rb b/spec/presenters/speedy_af/proxy/media_object_spec.rb index db15d8e3cd..b7cb57b63c 100644 --- a/spec/presenters/speedy_af/proxy/media_object_spec.rb +++ b/spec/presenters/speedy_af/proxy/media_object_spec.rb @@ -65,6 +65,8 @@ expect(presenter.other_identifier).to eq media_object.other_identifier expect(presenter.comment).to eq media_object.comment.to_a expect(presenter.visibility).to eq media_object.visibility + expect(presenter.section_list).to eq media_object.section_list + expect(presenter.section_ids).to eq media_object.section_ids end end @@ -83,4 +85,24 @@ expect(presenter.lending_period).to be_present end end + + describe '#sections' do + let(:section1) { FactoryBot.create(:master_file, media_object: media_object) } + let(:section2) { FactoryBot.create(:master_file, media_object: media_object) } + let(:media_object) { FactoryBot.create(:media_object) } + + context 'when no sections present' do + it 'returns empty array without reifying' do + expect(presenter.sections).to eq [] + expect(presenter.real?).to eq false + end + end + + it 'returns array of master file proxy objects in proper order' do + section1 + section2 + expect(presenter.sections.map(&:id)).to eq media_object.section_ids + expect(presenter.sections.map(&:class)).to eq [SpeedyAF::Proxy::MasterFile, SpeedyAF::Proxy::MasterFile] + end + end end