From 31f507ab04a16deefaf9e1a4d0ff44d7368e3745 Mon Sep 17 00:00:00 2001 From: Phuong Dinh Date: Tue, 12 Sep 2017 13:36:35 -0400 Subject: [PATCH 001/237] Add CodeClimate integration with Travis --- .travis.yml | 10 +++++++--- Gemfile | 2 +- Gemfile.lock | 14 ++++---------- spec/rails_helper.rb | 7 ++----- 4 files changed, 14 insertions(+), 19 deletions(-) diff --git a/.travis.yml b/.travis.yml index eaba423349..8a5792d5e5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,13 +17,17 @@ before_script: - cp config/database.travis.yml config/database.yml - mysql -e 'create database avalon_test;' - bundle exec rake db:migrate + - curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter + - chmod +x ./cc-test-reporter + - ./cc-test-reporter before-build +script: + - bundle exec rake +after_script: + - ./cc-test-reporter after-build --exit-code $TRAVIS_TEST_RESULT language: ruby jdk: - oraclejdk8 rvm: - 2.3 - 2.4 -addons: - code_climate: - repo_token: 1fb78f221b36e5615428f2ada12950b39a3b702b23fdd41e1b980dc4b47d0233 dist: precise diff --git a/Gemfile b/Gemfile index a2d6ea08c7..31d45ee396 100644 --- a/Gemfile +++ b/Gemfile @@ -135,9 +135,9 @@ end group :test do gem 'factory_girl_rails' gem 'simplecov' + gem 'codeclimate-test-reporter' gem 'faker' gem 'database_cleaner' - gem 'coveralls' gem 'shoulda-matchers' gem 'fakefs', require: 'fakefs/safe' gem 'email_spec' diff --git a/Gemfile.lock b/Gemfile.lock index 55d6cc6e89..715bf7a624 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -278,6 +278,8 @@ GEM xpath (~> 2.0) chronic (0.10.2) cliver (0.3.2) + codeclimate-test-reporter (1.0.8) + simplecov (<= 0.13) coderay (1.1.1) coffee-rails (4.1.1) coffee-script (>= 2.2.0) @@ -287,12 +289,6 @@ GEM execjs coffee-script-source (1.12.2) concurrent-ruby (1.0.5) - coveralls (0.8.17) - json (>= 1.8, < 3) - simplecov (~> 0.12.0) - term-ansicolor (~> 1.3) - thor (~> 0.19.1) - tins (~> 1.6) crack (0.4.3) safe_yaml (~> 1.0.0) daemons (1.2.4) @@ -718,12 +714,9 @@ GEM stomp (1.4.3) sxp (1.0.0) rdf (~> 2.0) - term-ansicolor (1.4.0) - tins (~> 1.0) thor (0.19.4) thread_safe (0.3.6) tilt (2.0.5) - tins (1.13.0) twitter-typeahead-rails (0.11.1.pre.corejavascript) actionpack (>= 3.1) jquery-rails @@ -786,8 +779,8 @@ DEPENDENCIES capistrano-resque capistrano-rvm capybara + codeclimate-test-reporter coffee-rails (~> 4.1.0) - coveralls database_cleaner devise dotenv-rails @@ -860,3 +853,4 @@ DEPENDENCIES BUNDLED WITH 1.15.3 + diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb index d011be4093..1957c7bc91 100644 --- a/spec/rails_helper.rb +++ b/spec/rails_helper.rb @@ -1,10 +1,7 @@ -require 'coveralls' -Coveralls.wear! - if ENV['COVERAGE'] || ENV['TRAVIS'] require 'simplecov' - SimpleCov.root(File.expand_path('..', __FILE__)) - SimpleCov.formatter = Coveralls::SimpleCov::Formatter + require 'codeclimate-test-reporter' + SimpleCov.start('rails') do add_filter '/spec' end From f3aff7ec1408b6d66403ed6b1a0f3a457020e080 Mon Sep 17 00:00:00 2001 From: Phuong Dinh Date: Mon, 25 Sep 2017 11:01:37 -0400 Subject: [PATCH 002/237] Fix SQL injection issues --- app/controllers/master_files_controller.rb | 2 +- app/controllers/migration_status_controller.rb | 18 ++++++++++++++++-- app/controllers/objects_controller.rb | 8 ++++---- app/controllers/playlists_controller.rb | 3 ++- lib/avalon/sanitizer.rb | 6 +++--- lib/avalon/variations_mapping_service.rb | 8 ++++---- 6 files changed, 30 insertions(+), 15 deletions(-) diff --git a/app/controllers/master_files_controller.rb b/app/controllers/master_files_controller.rb index 5a334bf386..b5f5adce73 100644 --- a/app/controllers/master_files_controller.rb +++ b/app/controllers/master_files_controller.rb @@ -60,7 +60,7 @@ def embed def oembed if params[:url].present? id = params[:url].split('?')[0].split('/').last - mf = MasterFile.where("identifier_ssim:\"#{id.downcase}\"").first + mf = MasterFile.where(identifier_ssim: id.downcase).first mf ||= MasterFile.find(id) rescue nil if mf.present? width = params[:maxwidth] || MasterFile::EMBED_SIZE[:medium] diff --git a/app/controllers/migration_status_controller.rb b/app/controllers/migration_status_controller.rb index a8e9aff48f..7c49760ed7 100644 --- a/app/controllers/migration_status_controller.rb +++ b/app/controllers/migration_status_controller.rb @@ -11,7 +11,6 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # --- END LICENSE_HEADER BLOCK --- - class MigrationStatusController < ApplicationController before_filter :auth @@ -28,7 +27,10 @@ def show criteria[:status] = "migrate" if params[:status] == "in progress" criteria[:status] ||= params[:status] end - @statuses = MigrationStatus.where(criteria).order(params[:order] || :id).page(params[:page]).per(params[:per]) + @statuses = MigrationStatus.where(criteria) + .order(sanitize_order(params[:order]) || :id) + .page(params[:page]) + .per(params[:per]) render without_layout_if_xhr end @@ -66,4 +68,16 @@ def auth def without_layout_if_xhr request.xhr? ? { layout: false } : {} end + +private + + # Avoid SQL injection attack on ActiveRecord order method + # Input must be in format "column asc" or "column desc" + def sanitize_order(order_param) + if order_param.present? + { order_param.split.first => order_param.split.second } + else + nil + end + end end diff --git a/app/controllers/objects_controller.rb b/app/controllers/objects_controller.rb index e3df551c66..3eb1680d6b 100644 --- a/app/controllers/objects_controller.rb +++ b/app/controllers/objects_controller.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -14,7 +14,7 @@ class ObjectsController < ApplicationController def show - obj = ActiveFedora::Base.where("identifier_ssim:\"#{params[:id].downcase}\"").first + obj = ActiveFedora::Base.where(identifier_ssim: params[:id].downcase).first obj ||= ActiveFedora::Base.find(params[:id], cast: true) rescue nil obj ||= GlobalID::Locator.locate params[:id] if obj.blank? diff --git a/app/controllers/playlists_controller.rb b/app/controllers/playlists_controller.rb index 3df16cb094..9df2b45507 100644 --- a/app/controllers/playlists_controller.rb +++ b/app/controllers/playlists_controller.rb @@ -53,7 +53,8 @@ def paged_index columns = ['title','size','visibility','created_at','updated_at','actions'] playlistsFiltered = playlists.where("title LIKE ?", "%#{request.params['search']['value']}%") if columns[request.params['order']['0']['column'].to_i] != 'size' - playlistsFiltered = playlistsFiltered.order("lower(#{columns[request.params['order']['0']['column'].to_i]}) #{request.params['order']['0']['dir']}") + playlistsFiltered = playlistsFiltered + .order(columns[request.params['order']['0']['column'].to_i].downcase => request.params['order']['0']['dir']) pagedPlaylists = playlistsFiltered.offset(request.params['start']).limit(request.params['length']) else # sort by size (item count): decorate list with playlistitem count then sort and undecorate diff --git a/lib/avalon/sanitizer.rb b/lib/avalon/sanitizer.rb index 6e6d56497a..2f4b3cbbdd 100644 --- a/lib/avalon/sanitizer.rb +++ b/lib/avalon/sanitizer.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the diff --git a/lib/avalon/variations_mapping_service.rb b/lib/avalon/variations_mapping_service.rb index d49a50911c..e8bb13a3a2 100644 --- a/lib/avalon/variations_mapping_service.rb +++ b/lib/avalon/variations_mapping_service.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -23,7 +23,7 @@ def find_master_file(variations_media_object_id) raise ArgumentError, 'Not a valid Variations Media Object ID' unless variations_media_object_id =~ %r{/MediaObject/} notis_id = MEDIA_OBJECT_ID_MAP[variations_media_object_id] raise RuntimeError, "Unknown Variations Id: #{variations_media_object_id}" unless notis_id - master_file = MasterFile.where("identifier_ssim:#{notis_id.downcase}").first + master_file = MasterFile.where(identifier_ssim: notis_id.downcase).first raise RuntimeError, "MasterFile could not be found for Variations label #{notis_id}" unless master_file master_file end From ad9564a601d4b27f80ff8f0424b894571d859e36 Mon Sep 17 00:00:00 2001 From: Phil Dinh Date: Wed, 27 Sep 2017 14:05:18 -0400 Subject: [PATCH 003/237] Escape structure metadata So Edit Structure popup doesn't fail to close --- app/controllers/master_files_controller.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/controllers/master_files_controller.rb b/app/controllers/master_files_controller.rb index b5f5adce73..eec9d6eb6f 100644 --- a/app/controllers/master_files_controller.rb +++ b/app/controllers/master_files_controller.rb @@ -120,7 +120,7 @@ def attach_structure end respond_to do |format| format.html { redirect_to edit_media_object_path(@master_file.media_object_id, step: 'structure') } - format.json { render json: {structure: structure, flash: flash} } + format.json { render json: {structure: ERB::Util.html_escape(structure), flash: flash} } end end From b3cf0e2104b0eadfba5fbf5f576d3b9decbdb06a Mon Sep 17 00:00:00 2001 From: Phuong Dinh Date: Fri, 7 Apr 2017 16:32:54 -0400 Subject: [PATCH 004/237] Core AWS commits: Use Config gem and add zookeeper rake tasks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Elastic Beanstalk extensions for ffmpeg and mediainfo ERBify role_map.yaml so that the initial admin user can come from the settings Implement zookeeper:create rake task Install git and perform zookeeper tasks via ebextensions Improve authentication initialization Abstract file handling logic out of MasterFile and MasterFileController to make way for S3 and other remote files Enable Elastic Transcoding Cherry pick SecurityHandler abstraction from 5.x-aws Finish merge of SecurityHandler Use CloudFront signed URLs (for RTMP) and signed cookies (for HLS) Add ability to store CloudFront signing key in an S3 bucket Direct upload to S3 Don’t allow URL-unsafe characters in derivative filenames. It’s easier that way. Use active_elastic_job on AWS Extract stills from S3! Teach ElasticTranscoderJob how to remove derivatives S3 Dropbox Browsing Stringify URIs Prepare batch ingest code for S3 Add better_active_elastic_job_adapter from hyku Batch ingest AWS code [WIP] Finish S3 ActiveJob-aware batch ingest FileSystem dropbox fix Finish batch ingest Settings tweaks Allow auth to be configured either an an array or a hash (for railsconfig environment compatibility) Move common/static AWS environment config to .ebextensions --- .ebextensions/01_packages.config | 12 + .ebextensions/02_install.config | 25 + .ebextensions/03_environment.config | 25 + .gitignore | 4 + Gemfile | 29 +- Gemfile.lock | 156 +- .../javascripts/direct_upload.js.coffee | 52 + .../javascripts/refresh_token.js.coffee | 15 +- app/controllers/admin/groups_controller.rb | 8 +- app/controllers/comments_controller.rb | 10 +- app/controllers/master_files_controller.rb | 95 +- app/controllers/media_objects_controller.rb | 4 +- .../users/omniauth_callbacks_controller.rb | 11 +- app/helpers/application_helper.rb | 2 +- app/helpers/media_objects_helper.rb | 4 +- app/helpers/security_helper.rb | 16 + app/helpers/upload_form_helper.rb | 25 + app/jobs/active_encode_job.rb | 8 +- app/jobs/batch_ingest_job.rb | 38 + app/jobs/s3_split_job.rb | 44 + app/mailers/comments_mailer.rb | 2 +- app/mailers/ingest_batch_mailer.rb | 12 +- app/mailers/notifications_mailer.rb | 2 +- app/models/admin/collection.rb | 38 +- app/models/admin/group.rb | 4 +- app/models/concerns/derivative_behavior.rb | 11 +- app/models/concerns/master_file_behavior.rb | 6 +- app/models/derivative.rb | 8 +- app/models/elastic_transcoder_job.rb | 226 +++ app/models/file_upload_step.rb | 2 +- app/models/master_file.rb | 74 +- app/models/role_map.rb | 8 +- app/models/security_handler.rb | 19 + app/models/stream_token.rb | 2 +- app/models/user.rb | 8 +- app/services/file_locator.rb | 90 + app/services/master_file_builder.rb | 83 + app/services/solr_collection_creator.rb | 78 + app/services/solr_config_uploader.rb | 71 + app/views/media_objects/_file_upload.html.erb | 2 +- app/views/playlists/_player.html.erb | 5 +- app/views/playlists/index.html.erb | 6 +- config/avalon.yml.example | 3 + config/encoding_presets.yml | 67 + config/environments/production.rb | 4 +- config/initializers/about_page.rb | 2 +- config/initializers/active_encode.rb | 21 +- config/initializers/batch_manifest_class.rb | 3 + config/initializers/browse_everything.rb | 7 + config/initializers/config.rb | 31 + config/initializers/default_host.rb | 11 +- config/initializers/devise.rb | 17 +- config/initializers/dropbox_context.rb | 7 +- config/initializers/mailer.rb | 4 + config/initializers/security.rb | 70 + config/role_map.yml | 6 + config/routes.rb | 2 +- config/settings.yml | 72 + config/settings/development.yml | 2 + config/settings/production.yml | 0 config/settings/test.yml | 15 + .../better_active_elastic_job_adapter.rb | 31 + lib/avalon/authentication.rb | 22 +- lib/avalon/batch.rb | 12 +- lib/avalon/batch/entry.rb | 179 +- lib/avalon/batch/file_manifest.rb | 71 + lib/avalon/batch/ingest.rb | 8 +- lib/avalon/batch/manifest.rb | 84 +- lib/avalon/batch/package.rb | 19 +- lib/avalon/batch/s3_manifest.rb | 77 + lib/avalon/bib_retriever.rb | 23 +- lib/avalon/bib_retriever/zoom.rb | 11 +- lib/avalon/configuration.rb | 181 +- lib/avalon/controlled_vocabulary.rb | 2 +- lib/avalon/stream_mapper.rb | 20 +- lib/avalon/variations_mapping_service.rb | 2 +- lib/tasks/zookeeper.rake | 18 + .../media_objects_controller_spec.rb | 6 +- .../controllers/vocabulary_controller_spec.rb | 4 +- spec/helpers/media_objects_helper_spec.rb | 12 +- spec/lib/avalon/batch_ingest_spec.rb | 10 +- spec/lib/avalon/bib_retriever_spec.rb | 10 +- spec/lib/avalon/dropbox_spec.rb | 2 +- spec/models/collection_spec.rb | 10 +- spec/models/derivative_spec.rb | 6 +- spec/models/group_spec.rb | 2 +- spec/models/master_file_spec.rb | 14 +- spec/rails_helper.rb | 14 +- .../assets/javascripts/z.jquery.fileupload.js | 1482 +++++++++++++++++ 89 files changed, 3345 insertions(+), 671 deletions(-) create mode 100644 .ebextensions/01_packages.config create mode 100644 .ebextensions/02_install.config create mode 100644 .ebextensions/03_environment.config create mode 100644 app/assets/javascripts/direct_upload.js.coffee create mode 100644 app/helpers/security_helper.rb create mode 100644 app/helpers/upload_form_helper.rb create mode 100644 app/jobs/batch_ingest_job.rb create mode 100644 app/jobs/s3_split_job.rb create mode 100644 app/models/elastic_transcoder_job.rb create mode 100644 app/models/security_handler.rb create mode 100644 app/services/file_locator.rb create mode 100644 app/services/master_file_builder.rb create mode 100644 app/services/solr_collection_creator.rb create mode 100644 app/services/solr_config_uploader.rb create mode 100644 config/encoding_presets.yml create mode 100644 config/initializers/batch_manifest_class.rb create mode 100644 config/initializers/browse_everything.rb create mode 100644 config/initializers/config.rb create mode 100644 config/initializers/mailer.rb create mode 100644 config/initializers/security.rb create mode 100644 config/settings.yml create mode 100644 config/settings/development.yml create mode 100644 config/settings/production.yml create mode 100644 config/settings/test.yml create mode 100644 lib/active_job/queue_adapters/better_active_elastic_job_adapter.rb create mode 100644 lib/avalon/batch/file_manifest.rb create mode 100644 lib/avalon/batch/s3_manifest.rb create mode 100644 lib/tasks/zookeeper.rake create mode 100644 vendor/assets/javascripts/z.jquery.fileupload.js diff --git a/.ebextensions/01_packages.config b/.ebextensions/01_packages.config new file mode 100644 index 0000000000..6bc4e2efe7 --- /dev/null +++ b/.ebextensions/01_packages.config @@ -0,0 +1,12 @@ +packages: + yum: + git: [] +commands: + install_ffmpeg: + command: | + mkdir -p /tmp/ffmpeg + cd /tmp/ffmpeg + curl https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-64bit-static.tar.xz | tar xJ + cp `find . -type f -executable` /usr/local/bin/ + install_mediainfo: + command: yum -y --enablerepo=epel install mediainfo libyaz-devel tcp_wrappers-devel diff --git a/.ebextensions/02_install.config b/.ebextensions/02_install.config new file mode 100644 index 0000000000..e9446e279e --- /dev/null +++ b/.ebextensions/02_install.config @@ -0,0 +1,25 @@ +# From https://gist.github.com/t2/c629e1018a0f6815d871 +commands: + create_post_dir: + command: "mkdir /opt/elasticbeanstalk/hooks/appdeploy/post" + ignoreErrors: true +files: + "/opt/elasticbeanstalk/hooks/appdeploy/post/999_install_application_dependencies": + mode: "000755" + content: | + #!/bin/bash + + EB_SCRIPT_DIR=$(/opt/elasticbeanstalk/bin/get-config container -k script_dir) + EB_CONFIG_APP_CURRENT=$(/opt/elasticbeanstalk/bin/get-config container -k app_deploy_dir) + EB_SUPPORT_DIR=$(/opt/elasticbeanstalk/bin/get-config container -k support_dir) + + . $EB_SUPPORT_DIR/envvars + . $EB_SCRIPT_DIR/use-app-ruby.sh + + cd $EB_CONFIG_APP_CURRENT + + . $EB_SUPPORT_DIR/envvars.d/sysenv + + sleep 10 + + bundle exec rake zookeeper:upload zookeeper:create diff --git a/.ebextensions/03_environment.config b/.ebextensions/03_environment.config new file mode 100644 index 0000000000..d0b00b6bbf --- /dev/null +++ b/.ebextensions/03_environment.config @@ -0,0 +1,25 @@ +option_settings: + - option_name: BUNDLE_WITH + value: aws:postgres + - option_name: BUNDLE_WITHOUT + value: development:test + - option_name: DISABLE_REDIS_CLUSTER + value: 'true' + - option_name: RAILS_GROUPS + value: 'aws' + - option_name: RAILS_SERVE_STATIC_FILES + value: 'true' + - option_name: SETTINGS__ACTIVE_JOB__QUEUE_ADAPTER + value: better_active_elastic_job + - option_name: SETTINGS__FFMPEG__PATH + value: "/usr/local/bin/ffmpeg" + - option_name: SETTINGS__GROUPS__SYSTEM_GROUPS + value: administrator,group_manager,manager + - option_name: SETTINGS__ENCODING__ENGINE_ADAPTER + value: elastic_transcoder + - option_name: SETTINGS__MEDIAINFO__PATH + value: "/usr/bin/mediainfo" + - option_name: SETTINGS__NAME + value: avalon + - option_name: SETTINGS__STREAMING__SERVER + value: aws diff --git a/.gitignore b/.gitignore index 2822f7267d..17154e2914 100644 --- a/.gitignore +++ b/.gitignore @@ -40,3 +40,7 @@ masterfiles/* Gemfile.local config/fedora3.yml migration_report + +config/settings.local.yml +config/settings/*.local.yml +config/environments/*.local.yml diff --git a/Gemfile b/Gemfile index 31d45ee396..957c4cbc04 100644 --- a/Gemfile +++ b/Gemfile @@ -49,23 +49,24 @@ gem 'devise' #gem 'devise-guests', '~> 0.3' # Avalon-specific -gem 'avalon-workflow', git: "https://github.com/avalonmediasystem/avalon-workflow.git", tag: 'avalon-r6.2' -gem 'active_encode', '~> 0.1.1' +gem 'active_encode', git: "http://github.com/projecthydra-labs/active-encode.git" +gem 'avalon-workflow', git: "https://github.com/avalonmediasystem/avalon-workflow.git", branch: 'no_invalid_objs' gem 'hooks' gem 'iconv' -gem 'mediainfo', git: "https://github.com/avalonmediasystem/mediainfo.git", tag: 'avalon-r6.1' +gem 'mediainfo', git: "https://github.com/avalonmediasystem/mediainfo.git", branch: 'remote_files' gem 'omniauth-identity' gem 'omniauth-lti', git: "https://github.com/avalonmediasystem/omniauth-lti.git", tag: 'avalon-r4' +gem 'omniauth-openam' gem 'net-ldap' gem 'edtf' gem 'rest-client' gem 'active_annotations', '~> 0.2.2' gem 'acts_as_list' gem 'api-pagination' -gem 'browse-everything', '~> 0.10.5' +gem 'browse-everything', '~> 0.13.0' gem 'bootstrap_form' -gem 'bootstrap-toggle-rails' -gem 'rubyhorn', git: "https://github.com/avalonmediasystem/rubyhorn.git", tag: 'avalon-r6' +gem 'bootstrap-toggle-rails', git: "https://github.com/rkallensee/bootstrap-toggle-rails.git" +gem 'rubyhorn', git: "https://github.com/avalonmediasystem/rubyhorn.git" gem 'roo' gem 'activerecord-session_store' gem 'whenever', git: "https://github.com/javan/whenever.git", require: false @@ -73,7 +74,8 @@ gem 'with_locking' gem 'parallel' gem 'avalon-about', git: 'https://github.com/avalonmediasystem/avalon-about.git', tag: 'avalon-r6' gem 'about_page', git: 'https://github.com/avalonmediasystem/about_page.git', tag: 'avalon-r6.1' -gem 'jquery-datatables' +gem 'config' +gem 'marc' #MediaElement.js related gem 'mediaelement_rails', git: 'https://github.com/avalonmediasystem/mediaelement_rails.git', tag: 'avalon-r6_flash-fix' @@ -97,7 +99,6 @@ end group :development, :test do gem 'equivalent-xml' # Call 'byebug' anywhere in the code to stop execution and get a debugger console - gem 'rb-readline' gem 'byebug' gem 'pry-rails' gem 'pry-byebug' @@ -148,6 +149,18 @@ group :test do gem 'webmock' end +group :aws, optional: true do + gem 'aws-sdk' + gem 'aws-sdk-rails' + gem 'cloudfront-signer' + gem 'zk' + gem 'active_elastic_job', '~> 1.7' +end + +group :zoom, optional: true do + gem 'zoom' +end + group :mysql, optional: true do gem 'mysql2' end diff --git a/Gemfile.lock b/Gemfile.lock index 5399a4d7a2..fe8801fe53 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,3 +1,10 @@ +GIT + remote: http://github.com/projecthydra-labs/active-encode.git + revision: 715159a9b66a5b2418701cc6b962df66d677034f + specs: + active_encode (0.1.0) + activesupport + GIT remote: https://github.com/avalonmediasystem/about_page.git revision: d59bd65d5482f0b69c2ebf1acd60952ecd6b6fb3 @@ -85,8 +92,8 @@ GIT GIT remote: https://github.com/avalonmediasystem/mediainfo.git - revision: 5e28f2b58cefb20b6e06f022256ea1d13b761b1f - tag: avalon-r6.1 + revision: cd5b2675958f3215f93190ab696727dd2327e842 + branch: remote_files specs: mediainfo (0.7.1) @@ -120,6 +127,21 @@ GIT whenever (0.9.7) chronic (>= 0.6.3) +GIT + remote: https://github.com/projecthydra-labs/speedy_af.git + revision: 661b34c57f4e8de9db2c07aa80acce7c6288462d + tag: v0.1.0 + specs: + speedy-af (0.1.0) + active-fedora (>= 11.0.0) + activesupport + +GIT + remote: https://github.com/rkallensee/bootstrap-toggle-rails.git + revision: 1eaf2b57b4e2fab387f913ef6833ab735eacb0d4 + specs: + bootstrap-toggle-rails (2.2.1.0) + GEM remote: https://rubygems.org/ specs: @@ -159,8 +181,9 @@ GEM active_annotations (0.2.2) json-ld rdf-vocab (~> 2.1.0) - active_encode (0.1.1) - activesupport + active_elastic_job (1.7.0) + aws-sdk (~> 2) + rails (>= 4.2) active_fedora-datastreams (0.1.0) active-fedora (>= 11.0.0.pre, < 12) nom-xml (>= 0.5.1) @@ -200,16 +223,19 @@ GEM api-pagination (4.5.1) arel (6.0.4) ast (2.3.0) - autoparse (0.3.3) - addressable (>= 2.3.1) - extlib (>= 0.9.15) - multi_json (>= 1.0.0) autoprefixer-rails (6.5.4) execjs - babel-source (5.8.35) - babel-transpiler (0.7.0) - babel-source (>= 4.0, < 6) - execjs (~> 2.0) + aws-sdk (2.9.5) + aws-sdk-resources (= 2.9.5) + aws-sdk-core (2.9.5) + aws-sigv4 (~> 1.0) + jmespath (~> 1.0) + aws-sdk-rails (1.0.1) + aws-sdk-resources (~> 2) + railties (>= 3) + aws-sdk-resources (2.9.5) + aws-sdk-core (= 2.9.5) + aws-sigv4 (1.0.0) bcrypt (3.1.11) bcrypt-ruby (3.1.5) bcrypt (>= 3.1.3) @@ -231,18 +257,20 @@ GEM bootstrap-sass (3.3.7) autoprefixer-rails (>= 5.2.1) sass (>= 3.3.4) - bootstrap-toggle-rails (2.2.1.0) bootstrap_form (2.5.2) - browse-everything (0.10.5) + browse-everything (0.13.0) + addressable (~> 2.5) + aws-sdk bootstrap-sass dropbox-sdk (>= 1.6.2) font-awesome-rails - google-api-client (~> 0.8.6) + google-api-client (~> 0.9) google_drive httparty rails (>= 3.1) ruby-box sass-rails + signet skydrive builder (3.2.3) byebug (9.0.6) @@ -277,9 +305,7 @@ GEM rack-test (>= 0.5.4) xpath (~> 2.0) chronic (0.10.2) - cliver (0.3.2) - codeclimate-test-reporter (1.0.8) - simplecov (<= 0.13) + cloudfront-signer (3.0.1) coderay (1.1.1) coffee-rails (4.1.1) coffee-script (>= 2.2.0) @@ -289,11 +315,23 @@ GEM execjs coffee-script-source (1.12.2) concurrent-ruby (1.0.5) + config (1.4.0) + activesupport (>= 3.0) + deep_merge (~> 1.1.1) + coveralls (0.8.17) + json (>= 1.8, < 3) + simplecov (~> 0.12.0) + term-ansicolor (~> 1.3) + thor (~> 0.19.1) + tins (~> 1.6) crack (0.4.3) safe_yaml (~> 1.0.0) daemons (1.2.4) database_cleaner (1.5.3) debug_inspector (0.0.2) + declarative (0.0.9) + declarative-option (0.1.0) + deep_merge (1.1.1) deprecation (1.0.0) activesupport devise (4.2.0) @@ -325,7 +363,6 @@ GEM nokogiri (>= 1.4.3) erubis (2.7.0) execjs (2.7.0) - extlib (0.9.16) factory_girl (4.8.0) activesupport (>= 3.0.0) factory_girl_rails (4.8.0) @@ -345,24 +382,21 @@ GEM flamegraph (0.9.5) font-awesome-rails (4.7.0.1) railties (>= 3.2, < 5.1) - globalid (0.4.0) - activesupport (>= 4.2.0) - google-api-client (0.8.7) - activesupport (>= 3.2, < 5.0) + globalid (0.3.7) + activesupport (>= 4.1.0) + google-api-client (0.10.3) addressable (~> 2.3) - autoparse (~> 0.3) - extlib (~> 0.9) - faraday (~> 0.9) - googleauth (~> 0.3) - launchy (~> 2.4) - multi_json (~> 1.10) - retriable (~> 1.4) - signet (~> 0.6) - google_drive (1.0.6) - google-api-client (>= 0.7.0, < 0.9) - nokogiri (>= 1.4.4, != 1.5.2, != 1.5.1) - oauth (>= 0.3.6) - oauth2 (>= 0.5.0) + googleauth (~> 0.5) + httpclient (~> 2.7) + hurley (~> 0.1) + memoist (~> 0.11) + mime-types (>= 1.6) + representable (~> 3.0) + retriable (>= 2.0, < 4.0) + google_drive (2.1.2) + google-api-client (>= 0.9.0, < 1.0.0) + googleauth (>= 0.5.0, < 1.0.0) + nokogiri (>= 1.5.3, < 2.0.0) googleauth (0.5.1) faraday (~> 0.9) jwt (~> 1.4) @@ -389,6 +423,8 @@ GEM http_logger (0.5.1) httparty (0.14.0) multi_xml (>= 0.5.2) + httpclient (2.8.3) + hurley (0.2) hydra-access-controls (10.3.4) active-fedora (>= 10.0.0, < 12) activesupport (>= 4, < 6) @@ -411,7 +447,7 @@ GEM jbuilder (2.6.1) activesupport (>= 3.0.0, < 5.1) multi_json (~> 1.2) - jquery-datatables (1.10.15) + jmespath (1.3.1) jquery-rails (4.2.1) rails-dom-testing (>= 1, < 3) railties (>= 4.2.0) @@ -440,7 +476,7 @@ GEM slop link_header (0.0.8) little-plugger (1.1.4) - logging (2.1.0) + logging (2.2.2) little-plugger (~> 1.1) multi_json (~> 1.10) lograge (0.4.1) @@ -451,6 +487,9 @@ GEM nokogiri (>= 1.5.9) mail (2.6.6) mime-types (>= 1.16, < 4) + marc (1.0.0) + scrub_rb (>= 1.0.1, < 2) + unf media-element-logo-plugin (0.0.2) rails (>= 3.2.3) mediashelf-loggable (0.4.10) @@ -481,8 +520,8 @@ GEM i18n nokogiri oauth (0.5.1) - oauth2 (1.2.0) - faraday (>= 0.8, < 0.10) + oauth2 (1.3.1) + faraday (>= 0.8, < 0.12) jwt (~> 1.0) multi_json (~> 1.3) multi_xml (~> 0.5) @@ -498,6 +537,9 @@ GEM omniauth-identity (1.1.1) bcrypt-ruby (~> 3.0) omniauth (~> 1.0) + omniauth-openam (1.0.0) + faraday + omniauth (~> 1.0) orm_adapter (0.5.0) os (0.9.6) parallel (1.10.0) @@ -583,6 +625,10 @@ GEM redis (3.3.2) redis-namespace (1.5.2) redis (~> 3.0, >= 3.0.4) + representable (3.0.4) + declarative (< 0.1.0) + declarative-option (< 0.2.0) + uber (< 0.2.0) responders (2.3.0) railties (>= 4.2.0, < 5.1) resque (1.26.0) @@ -600,7 +646,7 @@ GEM http-cookie (>= 1.0.2, < 2.0) mime-types (>= 1.16, < 4.0) netrc (~> 0.8) - retriable (1.4.1) + retriable (3.0.2) roo (2.5.1) nokogiri (~> 1) rubyzip (~> 1.1, < 2.0.0) @@ -657,6 +703,7 @@ GEM sprockets (>= 2.8, < 4.0) sprockets-rails (>= 2.0, < 4.0) tilt (>= 1.1, < 3) + scrub_rb (1.0.1) sdoc (0.4.2) json (~> 1.7, >= 1.7.7) rdoc (~> 4.0) @@ -750,8 +797,10 @@ GEM xml-simple (1.1.5) xpath (2.0.0) nokogiri (~> 1.3) - xray-rails (0.3.1) - rails (>= 3.1.0) + zk (1.9.6) + zookeeper (~> 1.4.0) + zookeeper (1.4.11) + zoom (0.5.0) PLATFORMS ruby @@ -760,7 +809,8 @@ DEPENDENCIES about_page! active-fedora (~> 11.2) active_annotations (~> 0.2.2) - active_encode (~> 0.1.1) + active_elastic_job (~> 1.7) + active_encode! active_fedora-datastreams active_fedora-noid (~> 2.0.2) activerecord-session_store @@ -768,10 +818,12 @@ DEPENDENCIES api-pagination avalon-about! avalon-workflow! + aws-sdk + aws-sdk-rails blacklight (~> 6.6) - bootstrap-toggle-rails + bootstrap-toggle-rails! bootstrap_form - browse-everything (~> 0.10.5) + browse-everything (~> 0.13.0) byebug capistrano (~> 3.6) capistrano-passenger @@ -779,8 +831,10 @@ DEPENDENCIES capistrano-resque capistrano-rvm capybara - codeclimate-test-reporter + cloudfront-signer coffee-rails (~> 4.1.0) + config + coveralls database_cleaner devise dotenv-rails @@ -802,6 +856,7 @@ DEPENDENCIES jquery-rails jquery-ui-rails lograge + marc media-element-logo-plugin media_element_add_to_playlist! media_element_thumbnail_selector! @@ -816,6 +871,7 @@ DEPENDENCIES net-ldap omniauth-identity omniauth-lti! + omniauth-openam parallel pg poltergeist @@ -849,8 +905,8 @@ DEPENDENCIES webmock whenever! with_locking - xray-rails + zk + zoom BUNDLED WITH - 1.15.3 - + 1.14.6 diff --git a/app/assets/javascripts/direct_upload.js.coffee b/app/assets/javascripts/direct_upload.js.coffee new file mode 100644 index 0000000000..e7891f3d4a --- /dev/null +++ b/app/assets/javascripts/direct_upload.js.coffee @@ -0,0 +1,52 @@ +$ -> + $('.directupload').find("input:file").each (i, elem)-> + file_input = $(elem) + form = $(file_input.parents('form:first')) + submit_button = form.find('input[type="submit"], *[data-trigger="submit"]') + submit_button.on 'click', -> + $('.directupload input:file').fileupload 'send', + files: $('.directupload input:file').prop('files') + return false + progress_bar = $("
"); + bar_container = $("
").append(progress_bar); + $('div.fileinput').after(bar_container) + file_input.fileupload + fileInput: file_input + url: form.data('url') + type: 'POST' + autoUpload: false + formData: form.data('form-data') + paramName: 'file' + dataType: 'XML' + replaceFileInput: false + progressall: (e, data)-> + progress = parseInt(data.loaded / data.total * 100, 10) + progress_bar.css('width', "#{progress}%") + start: (e)-> + submit_button.prop('disabled', true) + progress_bar. + css('background', 'green'). + css('display', 'block'). + css('width', '0%'). + text("Loading...") + done: (e, data)-> + submit_button.prop('disabled', false) + progress_bar.text("Uploading done") + + # extract key and generate URL from response + key = $(data.jqXHR.responseXML).find("Key").text(); + bucket = $(data.jqXHR.responseXML).find("Bucket").text(); + url = "s3://#{bucket}/#{key}" + + # create hidden field + input = $ "", + type: 'hidden' + name: 'selected_files[0][url]' + value: url + file_input.replaceWith(input) + form.submit() + fail: (e, data)-> + submit_button.prop('disabled', false) + progress_bar. + css("background", "red"). + text("Failed") diff --git a/app/assets/javascripts/refresh_token.js.coffee b/app/assets/javascripts/refresh_token.js.coffee index 3e44909237..fb29bfaa79 100644 --- a/app/assets/javascripts/refresh_token.js.coffee +++ b/app/assets/javascripts/refresh_token.js.coffee @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -14,10 +14,11 @@ $ -> refreshToken = -> - mount_point = $('body').data('mountpoint') token = currentPlayer.media.src.split('?')[1] - $.get("#{mount_point}authorize.txt?#{token}") - .done -> console.log("Token refreshed") - .fail -> console.error("Token refresh failed") + if token.match(/^token=/) + mount_point = $('body').data('mountpoint') + $.get("#{mount_point}authorize.txt?#{token}") + .done -> console.log("Token refreshed") + .fail -> console.error("Token refresh failed") setInterval(refreshToken, 5*60*1000) diff --git a/app/controllers/admin/groups_controller.rb b/app/controllers/admin/groups_controller.rb index 2cbce9fe85..3c93191719 100644 --- a/app/controllers/admin/groups_controller.rb +++ b/app/controllers/admin/groups_controller.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -36,7 +36,7 @@ def auth end def index - default_groups = Avalon::Configuration.lookup('groups.system_groups') + default_groups = Settings.groups.system_groups @default_groups = [] @groups = [] diff --git a/app/controllers/comments_controller.rb b/app/controllers/comments_controller.rb index cf2ab30214..e30d975f0c 100644 --- a/app/controllers/comments_controller.rb +++ b/app/controllers/comments_controller.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -35,8 +35,8 @@ def create rescue Errno::ECONNRESET => e logger.warn "The mail server does not appear to be responding \n #{e}" - flash[:notice] = "The message could not be sent in a timely fashion. Contact us at #{Avalon::Configuration.lookup('email.support')} to report the problem." - render action: "index" + flash[:notice] = "The message could not be sent in a timely fashion. Contact us at #{Settings.email.support} to report the problem." + render action: "index" end else flash[:error] = "There were problems submitting your comment. Please correct the errors and try again." diff --git a/app/controllers/master_files_controller.rb b/app/controllers/master_files_controller.rb index b5f5adce73..b4a1c35638 100644 --- a/app/controllers/master_files_controller.rb +++ b/app/controllers/master_files_controller.rb @@ -14,6 +14,8 @@ # require 'avalon/controller/controller_behavior' +include SecurityHelper + class MasterFilesController < ApplicationController # include Avalon::Controller::ControllerBehavior @@ -44,10 +46,10 @@ def show end def embed - @master_file = MasterFile.find(params[:id]) - if can? :read, @master_file - @token = @master_file.nil? ? "" : StreamToken.find_or_create_session_token(session, @master_file.id) - @stream_info = @master_file.stream_details(@token, default_url_options[:host]) + @masterfile = MasterFile.find(params[:id]) + if can? :read, @masterfile.mediaobject + add_stream_cookies(id: @masterfile.id) + @stream_info = secure_streams(@masterfile.stream_details) end respond_to do |format| format.html do @@ -74,7 +76,7 @@ def oembed hash = { "version" => "1.0", "type" => mf.is_video? ? "video" : "rich", - "provider_name" => Avalon::Configuration.lookup('name') || 'Avalon Media System', + "provider_name" => Settings.name || 'Avalon Media System', "provider_url" => request.base_url, "width" => width, "height" => height, @@ -185,70 +187,13 @@ def create return end - format_errors = "The file was not recognized as audio or video - " - - if params.has_key?(:Filedata) and params.has_key?(:original) - @master_files = [] - params[:Filedata].each do |file| - if (file.size > MasterFile::MAXIMUM_UPLOAD_SIZE) - # Use the errors key to signal that it should be a red notice box rather - # than the default - flash[:error] = "The file you have uploaded is too large" - return redirect_to :back - end - - unless file.original_filename.valid_encoding? && file.original_filename.ascii_only? - flash[:error] = 'The file you have uploaded has non-ASCII characters in its name.' - return redirect_to :back - end - - master_file = MasterFile.new() - master_file.setContent(file) - master_file.set_workflow(params[:workflow]) - # master_file.media_object = media_object - # master_file.save! - - if 'Unknown' == master_file.file_format - flash[:error] = [] if flash[:error].nil? - error = format_errors - error << file.original_filename - error << " (" << file.content_type << ")" - flash[:error].push error - next - else - flash[:notice] = create_upload_notice(master_file.file_format) - end - - master_file.media_object = media_object - unless master_file.save - flash[:error] = "There was a problem storing the file" - else - media_object.save - master_file.process - @master_files << master_file - end - - end - elsif params.has_key?(:selected_files) - @master_files = [] - params[:selected_files].each_value do |entry| - file_path = URI.decode(URI.parse(URI.encode(entry[:url])).path) - master_file = MasterFile.new - master_file.setContent(File.open(file_path, 'rb')) - master_file.set_workflow(params[:workflow]) - master_file.save( validate: false ) - master_file.media_object = media_object - - unless master_file.save - flash[:error] = "There was a problem storing the file" - else - media_object.save - master_file.process - @master_files << master_file - end - end - else - flash[:notice] = "You must specify a file to upload" + begin + result = MasterFileBuilder.build(media_object, params) + @master_files = result[:master_files] + [:notice, :error].each { |type| flash[type] = result[:flash][type] } + rescue MasterFileBuilder::BuildError => err + flash[:error] = err.message + return redirect_to :back end respond_to do |format| @@ -312,18 +257,6 @@ def get_frame end protected - def create_upload_notice(format) - case format - when /^Sound$/ - text = 'The uploaded content appears to be audio'; - when /^Moving image$/ - text = 'The uploaded content appears to be video'; - else - text = 'The uploaded content could not be identified'; - end - return text - end - def ensure_readable_filedata if params[:Filedata].present? params[:Filedata].each do |file| diff --git a/app/controllers/media_objects_controller.rb b/app/controllers/media_objects_controller.rb index 69c2c3bcb5..eaccef68f0 100644 --- a/app/controllers/media_objects_controller.rb +++ b/app/controllers/media_objects_controller.rb @@ -18,6 +18,7 @@ class MediaObjectsController < ApplicationController include Avalon::Workflow::WorkflowControllerBehavior include Avalon::Controller::ControllerBehavior include ConditionalPartials + include SecurityHelper before_filter :authenticate_user!, except: [:show, :set_session_quality, :show_stream_details] before_filter :authenticate_api!, only: [:show], if: proc{|c| request.format.json?} @@ -435,7 +436,8 @@ def set_player_token def load_current_stream set_active_file set_player_token - @currentStreamInfo = @currentStream.nil? ? {} : @currentStream.stream_details(@token, default_url_options[:host]) + add_stream_cookies(id: @currentStream.id) unless @currentStream.nil? + @currentStreamInfo = @currentStream.nil? ? {} : secure_streams(@currentStream.stream_details) @currentStreamInfo['t'] = view_context.parse_media_fragment(params[:t]) # add MediaFragment from params end diff --git a/app/controllers/users/omniauth_callbacks_controller.rb b/app/controllers/users/omniauth_callbacks_controller.rb index e9e07490cd..6669197c87 100644 --- a/app/controllers/users/omniauth_callbacks_controller.rb +++ b/app/controllers/users/omniauth_callbacks_controller.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -46,6 +46,7 @@ def action_missing(sym, *args, &block) def find_user(auth_type) auth_type.downcase! find_method = "find_for_#{auth_type}".to_sym + find_method = :find_for_generic unless User.respond_to?(find_method) logger.debug "#{auth_type} :: #{current_user.inspect}" @user = User.send(find_method,request.env["omniauth.auth"], current_user) if @user.persisted? @@ -71,7 +72,7 @@ def find_user(auth_type) redirect_to objects_path(request['target_id'], params.slice(*params_whitelist)) elsif params[:url] redirect_to params[:url] - elsif session[:previous_url] + elsif session[:previous_url] redirect_to session.delete :previous_url elsif auth_type == 'lti' && user_session[:virtual_groups].present? redirect_to search_catalog_path('f[read_access_virtual_group_ssim][]' => user_session[:lti_group]) @@ -83,7 +84,7 @@ def find_user(auth_type) protected :find_user rescue_from Avalon::MissingUserId do |exception| - support_email = Avalon::Configuration.lookup('email.support') + support_email = Settings.email.support notice_text = I18n.t('errors.lti_auth_error') % [support_email, support_email] redirect_to root_path, flash: { error: notice_text.html_safe } end diff --git a/app/helpers/application_helper.rb b/app/helpers/application_helper.rb index 219125cd4c..724ac09482 100644 --- a/app/helpers/application_helper.rb +++ b/app/helpers/application_helper.rb @@ -15,7 +15,7 @@ module ApplicationHelper def application_name - Avalon::Configuration.lookup('name') || 'Avalon Media System' + Settings.name || 'Avalon Media System' end def release_text diff --git a/app/helpers/media_objects_helper.rb b/app/helpers/media_objects_helper.rb index 124f83d312..ddecd0caf1 100644 --- a/app/helpers/media_objects_helper.rb +++ b/app/helpers/media_objects_helper.rb @@ -47,7 +47,7 @@ def form_id_for_step(step) def dropbox_url collection ic = Iconv.new('UTF-8//IGNORE', 'UTF-8') path = URI::Parser.new.escape(collection.dropbox_directory_name || "", %r{[/\\%& #]}) - url = File.join(Avalon::Configuration.lookup('dropbox.upload_uri'), path) + url = File.join(Settings.dropbox.upload_uri, path) ic.iconv(url) end @@ -103,7 +103,7 @@ def current_quality stream_info available_qualities += Array(stream_info[:stream_hls]).collect {|s| s[:quality]} available_qualities.uniq! quality ||= session[:quality] if session['quality'].present? && available_qualities.include?(session[:quality]) - quality ||= Avalon::Configuration.lookup('streaming.default_quality') if available_qualities.include?(Avalon::Configuration.lookup('streaming.default_quality')) + quality ||= Settings.streaming.default_quality if available_qualities.include?(Settings.streaming.default_quality) quality ||= available_qualities.first quality end diff --git a/app/helpers/security_helper.rb b/app/helpers/security_helper.rb new file mode 100644 index 0000000000..319e3f8ac5 --- /dev/null +++ b/app/helpers/security_helper.rb @@ -0,0 +1,16 @@ +module SecurityHelper + def add_stream_cookies(stream_info) + SecurityHandler.secure_cookies(target: stream_info[:id], request_host: request.server_name).each_pair do |name, value| + cookies[name] = value + end + end + + def secure_streams(stream_info) + [:stream_flash, :stream_hls].each do |protocol| + stream_info[protocol].each do |quality| + quality[:url] = SecurityHandler.secure_url(quality[:url], session: session, target: stream_info[:id], protocol: protocol) + end + end + stream_info + end +end diff --git a/app/helpers/upload_form_helper.rb b/app/helpers/upload_form_helper.rb new file mode 100644 index 0000000000..e071c24f28 --- /dev/null +++ b/app/helpers/upload_form_helper.rb @@ -0,0 +1,25 @@ +module UploadFormHelper + def direct_upload? + Settings.encoding.engine_adapter.to_sym == :elastic_transcoder + end + + def upload_form_classes + result = %w(uploader-form form-horizontal step) + result << 'directupload' if direct_upload? + result.join(' ') + end + + def upload_form_data + if direct_upload? + bucket = Aws::S3::Bucket.new(name: Settings.encoding.masterfile_bucket) + direct_post = bucket.presigned_post(key: "uploads/#{SecureRandom.uuid}/${filename}", success_action_status: '201') + { + 'form-data' => (direct_post.fields), + 'url' => direct_post.url, + 'host' => URI.parse(direct_post.url).host + } + else + {} + end + end +end diff --git a/app/jobs/active_encode_job.rb b/app/jobs/active_encode_job.rb index daa17a7153..b7c4ff78e2 100644 --- a/app/jobs/active_encode_job.rb +++ b/app/jobs/active_encode_job.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -29,7 +29,7 @@ class Create < ActiveJob::Base queue_as :active_encode_create def perform(master_file_id, input, options) mf = MasterFile.find(master_file_id) - encode = mf.encoder_class.new(input, options) + encode = mf.encoder_class.new(input, options.merge({output_key_prefix: "#{mf.id}/"})) unless encode.created? Rails.logger.info "Creating! #{encode.inspect} for MasterFile #{master_file_id}" encode_job = encode.create! diff --git a/app/jobs/batch_ingest_job.rb b/app/jobs/batch_ingest_job.rb new file mode 100644 index 0000000000..979541206d --- /dev/null +++ b/app/jobs/batch_ingest_job.rb @@ -0,0 +1,38 @@ +# Copyright 2011-2017, The Trustees of Indiana University and Northwestern +# University. Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +# --- END LICENSE_HEADER BLOCK --- + +class BatchIngestJob < ActiveJob::Base + queue_as :batch_ingest + def perform(filename) + return unless Avalon::Batch::Manifest.is_spreadsheet?(filename) && Avalon::Batch::S3Manifest.status(filename).blank? + + uri = Addressable::URI.parse(filename) + dropbox_directory = uri.route_from(Addressable::URI.parse(Settings.dropbox.path)).to_s.split(/\//).first + collection = Admin::Collection.where(dropbox_directory_name_ssi: dropbox_directory).first + return if collection.nil? + + ingest = Avalon::Batch::Ingest.new(collection) + begin + package = Avalon::Batch::Package.new(filename, collection) + ingest.ingest_package(package) + rescue Exception => ex + begin + package.manifest.error! + ensure + Rails.logger.error("#{ex.class.name}: #{ex.message}") + IngestBatchMailer.batch_ingest_validation_error( package, ["#{ex.class.name}: #{ex.message}"] ).deliver_now + end + end + end +end diff --git a/app/jobs/s3_split_job.rb b/app/jobs/s3_split_job.rb new file mode 100644 index 0000000000..3d0a76a7c6 --- /dev/null +++ b/app/jobs/s3_split_job.rb @@ -0,0 +1,44 @@ +# Copyright 2011-2017, The Trustees of Indiana University and Northwestern +# University. Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +# --- END LICENSE_HEADER BLOCK --- + +class S3SplitJob < ActiveJob::Base + queue_as :s3_split + def perform(file) + ffmpeg = Settings.ffmpeg.path + input = FileLocator.new(file).location + s3obj = FileLocator::S3File.new(file).object + bucket = s3obj.bucket + path = File.dirname(s3obj.key) + base = File.basename(s3obj.key,'.*') + Dir.mktmpdir do |dir| + cmd = [ + ffmpeg, + '-i', input, + '-codec','copy', + '-map','0', + '-bsf:v','h264_mp4toannexb', + '-f','segment', + '-segment_format', 'mpegts', + '-segment_list', File.join(dir, "#{base}.m3u8"), + '-segment_time', '10', File.join(dir, "#{base}-%03d.ts") + ] + if Kernel.system(*cmd) + segment_files = Dir[File.join(dir,'*')] + segment_files.each do |seg| + File.open(seg,'r') { |io| bucket.put_object(key: File.join(path,'segments',File.basename(seg)), body: io) } + end + end + end + end +end diff --git a/app/mailers/comments_mailer.rb b/app/mailers/comments_mailer.rb index ce5c75c177..58902bec4b 100644 --- a/app/mailers/comments_mailer.rb +++ b/app/mailers/comments_mailer.rb @@ -13,7 +13,7 @@ # --- END LICENSE_HEADER BLOCK --- class CommentsMailer < ActionMailer::Base - default :to => Avalon::Configuration.lookup('email.comments') + default :to => Settings.email.comments def contact_email(comment) @comment = OpenStruct.new(comment) diff --git a/app/mailers/ingest_batch_mailer.rb b/app/mailers/ingest_batch_mailer.rb index 474a1544db..752b1645fb 100644 --- a/app/mailers/ingest_batch_mailer.rb +++ b/app/mailers/ingest_batch_mailer.rb @@ -18,10 +18,10 @@ class IngestBatchMailer < ActionMailer::Base def status_email( ingest_batch_id ) @ingest_batch = IngestBatch.find(ingest_batch_id) @media_objects = @ingest_batch.media_objects - @email = @ingest_batch.email || Avalon::Configuration.lookup('email.notification') + @email = @ingest_batch.email || Settings.email.notification mail( to: @email, - from: Avalon::Configuration.lookup('email.notification'), + from: Settings.email.notification, subject: "Batch ingest status for: #{@ingest_batch.name}" ) end @@ -29,20 +29,20 @@ def status_email( ingest_batch_id ) def batch_ingest_validation_error( package, base_errors ) @package = package @base_errors = base_errors - email = package.manifest.email || Avalon::Configuration.lookup('email.notification') + email = package.manifest.email || Settings.email.notification mail( to: email, - from: Avalon::Configuration.lookup('email.notification'), + from: Settings.email.notification, subject: "Failed batch ingest processing errors for: #{package.manifest.name}", ) end def batch_ingest_validation_success( package ) @package = package - email = package.manifest.email || Avalon::Configuration.lookup('email.notification') + email = package.manifest.email || Settings.email.notification mail( to: email, - from: Avalon::Configuration.lookup('email.notification'), + from: Settings.email.notification, subject: "Successfully processed batch ingest: #{package.manifest.name}", ) end diff --git a/app/mailers/notifications_mailer.rb b/app/mailers/notifications_mailer.rb index 971eb2ce80..a8eb28c6c5 100644 --- a/app/mailers/notifications_mailer.rb +++ b/app/mailers/notifications_mailer.rb @@ -13,7 +13,7 @@ # --- END LICENSE_HEADER BLOCK --- class NotificationsMailer < ActionMailer::Base - default from: Avalon::Configuration.lookup('email.notification') + default from: Settings.email.notification def new_collection( args = {} ) @collection = Admin::Collection.find(args.delete(:collection_id)) diff --git a/app/models/admin/collection.rb b/app/models/admin/collection.rb index 51e509a32b..b4a8d7cd9e 100644 --- a/app/models/admin/collection.rb +++ b/app/models/admin/collection.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -186,7 +186,7 @@ def dropbox end def dropbox_absolute_path( name = nil ) - File.join(Avalon::Configuration.lookup('dropbox.path'), name || dropbox_directory_name) + File.join(Settings.dropbox.path, name || dropbox_directory_name) end def media_objects_to_json @@ -245,18 +245,44 @@ def add_edit_user(name) end def create_dropbox_directory! + if Settings.dropbox.path =~ %r(^s3://) + create_s3_dropbox_directory! + else + create_fs_dropbox_directory! + end + end + + def calculate_dropbox_directory_name name = self.dropbox_directory_name if name.blank? name = Avalon::Sanitizer.sanitize(self.name) iter = 2 original_name = name.dup.freeze - - while File.exist? dropbox_absolute_path(name) + while yield(name) name = "#{original_name}_#{iter}" iter += 1 end end + name + end + + def create_s3_dropbox_directory! + base_uri = Addressable::URI.parse(Settings.dropbox.path) + name = calculate_dropbox_directory_name do |n| + obj = FileLocator::S3File.new(base_uri.join(n).to_s + '/').object + obj.exists? + end + absolute_path = base_uri.join(name).to_s + '/' + obj = FileLocator::S3File.new(absolute_path).object + Aws::S3::Client.new.put_object(bucket: obj.bucket_name, key: obj.key) + self.dropbox_directory_name = name + end + + def create_fs_dropbox_directory! + name = calculate_dropbox_directory_name do |n| + File.exist? dropbox_absolute_path(n) + end absolute_path = dropbox_absolute_path(name) diff --git a/app/models/admin/group.rb b/app/models/admin/group.rb index f99e65e02a..93c152447c 100644 --- a/app/models/admin/group.rb +++ b/app/models/admin/group.rb @@ -31,7 +31,7 @@ class Group def self.non_system_groups groups = all - if system_groups = Avalon::Configuration.lookup('groups.system_groups') + if system_groups = Settings.groups.system_groups groups.reject! { |g| system_groups.include? g.name } end groups @@ -213,7 +213,7 @@ def saved= val # group. This is a workaround for the bug that breaks the system whenever # a system group is renamed. def self.name_is_static? group_name - Avalon::Configuration.lookup('groups.system_groups').include? group_name + Settings.groups.system_groups.include? group_name end end end diff --git a/app/models/concerns/derivative_behavior.rb b/app/models/concerns/derivative_behavior.rb index 3efb0bda5c..7440bdc096 100644 --- a/app/models/concerns/derivative_behavior.rb +++ b/app/models/concerns/derivative_behavior.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -17,11 +17,6 @@ def absolute_location derivativeFile end - def tokenized_url(token, mobile = false) - uri = streaming_url(mobile) - "#{uri}?token=#{token}".html_safe - end - def streaming_url(is_mobile = false) is_mobile ? hls_url : location_url end diff --git a/app/models/concerns/master_file_behavior.rb b/app/models/concerns/master_file_behavior.rb index 15db90f0c6..a37f77b4eb 100644 --- a/app/models/concerns/master_file_behavior.rb +++ b/app/models/concerns/master_file_behavior.rb @@ -29,7 +29,7 @@ def succeeded? status?('COMPLETED') end - def stream_details(token,host=nil) + def stream_details flash, hls = [], [] common, poster_path, captions_path, captions_format = nil, nil, nil, nil, nil, nil @@ -38,8 +38,8 @@ def stream_details(token,host=nil) common = { quality: d.quality, mimetype: d.mime_type, format: d.format } - flash << common.merge(url: Avalon::Configuration.rehost(d.tokenized_url(token, false),host)) - hls << common.merge(url: Avalon::Configuration.rehost(d.tokenized_url(token, true),host)) + flash << common.merge(url: d.streaming_url(false)) + hls << common.merge(url: d.streaming_url(true)) end # Sorts the streams in order of quality, note: Hash order only works in Ruby 1.9 or later diff --git a/app/models/derivative.rb b/app/models/derivative.rb index 61e48cb457..969adc547d 100644 --- a/app/models/derivative.rb +++ b/app/models/derivative.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -95,7 +95,7 @@ def self.from_output(dists, managed = true) output = dists.first || hls_output derivative = Derivative.new - derivative.managed = managed + derivative.managed = output.key?(:managed) ? output[:managed] : managed derivative.track_id = output[:id] derivative.duration = output[:duration] derivative.mime_type = output[:mime_type] diff --git a/app/models/elastic_transcoder_job.rb b/app/models/elastic_transcoder_job.rb new file mode 100644 index 0000000000..898c133c74 --- /dev/null +++ b/app/models/elastic_transcoder_job.rb @@ -0,0 +1,226 @@ +class ElasticTranscoderJob < ActiveEncode::Base + before_create :set_up_options + before_create :copy_to_input_bucket + + JOB_STATES = { + "Submitted" => :running, "Progressing" => :running, "Canceled" => :cancelled, + "Error" => :failed, "Complete" => :completed + } + + JOB_COMPLETION = { + "Submitted" => 10, "Progressing" => 50, "Complete" => 100 + } + + def self.find(id) + job = Aws::ElasticTranscoder::Client.new.read_job(id: id)&.job + return nil if job.nil? + encode = self.new(job.input, {}) + encode.populate(job) + end + + def set_up_options + file_name = File.basename(Addressable::URI.parse(input).path,'.*').gsub(URI::UNSAFE,'_') + outputs = { + fullaudio: { + hls_medium: { key: "quality-medium/hls/#{file_name}", preset_id: find_or_create_preset('ts',:audio,:medium).id, segment_duration: '2' }, + hls_high: { key: "quality-high/hls/#{file_name}", preset_id: find_or_create_preset('ts',:audio,:high).id, segment_duration: '2' }, + aac_medium: { key: "quality-medium/#{file_name}.mp4", preset_id: find_or_create_preset('mp4',:audio,:medium).id }, + aac_high: { key: "quality-high/#{file_name}.mp4", preset_id: find_or_create_preset('mp4',:audio,:high).id } + }, + avalon: { + hls_low: { key: "quality-low/hls/#{file_name}", preset_id: find_or_create_preset('ts',:video,:low).id, segment_duration: '2' }, + hls_medium: { key: "quality-medium/hls/#{file_name}", preset_id: find_or_create_preset('ts',:video,:medium).id, segment_duration: '2' }, + hls_high: { key: "quality-high/hls/#{file_name}", preset_id: find_or_create_preset('ts',:video,:high).id, segment_duration: '2' }, + mp4_low: { key: "quality-low/#{file_name}.mp4", preset_id: find_or_create_preset('mp4',:video,:low).id }, + mp4_medium: { key: "quality-medium/#{file_name}.mp4", preset_id: find_or_create_preset('mp4',:video,:medium).id }, + mp4_high: { key: "quality-high/#{file_name}.mp4", preset_id: find_or_create_preset('mp4',:video,:high).id } + } + } + + self.options[:output_key_prefix] ||= "#{SecureRandom.uuid}/" + self.options.merge!({ + pipeline_id: Settings.encoding.pipeline, + outputs: outputs[self.options[:preset].to_sym].values + }) + end + + def copy_to_input_bucket + case Addressable::URI.parse(input).scheme + when nil,'file' + upload_to_s3 + when 's3' + check_s3_bucket + end + end + + def populate(job) + self.id = job.id + self.state = JOB_STATES[job.status] + self.current_operations = [] + self.percent_complete = (job.outputs.select { |o| o.status == 'Complete' }.length.to_f / job.outputs.length.to_f) * 100 + self.created_at = convert_time(job.timing["submit_time_millis"]) + self.updated_at = convert_time(job.timing["start_time_millis"]) + self.finished_at = convert_time(job.timing["finish_time_millis"]) + + self.output = convert_output(job) + self.errors = job.outputs.select { |o| o.status == "Error" }.collect(&:status_detail).compact + self.tech_metadata = convert_tech_metadata(job.input.detected_properties) + self + end + + def remove_output!(id) + track = output.find { |o| o[:id] == id } + raise "Unknown track: `#{id}'" if track.nil? + s3_object = FileLocator::S3File.new(track[:url]).object + if s3_object.key =~ /\.m3u8$/ + delete_segments(s3_object) + else + s3_object.delete + end + end + + def delete_segments(obj) + raise "Invalid segmented video object" unless obj.key =~ %r(quality-.+/.+\.m3u8$) + bucket = obj.bucket + prefix = obj.key.sub(/\.m3u8$/,'') + next_token = nil + loop do + response = s3client.list_objects_v2(bucket: obj.bucket_name, prefix: prefix, continuation_token: next_token) + response.contents.collect(&:key).each { |key| bucket.object(key).delete } + next_token = response.continuation_token + break if next_token.nil? + end + end + + private + + def etclient + Aws::ElasticTranscoder::Client.new + end + + def s3client + Aws::S3::Client.new + end + + def check_s3_bucket + logger.info("Checking `#{input}'") + s3_object = FileLocator::S3File.new(input).object + if s3_object.bucket_name == source_bucket + logger.info("Already in bucket `#{source_bucket}'") + self.input = s3_object.key + else + self.input = File.join(SecureRandom.uuid,s3_object.key) + logger.info("Copying to `#{source_bucket}/#{input}'") + s3client.copy_object(copy_source: File.join(s3_object.bucket_name,s3_object.key), + bucket: source_bucket, key: self.input) + end + end + + def upload_to_s3 + original_input = input + bucket = Aws::S3::Resource.new(client: s3client).bucket(source_bucket) + filename = FileLocator.new(input).location + self.input = File.join(SecureRandom.uuid,File.basename(filename)) + logger.info("Copying `#{original_input}' to `#{source_bucket}/#{input}'") + obj = bucket.object(input) + obj.upload_file filename + end + + def source_bucket + Settings.encoding.masterfile_bucket + end + + def find_preset(container, format, quality) + container_description = container == 'ts' ? 'hls' : container + result = nil + next_token = nil + loop do + resp = etclient.list_presets page_token: next_token + result = resp.presets.find { |p| p.name == "avalon-#{format}-#{quality}-#{container_description}" } + next_token = resp.next_page_token + break if result.present? || next_token.nil? + end + result + end + + def read_preset(id) + etclient.read_preset(id: id).preset + end + + def create_preset(container, format, quality) + etclient.create_preset(preset_settings(container, format, quality)).preset + end + + def find_or_create_preset(container, format, quality) + find_preset(container, format, quality) || create_preset(container, format, quality) + end + + def preset_settings(container, format, quality) + templates = YAML.load(File.read(File.join(Rails.root,'config','encoding_presets.yml'))) + template = templates[:templates][format.to_sym].deep_dup.deep_merge(templates[:settings][format.to_sym][quality.to_sym]) + container_description = container == 'ts' ? 'hls' : container + template.merge!({ + name: "avalon-#{format}-#{quality}-#{container_description}", + description: "Avalon Media System: #{format}/#{quality}/#{container_description}", + container: container + }) + end + + def convert_time(time_millis) + return nil if time_millis.nil? + Time.at(time_millis / 1000).iso8601 + end + + def convert_bitrate(rate) + return nil if rate.nil? + (rate.to_f * 1024).to_s + end + + def convert_output(job) + pipeline = etclient.read_pipeline(id: job.pipeline_id).pipeline + job.outputs.collect do |output| + preset = read_preset(output.preset_id) + extension = preset.container == 'ts' ? '.m3u8' : '' + convert_tech_metadata(output,preset).merge({ + managed: false, + id: output.id, + label: output.key.split("/", 2).first, + url: "s3://#{pipeline.output_bucket}/#{job.output_key_prefix}#{output.key}#{extension}" + }) + end + end + + def convert_tech_metadata(props, preset=nil) + return {} if props.nil? || props.empty? + metadata_fields = { + file_size: { key: :file_size, method: :itself }, + duration_millis: { key: :duration, method: :to_s }, + frame_rate: { key: :video_framerate, method: :itself }, + segment_duration: { key: :segment_duration, method: :itself }, + width: { key: :width, method: :itself }, + height: { key: :height, method: :itself } + } + + metadata = {} + props.each_pair do |key, value| + next if value.nil? + conversion = metadata_fields[key.to_sym] + next if conversion.nil? + metadata[conversion[:key]] = value.send(conversion[:method]) + end + + unless preset.nil? + audio = preset.audio + video = preset.video + metadata.merge!({ + audio_codec: audio&.codec, + audio_channels: audio&.channels, + audio_bitrate: convert_bitrate(audio&.bit_rate), + video_codec: video&.codec, + video_bitrate: convert_bitrate(video&.bit_rate) + }) + end + + metadata + end +end diff --git a/app/models/file_upload_step.rb b/app/models/file_upload_step.rb index 2b4c13df94..7c17ccd102 100644 --- a/app/models/file_upload_step.rb +++ b/app/models/file_upload_step.rb @@ -37,7 +37,7 @@ def after_step context def execute context deleted_master_files = update_master_files context - context[:notice] = "Several clean up jobs have been sent out. Their statuses can be viewed by your sysadmin at #{ Avalon::Configuration.lookup('matterhorn.cleanup_log') }" unless deleted_master_files.empty? + context[:notice] = "Several clean up jobs have been sent out. Their statuses can be viewed by your sysadmin at #{ Settings.matterhorn.cleanup_log }" unless deleted_master_files.empty? # Reloads media_object.master_files, should use .reload when we update hydra-head media = MediaObject.find(context[:media_object].id) diff --git a/app/models/master_file.rb b/app/models/master_file.rb index 2c72161e76..b82d3eec18 100644 --- a/app/models/master_file.rb +++ b/app/models/master_file.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -168,7 +168,15 @@ def setContent(file) file.each_value {|f| f.close unless f.closed? } when ActionDispatch::Http::UploadedFile #Web upload saveOriginal(file, file.original_filename) - else #Batch or dropbox + when URI, Addressable::URI + case file.scheme + when 'file' + saveOriginal(File.open(file.path), File.basename(file.path)) + when 's3' + self.file_location = file.to_s + self.file_size = FileLocator::S3File.new(file).object.size + end + else #Batch saveOriginal(file) end reloadTechnicalMetadata! @@ -238,14 +246,14 @@ def process file=nil #Build hash for single file skip transcoding if !file.is_a?(Hash) && (self.workflow_name == 'avalon-skip-transcoding' || self.workflow_name == 'avalon-skip-transcoding-audio') - file = {'quality-high' => File.new(file_location)} + file = {'quality-high' => FileLocator.new(file_location).attachment} end input = if file.is_a? Hash file_dup = file.dup - file_dup.each_pair {|quality, f| file_dup[quality] = "file://" + URI.escape(File.realpath(f.to_path))} + file_dup.each_pair {|quality, f| file_dup[quality] = FileLocator.new(f.to_path).uri.to_s } else - "file://" + URI.escape(file_location) + FileLocator.new(file_location).uri.to_s end ActiveEncodeJob::Create.perform_later(self.id, input, {preset: self.workflow_name}) @@ -404,7 +412,7 @@ def file_location=(value) end def encoder_class - find_encoder_class(encoder_classname) || find_encoder_class(workflow_name.to_s.classify) || ActiveEncode::Base + find_encoder_class(encoder_classname) || find_encoder_class(workflow_name.to_s.classify) || MasterFile.default_encoder_class || ActiveEncode::Base end def encoder_class=(value) @@ -417,6 +425,20 @@ def encoder_class=(value) end end + def self.default_encoder_class + @@default_encoder_class ||= nil + end + + def self.default_encoder_class=(value) + if value.nil? + @@default_encoder_class = nil + elsif value.is_a?(Class) and value.ancestors.include?(ActiveEncode::Base) + @@default_encoder_class = value + else + raise ArgumentError, '#default_encoder_class must be a descendant of ActiveEncode::Base' + end + end + def structural_metadata_labels structuralMetadata.xpath('//@label').collect{|a|a.value} end @@ -478,25 +500,28 @@ def to_solr *args protected def mediainfo - @mediainfo ||= Mediainfo.new file_location + if @mediainfo.nil? + @mediainfo = Mediainfo.new(FileLocator.new(file_location).location) + end + @mediainfo end def find_frame_source(options={}) options[:offset] ||= 2000 - response = { source: file_location, offset: options[:offset], master: true } + response = { source: FileLocator.new(file_location).location, offset: options[:offset], master: true } + return response if response[:source] =~ %r(^https?://) + unless File.exists?(response[:source]) Rails.logger.warn("Masterfile `#{file_location}` not found. Extracting via HLS.") begin - token = StreamToken.find_or_create_session_token({media_token:nil}, self.id) - playlist_url = self.stream_details(token)[:stream_hls].find { |d| d[:quality] == 'high' }[:url] - playlist = Avalon::M3U8Reader.read(playlist_url) + playlist_url = self.stream_details[:stream_hls].find { |d| d[:quality] == 'high' }[:url] + secure_url = SecurityHandler.secure_stream(playlist_url, target: self.pid) + playlist = Avalon::M3U8Reader.read(secure_url) details = playlist.at(options[:offset]) target = File.join(Dir.tmpdir,File.basename(details[:location])) File.open(target,'wb') { |f| open(details[:location]) { |io| f.write(io.read) } } response = { source: target, offset: details[:offset], master: false } - ensure - StreamToken.find_by_token(token).destroy end end return response @@ -511,10 +536,10 @@ def extract_frame(options={}) raise RangeError, "Offset #{offset} not in range 0..#{self.duration}" end - ffmpeg = Avalon::Configuration.lookup('ffmpeg.path') + ffmpeg = Settings.ffmpeg.path frame_size = (options[:size].nil? or options[:size] == 'auto') ? self.original_frame_size : options[:size] - (new_width,new_height) = frame_size.split(/x/).collect &:to_f + (new_width,new_height) = frame_size.split(/x/).collect(&:to_f) new_height = (new_width/self.display_aspect_ratio.to_f).floor new_height += 1 if new_height.odd? aspect = new_width/new_height @@ -522,8 +547,11 @@ def extract_frame(options={}) frame_source = find_frame_source(offset: offset) data = nil Tempfile.open([base,'.jpg']) do |jpeg| - file_source = File.join(File.dirname(jpeg.path),"#{File.basename(jpeg.path,File.extname(jpeg.path))}#{File.extname(frame_source[:source])}") - File.symlink(frame_source[:source],file_source) + file_source = frame_source[:source] + unless file_source =~ %r(https?://) + file_source = File.join(File.dirname(jpeg.path),"#{File.basename(jpeg.path,File.extname(jpeg.path))}#{File.extname(frame_source[:source])}") + File.symlink(frame_source[:source],file_source) + end begin options = [ '-i', file_source, @@ -552,7 +580,7 @@ def extract_frame(options={}) end data ensure - File.unlink(file_source) + File.unlink(file_source) unless file_source =~ %r(https?://) end end raise RuntimeError, "Frame extraction failed. See log for details." if data.empty? @@ -591,7 +619,7 @@ def calculate_percent_complete matterhorn_response def saveOriginal(file, original_name=nil) realpath = File.realpath(file.path) if original_name.present? - config_path = Avalon::Configuration.lookup('matterhorn.media_path') + config_path = Settings.matterhorn.media_path newpath = nil if config_path.present? and File.directory?(config_path) newpath = File.join(config_path, original_name) @@ -644,11 +672,11 @@ def reloadTechnicalMetadata! def post_processing_file_management logger.debug "Finished processing" - case Avalon::Configuration.lookup('master_file_management.strategy') + case Settings.master_file_management.strategy when 'delete' MasterFileManagementJobs::Delete.perform_now self.id when 'move' - move_path = Avalon::Configuration.lookup('master_file_management.path') + move_path = Settings.master_file_management.path raise '"path" configuration missing for master_file_management strategy "move"' if move_path.blank? newpath = File.join(move_path, MasterFile.post_processing_move_filename(file_location, id: id)) MasterFileManagementJobs::Move.perform_later self.id, newpath diff --git a/app/models/role_map.rb b/app/models/role_map.rb index 56fc2d3991..a9638800d8 100644 --- a/app/models/role_map.rb +++ b/app/models/role_map.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -18,7 +18,7 @@ class RoleMap < ActiveRecord::Base #attr_accessible :entry, :role, :parent_id def self.reset! - self.replace_with! YAML.load(File.read(File.join(Rails.root, "config/role_map.yml"))).fetch(Rails.env) + self.replace_with! YAML.load(ERB.new(File.read(File.join(Rails.root, "config/role_map.yml"))).result).fetch(Rails.env) end def self.roles diff --git a/app/models/security_handler.rb b/app/models/security_handler.rb new file mode 100644 index 0000000000..7bfbd2fe11 --- /dev/null +++ b/app/models/security_handler.rb @@ -0,0 +1,19 @@ +class SecurityHandler + class << self + def secure_url(url, context={}) + @shim&.call(url, context) || url + end + + def secure_cookies(context={}) + @cookie_shim&.call(context) || {} + end + + def rewrite_url(&block) + @shim = block + end + + def create_cookies(&block) + @cookie_shim = block + end + end +end diff --git a/app/models/stream_token.rb b/app/models/stream_token.rb index 387291d12f..92f2a12fba 100644 --- a/app/models/stream_token.rb +++ b/app/models/stream_token.rb @@ -57,6 +57,6 @@ def self.validate_token(value) end def renew! - update_attribute :expires, (Time.now.utc + Avalon::Configuration.lookup('streaming.stream_token_ttl').minutes) + update_attribute :expires, (Time.now.utc + Settings.streaming.stream_token_ttl.minutes) end end diff --git a/app/models/user.rb b/app/models/user.rb index bcd9cd5d91..c1c16221ff 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -52,7 +52,7 @@ def self.from_api_token(token) self.find_or_create_by_username_or_email(token.username, token.email) end - def self.find_for_identity(access_token, signed_in_resource=nil) + def self.find_for_generic(access_token, signed_in_resource=nil) username = access_token.info['email'] User.find_by_username(username) || User.find_by_email(username) || User.create(username: username, email: username) end diff --git a/app/services/file_locator.rb b/app/services/file_locator.rb new file mode 100644 index 0000000000..357340bf0b --- /dev/null +++ b/app/services/file_locator.rb @@ -0,0 +1,90 @@ +require 'addressable/uri' +require 'aws-sdk' + +class FileLocator + attr_reader :source + + class S3File + attr_reader :bucket, :key + + def initialize(uri) + uri = Addressable::URI.parse(uri) + @bucket = URI.decode(uri.host) + @key = URI.decode(uri.path).sub(%r(^/*(.+)/*$),'\1') + end + + def object + Aws::S3::Object.new(bucket_name: bucket, key: key) + end + end + + def initialize(source) + @source = source + end + + def uri + if @uri.nil? + encoded_source = source + begin + @uri = Addressable::URI.parse(encoded_source) + rescue URI::InvalidURIError + if encoded_source == source + encoded_source = URI.encode(encoded_source) + retry + else + raise + end + end + + if @uri.scheme.nil? + @uri = Addressable::URI.parse("file://#{URI.encode(File.expand_path(source))}") + end + end + @uri + end + + def location + case uri.scheme + when 's3' + S3File.new(uri).object.presigned_url(:get) + when 'file' + URI.decode(uri.path) + else + @uri.to_s + end + end + + def exist? + case uri.scheme + when 's3' + S3File.new(uri).object.exists? + when 'file' + File.exist?(location) + else + false + end + end + alias_method :exists?, :exist? + + def reader + case uri.scheme + when 's3' + S3File.new(uri).object.get.body + when 'file' + File.open(location,'r') + else + Kernel::open(uri.to_s, 'r') + end + end + + def attachment + case uri.scheme + when 's3' + uri + when 'file' + File.open(location,'r') + else + location + end + end +end diff --git a/app/services/master_file_builder.rb b/app/services/master_file_builder.rb new file mode 100644 index 0000000000..bf1074c3bf --- /dev/null +++ b/app/services/master_file_builder.rb @@ -0,0 +1,83 @@ +module MasterFileBuilder + class BuildError < Exception; end + Spec = Struct.new(:content, :original_filename, :content_type, :workflow) + + def self.build(media_object, params) + builder = if params.has_key?(:Filedata) and params.has_key?(:original) + FileUpload + elsif params.has_key?(:selected_files) + DropboxUpload + else + nil + end + if builder.nil? + { flash: { error: ["You must specify a file to upload"] }, master_files: [] } + else + from_specs(media_object, builder.build(params)) + end + end + + def self.from_specs(media_object, specs) + response = { flash: { error: [] }, master_files: [] } + specs.each do |spec| + unless spec.original_filename.valid_encoding? && spec.original_filename.ascii_only? + raise BuildError, 'The file you have uploaded has non-ASCII characters in its name.' + end + + master_file = MasterFile.new() + master_file.setContent(spec.content) + master_file.set_workflow(spec.workflow) + + if 'Unknown' == master_file.file_format + response[:flash][:error] << "The file was not recognized as audio or video - %s (%s)" % [spec.original_filename, spec.content_type] + master_file.destroy + next + else + response[:flash][:notice] = create_upload_notice(master_file.file_format) + end + + master_file.media_object = media_object + if master_file.save + media_object.save + master_file.process + response[:master_files] << master_file + else + response[:flash][:error] << "There was a problem storing the file" + end + end + response[:flash][:error] = nil if response[:flash][:error].empty? + response + end + + def self.create_upload_notice(format) + case format + when /^Sound$/ + 'The uploaded content appears to be audio'; + when /^Moving image$/ + 'The uploaded content appears to be video'; + else + 'The uploaded content could not be identified'; + end + end + + module FileUpload + def self.build(params) + params[:Filedata].collect do |file| + if (file.size > MasterFile::MAXIMUM_UPLOAD_SIZE) + raise BuildError, "The file you have uploaded is too large" + end + Spec.new(file, file.original_filename, file.content_type, params[:workflow]) + end + end + end + + module DropboxUpload + def self.build(params) + params[:selected_files].values.collect do |entry| + uri = Addressable::URI.parse(entry[:url]) + path = URI.decode(uri.path) + Spec.new(uri, File.basename(path), Rack::Mime.mime_type(File.extname(path)), params[:workflow]) + end + end + end +end diff --git a/app/services/solr_collection_creator.rb b/app/services/solr_collection_creator.rb new file mode 100644 index 0000000000..f8219326d0 --- /dev/null +++ b/app/services/solr_collection_creator.rb @@ -0,0 +1,78 @@ +class SolrCollectionCreator + attr_accessor :name + + def initialize(name) + @name = name + end + + def perform + unless collection_exists? name + client.get '/solr/admin/collections', params: collection_options.merge(action: 'CREATE', name: name) + end + end + + class CollectionOptions + attr_reader :settings + + def initialize(settings = {}) + @settings = settings + end + + ## + # @example Camel-casing + # { replication_factor: 5 } # => { "replicationFactor" => 5 } + # @example Blank-rejecting + # { emptyValue: '' } #=> { } + # @example Nested value-flattening + # { collection: { config_name: 'x' } } # => { 'collection.configName' => 'x' } + def to_h + Hash[*settings.map { |k, v| transform_entry(k, v) }.flatten].reject { |_k, v| v.blank? }.symbolize_keys + end + + private + + def transform_entry(k, v) + case v + when Hash + v.map do |k1, v1| + ["#{transform_key(k)}.#{transform_key(k1)}", v1] + end + else + [transform_key(k), v] + end + end + + def transform_key(k) + k.to_s.camelize(:lower) + end + end + + private + + def client + Blacklight.default_index.connection + end + + def collection_options + CollectionOptions.new(Settings.solr.collection_options.to_hash).to_h + end + + def collection_exists?(name) + response = client.get '/solr/admin/collections', params: { action: 'LIST' } + collections = response['collections'] + + collections.include? name + end + + def collection_url(name) + normalized_uri = if Settings.solr.url.ends_with?('/') + Settings.solr.url + else + "#{Settings.solr.url}/" + end + + uri = URI(normalized_uri) + name + + uri.to_s + end +end diff --git a/app/services/solr_config_uploader.rb b/app/services/solr_config_uploader.rb new file mode 100644 index 0000000000..42b26d5161 --- /dev/null +++ b/app/services/solr_config_uploader.rb @@ -0,0 +1,71 @@ +# Upload solr configuration from the local filesystem into the zookeeper configs path for solr +# Taken from https://github.com/projecthydra-labs/hyku/blob/master/app/services/solr_config_uploader.rb +class SolrConfigUploader + attr_reader :collection_path + + ## + # Build a new SolrConfigUploader using the application-wide settings + def self.default + new(Settings.solr.configset) + end + + def initialize(collection_path) + @collection_path = collection_path + end + + def upload(upload_directory) + with_client do |zk| + salient_files(upload_directory).each do |file| + zk.create(zookeeper_path_for_file(file), file.read, or: :set) + end + end + end + + def delete_all + with_client do |zk| + zk.rm_rf(zookeeper_path) + end + end + + def get(path) + with_client do |zk| + zk.get(zookeeper_path(path)).first + end + end + + private + + def zookeeper_path_for_file(file) + zookeeper_path(File.basename(file.path)) + end + + def zookeeper_path(*path) + "/#{([collection_path] + path).compact.join('/')}" + end + + def salient_files(config_dir) + return to_enum(:salient_files, config_dir) unless block_given? + + Dir.new(config_dir).each do |file_name| + full_path = File.expand_path(file_name, config_dir) + + next unless File.file? full_path + + yield File.new(full_path) + end + end + + def with_client(&block) + ensure_chroot! + + ZK.open(connection_str, &block) + end + + def connection_str + Settings.zookeeper.connection_str + end + + def ensure_chroot! + raise ArgumentError, 'Zookeeper connection string must include a chroot path' unless connection_str =~ %r{/[^/]} + end +end diff --git a/app/views/media_objects/_file_upload.html.erb b/app/views/media_objects/_file_upload.html.erb index ac20cd49bb..9f80f73478 100644 --- a/app/views/media_objects/_file_upload.html.erb +++ b/app/views/media_objects/_file_upload.html.erb @@ -122,7 +122,7 @@ Unless required by applicable law or agreed to in writing, software distributed

Uploaded files must not exceed <%= number_to_human_size MasterFile::MAXIMUM_UPLOAD_SIZE %>

- <%= form_tag(master_files_path, :enctype=>"multipart/form-data", class: "uploader-form form-horizontal step") do -%> + <%= form_tag(master_files_path, :enctype=>"multipart/form-data", class: upload_form_classes, data: upload_form_data) do -%> diff --git a/app/views/playlists/_player.html.erb b/app/views/playlists/_player.html.erb index 683138e2ad..60bf84cd5b 100644 --- a/app/views/playlists/_player.html.erb +++ b/app/views/playlists/_player.html.erb @@ -17,9 +17,8 @@ Unless required by applicable law or agreed to in writing, software distributed <% f_start = @current_clip.start_time / 1000.0 %> <% f_end = @current_clip.end_time / 1000.0 %> <% @currentStream = @current_masterfile %> -<% @token = StreamToken.find_or_create_session_token(session, @currentStream.id) %> -<% @currentStreamInfo = @currentStream.stream_details(@token, ApplicationController.default_url_options[:host]) %> -<% @currentStreamInfo['t'] = [f_start,f_end] %> +<% @currentStreamInfo = secure_streams(@currentStream.stream_details) %> +<% @currentStreamInfo['t'] = "#{f_start},#{f_end}" %> <% if can? :read, @current_masterfile %> <%= render partial: 'modules/player/section', locals: {section: @currentStream, section_info: @currentStreamInfo, f_start: @f_start, f_end: @f_end} %> <% end %> diff --git a/app/views/playlists/index.html.erb b/app/views/playlists/index.html.erb index 637f150b81..ff279b3b40 100644 --- a/app/views/playlists/index.html.erb +++ b/app/views/playlists/index.html.erb @@ -45,11 +45,11 @@ Unless required by applicable law or agreed to in writing, software distributed <% end %>
<%= link_to(new_playlist_path) do %> - + Create New Playlist - + <% end %> - <% if Avalon::Configuration.has_key?('variations') %> + <% if Settings['variations'].present? %> <%= form_tag(import_variations_playlist_playlists_path, method:"post", enctype:"multipart/form-data", style:"display:inline") do %> diff --git a/config/avalon.yml.example b/config/avalon.yml.example index 07417fb6b3..9b190ce57e 100644 --- a/config/avalon.yml.example +++ b/config/avalon.yml.example @@ -24,6 +24,9 @@ development: address: 'smtp.example.edu' port: 587 enable_starttls_auto: false + solr: + configset_source_path: <%= File.join(Rails.root, 'solr', 'config') %> + zookeeper_connection_str: "localhost:9983/configs" streaming: stream_token_ttl: 20 #minutes rtmp_base: 'rtmp://localhost/avalon' diff --git a/config/encoding_presets.yml b/config/encoding_presets.yml new file mode 100644 index 0000000000..ef9b8aa157 --- /dev/null +++ b/config/encoding_presets.yml @@ -0,0 +1,67 @@ +--- +:templates: + :video: + :audio: + :codec: AAC + :sample_rate: '44100' + :channels: '2' + :codec_options: + :profile: AAC-LC + :video: + :codec: H.264 + :codec_options: + ColorSpaceConversionMode: None + InterlacedMode: Progressive + Level: '3' + MaxReferenceFrames: '3' + Profile: baseline + :keyframes_max_dist: '90' + :fixed_gop: 'true' + :frame_rate: auto + :display_aspect_ratio: auto + :sizing_policy: ShrinkToFit + :padding_policy: NoPad + :thumbnails: + :format: png + :interval: '300' + :max_width: '192' + :max_height: '108' + :sizing_policy: ShrinkToFit + :padding_policy: NoPad + :audio: + :audio: + :codec: AAC + :sample_rate: '44100' + :channels: '2' + :codec_options: + :profile: AAC-LC +:settings: + :video: + :low: + :audio: + :bit_rate: '128' + :video: + :bit_rate: '500' + :max_width: '360' + :max_height: '240' + :medium: + :audio: + :bit_rate: '128' + :video: + :bit_rate: '1024' + :max_width: '480' + :max_height: '320' + :high: + :audio: + :bit_rate: '192' + :video: + :bit_rate: '2048' + :max_width: '720' + :max_height: '480' + :audio: + :medium: + :audio: + :bit_rate: '128' + :high: + :audio: + :bit_rate: '320' diff --git a/config/environments/production.rb b/config/environments/production.rb index e39717e592..c7e67da292 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -87,6 +87,6 @@ # Do not dump schema after migrations. config.active_record.dump_schema_after_migration = false - # Suppress deprecation warnings - Deprecation.default_deprecation_behavior = :silence + require 'active_job/queue_adapters/better_active_elastic_job_adapter' + config.active_job.queue_adapter = Settings&.active_job&.queue_adapter || :resque end diff --git a/config/initializers/about_page.rb b/config/initializers/about_page.rb index 14a295cab5..85dcec6684 100644 --- a/config/initializers/about_page.rb +++ b/config/initializers/about_page.rb @@ -11,7 +11,7 @@ config.database = Avalon::About::Database.new(User) config.matterhorn = Avalon::About::Matterhorn.new(Rubyhorn) config.mediainfo = Avalon::About::MediaInfo.new(:version => '>=0.7.59') - config.streaming_server = Avalon::About::RTMPServer.new(URI.parse(Avalon::Configuration.lookup('streaming.rtmp_base')).host) + config.streaming_server = Avalon::About::RTMPServer.new(URI.parse(Settings.streaming.rtmp_base).host) config.resque = Avalon::About::Resque.new(::Resque) config.resque_scheduler = Avalon::About::ResqueScheduler.new(::Resque::Scheduler) config.git_log = AboutPage::GitLog.new(limit: 15) if Rails.env.development? diff --git a/config/initializers/active_encode.rb b/config/initializers/active_encode.rb index 253b731d60..e66361f5ea 100644 --- a/config/initializers/active_encode.rb +++ b/config/initializers/active_encode.rb @@ -1,2 +1,19 @@ -ActiveEncode::Base.engine_adapter = :matterhorn -Rubyhorn.init +require 'aws-sdk' + +ActiveEncode::Base.engine_adapter = Settings.encoding.engine_adapter.to_sym +case Settings.encoding.engine_adapter.to_sym +when :matterhorn + Rubyhorn.init +when :elastic_transcoder + MasterFile.default_encoder_class = ElasticTranscoderJob + pipeline = Aws::ElasticTranscoder::Client.new.read_pipeline(id: Settings.encoding.pipeline) + # Set environment variables to guard against reloads + ENV['SETTINGS__ENCODING__MASTERFILE_BUCKET'] = Settings.encoding.masterfile_bucket = pipeline.pipeline.input_bucket + ENV['SETTINGS__ENCODING__DERIVATIVE_BUCKET'] = Settings.encoding.derivative_bucket = pipeline.pipeline.output_bucket + if Settings.dropbox.path.nil? + ENV['SETTINGS__DROPBOX__PATH'] = Settings.dropbox.path = "s3://#{Settings.encoding.masterfile_bucket}/dropbox/" + end + if Settings.dropbox.upload_uri.nil? + ENV['SETTINGS__DROPBOX__UPLOAD_URI'] = Settings.dropbox.upload_uri = "s3://#{Settings.encoding.masterfile_bucket}/dropbox/" + end +end diff --git a/config/initializers/batch_manifest_class.rb b/config/initializers/batch_manifest_class.rb new file mode 100644 index 0000000000..c382ae68d8 --- /dev/null +++ b/config/initializers/batch_manifest_class.rb @@ -0,0 +1,3 @@ +if Settings.dropbox.path =~ %r{^s3://} + Avalon::Batch::Manifest.concrete_class = Avalon::Batch::S3Manifest +end diff --git a/config/initializers/browse_everything.rb b/config/initializers/browse_everything.rb new file mode 100644 index 0000000000..5c88c1b75d --- /dev/null +++ b/config/initializers/browse_everything.rb @@ -0,0 +1,7 @@ +settings = if Settings.dropbox.path =~ %r{^s3://} + obj = FileLocator::S3File.new(Settings.dropbox.path).object + { 's3' => { name: 'AWS S3 Dropbox', bucket: obj.bucket_name, base: obj.key, response_type: :s3_uri } } +else + { 'file_system' => { name: 'File Dropbox', home: Settings.dropbox.path } } +end +BrowseEverything.configure(settings) diff --git a/config/initializers/config.rb b/config/initializers/config.rb new file mode 100644 index 0000000000..c51f6429f0 --- /dev/null +++ b/config/initializers/config.rb @@ -0,0 +1,31 @@ +Config.setup do |config| + # Name of the constant exposing loaded settings + config.const_name = 'Settings' + + # Ability to remove elements of the array set in earlier loaded settings file. For example value: '--'. + # + # config.knockout_prefix = nil + + # Overwrite arrays found in previously loaded settings file. When set to `false`, arrays will be merged. + # + # config.overwrite_arrays = true + + # Load environment variables from the `ENV` object and override any settings defined in files. + config.use_env = true + + # Define ENV variable prefix deciding which variables to load into config. + config.env_prefix = 'SETTINGS' + + # What string to use as level separator for settings loaded from ENV variables. Default value of '.' works well + # with Heroku, but you might want to change it for example for '__' to easy override settings from command line, where + # using dots in variable names might not be allowed (eg. Bash). + config.env_separator = '__' + + # Ability to process variables names: + # * nil - no change + # * :downcase - convert to lower case + config.env_converter = :downcase + + # Parse numeric values as integers instead of strings. + config.env_parse_values = true +end diff --git a/config/initializers/default_host.rb b/config/initializers/default_host.rb index b5cf6e6984..ca6bea0fc2 100644 --- a/config/initializers/default_host.rb +++ b/config/initializers/default_host.rb @@ -1,4 +1,13 @@ -server_options = Avalon::Configuration.lookup('domain').dup +server_options = Settings.domain +server_options = case server_options +when String + uri = URI.parse(server_options) + { host: uri.host, port: uri.port, procotol: uri.scheme } +when Hash + server_options +else + server_options.to_hash +end if server_options server_options.symbolize_keys! diff --git a/config/initializers/devise.rb b/config/initializers/devise.rb index 8edf54e3e7..1166f6588a 100644 --- a/config/initializers/devise.rb +++ b/config/initializers/devise.rb @@ -253,19 +253,22 @@ if provider[:provider] == :lti provider[:params].merge!({consumers: Avalon::Lti::Configuration}) end - + if provider[:provider] == :identity provider[:params].merge!({ on_login: AuthFormsController.action(:render_form, AuthFormsController.dispatcher(:identity, :request_phase)), on_registration: AuthFormsController.action(:render_form, AuthFormsController.dispatcher(:identity, :registration_form)) }) end - - config.omniauth provider[:provider], provider[:params] - end - if ENV['LTI_AUTH_KEY'] - config.omniauth :lti, consumers: Avalon::Lti::Configuration, - oauth_credentials: { ENV['LTI_AUTH_KEY'] => ENV['LTI_AUTH_SECRET'] } + + params = provider[:params] + params = [params] unless params.is_a?(Array) + begin + require "omniauth/#{provider[:provider]}" + rescue LoadError + require "omniauth-#{provider[:provider]}" + end + config.omniauth provider[:provider], *params end # ==> Warden configuration diff --git a/config/initializers/dropbox_context.rb b/config/initializers/dropbox_context.rb index 1974e5480f..e2d7398514 100644 --- a/config/initializers/dropbox_context.rb +++ b/config/initializers/dropbox_context.rb @@ -1,6 +1,11 @@ BrowseEverythingController.before_filter do if params[:context] collection = Admin::Collection.find(params[:context]) - browser.providers['file_system'].config[:home] = collection.dropbox_absolute_path + if browser.providers['file_system'].present? + browser.providers['file_system'].config[:home] = collection.dropbox_absolute_path + end + if browser.providers['s3'].present? + browser.providers['s3'].config[:base] = FileLocator::S3File.new(collection.dropbox_absolute_path).key + end end end diff --git a/config/initializers/mailer.rb b/config/initializers/mailer.rb new file mode 100644 index 0000000000..747f921695 --- /dev/null +++ b/config/initializers/mailer.rb @@ -0,0 +1,4 @@ +if File.exist?('/sys/hypervisor/uuid') && (File.read('/sys/hypervisor/uuid',3) == 'ec2') + require 'aws/rails/mailer' + ActionMailer::Base.delivery_method = :aws_sdk +end diff --git a/config/initializers/security.rb b/config/initializers/security.rb new file mode 100644 index 0000000000..ca0fc7156b --- /dev/null +++ b/config/initializers/security.rb @@ -0,0 +1,70 @@ +def configure_signer + require 'cloudfront-signer' + unless Aws::CF::Signer.is_configured? + Aws::CF::Signer.configure do |config| + key = case Settings.streaming.signing_key + when %r(^-----BEGIN) + Settings.streaming.signing_key + when %r(^s3://) + FileLocator::S3File.new(Settings.streaming.signing_key).object.get.body.read + when nil + Logger.warn('No CloudFront signing key configured') + else + File.read(Settings.streaming.signing_key) + end + config.key = key + config.key_pair_id = Settings.streaming.signing_key_id + end + end +end + +SecurityHandler.rewrite_url do |url, context| + case Settings.streaming.server.to_sym + when :aws + configure_signer + context[:protocol] ||= :stream_hls + uri = Addressable::URI.parse(url) + expiration = Settings.streaming.stream_token_ttl.minutes.from_now + case context[:protocol] + when :stream_flash + # WARNING: UGLY FILENAME MUNGING AHEAD + content_path = File.join(File.dirname(uri.path),File.basename(uri.path,File.extname(uri.path))).sub(%r(^/),'') + content_prefix = File.extname(uri.path).sub(%r(^\.),'') + result = Addressable::URI.join(Settings.streaming.rtmp_base,"cfx/st/#{content_prefix}:#{content_path}") + result.query = Aws::CF::Signer.signed_params(content_path, expires: expiration).to_param + result.to_s + when :stream_hls + Addressable::URI.join(Settings.streaming.http_base,uri.path).to_s + #Aws::CF::Signer.sign_url(URI.join(Settings.streaming.http_base,uri.path).to_s, expires: expiration) + else + url + end + else + session = context[:session] || { media_token: nil } + token = StreamToken.find_or_create_session_token(session, context[:target]) + "#{url}?token=#{token}" + end +end + +SecurityHandler.create_cookies do |context| + result = {} + case Settings.streaming.server.to_sym + when :aws + configure_signer + domain = Addressable::URI.parse(Settings.streaming.http_base).host + cookie_domain = (context[:request_host].split(/\./) & domain.split(/\./)).join('.') + resource = "http*://#{domain}/#{context[:target]}/*" + Rails.logger.info "Creating signed policy for resource #{resource}" + expiration = Settings.streaming.stream_token_ttl.minutes.from_now + params = Aws::CF::Signer.signed_params(resource, expires: expiration, resource: resource) + params.each_pair do |param,value| + result["CloudFront-#{param}"] = { + value: value, + path: "/#{context[:target]}", + domain: cookie_domain, + expires: expiration + } + end + end + result +end diff --git a/config/role_map.yml b/config/role_map.yml index a329cfe9d5..5bbf5f756e 100644 --- a/config/role_map.yml +++ b/config/role_map.yml @@ -16,3 +16,9 @@ test: production: # Add roles for users here. + administrator: + - <%= Settings.initial_user %> + manager: + - <%= Settings.initial_user %> + group_manager: + - <%= Settings.initial_user %> diff --git a/config/routes.rb b/config/routes.rb index ed287d1ccb..2077d42d83 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -133,7 +133,7 @@ collection do post 'duplicate' post 'paged_index' - if Avalon::Configuration.has_key?('variations') + if Settings['variations'].present? post 'import_variations_playlist' end end diff --git a/config/settings.yml b/config/settings.yml new file mode 100644 index 0000000000..6702a63e1c --- /dev/null +++ b/config/settings.yml @@ -0,0 +1,72 @@ +name: 'Avalon' # used in page titles +domain: + host: localhost + port: 80 + protocol: http +dropbox: {} +encoding: + engine_adapter: matterhorn +matterhorn: + root: 'http://localhost:8080/' + baseApplication: 'avalon' + cleanup_log: 'log/cleanup_jobs.log' +mediainfo: + path: '/usr/bin/mediainfo' +ffmpeg: + path: '/usr/local/bin/ffmpeg' +email: + comments: 'avalon-comments@example.edu' + notification: 'avalon-notifications@example.edu' + support: 'avalon-support@example.edu' +solr: + configset: avalon + configset_source_path: <%= File.join(Rails.root, 'solr', 'config') %> + collection_options: + async: + auto_add_replicas: + collection: + config_name: avalon + create_node_set: + max_shards_per_node: + num_shards: 1 + replication_factor: + router: + name: + field: + rule: + shards: + snitch: +zookeeper: + connection_str: "localhost:9983/configs" +streaming: + stream_token_ttl: 20 #minutes + content_path: '/srv/avalon/content' + rtmp_base: 'rtmp://localhost/avalon' + http_base: 'http://localhost:3000/streams' + default_quality: 'low' +groups: + system_groups: [administrator, group_manager, manager] +master_file_management: + strategy: 'none' #'delete', or 'move' (for move uncomment and configure next line) + #path: '/path/to/move/to' +#bib_retriever: +# protocol: sru +# url: http://zgate.example.edu:9000/exampledb +# query: rec.id='%s' +controlled_vocabulary: + path: config/controlled_vocabulary.yml +auth: + configuration: + - :name: Avalon Test Auth + :provider: :identity + :params: + :fields: + - :email + <% if ENV['LTI_AUTH_KEY'] %> + - :name: Avalon Lti OAuth + :provider: :lti + :hidden: true + :params: + :oauth_credentials: + <%= ENV['LTI_AUTH_KEY'] %>: <%= ENV['LTI_AUTH_SECRET'] %> + <% end %> diff --git a/config/settings/development.yml b/config/settings/development.yml new file mode 100644 index 0000000000..becd2465cb --- /dev/null +++ b/config/settings/development.yml @@ -0,0 +1,2 @@ +domain: + port: 3000 diff --git a/config/settings/production.yml b/config/settings/production.yml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/config/settings/test.yml b/config/settings/test.yml new file mode 100644 index 0000000000..5517d7e3fa --- /dev/null +++ b/config/settings/test.yml @@ -0,0 +1,15 @@ +domain: + host: test.host +auth: + configuration: + - :name: Avalon Test Auth + :provider: :identity + :params: + :fields: + - :email + - :name: Avalon Lti OAuth + :provider: :lti + :hidden: true + :params: + :oauth_credentials: + key: 'secret' diff --git a/lib/active_job/queue_adapters/better_active_elastic_job_adapter.rb b/lib/active_job/queue_adapters/better_active_elastic_job_adapter.rb new file mode 100644 index 0000000000..371d26743b --- /dev/null +++ b/lib/active_job/queue_adapters/better_active_elastic_job_adapter.rb @@ -0,0 +1,31 @@ +require 'active_elastic_job' + +module ActiveJob + module QueueAdapters + class BetterActiveElasticJobAdapter < ActiveElasticJobAdapter + def enqueue(*args) + self.class.enqueue(*args) + end + + def enqueue_at(*args) + self.class.enqueue_at(*args) + end + + class << self + def aws_sqs_client + @aws_sqs_client ||= Aws::SQS::Client.new + end + + private + + def queue_url(*_) + if Settings.active_job_queue.url + Settings.active_job_queue.url + else + super + end + end + end + end + end +end diff --git a/lib/avalon/authentication.rb b/lib/avalon/authentication.rb index b206fa45ec..be618a3790 100644 --- a/lib/avalon/authentication.rb +++ b/lib/avalon/authentication.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -18,7 +18,21 @@ module Authentication def self.lti_configured? Devise.omniauth_providers.include?(:lti) end - Config = YAML.load(File.read(File.expand_path('config/authentication.yml',Rails.root)))[Rails.env] + + def self.load_configs + configs = Settings.auth.configuration + if configs.is_a?(Array) + configs.collect(&:to_hash) + else + configs.to_hash.values + end + end + + Config = load_configs + if ENV['LTI_AUTH_KEY'] + Config << { name: 'LTI', provider: :lti, hidden: true, params: { oauth_credentials: { ENV['LTI_AUTH_KEY'] => ENV['LTI_AUTH_SECRET'] } } } + end + Providers = Config.reject {|provider| provider[:provider].blank? } VisibleProviders = Providers.reject {|provider| provider[:hidden]} HiddenProviders = Providers - VisibleProviders diff --git a/lib/avalon/batch.rb b/lib/avalon/batch.rb index d34543193a..e218f93103 100644 --- a/lib/avalon/batch.rb +++ b/lib/avalon/batch.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -16,6 +16,8 @@ require "avalon/batch/entry" require "avalon/batch/ingest" require "avalon/batch/manifest" +require "avalon/batch/file_manifest" +require "avalon/batch/s3_manifest" require "avalon/batch/package" require "timeout" @@ -34,9 +36,9 @@ def self.find_open_files(files, base_directory = '.') statuses = status.split(/[\u0000\n]+/) statuses.in_groups_of(4) do |group| file_status = Hash[group.compact.collect { |s| [s[0].to_sym,s[1..-1]] }] - if file_status.has_key?(:n) and File.file?(file_status[:n]) and + if file_status.has_key?(:n) and File.file?(file_status[:n]) and (file_status[:a] =~ /w/ or file_status[:c] == 'scp') - result << file_status[:n] + result << file_status[:n] end end end diff --git a/lib/avalon/batch/entry.rb b/lib/avalon/batch/entry.rb index d997a701fb..05f7d11d29 100644 --- a/lib/avalon/batch/entry.rb +++ b/lib/avalon/batch/entry.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -22,37 +22,37 @@ class Entry attr_reader :fields, :files, :opts, :row, :errors, :manifest, :collection - def initialize(fields, files, opts, row, manifest) + def initialize(fields, files, opts, row, manifest) @fields = fields @files = files @opts = opts @row = row @manifest = manifest @errors = ActiveModel::Errors.new(self) - @files.each { |file| file[:file] = File.join(@manifest.package.dir, file[:file]) } + @files.each { |file| file[:file] = @manifest.path_to(file[:file]) } end - def media_object - @media_object ||= MediaObject.new(avalon_uploader: @manifest.package.user.user_key, - collection: @manifest.package.collection).tap do |mo| - mo.workflow.origin = 'batch' - mo.workflow.last_completed_step = HYDRANT_STEPS.last.step - if Avalon::BibRetriever.configured? and fields[:bibliographic_id].present? - begin - mo.descMetadata.populate_from_catalog!(fields[:bibliographic_id].first, Array(fields[:bibliographic_id_label]).first) - rescue Exception => e - @errors.add(:bibliographic_id, e.message) - end - else - begin - mo.update_attributes(media_object_fields) - rescue ActiveFedora::UnknownAttributeError => e - @errors.add(e.attribute.to_sym, e.message) - end + def media_object + @media_object ||= MediaObject.new(avalon_uploader: @manifest.package.user.user_key, + collection: @manifest.package.collection).tap do |mo| + mo.workflow.origin = 'batch' + mo.workflow.last_completed_step = HYDRANT_STEPS.last.step + if Avalon::BibRetriever.configured? and fields[:bibliographic_id].present? + begin + mo.descMetadata.populate_from_catalog!(fields[:bibliographic_id].first, Array(fields[:bibliographic_id_label]).first) + rescue Exception => e + @errors.add(:bibliographic_id, e.message) + end + else + begin + mo.update_attributes(media_object_fields) + rescue ActiveFedora::UnknownAttributeError => e + @errors.add(e.attribute.to_sym, e.message) end end - @media_object end + @media_object + end def media_object_fields mo_parameters = fields.dup @@ -74,88 +74,88 @@ def media_object_fields note_type = mo_parameters.delete(:note_type) mo_parameters[:note] = note.zip(note_type).map{|a|{note: a[0],type: a[1]}} if note.present? - mo_parameters - end + mo_parameters + end - def valid? - # Set errors if does not validate against media_object model - media_object.valid? - media_object.errors.messages.each_pair { |field,errs| - errs.each { |err| @errors.add(field, err) } - } - files = @files.select {|file_spec| file_valid?(file_spec)} - # Ensure files are listed - @errors.add(:content, "No files listed") if files.empty? - # Replace collection error if collection not found - if media_object.collection.nil? - @errors.messages[:collection] = ["Collection not found: #{@fields[:collection].first}"] - @errors.messages.delete(:governing_policy) - end + def valid? + # Set errors if does not validate against media_object model + media_object.valid? + media_object.errors.messages.each_pair { |field,errs| + errs.each { |err| @errors.add(field, err) } + } + files = @files.select {|file_spec| file_valid?(file_spec)} + # Ensure files are listed + @errors.add(:content, "No files listed") if files.empty? + # Replace collection error if collection not found + if media_object.collection.nil? + @errors.messages[:collection] = ["Collection not found: #{@fields[:collection].first}"] + @errors.messages.delete(:governing_policy) end + end - def file_valid?(file_spec) - valid = true - # Check date_digitized for valid format - if file_spec[:date_digitized].present? - begin - DateTime.parse(file_spec[:date_digitized]) - rescue ArgumentError - @errors.add(:date_digitized, "Invalid date_digitized: #{file_spec[:date_digitized]}. Recommended format: yyyy-mm-dd.") - valid = false - end - end - # Check file offsets for valid format - if file_spec[:offset].present? && !Avalon::Batch::Entry.offset_valid?(file_spec[:offset]) - @errors.add(:offset, "Invalid offset: #{file_spec[:offset]}") + def file_valid?(file_spec) + valid = true + # Check date_digitized for valid format + if file_spec[:date_digitized].present? + begin + DateTime.parse(file_spec[:date_digitized]) + rescue ArgumentError + @errors.add(:date_digitized, "Invalid date_digitized: #{file_spec[:date_digitized]}. Recommended format: yyyy-mm-dd.") valid = false end - # Ensure listed files exist - if File.file?(file_spec[:file]) && self.class.derivativePaths(file_spec[:file]).present? - @errors.add(:content, "Both original and derivative files found") - valid = false - elsif File.file?(file_spec[:file]) + end + # Check file offsets for valid format + if file_spec[:offset].present? && !Avalon::Batch::Entry.offset_valid?(file_spec[:offset]) + @errors.add(:offset, "Invalid offset: #{file_spec[:offset]}") + valid = false + end + # Ensure listed files exist + if FileLocator.new(file_spec[:file]).exist? && self.class.derivativePaths(file_spec[:file]).present? + @errors.add(:content, "Both original and derivative files found") + valid = false + elsif FileLocator.new(file_spec[:file]).exist? + #Do nothing. + else + if self.class.derivativePaths(file_spec[:file]).present? && file_spec[:skip_transcoding] #Do nothing. + elsif self.class.derivativePaths(file_spec[:file]).present? && !file_spec[:skip_transcoding] + @errors.add(:content, "Derivative files found but skip transcoding not selected") + valid = false else - if self.class.derivativePaths(file_spec[:file]).present? && file_spec[:skip_transcoding] - #Do nothing. - elsif self.class.derivativePaths(file_spec[:file]).present? && !file_spec[:skip_transcoding] - @errors.add(:content, "Derivative files found but skip transcoding not selected") - valid = false - else - @errors.add(:content, "File not found: #{file_spec[:file]}") - valid = false - end + @errors.add(:content, "File not found: #{file_spec[:file]}") + valid = false end - valid end + valid + end - def self.offset_valid?( offset ) - tokens = offset.split(':') - return false unless (1...4).include? tokens.size - seconds = tokens.pop - return false unless /^\d{1,2}([.]\d*)?$/ =~ seconds - return false unless seconds.to_f < 60 + def self.offset_valid?( offset ) + tokens = offset.split(':') + return false unless (1...4).include? tokens.size + seconds = tokens.pop + return false unless /^\d{1,2}([.]\d*)?$/ =~ seconds + return false unless seconds.to_f < 60 + unless tokens.empty? + minutes = tokens.pop + return false unless /^\d{1,2}$/ =~ minutes + return false unless minutes.to_i < 60 unless tokens.empty? - minutes = tokens.pop - return false unless /^\d{1,2}$/ =~ minutes - return false unless minutes.to_i < 60 - unless tokens.empty? - hours = tokens.pop - return false unless /^\d{1,}$/ =~ hours - end + hours = tokens.pop + return false unless /^\d{1,}$/ =~ hours end - true end + true + end def self.attach_datastreams_to_master_file( master_file, filename ) structural_file = "#{filename}.structure.xml" - if File.exists? structural_file - master_file.structuralMetadata.content=File.open(structural_file) + if FileLocator.new(structural_file).exist? + master_file.structuralMetadata.content=FileLocator.new(structural_file).reader master_file.structuralMetadata.original_name = structural_file end captions_file = "#{filename}.vtt" - if File.exists? captions_file - master_file.captions.content=File.open(captions_file) + if FileLocator.new(captions_file).exist? + master_file.captions.content=FileLocator.new(captions_file).reader master_file.captions.mime_type='text/vtt' master_file.captions.original_name = captions_file end @@ -206,16 +206,17 @@ def self.gatherFiles(file) derivatives = {} %w(low medium high).each do |quality| derivative = self.derivativePath(file, quality) - derivatives["quality-#{quality}"] = File.new(derivative) if File.file? derivative + locator = FileLocator.new(derivative) + derivatives["quality-#{quality}"] = locator.attachment if locator.exist? end - derivatives.empty? ? File.new(file) : derivatives + derivatives.empty? ? FileLocator.new(file).attachment : derivatives end def self.derivativePaths(filename) paths = [] %w(low medium high).each do |quality| derivative = self.derivativePath(filename, quality) - paths << derivative if File.file? derivative + paths << derivative if FileLocator.new(derivative).exist? end paths end diff --git a/lib/avalon/batch/file_manifest.rb b/lib/avalon/batch/file_manifest.rb new file mode 100644 index 0000000000..7b9ac59370 --- /dev/null +++ b/lib/avalon/batch/file_manifest.rb @@ -0,0 +1,71 @@ +module Avalon + module Batch + class FileManifest < Manifest + class << self + def locate(root) + possibles = Dir[File.join(root, "**/*.{#{Manifest::EXTENSIONS.join(',')}}")] + possibles.reject do |file| + File.basename(file) =~ /^~\$/ or self.error?(file) or self.processing?(file) or self.processed?(file) + end + end + + def error?(file) + if File.file?("#{file}.error") + if File.mtime(file) > File.mtime("#{file}.error") + File.unlink("#{file}.error") + return false + else + return true + end + end + return false + end + + def processing?(file) + File.file?("#{file}.processing") + end + + def processed?(file) + File.file?("#{file}.processed") + end + end + + def start! + File.open("#{@file}.processing",'w') { |f| f.puts Time.now.xmlschema } + end + + def error! msg=nil + File.open("#{@file}.error",'a') do |f| + if msg.nil? + entries.each do |entry| + if entry.errors.count > 0 + f.puts "Row #{entry.row}:" + entry.errors.messages.each { |k,m| f.puts %{ #{m.join("\n ")}} } + end + end + else + f.puts msg + end + end + rollback! if processing? + end + + def rollback! + File.unlink("#{@file}.processing") + end + + def commit! + File.open("#{@file}.processed",'w') { |f| f.puts Time.now.xmlschema } + rollback! if processing? + end + + def path_to(f) + File.join(File.dirname(@file),f) + end + + def retrieve(f) + File.open(f) + end + end + end +end diff --git a/lib/avalon/batch/ingest.rb b/lib/avalon/batch/ingest.rb index 332980fc96..2d0009474c 100644 --- a/lib/avalon/batch/ingest.rb +++ b/lib/avalon/batch/ingest.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -30,7 +30,7 @@ def initialize(collection) def ingest # Scans dropbox for new batch packages new_packages = collection.dropbox.find_new_packages - logger.info "<< Found #{new_packages.count} new packages for collection #{collection.name} >>" if new_packages.count > 0 + logger.info "<< Found #{new_packages.count} new packages for collection #{collection.name} >>" if new_packages.count > 0 # Extract package and process new_packages.each do |package| begin diff --git a/lib/avalon/batch/manifest.rb b/lib/avalon/batch/manifest.rb index 31d3c4949f..dd78e483bd 100644 --- a/lib/avalon/batch/manifest.rb +++ b/lib/avalon/batch/manifest.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -28,35 +28,30 @@ class Manifest attr_reader :spreadsheet, :file, :name, :email, :entries, :package class << self - def locate(root) - possibles = Dir[File.join(root, "**/*.{#{EXTENSIONS.join(',')}}")] - possibles.reject do |file| - File.basename(file) =~ /^~\$/ or self.error?(file) or self.processing?(file) or self.processed?(file) - end + def concrete_class=(value) + raise ArgumentError, "#{value} is not a #{self.name}" unless self.descendants.include?(value) + @concrete_class = value end - def error?(file) - if File.file?("#{file}.error") - if File.mtime(file) > File.mtime("#{file}.error") - File.unlink("#{file}.error") - return false - else - return true - end - end - return false + def concrete_class + @concrete_class ||= FileManifest end - def processing?(file) - File.file?("#{file}.processing") + def is_spreadsheet?(file) + EXTENSIONS.include?(file.split(/\./).last.downcase) end - def processed?(file) - File.file?("#{file}.processed") + def load(*args) + concrete_class.new(*args) + end + + def locate(root) + concrete_class.locate(root) end end def initialize(file, package) + raise "#{self.class.name} is an abstract class. Please set #concrete_class and use #load()" unless self.respond_to?(:start!) @file = file @package = package load! @@ -65,14 +60,14 @@ def initialize(file, package) def load! @entries = [] begin - @spreadsheet = Roo::Spreadsheet.open(file) + @spreadsheet = Roo::Spreadsheet.open(FileLocator.new(file).location) @name = @spreadsheet.row(@spreadsheet.first_row)[0] @email = @spreadsheet.row(@spreadsheet.first_row)[1] header_row = @spreadsheet.row(@spreadsheet.first_row + 1) - @field_names = header_row.collect { |field| - field.to_s.downcase.gsub(/\s/,'_').strip.to_sym + @field_names = header_row.collect { |field| + field.to_s.downcase.gsub(/\s/,'_').strip.to_sym } create_entries! rescue Exception => err @@ -80,35 +75,6 @@ def load! end end - def start! - File.open("#{@file}.processing",'w') { |f| f.puts Time.now.xmlschema } - end - - def error! msg=nil - File.open("#{@file}.error",'a') do |f| - if msg.nil? - entries.each do |entry| - if entry.errors.count > 0 - f.puts "Row #{entry.row}:" - entry.errors.messages.each { |k,m| f.puts %{ #{m.join("\n ")}} } - end - end - else - f.puts msg - end - end - rollback! if processing? - end - - def rollback! - File.unlink("#{@file}.processing") - end - - def commit! - File.open("#{@file}.processed",'w') { |f| f.puts Time.now.xmlschema } - rollback! if processing? - end - def error? result = self.class.error?(@file) load! unless result @@ -133,9 +99,9 @@ def true?(value) end def create_entries! - f = @spreadsheet.first_row + 2 - l = @spreadsheet.last_row - f.upto(l) do |index| + first = @spreadsheet.first_row + 2 + last = @spreadsheet.last_row + first.upto(last) do |index| opts = { :publish => false, :hidden => false @@ -147,13 +113,13 @@ def create_entries! content=[] fields = Hash.new { |h,k| h[k] = [] } - @field_names.each_with_index do |f,i| + @field_names.each_with_index do |f,i| unless f.blank? || SKIP_FIELDS.include?(f) || values[i].blank? if FILE_FIELDS.include?(f) content << {} if f == :file content.last[f] = f == :skip_transcoding ? true?(values[i]) : values[i] else - fields[f] << values[i] + fields[f] << values[i] end end end diff --git a/lib/avalon/batch/package.rb b/lib/avalon/batch/package.rb index 8813ac2efd..5395ab9b95 100644 --- a/lib/avalon/batch/package.rb +++ b/lib/avalon/batch/package.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -18,7 +18,7 @@ class Package include Enumerable extend Forwardable - attr_reader :dir, :manifest, :collection + attr_reader :manifest, :collection def_delegators :@manifest, :each def self.locate(root, collection) @@ -26,11 +26,10 @@ def self.locate(root, collection) end def initialize(manifest, collection) - @dir = File.dirname(manifest) - @manifest = Avalon::Batch::Manifest.new(manifest, self) + @manifest = Avalon::Batch::Manifest.load(manifest, self) @collection = collection end - + def title File.basename(@manifest.file) end @@ -41,17 +40,17 @@ def user end def file_list - @manifest.collect { |entry| entry.files }.flatten.collect { |f| File.join(@dir,f[:file]) } + @manifest.collect { |entry| entry.files }.flatten.collect { |f| @manifest.path_to(f[:file]) } end def complete? - file_list.all? { |f| File.file?(f) } + file_list.all? { |f| FileLocator.new(f).exist? } end def each_entry @manifest.each_with_index do |entry, index| files = entry.files.dup - files.each { |file| file[:file] = File.join(@dir,file[:file]) } + files.each { |file| file[:file] = @manifest.path_to(file[:file]) } yield(entry.fields, files, entry.opts, entry, index) end end diff --git a/lib/avalon/batch/s3_manifest.rb b/lib/avalon/batch/s3_manifest.rb new file mode 100644 index 0000000000..7116fc6c8b --- /dev/null +++ b/lib/avalon/batch/s3_manifest.rb @@ -0,0 +1,77 @@ +module Avalon + module Batch + class S3Manifest < Manifest + class << self + def locate(root) + root_object = FileLocator::S3File.new(root).object + bucket = root_object.bucket + manifests = bucket.objects(prefix: root_object.key).select do |o| + is_spreadsheet?(o.key) && status(bucket.object(o.key)).blank? + end + manifests.collect { |o| "s3://#{o.bucket_name}/#{o.key}" } + end + + def status(file) + case file + when Aws::S3::Object then file.metadata['batch-status'] + else FileLocator::S3File.new(file.to_s).object.metadata['batch-status'] + end + end + + def status?(file, status) + status(file) == status + end + def error?(file) ; status?(file, 'error') ; end + def processing?(file) ; status?(file, 'processing') ; end + def processed?(file) ; status?(file, 'processed') ; end + + def status!(file, status) + obj = FileLocator::S3File.new(file).object + obj.copy_to( + bucket: obj.bucket_name, + key: obj.key, + content_type: obj.content_type, + metadata: obj.metadata.merge('batch-status'=>status), + metadata_directive: 'REPLACE' + ) + end + end + + def initialize(*args) + super + end + + def commit! ; self.class.status!(file, 'processed') ; end + def start! ; self.class.status!(file, 'processing') ; end + + def error!(msg=nil) + begin + error_obj = FileLocator::S3File.new("#{file}.error").object + io = StringIO.new + if msg.nil? + entries.each do |entry| + if entry.errors.count > 0 + io.puts "Row #{entry.row}:" + entry.errors.messages.each { |k,m| io.puts %{ #{m.join("\n ")}} } + end + end + else + io.puts msg + end + io.rewind + error_obj.put(body: io) + ensure + self.class.status!(file, 'error') + end + end + + def path_to(f) + FileLocator.new(file).uri.join(f).to_s + end + + def retrieve(f) + FileLocator::S3File.new(f).object.get.body + end + end + end +end diff --git a/lib/avalon/bib_retriever.rb b/lib/avalon/bib_retriever.rb index 56e4a1be3b..4c50f30fa9 100644 --- a/lib/avalon/bib_retriever.rb +++ b/lib/avalon/bib_retriever.rb @@ -1,22 +1,23 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # --- END LICENSE_HEADER BLOCK --- +require 'marc' module Avalon class BibRetriever XSLT_FILE = File.expand_path('../bib_retriever/MARC21slim2MODS3-5-avalon.xsl',__FILE__) attr_reader :config - + class << self def configured? begin @@ -25,13 +26,13 @@ def configured? false end end - + def instance - config = Avalon::Configuration.lookup('bib_retriever') + config = Settings.bib_retriever unless config.respond_to?(:[]) and config['protocol'].present? - raise ArgumentError, "Missing/invalid bib retriever configuration" + raise ArgumentError, "Missing/invalid bib retriever configuration" end - + case config['protocol'].downcase when 'sru', /^yaz/ require 'avalon/bib_retriever/sru' @@ -43,10 +44,10 @@ def instance raise ArgumentError, "Unknown bib retriever protocol: #{config['protocol']}" end end - + protected :new, :allocate end - + def initialize config @config = config end @@ -57,7 +58,7 @@ def marcxml2mods(marcxml) mods = xsl.transform(doc) mods.to_s end - + def marc2mods(marc) record = MARC::Reader.decode marc marcxml = record.to_xml.to_s diff --git a/lib/avalon/bib_retriever/zoom.rb b/lib/avalon/bib_retriever/zoom.rb index 9de2e7ee5a..68dbafcb3c 100644 --- a/lib/avalon/bib_retriever/zoom.rb +++ b/lib/avalon/bib_retriever/zoom.rb @@ -1,27 +1,28 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # --- END LICENSE_HEADER BLOCK --- +require 'zoom' module Avalon class BibRetriever class Zoom < ::Avalon::BibRetriever - + def initialize config super @config['port'] ||= 7090 @config['attribute'] ||= 7 end - + def get_record(bib_id) record = nil ZOOM::Connection.open(config['host'], config['port']) do |conn| diff --git a/lib/avalon/configuration.rb b/lib/avalon/configuration.rb index 4bd1bf750b..3980761c90 100644 --- a/lib/avalon/configuration.rb +++ b/lib/avalon/configuration.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -15,174 +15,9 @@ module Avalon class Config - DEFAULT_CONFIGURATION = { - "dropbox"=>{}, - "fedora"=>{"namespace"=>"avalon", "base_path"=>""}, - "ffmpeg"=>{"path"=>"/usr/local/bin/ffmpeg"}, - "mediainfo"=>{"path"=>"/usr/local/bin/mediainfo"}, - "email"=>{}, - "streaming"=>{ - "server"=>:generic, - "content_path"=>File.join(Rails.root,"red5/webapps/avalon/streams"), - "rtmp_base"=>"rtmp://localhost/avalon/", - "http_base"=>"http://localhost:3000/streams/", - "stream_token_ttl"=>20, - "default_quality"=>'low' - }, - 'controlled_vocabulary' => {'path'=>'config/controlled_vocabulary.yml'}, - 'master_file_management' => {'strategy'=>'none'} - } - - def initialize - @config = DEFAULT_CONFIGURATION.deep_merge(load_configuration) - ['dropbox','matterhorn','mediainfo','email','streaming'].each { |key| @config[key] ||= {} } - begin - mipath = lookup('mediainfo.path') - Mediainfo.path = mipath unless mipath.blank? - rescue Exception => e - logger.fatal "Initialization failed" - logger.fatal e.backtrace - raise - end - end - - def config_file - Rails.root.join('config', 'avalon.yml') - end - - def load_configuration - if File.exists?(config_file) - load_configuration_from_file - else - load_configuration_from_environment - end - end - - ENVIRONMENT_MAP = { - "BASE_URL" => { key: 'domain', read_proc: ->(v){read_avalon_url(v)}, write_proc: ->(v){write_avalon_url(v)} }, - "DROPBOX_PATH" => { key: "dropbox.path" }, - "DROPBOX_URI" => { key: "dropbox.upload_uri" }, - "FEDORA_BASE_PATH" => { key: "fedora.base_path" }, - "FEDORA_NAMESPACE" => { key: "fedora.namespace" }, - "FFMPEG_PATH" => { key: "ffmpeg.path" }, - "MEDIA_PATH" => { key: "matterhorn.media_path" }, - "MEDIAINFO_PATH" => { key: "mediainfo.path" }, - "EMAIL_COMMENTS" => { key: "email.comments" }, - "EMAIL_NOTIFICATION" => { key: "email.notification" }, - "EMAIL_SUPPORT" => { key: "email.support" }, - "SMTP_ADDRESS" => { key: "email.mailer.smtp.address" }, - "SMTP_PORT" => { key: "email.mailer.smtp.port", read_proc: ->(v){coerce(v, :to_i)} }, - "SMTP_DOMAIN" => { key: "email.mailer.smtp.domain" }, - "SMTP_USER_NAME" => { key: "email.mailer.smtp.user_name" }, - "SMTP_PASSWORD" => { key: "email.mailer.smtp.password" }, - "SMTP_AUTHENTICATION" => { key: "email.mailer.smtp.authentication" }, - "SMTP_ENABLE_STARTTLS_AUTO" => { key: "email.mailer.smtp.enable_starttls_auto" }, - "SMTP_OPENSSL_VERIFY_MODE" => { key: "email.mailer.smtp.openssl_verify_mode" }, - "SRU_URL" => { key: "bib_retriever.url", infer: { key: 'bib_retriever.protocol', value: 'sru' } }, - "SRU_QUERY" => { key: "bib_retriever.query" }, - "SRU_NAMESPACE" => { key: "bib_retriever.namespace" }, - "STREAM_SERVER" => { key: "streaming.server" }, - "STREAM_BASE" => { key: "streaming.content_path" }, - "STREAM_RTMP_BASE" => { key: "streaming.rtmp_base" }, - "STREAM_HTTP_BASE" => { key: "streaming.http_base" }, - "STREAM_TOKEN_TTL" => { key: "streaming.stream_token_ttl", read_proc: ->(v){coerce(v, :to_i)} }, - "STREAM_DEFAULT_QUALITY" => { key: "streaming.default_quality" }, - "CONTROLLED_VOCABULARY" => { key: "controlled_vocabulary.path" }, - "MASTER_FILE_STRATEGY" => { key: "master_file_management.strategy" }, - "MASTER_FILE_PATH" => { key: "master_file_management.path" }, - "APP_NAME" => { key: "name" }, - "SYSTEM_GROUPS" => { key: "groups.system_groups", read_proc: ->(v){v.to_s.split(/[,;:]\s*/)}, write_proc: ->(v){v.join(',')} }, - "Z3950_HOST" => { key: "bib_retriever.host", infer: { key: 'bib_retriever.protocol', value: 'zoom' } }, - "Z3950_PORT" => { key: "bib_retriever.port", read_proc: ->(v){coerce(v, :to_i)} }, - "Z3950_DATABASE" => { key: "bib_retriever.database" }, - "Z3950_ATTRIBUTE" => { key: "bib_retriever.attribute", read_proc: ->(v){coerce(v, :to_i)} } - } - - ENV.keys.select { |k| k =~ /^AVALON_/ }.each do |key| - ENV[key.split(/_/,2).last] = ENV[key] - end - - - def set(key, value, hash=@config_hash) - this_key,sub_key = key.split(/\./,2) - if sub_key.nil? - hash[this_key] = value - else - hash[this_key] ||= {} - set(sub_key,value,hash[this_key]) - end - hash - end - - def from_env - config_hash = {} - ENVIRONMENT_MAP.each_pair do |key,spec| - val = ENV[key] - val = spec[:read_proc].call(val) unless spec[:read_proc].nil? - if val.present? - set(spec[:key],val,config_hash) - end - if spec[:infer] - set(spec[:infer][:key],spec[:infer][:value],config_hash) - end - end - config_hash - end - - def to_env - result = {} - ENVIRONMENT_MAP.each_pair do |key,spec| - next if spec[:infer] and lookup(spec[:infer][:key]) != spec[:infer][:value] - val = lookup(spec[:key]) - val = spec[:write_proc].call(val) unless spec[:write_proc].nil? - result[key] = val unless val.nil? - end - - url = URI.parse(ActiveFedora.fedora_config.credentials[:url]) - url.user = ActiveFedora.fedora_config.credentials[:user] - url.password = ActiveFedora.fedora_config.credentials[:password] - result['FEDORA_URL'] = url.to_s - result['FEDORA_BASE_PATH'] = ActiveFedora.fedora_config.credentials[:base_path] - - begin - url = URI.parse(Rubyhorn.config_for_environment[:url]) - url.user = Rubyhorn.config_for_environment[:user] - url.password = Rubyhorn.config_for_environment[:password] - result['MATTERHORN_URL'] = url.to_s - rescue NameError, LoadError - end - - result['SOLR_URL'] = ActiveFedora.solr_config[:url] - - config = ActiveRecord::Base.connection_config - path_key = config[:database].starts_with?('/') ? :path : :opaque - url = Addressable::URI.parse(URI::Generic.build(scheme: config[:adapter], host: config[:host], user: config[:username], password: config[:password], port: config[:port], path_key => config[:database])) - url.query_values = config.reject { |k,v| [:adapter,:host,:username,:password,:port,:database].include?(k) } - result['DATABASE_URL'] = url.to_s - - result['SECRET_KEY_BASE'] = Avalon::Application.config.secret_key_base - result.collect { |key,val| [key,val.to_s.inspect].join('=') }.join("\n") - end - - def load_configuration_from_environment - deep_compact(from_env) - end - - def load_configuration_from_file - env = ENV['RAILS_ENV'] || 'development' - YAML::load(File.read(config_file))[env] - end - - def lookup(*path) - path = path.first.split(/\./) if path.length == 1 - path.inject(@config) do |location, key| - location.respond_to?(:keys) ? location[key] : nil - end - end - def rehost(url, host=nil) if host.present? - url.sub(%r{/localhost([/:])},"/#{host}\\1") + url.sub(%r{/localhost([/:])},"/#{host}\\1") else url end @@ -192,24 +27,24 @@ def method_missing(sym, *args, &block) super(sym, *args, &block) unless @config.respond_to?(sym) @config.send(sym, *args, &block) end - + private class << self def coerce(value, method) value.nil? ? nil : value.send(method) end - + def read_avalon_url(v) return({}) if v.nil? avalon_url = URI.parse(v) { 'host'=>avalon_url.host, 'port'=>avalon_url.port, 'protocol'=>avalon_url.scheme } end - + def write_avalon_url(v) URI::Generic.build(scheme: v.fetch('protocol','http'), host: v['host'], port: v['port']).to_s end end - + def deep_compact(value) if value.is_a?(Hash) new_value = value.dup diff --git a/lib/avalon/controlled_vocabulary.rb b/lib/avalon/controlled_vocabulary.rb index ad77231219..e265e034f7 100644 --- a/lib/avalon/controlled_vocabulary.rb +++ b/lib/avalon/controlled_vocabulary.rb @@ -15,7 +15,7 @@ module Avalon class ControlledVocabulary - @@path = Rails.root.join(Avalon::Configuration.lookup('controlled_vocabulary.path')) + @@path = Rails.root.join(Settings.controlled_vocabulary.path) def self.vocabulary vocabulary = {} diff --git a/lib/avalon/stream_mapper.rb b/lib/avalon/stream_mapper.rb index 1e5548e6b3..78bbf07bed 100644 --- a/lib/avalon/stream_mapper.rb +++ b/lib/avalon/stream_mapper.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -16,24 +16,24 @@ module Avalon class DefaultStreamMapper Detail = Struct.new(:rtmp_base, :http_base, :path, :filename, :extension) { def get_binding; binding; end } - + attr_accessor :streaming_server - + def initialize - @streaming_server = Avalon::Configuration.lookup('streaming.server') + @streaming_server = Settings.streaming.server @handler_config = YAML.load(File.read(Rails.root.join('config/url_handlers.yml'))) end - + def url_handler @handler_config[self.streaming_server.to_s] end def base_url_for(path, protocol) - Avalon::Configuration.lookup("streaming.#{protocol}_base") + Settings.streaming["#{protocol}_base"] end def stream_details_for(path) - content_path = Pathname.new(Avalon::Configuration.lookup("streaming.content_path")) + content_path = Pathname.new(Settings.streaming.content_path) p = Pathname.new(path).relative_path_from(content_path) Detail.new(base_url_for(path,'rtmp'),base_url_for(path,'http'),p.dirname,p.basename(p.extname),p.extname[1..-1]) end @@ -43,6 +43,6 @@ def map(path, protocol, format) template.result(stream_details_for(path).get_binding) end end - + StreamMapper = DefaultStreamMapper.new end diff --git a/lib/avalon/variations_mapping_service.rb b/lib/avalon/variations_mapping_service.rb index e8bb13a3a2..b2a327d34d 100644 --- a/lib/avalon/variations_mapping_service.rb +++ b/lib/avalon/variations_mapping_service.rb @@ -17,7 +17,7 @@ module Avalon # A tool for mapping from a variations id to an Avalon object class VariationsMappingService - MEDIA_OBJECT_ID_MAP = YAML.load_file(Avalon::Configuration['variations']['media_object_id_map_file']).freeze rescue {} + MEDIA_OBJECT_ID_MAP = YAML.load_file(Settings.variations.media_object_id_map_file).freeze rescue {} def find_master_file(variations_media_object_id) raise ArgumentError, 'Not a valid Variations Media Object ID' unless variations_media_object_id =~ %r{/MediaObject/} diff --git a/lib/tasks/zookeeper.rake b/lib/tasks/zookeeper.rake new file mode 100644 index 0000000000..bfcc50b272 --- /dev/null +++ b/lib/tasks/zookeeper.rake @@ -0,0 +1,18 @@ +# Taken from https://github.com/projecthydra-labs/hyku/blob/master/lib/tasks/zookeeper.rake +namespace :zookeeper do + desc 'Push solr configs into zookeeper' + task upload: [:environment] do + SolrConfigUploader.default.upload(Settings.solr.configset_source_path) + end + + desc 'Create a collection' + task create: [:environment] do + collection_name = Blacklight.default_index.connection.uri.path.split(/\//).reject(&:empty?).last + SolrCollectionCreator.new(collection_name).perform + end + + desc 'Delete solr configs from zookeeper' + task delete_all: [:environment] do + SolrConfigUploader.default.delete_all + end +end diff --git a/spec/controllers/media_objects_controller_spec.rb b/spec/controllers/media_objects_controller_spec.rb index 2cc6785335..c411ccb916 100644 --- a/spec/controllers/media_objects_controller_spec.rb +++ b/spec/controllers/media_objects_controller_spec.rb @@ -223,7 +223,7 @@ expect(new_media_object.workflow.last_completed_step).to eq([HYDRANT_STEPS.last.step]) end it "should create a new media_object with successful bib import" do - Avalon::Configuration['bib_retriever'] = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } + Settings.bib_retriever = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } stub_request(:get, sru_url).to_return(body: sru_response) fields = { bibliographic_id: bib_id } post 'create', format: 'json', import_bib_record: true, fields: fields, files: [master_file], collection_id: collection.id @@ -233,7 +233,7 @@ expect(new_media_object.title).to eq('245 A : B F G K N P S') end it "should create a new media_object with supplied fields when bib import fails" do - Avalon::Configuration['bib_retriever'] = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } + Settings.bib_retriever = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } stub_request(:get, sru_url).to_return(body: nil) ex_media_object = FactoryGirl.create(:media_object) fields = {} @@ -269,7 +269,7 @@ expect(new_media_object.copyright_date).to eq nil end it "should merge supplied other identifiers after bib import" do - Avalon::Configuration['bib_retriever'] = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } + Settings.bib_retriever = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } stub_request(:get, sru_url).to_return(body: sru_response) fields = { bibliographic_id: bib_id, other_identifier_type: ['other'], other_identifier: ['12345'] } post 'create', format: 'json', import_bib_record: true, fields: fields, files: [master_file], collection_id: collection.id diff --git a/spec/controllers/vocabulary_controller_spec.rb b/spec/controllers/vocabulary_controller_spec.rb index fa60f55929..fdfc0a06d9 100644 --- a/spec/controllers/vocabulary_controller_spec.rb +++ b/spec/controllers/vocabulary_controller_spec.rb @@ -19,12 +19,12 @@ render_views before(:all) { - FileUtils.cp_r Avalon::Configuration.lookup('controlled_vocabulary.path'), 'spec/fixtures/controlled_vocabulary.yml.tmp' + FileUtils.cp_r Settings.controlled_vocabulary.path, 'spec/fixtures/controlled_vocabulary.yml.tmp' Avalon::ControlledVocabulary.class_variable_set :@@path, Rails.root.join('spec/fixtures/controlled_vocabulary.yml.tmp') } after(:all) { File.delete('spec/fixtures/controlled_vocabulary.yml.tmp') - Avalon::ControlledVocabulary.class_variable_set :@@path, Rails.root.join(Avalon::Configuration.lookup('controlled_vocabulary.path')) + Avalon::ControlledVocabulary.class_variable_set :@@path, Rails.root.join(Settings.controlled_vocabulary.path) } before(:each) do diff --git a/spec/helpers/media_objects_helper_spec.rb b/spec/helpers/media_objects_helper_spec.rb index 514147891d..d827a3b33c 100644 --- a/spec/helpers/media_objects_helper_spec.rb +++ b/spec/helpers/media_objects_helper_spec.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -17,11 +17,11 @@ describe MediaObjectsHelper do describe "#current_quality" do before(:all) do - @streaming_config = Avalon::Configuration["streaming"] - Avalon::Configuration["streaming"] = {"default_quality" => "low"} + @streaming_config = Settings.streaming + Settings.streaming.default_quality = "low" end after(:all) do - Avalon::Configuration["streaming"] = @streaming_config + Settings.streaming = @streaming_config end let(:stream_info) {{stream_flash: [{quality: 'high'}, {quality: 'medium'}, {quality: 'low'}], stream_hls: [{quality: 'high'}, {quality: 'medium'}, {quality: 'low'}]}} let(:skip_transcoded_stream_info) {{stream_flash: [{quality: 'high'}], stream_hls: [{quality: 'high'}]}} diff --git a/spec/lib/avalon/batch_ingest_spec.rb b/spec/lib/avalon/batch_ingest_spec.rb index 3242eed0e7..1bc988ce13 100644 --- a/spec/lib/avalon/batch_ingest_spec.rb +++ b/spec/lib/avalon/batch_ingest_spec.rb @@ -19,9 +19,9 @@ describe Avalon::Batch::Ingest do before :each do - @saved_dropbox_path = Avalon::Configuration.lookup('dropbox.path') - Avalon::Configuration['dropbox']['path'] = 'spec/fixtures/dropbox' - Avalon::Configuration['email']['notification'] = 'frances.dickens@reichel.com' + @saved_dropbox_path = Settings.dropbox.path + Settings.dropbox.path = File.join(Rails.root, 'spec/fixtures/dropbox') + Settings.email.notification = 'frances.dickens@reichel.com' # Dirty hack is to remove the .processed files both before and after the # test. Need to look closer into the ideal timing for where this should take # place @@ -36,7 +36,7 @@ end after :each do - Avalon::Configuration['dropbox']['path'] = @saved_dropbox_path + Settings.dropbox.path = @saved_dropbox_path Dir['spec/fixtures/**/*.xlsx.process*','spec/fixtures/**/*.xlsx.error'].each { |file| File.delete(file) } Avalon::RoleControls.remove_user_role('frances.dickens@reichel.com','manager') Avalon::RoleControls.remove_user_role('jay@krajcik.org','manager') @@ -56,7 +56,7 @@ before :each do @dropbox_dir = collection.dropbox.base_directory FileUtils.cp_r 'spec/fixtures/dropbox/example_batch_ingest', @dropbox_dir - Avalon::Configuration['bib_retriever'] = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } + Settings.bib_retriever = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } stub_request(:get, sru_url).to_return(body: sru_response) manifest_file = File.join(@dropbox_dir,'example_batch_ingest','batch_manifest.xlsx') batch = Avalon::Batch::Package.new(manifest_file, collection) diff --git a/spec/lib/avalon/bib_retriever_spec.rb b/spec/lib/avalon/bib_retriever_spec.rb index 953f442037..66213d3f7a 100644 --- a/spec/lib/avalon/bib_retriever_spec.rb +++ b/spec/lib/avalon/bib_retriever_spec.rb @@ -21,7 +21,7 @@ describe 'configured?' do before :each do - Avalon::Configuration['bib_retriever'] = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } + Settings.bib_retriever = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } end it 'valid' do @@ -29,12 +29,12 @@ end it 'invalid' do - Avalon::Configuration['bib_retriever'] = { 'protocol' => 'unknown', 'url' => 'http://zgate.example.edu:9000/db' } + Settings.bib_retriever = { 'protocol' => 'unknown', 'url' => 'http://zgate.example.edu:9000/db' } expect(Avalon::BibRetriever).not_to be_configured end it 'missing' do - Avalon::Configuration['bib_retriever'] = nil + Settings.bib_retriever = nil expect(Avalon::BibRetriever).not_to be_configured end end @@ -47,7 +47,7 @@ let!(:request) { stub_request(:get, sru_url).to_return(body: sru_response) } before :each do - Avalon::Configuration['bib_retriever'] = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } + Settings.bib_retriever = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db' } end it 'retrieves proper MODS' do @@ -62,7 +62,7 @@ let!(:request) { stub_request(:get, sru_url).to_return(body: sru_response) } before :each do - Avalon::Configuration['bib_retriever'] = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db', 'namespace' => 'http://example.edu/fake/sru/namespace/' } + Settings.bib_retriever = { 'protocol' => 'sru', 'url' => 'http://zgate.example.edu:9000/db', 'namespace' => 'http://example.edu/fake/sru/namespace/' } end it 'retrieves proper MODS' do diff --git a/spec/lib/avalon/dropbox_spec.rb b/spec/lib/avalon/dropbox_spec.rb index 1eddc94878..5df9454a48 100644 --- a/spec/lib/avalon/dropbox_spec.rb +++ b/spec/lib/avalon/dropbox_spec.rb @@ -24,7 +24,7 @@ Avalon::RoleControls.add_user_role('frances.dickens@reichel.com','manager') end let(:collection) { FactoryGirl.create(:collection, name: 'Ut minus ut accusantium odio autem odit.', managers: ['frances.dickens@reichel.com']) } - subject { Avalon::Dropbox.new(Avalon::Configuration.lookup('dropbox.path'),collection) } + subject { Avalon::Dropbox.new(Settings.dropbox.path,collection) } it 'returns true if the file is found' do allow(File).to receive(:delete).and_return true subject.delete('some_file.mov') diff --git a/spec/models/collection_spec.rb b/spec/models/collection_spec.rb index 71b456ac99..8b656c577c 100644 --- a/spec/models/collection_spec.rb +++ b/spec/models/collection_spec.rb @@ -513,7 +513,7 @@ it 'removes bad characters from collection name' do collection.name = '../../secret.rb' - expect(Dir).to receive(:mkdir).with( File.join(Avalon::Configuration.lookup('dropbox.path'), '______secret_rb') ) + expect(Dir).to receive(:mkdir).with( File.join(Settings.dropbox.path, '______secret_rb') ) allow(Dir).to receive(:mkdir) # stubbing this out in a before(:each) block will effect where mkdir is used elsewhere (i.e. factories) collection.send(:create_dropbox_directory!) end @@ -526,9 +526,9 @@ it 'uses a different directory name if the directory exists' do collection.name = 'african art' FakeFS.activate! - FileUtils.mkdir_p(File.join(Avalon::Configuration.lookup('dropbox.path'), 'african_art')) - FileUtils.mkdir_p(File.join(Avalon::Configuration.lookup('dropbox.path'), 'african_art_2')) - expect(Dir).to receive(:mkdir).with(File.join(Avalon::Configuration.lookup('dropbox.path'), 'african_art_3')) + FileUtils.mkdir_p(File.join(Settings.dropbox.path, 'african_art')) + FileUtils.mkdir_p(File.join(Settings.dropbox.path, 'african_art_2')) + expect(Dir).to receive(:mkdir).with(File.join(Settings.dropbox.path, 'african_art_3')) collection.send(:create_dropbox_directory!) FakeFS.deactivate! end @@ -541,7 +541,7 @@ it 'handles Unicode collection names correctly' do collection.name = collection_name - expect(Dir).to receive(:mkdir).with( File.join(Avalon::Configuration.lookup('dropbox.path'), collection_dir) ) + expect(Dir).to receive(:mkdir).with( File.join(Settings.dropbox.path, collection_dir) ) allow(Dir).to receive(:mkdir) collection.send(:create_dropbox_directory!) end diff --git a/spec/models/derivative_spec.rb b/spec/models/derivative_spec.rb index 67d2f52bcb..4b3e729c60 100644 --- a/spec/models/derivative_spec.rb +++ b/spec/models/derivative_spec.rb @@ -66,9 +66,9 @@ end describe "streaming" do - let(:rtmp_base) { Avalon::Configuration.lookup('streaming.rtmp_base') } - let(:http_base) { Avalon::Configuration.lookup('streaming.http_base') } - let(:root) { Avalon::Configuration.lookup('streaming.content_path') } + let(:rtmp_base) { Settings.streaming.rtmp_base } + let(:http_base) { Settings.streaming.http_base } + let(:root) { Settings.streaming.content_path } let(:location) { "file://#{root}/c5e0f8b8-3f69-40de-9524-604f03b5f867/8c871d4b-a9a6-4841-8e2a-dd98cf2ee625/content.mp4" } let(:audio_derivative) { Derivative.new(audio_codec: 'AAC').tap { |d| d.absolute_location = location } } let(:video_derivative) { Derivative.new(video_codec: 'AVC').tap { |d| d.absolute_location = location } } diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb index 1e6981a188..d0f29014cb 100644 --- a/spec/models/group_spec.rb +++ b/spec/models/group_spec.rb @@ -27,7 +27,7 @@ describe "non system groups" do it "should not have system groups" do groups = Admin::Group.non_system_groups - system_groups = Avalon::Configuration.lookup('groups.system_groups') + system_groups = Settings.groups.system_groups groups.each { |g| expect(system_groups).not_to include g.name } end end diff --git a/spec/models/master_file_spec.rb b/spec/models/master_file_spec.rb index 48959d81b6..29cefa2345 100644 --- a/spec/models/master_file_spec.rb +++ b/spec/models/master_file_spec.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -302,24 +302,24 @@ } before(:each) do - @old_media_path = Avalon::Configuration.lookup('matterhorn.media_path') + @old_media_path = Settings.matterhorn.media_path FileUtils.mkdir_p media_path FileUtils.cp fixture, tempfile end after(:each) do - Avalon::Configuration['matterhorn']['media_path'] = @old_media_path + Settings.matterhorn.media_path = @old_media_path File.unlink subject.file_location FileUtils.rm_rf media_path end it "should rename an uploaded file in place" do - Avalon::Configuration['matterhorn'].delete('media_path') + Settings.matterhorn.media_path = nil expect(subject.file_location).to eq(File.realpath(File.join(File.dirname(tempfile),original))) end it "should copy an uploaded file to the media path" do - Avalon::Configuration['matterhorn']['media_path'] = media_path + Settings.matterhorn.media_path = media_path expect(subject.file_location).to eq(File.join(media_path,original)) end end diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb index 1957c7bc91..5a0d64ead3 100644 --- a/spec/rails_helper.rb +++ b/spec/rails_helper.rb @@ -73,19 +73,19 @@ disable_production_minter! # Stub the entire dropbox - Avalon::Configuration['spec'] = { - 'real_dropbox' => Avalon::Configuration.lookup('dropbox.path'), + Settings.spec = { + 'real_dropbox' => Settings.dropbox.path, 'fake_dropbox' => Dir.mktmpdir } - Avalon::Configuration['dropbox']['path'] = Avalon::Configuration.lookup('spec.fake_dropbox') + Settings.dropbox.path = Settings.spec['fake_dropbox'] MasterFile.skip_callback(:save, :after, :update_stills_from_offset!) end config.after :suite do - if Avalon::Configuration.lookup('spec.fake_dropbox') - FileUtils.remove_dir Avalon::Configuration.lookup('spec.fake_dropbox'), true - Avalon::Configuration['dropbox']['path'] = Avalon::Configuration.lookup('spec.real_dropbox') - Avalon::Configuration.delete('spec') + if Settings.spec['fake_dropbox'] + FileUtils.remove_dir Settings.spec['fake_dropbox'], true + Settings.dropbox.path = Settings.spec['real_dropbox'] + Settings.spec = nil end enable_production_minter! WebMock.allow_net_connect! diff --git a/vendor/assets/javascripts/z.jquery.fileupload.js b/vendor/assets/javascripts/z.jquery.fileupload.js new file mode 100644 index 0000000000..5ff151b539 --- /dev/null +++ b/vendor/assets/javascripts/z.jquery.fileupload.js @@ -0,0 +1,1482 @@ +/* + * jQuery File Upload Plugin + * https://github.com/blueimp/jQuery-File-Upload + * + * Copyright 2010, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + */ + +/* jshint nomen:false */ +/* global define, require, window, document, location, Blob, FormData */ + +;(function (factory) { + 'use strict'; + if (typeof define === 'function' && define.amd) { + // Register as an anonymous AMD module: + define([ + 'jquery', + 'jquery-ui/ui/widget' + ], factory); + } else if (typeof exports === 'object') { + // Node/CommonJS: + factory( + require('jquery'), + require('./vendor/jquery.ui.widget') + ); + } else { + // Browser globals: + factory(window.jQuery); + } +}(function ($) { + 'use strict'; + + // Detect file input support, based on + // http://viljamis.com/blog/2012/file-upload-support-on-mobile/ + $.support.fileInput = !(new RegExp( + // Handle devices which give false positives for the feature detection: + '(Android (1\\.[0156]|2\\.[01]))' + + '|(Windows Phone (OS 7|8\\.0))|(XBLWP)|(ZuneWP)|(WPDesktop)' + + '|(w(eb)?OSBrowser)|(webOS)' + + '|(Kindle/(1\\.0|2\\.[05]|3\\.0))' + ).test(window.navigator.userAgent) || + // Feature detection for all other devices: + $('').prop('disabled')); + + // The FileReader API is not actually used, but works as feature detection, + // as some Safari versions (5?) support XHR file uploads via the FormData API, + // but not non-multipart XHR file uploads. + // window.XMLHttpRequestUpload is not available on IE10, so we check for + // window.ProgressEvent instead to detect XHR2 file upload capability: + $.support.xhrFileUpload = !!(window.ProgressEvent && window.FileReader); + $.support.xhrFormDataFileUpload = !!window.FormData; + + // Detect support for Blob slicing (required for chunked uploads): + $.support.blobSlice = window.Blob && (Blob.prototype.slice || + Blob.prototype.webkitSlice || Blob.prototype.mozSlice); + + // Helper function to create drag handlers for dragover/dragenter/dragleave: + function getDragHandler(type) { + var isDragOver = type === 'dragover'; + return function (e) { + e.dataTransfer = e.originalEvent && e.originalEvent.dataTransfer; + var dataTransfer = e.dataTransfer; + if (dataTransfer && $.inArray('Files', dataTransfer.types) !== -1 && + this._trigger( + type, + $.Event(type, {delegatedEvent: e}) + ) !== false) { + e.preventDefault(); + if (isDragOver) { + dataTransfer.dropEffect = 'copy'; + } + } + }; + } + + // The fileupload widget listens for change events on file input fields defined + // via fileInput setting and paste or drop events of the given dropZone. + // In addition to the default jQuery Widget methods, the fileupload widget + // exposes the "add" and "send" methods, to add or directly send files using + // the fileupload API. + // By default, files added via file input selection, paste, drag & drop or + // "add" method are uploaded immediately, but it is possible to override + // the "add" callback option to queue file uploads. + $.widget('blueimp.fileupload', { + + options: { + // The drop target element(s), by the default the complete document. + // Set to null to disable drag & drop support: + dropZone: $(document), + // The paste target element(s), by the default undefined. + // Set to a DOM node or jQuery object to enable file pasting: + pasteZone: undefined, + // The file input field(s), that are listened to for change events. + // If undefined, it is set to the file input fields inside + // of the widget element on plugin initialization. + // Set to null to disable the change listener. + fileInput: undefined, + // By default, the file input field is replaced with a clone after + // each input field change event. This is required for iframe transport + // queues and allows change events to be fired for the same file + // selection, but can be disabled by setting the following option to false: + replaceFileInput: true, + // The parameter name for the file form data (the request argument name). + // If undefined or empty, the name property of the file input field is + // used, or "files[]" if the file input name property is also empty, + // can be a string or an array of strings: + paramName: undefined, + // By default, each file of a selection is uploaded using an individual + // request for XHR type uploads. Set to false to upload file + // selections in one request each: + singleFileUploads: true, + // To limit the number of files uploaded with one XHR request, + // set the following option to an integer greater than 0: + limitMultiFileUploads: undefined, + // The following option limits the number of files uploaded with one + // XHR request to keep the request size under or equal to the defined + // limit in bytes: + limitMultiFileUploadSize: undefined, + // Multipart file uploads add a number of bytes to each uploaded file, + // therefore the following option adds an overhead for each file used + // in the limitMultiFileUploadSize configuration: + limitMultiFileUploadSizeOverhead: 512, + // Set the following option to true to issue all file upload requests + // in a sequential order: + sequentialUploads: false, + // To limit the number of concurrent uploads, + // set the following option to an integer greater than 0: + limitConcurrentUploads: undefined, + // Set the following option to true to force iframe transport uploads: + forceIframeTransport: false, + // Set the following option to the location of a redirect url on the + // origin server, for cross-domain iframe transport uploads: + redirect: undefined, + // The parameter name for the redirect url, sent as part of the form + // data and set to 'redirect' if this option is empty: + redirectParamName: undefined, + // Set the following option to the location of a postMessage window, + // to enable postMessage transport uploads: + postMessage: undefined, + // By default, XHR file uploads are sent as multipart/form-data. + // The iframe transport is always using multipart/form-data. + // Set to false to enable non-multipart XHR uploads: + multipart: true, + // To upload large files in smaller chunks, set the following option + // to a preferred maximum chunk size. If set to 0, null or undefined, + // or the browser does not support the required Blob API, files will + // be uploaded as a whole. + maxChunkSize: undefined, + // When a non-multipart upload or a chunked multipart upload has been + // aborted, this option can be used to resume the upload by setting + // it to the size of the already uploaded bytes. This option is most + // useful when modifying the options object inside of the "add" or + // "send" callbacks, as the options are cloned for each file upload. + uploadedBytes: undefined, + // By default, failed (abort or error) file uploads are removed from the + // global progress calculation. Set the following option to false to + // prevent recalculating the global progress data: + recalculateProgress: true, + // Interval in milliseconds to calculate and trigger progress events: + progressInterval: 100, + // Interval in milliseconds to calculate progress bitrate: + bitrateInterval: 500, + // By default, uploads are started automatically when adding files: + autoUpload: true, + + // Error and info messages: + messages: { + uploadedBytes: 'Uploaded bytes exceed file size' + }, + + // Translation function, gets the message key to be translated + // and an object with context specific data as arguments: + i18n: function (message, context) { + message = this.messages[message] || message.toString(); + if (context) { + $.each(context, function (key, value) { + message = message.replace('{' + key + '}', value); + }); + } + return message; + }, + + // Additional form data to be sent along with the file uploads can be set + // using this option, which accepts an array of objects with name and + // value properties, a function returning such an array, a FormData + // object (for XHR file uploads), or a simple object. + // The form of the first fileInput is given as parameter to the function: + formData: function (form) { + return form.serializeArray(); + }, + + // The add callback is invoked as soon as files are added to the fileupload + // widget (via file input selection, drag & drop, paste or add API call). + // If the singleFileUploads option is enabled, this callback will be + // called once for each file in the selection for XHR file uploads, else + // once for each file selection. + // + // The upload starts when the submit method is invoked on the data parameter. + // The data object contains a files property holding the added files + // and allows you to override plugin options as well as define ajax settings. + // + // Listeners for this callback can also be bound the following way: + // .bind('fileuploadadd', func); + // + // data.submit() returns a Promise object and allows to attach additional + // handlers using jQuery's Deferred callbacks: + // data.submit().done(func).fail(func).always(func); + add: function (e, data) { + if (e.isDefaultPrevented()) { + return false; + } + if (data.autoUpload || (data.autoUpload !== false && + $(this).fileupload('option', 'autoUpload'))) { + data.process().done(function () { + data.submit(); + }); + } + }, + + // Other callbacks: + + // Callback for the submit event of each file upload: + // submit: function (e, data) {}, // .bind('fileuploadsubmit', func); + + // Callback for the start of each file upload request: + // send: function (e, data) {}, // .bind('fileuploadsend', func); + + // Callback for successful uploads: + // done: function (e, data) {}, // .bind('fileuploaddone', func); + + // Callback for failed (abort or error) uploads: + // fail: function (e, data) {}, // .bind('fileuploadfail', func); + + // Callback for completed (success, abort or error) requests: + // always: function (e, data) {}, // .bind('fileuploadalways', func); + + // Callback for upload progress events: + // progress: function (e, data) {}, // .bind('fileuploadprogress', func); + + // Callback for global upload progress events: + // progressall: function (e, data) {}, // .bind('fileuploadprogressall', func); + + // Callback for uploads start, equivalent to the global ajaxStart event: + // start: function (e) {}, // .bind('fileuploadstart', func); + + // Callback for uploads stop, equivalent to the global ajaxStop event: + // stop: function (e) {}, // .bind('fileuploadstop', func); + + // Callback for change events of the fileInput(s): + // change: function (e, data) {}, // .bind('fileuploadchange', func); + + // Callback for paste events to the pasteZone(s): + // paste: function (e, data) {}, // .bind('fileuploadpaste', func); + + // Callback for drop events of the dropZone(s): + // drop: function (e, data) {}, // .bind('fileuploaddrop', func); + + // Callback for dragover events of the dropZone(s): + // dragover: function (e) {}, // .bind('fileuploaddragover', func); + + // Callback for the start of each chunk upload request: + // chunksend: function (e, data) {}, // .bind('fileuploadchunksend', func); + + // Callback for successful chunk uploads: + // chunkdone: function (e, data) {}, // .bind('fileuploadchunkdone', func); + + // Callback for failed (abort or error) chunk uploads: + // chunkfail: function (e, data) {}, // .bind('fileuploadchunkfail', func); + + // Callback for completed (success, abort or error) chunk upload requests: + // chunkalways: function (e, data) {}, // .bind('fileuploadchunkalways', func); + + // The plugin options are used as settings object for the ajax calls. + // The following are jQuery ajax settings required for the file uploads: + processData: false, + contentType: false, + cache: false, + timeout: 0 + }, + + // A list of options that require reinitializing event listeners and/or + // special initialization code: + _specialOptions: [ + 'fileInput', + 'dropZone', + 'pasteZone', + 'multipart', + 'forceIframeTransport' + ], + + _blobSlice: $.support.blobSlice && function () { + var slice = this.slice || this.webkitSlice || this.mozSlice; + return slice.apply(this, arguments); + }, + + _BitrateTimer: function () { + this.timestamp = ((Date.now) ? Date.now() : (new Date()).getTime()); + this.loaded = 0; + this.bitrate = 0; + this.getBitrate = function (now, loaded, interval) { + var timeDiff = now - this.timestamp; + if (!this.bitrate || !interval || timeDiff > interval) { + this.bitrate = (loaded - this.loaded) * (1000 / timeDiff) * 8; + this.loaded = loaded; + this.timestamp = now; + } + return this.bitrate; + }; + }, + + _isXHRUpload: function (options) { + return !options.forceIframeTransport && + ((!options.multipart && $.support.xhrFileUpload) || + $.support.xhrFormDataFileUpload); + }, + + _getFormData: function (options) { + var formData; + if ($.type(options.formData) === 'function') { + return options.formData(options.form); + } + if ($.isArray(options.formData)) { + return options.formData; + } + if ($.type(options.formData) === 'object') { + formData = []; + $.each(options.formData, function (name, value) { + formData.push({name: name, value: value}); + }); + return formData; + } + return []; + }, + + _getTotal: function (files) { + var total = 0; + $.each(files, function (index, file) { + total += file.size || 1; + }); + return total; + }, + + _initProgressObject: function (obj) { + var progress = { + loaded: 0, + total: 0, + bitrate: 0 + }; + if (obj._progress) { + $.extend(obj._progress, progress); + } else { + obj._progress = progress; + } + }, + + _initResponseObject: function (obj) { + var prop; + if (obj._response) { + for (prop in obj._response) { + if (obj._response.hasOwnProperty(prop)) { + delete obj._response[prop]; + } + } + } else { + obj._response = {}; + } + }, + + _onProgress: function (e, data) { + if (e.lengthComputable) { + var now = ((Date.now) ? Date.now() : (new Date()).getTime()), + loaded; + if (data._time && data.progressInterval && + (now - data._time < data.progressInterval) && + e.loaded !== e.total) { + return; + } + data._time = now; + loaded = Math.floor( + e.loaded / e.total * (data.chunkSize || data._progress.total) + ) + (data.uploadedBytes || 0); + // Add the difference from the previously loaded state + // to the global loaded counter: + this._progress.loaded += (loaded - data._progress.loaded); + this._progress.bitrate = this._bitrateTimer.getBitrate( + now, + this._progress.loaded, + data.bitrateInterval + ); + data._progress.loaded = data.loaded = loaded; + data._progress.bitrate = data.bitrate = data._bitrateTimer.getBitrate( + now, + loaded, + data.bitrateInterval + ); + // Trigger a custom progress event with a total data property set + // to the file size(s) of the current upload and a loaded data + // property calculated accordingly: + this._trigger( + 'progress', + $.Event('progress', {delegatedEvent: e}), + data + ); + // Trigger a global progress event for all current file uploads, + // including ajax calls queued for sequential file uploads: + this._trigger( + 'progressall', + $.Event('progressall', {delegatedEvent: e}), + this._progress + ); + } + }, + + _initProgressListener: function (options) { + var that = this, + xhr = options.xhr ? options.xhr() : $.ajaxSettings.xhr(); + // Accesss to the native XHR object is required to add event listeners + // for the upload progress event: + if (xhr.upload) { + $(xhr.upload).bind('progress', function (e) { + var oe = e.originalEvent; + // Make sure the progress event properties get copied over: + e.lengthComputable = oe.lengthComputable; + e.loaded = oe.loaded; + e.total = oe.total; + that._onProgress(e, options); + }); + options.xhr = function () { + return xhr; + }; + } + }, + + _isInstanceOf: function (type, obj) { + // Cross-frame instanceof check + return Object.prototype.toString.call(obj) === '[object ' + type + ']'; + }, + + _initXHRData: function (options) { + var that = this, + formData, + file = options.files[0], + // Ignore non-multipart setting if not supported: + multipart = options.multipart || !$.support.xhrFileUpload, + paramName = $.type(options.paramName) === 'array' ? + options.paramName[0] : options.paramName; + options.headers = $.extend({}, options.headers); + if (options.contentRange) { + options.headers['Content-Range'] = options.contentRange; + } + if (!multipart || options.blob || !this._isInstanceOf('File', file)) { + options.headers['Content-Disposition'] = 'attachment; filename="' + + encodeURI(file.name) + '"'; + } + if (!multipart) { + options.contentType = file.type || 'application/octet-stream'; + options.data = options.blob || file; + } else if ($.support.xhrFormDataFileUpload) { + if (options.postMessage) { + // window.postMessage does not allow sending FormData + // objects, so we just add the File/Blob objects to + // the formData array and let the postMessage window + // create the FormData object out of this array: + formData = this._getFormData(options); + if (options.blob) { + formData.push({ + name: paramName, + value: options.blob + }); + } else { + $.each(options.files, function (index, file) { + formData.push({ + name: ($.type(options.paramName) === 'array' && + options.paramName[index]) || paramName, + value: file + }); + }); + } + } else { + if (that._isInstanceOf('FormData', options.formData)) { + formData = options.formData; + } else { + formData = new FormData(); + $.each(this._getFormData(options), function (index, field) { + formData.append(field.name, field.value); + }); + } + if (options.blob) { + formData.append(paramName, options.blob, file.name); + } else { + $.each(options.files, function (index, file) { + // This check allows the tests to run with + // dummy objects: + if (that._isInstanceOf('File', file) || + that._isInstanceOf('Blob', file)) { + formData.append( + ($.type(options.paramName) === 'array' && + options.paramName[index]) || paramName, + file, + file.uploadName || file.name + ); + } + }); + } + } + options.data = formData; + } + // Blob reference is not needed anymore, free memory: + options.blob = null; + }, + + _initIframeSettings: function (options) { + var targetHost = $('').prop('href', options.url).prop('host'); + // Setting the dataType to iframe enables the iframe transport: + options.dataType = 'iframe ' + (options.dataType || ''); + // The iframe transport accepts a serialized array as form data: + options.formData = this._getFormData(options); + // Add redirect url to form data on cross-domain uploads: + if (options.redirect && targetHost && targetHost !== location.host) { + options.formData.push({ + name: options.redirectParamName || 'redirect', + value: options.redirect + }); + } + }, + + _initDataSettings: function (options) { + if (this._isXHRUpload(options)) { + if (!this._chunkedUpload(options, true)) { + if (!options.data) { + this._initXHRData(options); + } + this._initProgressListener(options); + } + if (options.postMessage) { + // Setting the dataType to postmessage enables the + // postMessage transport: + options.dataType = 'postmessage ' + (options.dataType || ''); + } + } else { + this._initIframeSettings(options); + } + }, + + _getParamName: function (options) { + var fileInput = $(options.fileInput), + paramName = options.paramName; + if (!paramName) { + paramName = []; + fileInput.each(function () { + var input = $(this), + name = input.prop('name') || 'files[]', + i = (input.prop('files') || [1]).length; + while (i) { + paramName.push(name); + i -= 1; + } + }); + if (!paramName.length) { + paramName = [fileInput.prop('name') || 'files[]']; + } + } else if (!$.isArray(paramName)) { + paramName = [paramName]; + } + return paramName; + }, + + _initFormSettings: function (options) { + // Retrieve missing options from the input field and the + // associated form, if available: + if (!options.form || !options.form.length) { + options.form = $(options.fileInput.prop('form')); + // If the given file input doesn't have an associated form, + // use the default widget file input's form: + if (!options.form.length) { + options.form = $(this.options.fileInput.prop('form')); + } + } + options.paramName = this._getParamName(options); + if (!options.url) { + options.url = options.form.prop('action') || location.href; + } + // The HTTP request method must be "POST" or "PUT": + options.type = (options.type || + ($.type(options.form.prop('method')) === 'string' && + options.form.prop('method')) || '' + ).toUpperCase(); + if (options.type !== 'POST' && options.type !== 'PUT' && + options.type !== 'PATCH') { + options.type = 'POST'; + } + if (!options.formAcceptCharset) { + options.formAcceptCharset = options.form.attr('accept-charset'); + } + }, + + _getAJAXSettings: function (data) { + var options = $.extend({}, this.options, data); + this._initFormSettings(options); + this._initDataSettings(options); + return options; + }, + + // jQuery 1.6 doesn't provide .state(), + // while jQuery 1.8+ removed .isRejected() and .isResolved(): + _getDeferredState: function (deferred) { + if (deferred.state) { + return deferred.state(); + } + if (deferred.isResolved()) { + return 'resolved'; + } + if (deferred.isRejected()) { + return 'rejected'; + } + return 'pending'; + }, + + // Maps jqXHR callbacks to the equivalent + // methods of the given Promise object: + _enhancePromise: function (promise) { + promise.success = promise.done; + promise.error = promise.fail; + promise.complete = promise.always; + return promise; + }, + + // Creates and returns a Promise object enhanced with + // the jqXHR methods abort, success, error and complete: + _getXHRPromise: function (resolveOrReject, context, args) { + var dfd = $.Deferred(), + promise = dfd.promise(); + context = context || this.options.context || promise; + if (resolveOrReject === true) { + dfd.resolveWith(context, args); + } else if (resolveOrReject === false) { + dfd.rejectWith(context, args); + } + promise.abort = dfd.promise; + return this._enhancePromise(promise); + }, + + // Adds convenience methods to the data callback argument: + _addConvenienceMethods: function (e, data) { + var that = this, + getPromise = function (args) { + return $.Deferred().resolveWith(that, args).promise(); + }; + data.process = function (resolveFunc, rejectFunc) { + if (resolveFunc || rejectFunc) { + data._processQueue = this._processQueue = + (this._processQueue || getPromise([this])).then( + function () { + if (data.errorThrown) { + return $.Deferred() + .rejectWith(that, [data]).promise(); + } + return getPromise(arguments); + } + ).then(resolveFunc, rejectFunc); + } + return this._processQueue || getPromise([this]); + }; + data.submit = function () { + if (this.state() !== 'pending') { + data.jqXHR = this.jqXHR = + (that._trigger( + 'submit', + $.Event('submit', {delegatedEvent: e}), + this + ) !== false) && that._onSend(e, this); + } + return this.jqXHR || that._getXHRPromise(); + }; + data.abort = function () { + if (this.jqXHR) { + return this.jqXHR.abort(); + } + this.errorThrown = 'abort'; + that._trigger('fail', null, this); + return that._getXHRPromise(false); + }; + data.state = function () { + if (this.jqXHR) { + return that._getDeferredState(this.jqXHR); + } + if (this._processQueue) { + return that._getDeferredState(this._processQueue); + } + }; + data.processing = function () { + return !this.jqXHR && this._processQueue && that + ._getDeferredState(this._processQueue) === 'pending'; + }; + data.progress = function () { + return this._progress; + }; + data.response = function () { + return this._response; + }; + }, + + // Parses the Range header from the server response + // and returns the uploaded bytes: + _getUploadedBytes: function (jqXHR) { + var range = jqXHR.getResponseHeader('Range'), + parts = range && range.split('-'), + upperBytesPos = parts && parts.length > 1 && + parseInt(parts[1], 10); + return upperBytesPos && upperBytesPos + 1; + }, + + // Uploads a file in multiple, sequential requests + // by splitting the file up in multiple blob chunks. + // If the second parameter is true, only tests if the file + // should be uploaded in chunks, but does not invoke any + // upload requests: + _chunkedUpload: function (options, testOnly) { + options.uploadedBytes = options.uploadedBytes || 0; + var that = this, + file = options.files[0], + fs = file.size, + ub = options.uploadedBytes, + mcs = options.maxChunkSize || fs, + slice = this._blobSlice, + dfd = $.Deferred(), + promise = dfd.promise(), + jqXHR, + upload; + if (!(this._isXHRUpload(options) && slice && (ub || mcs < fs)) || + options.data) { + return false; + } + if (testOnly) { + return true; + } + if (ub >= fs) { + file.error = options.i18n('uploadedBytes'); + return this._getXHRPromise( + false, + options.context, + [null, 'error', file.error] + ); + } + // The chunk upload method: + upload = function () { + // Clone the options object for each chunk upload: + var o = $.extend({}, options), + currentLoaded = o._progress.loaded; + o.blob = slice.call( + file, + ub, + ub + mcs, + file.type + ); + // Store the current chunk size, as the blob itself + // will be dereferenced after data processing: + o.chunkSize = o.blob.size; + // Expose the chunk bytes position range: + o.contentRange = 'bytes ' + ub + '-' + + (ub + o.chunkSize - 1) + '/' + fs; + // Process the upload data (the blob and potential form data): + that._initXHRData(o); + // Add progress listeners for this chunk upload: + that._initProgressListener(o); + jqXHR = ((that._trigger('chunksend', null, o) !== false && $.ajax(o)) || + that._getXHRPromise(false, o.context)) + .done(function (result, textStatus, jqXHR) { + ub = that._getUploadedBytes(jqXHR) || + (ub + o.chunkSize); + // Create a progress event if no final progress event + // with loaded equaling total has been triggered + // for this chunk: + if (currentLoaded + o.chunkSize - o._progress.loaded) { + that._onProgress($.Event('progress', { + lengthComputable: true, + loaded: ub - o.uploadedBytes, + total: ub - o.uploadedBytes + }), o); + } + options.uploadedBytes = o.uploadedBytes = ub; + o.result = result; + o.textStatus = textStatus; + o.jqXHR = jqXHR; + that._trigger('chunkdone', null, o); + that._trigger('chunkalways', null, o); + if (ub < fs) { + // File upload not yet complete, + // continue with the next chunk: + upload(); + } else { + dfd.resolveWith( + o.context, + [result, textStatus, jqXHR] + ); + } + }) + .fail(function (jqXHR, textStatus, errorThrown) { + o.jqXHR = jqXHR; + o.textStatus = textStatus; + o.errorThrown = errorThrown; + that._trigger('chunkfail', null, o); + that._trigger('chunkalways', null, o); + dfd.rejectWith( + o.context, + [jqXHR, textStatus, errorThrown] + ); + }); + }; + this._enhancePromise(promise); + promise.abort = function () { + return jqXHR.abort(); + }; + upload(); + return promise; + }, + + _beforeSend: function (e, data) { + if (this._active === 0) { + // the start callback is triggered when an upload starts + // and no other uploads are currently running, + // equivalent to the global ajaxStart event: + this._trigger('start'); + // Set timer for global bitrate progress calculation: + this._bitrateTimer = new this._BitrateTimer(); + // Reset the global progress values: + this._progress.loaded = this._progress.total = 0; + this._progress.bitrate = 0; + } + // Make sure the container objects for the .response() and + // .progress() methods on the data object are available + // and reset to their initial state: + this._initResponseObject(data); + this._initProgressObject(data); + data._progress.loaded = data.loaded = data.uploadedBytes || 0; + data._progress.total = data.total = this._getTotal(data.files) || 1; + data._progress.bitrate = data.bitrate = 0; + this._active += 1; + // Initialize the global progress values: + this._progress.loaded += data.loaded; + this._progress.total += data.total; + }, + + _onDone: function (result, textStatus, jqXHR, options) { + var total = options._progress.total, + response = options._response; + if (options._progress.loaded < total) { + // Create a progress event if no final progress event + // with loaded equaling total has been triggered: + this._onProgress($.Event('progress', { + lengthComputable: true, + loaded: total, + total: total + }), options); + } + response.result = options.result = result; + response.textStatus = options.textStatus = textStatus; + response.jqXHR = options.jqXHR = jqXHR; + this._trigger('done', null, options); + }, + + _onFail: function (jqXHR, textStatus, errorThrown, options) { + var response = options._response; + if (options.recalculateProgress) { + // Remove the failed (error or abort) file upload from + // the global progress calculation: + this._progress.loaded -= options._progress.loaded; + this._progress.total -= options._progress.total; + } + response.jqXHR = options.jqXHR = jqXHR; + response.textStatus = options.textStatus = textStatus; + response.errorThrown = options.errorThrown = errorThrown; + this._trigger('fail', null, options); + }, + + _onAlways: function (jqXHRorResult, textStatus, jqXHRorError, options) { + // jqXHRorResult, textStatus and jqXHRorError are added to the + // options object via done and fail callbacks + this._trigger('always', null, options); + }, + + _onSend: function (e, data) { + if (!data.submit) { + this._addConvenienceMethods(e, data); + } + var that = this, + jqXHR, + aborted, + slot, + pipe, + options = that._getAJAXSettings(data), + send = function () { + that._sending += 1; + // Set timer for bitrate progress calculation: + options._bitrateTimer = new that._BitrateTimer(); + jqXHR = jqXHR || ( + ((aborted || that._trigger( + 'send', + $.Event('send', {delegatedEvent: e}), + options + ) === false) && + that._getXHRPromise(false, options.context, aborted)) || + that._chunkedUpload(options) || $.ajax(options) + ).done(function (result, textStatus, jqXHR) { + that._onDone(result, textStatus, jqXHR, options); + }).fail(function (jqXHR, textStatus, errorThrown) { + that._onFail(jqXHR, textStatus, errorThrown, options); + }).always(function (jqXHRorResult, textStatus, jqXHRorError) { + that._onAlways( + jqXHRorResult, + textStatus, + jqXHRorError, + options + ); + that._sending -= 1; + that._active -= 1; + if (options.limitConcurrentUploads && + options.limitConcurrentUploads > that._sending) { + // Start the next queued upload, + // that has not been aborted: + var nextSlot = that._slots.shift(); + while (nextSlot) { + if (that._getDeferredState(nextSlot) === 'pending') { + nextSlot.resolve(); + break; + } + nextSlot = that._slots.shift(); + } + } + if (that._active === 0) { + // The stop callback is triggered when all uploads have + // been completed, equivalent to the global ajaxStop event: + that._trigger('stop'); + } + }); + return jqXHR; + }; + this._beforeSend(e, options); + if (this.options.sequentialUploads || + (this.options.limitConcurrentUploads && + this.options.limitConcurrentUploads <= this._sending)) { + if (this.options.limitConcurrentUploads > 1) { + slot = $.Deferred(); + this._slots.push(slot); + pipe = slot.then(send); + } else { + this._sequence = this._sequence.then(send, send); + pipe = this._sequence; + } + // Return the piped Promise object, enhanced with an abort method, + // which is delegated to the jqXHR object of the current upload, + // and jqXHR callbacks mapped to the equivalent Promise methods: + pipe.abort = function () { + aborted = [undefined, 'abort', 'abort']; + if (!jqXHR) { + if (slot) { + slot.rejectWith(options.context, aborted); + } + return send(); + } + return jqXHR.abort(); + }; + return this._enhancePromise(pipe); + } + return send(); + }, + + _onAdd: function (e, data) { + var that = this, + result = true, + options = $.extend({}, this.options, data), + files = data.files, + filesLength = files.length, + limit = options.limitMultiFileUploads, + limitSize = options.limitMultiFileUploadSize, + overhead = options.limitMultiFileUploadSizeOverhead, + batchSize = 0, + paramName = this._getParamName(options), + paramNameSet, + paramNameSlice, + fileSet, + i, + j = 0; + if (!filesLength) { + return false; + } + if (limitSize && files[0].size === undefined) { + limitSize = undefined; + } + if (!(options.singleFileUploads || limit || limitSize) || + !this._isXHRUpload(options)) { + fileSet = [files]; + paramNameSet = [paramName]; + } else if (!(options.singleFileUploads || limitSize) && limit) { + fileSet = []; + paramNameSet = []; + for (i = 0; i < filesLength; i += limit) { + fileSet.push(files.slice(i, i + limit)); + paramNameSlice = paramName.slice(i, i + limit); + if (!paramNameSlice.length) { + paramNameSlice = paramName; + } + paramNameSet.push(paramNameSlice); + } + } else if (!options.singleFileUploads && limitSize) { + fileSet = []; + paramNameSet = []; + for (i = 0; i < filesLength; i = i + 1) { + batchSize += files[i].size + overhead; + if (i + 1 === filesLength || + ((batchSize + files[i + 1].size + overhead) > limitSize) || + (limit && i + 1 - j >= limit)) { + fileSet.push(files.slice(j, i + 1)); + paramNameSlice = paramName.slice(j, i + 1); + if (!paramNameSlice.length) { + paramNameSlice = paramName; + } + paramNameSet.push(paramNameSlice); + j = i + 1; + batchSize = 0; + } + } + } else { + paramNameSet = paramName; + } + data.originalFiles = files; + $.each(fileSet || files, function (index, element) { + var newData = $.extend({}, data); + newData.files = fileSet ? element : [element]; + newData.paramName = paramNameSet[index]; + that._initResponseObject(newData); + that._initProgressObject(newData); + that._addConvenienceMethods(e, newData); + result = that._trigger( + 'add', + $.Event('add', {delegatedEvent: e}), + newData + ); + return result; + }); + return result; + }, + + _replaceFileInput: function (data) { + var input = data.fileInput, + inputClone = input.clone(true), + restoreFocus = input.is(document.activeElement); + // Add a reference for the new cloned file input to the data argument: + data.fileInputClone = inputClone; + $('
').append(inputClone)[0].reset(); + // Detaching allows to insert the fileInput on another form + // without loosing the file input value: + input.after(inputClone).detach(); + // If the fileInput had focus before it was detached, + // restore focus to the inputClone. + if (restoreFocus) { + inputClone.focus(); + } + // Avoid memory leaks with the detached file input: + $.cleanData(input.unbind('remove')); + // Replace the original file input element in the fileInput + // elements set with the clone, which has been copied including + // event handlers: + this.options.fileInput = this.options.fileInput.map(function (i, el) { + if (el === input[0]) { + return inputClone[0]; + } + return el; + }); + // If the widget has been initialized on the file input itself, + // override this.element with the file input clone: + if (input[0] === this.element[0]) { + this.element = inputClone; + } + }, + + _handleFileTreeEntry: function (entry, path) { + var that = this, + dfd = $.Deferred(), + entries = [], + dirReader, + errorHandler = function (e) { + if (e && !e.entry) { + e.entry = entry; + } + // Since $.when returns immediately if one + // Deferred is rejected, we use resolve instead. + // This allows valid files and invalid items + // to be returned together in one set: + dfd.resolve([e]); + }, + successHandler = function (entries) { + that._handleFileTreeEntries( + entries, + path + entry.name + '/' + ).done(function (files) { + dfd.resolve(files); + }).fail(errorHandler); + }, + readEntries = function () { + dirReader.readEntries(function (results) { + if (!results.length) { + successHandler(entries); + } else { + entries = entries.concat(results); + readEntries(); + } + }, errorHandler); + }; + path = path || ''; + if (entry.isFile) { + if (entry._file) { + // Workaround for Chrome bug #149735 + entry._file.relativePath = path; + dfd.resolve(entry._file); + } else { + entry.file(function (file) { + file.relativePath = path; + dfd.resolve(file); + }, errorHandler); + } + } else if (entry.isDirectory) { + dirReader = entry.createReader(); + readEntries(); + } else { + // Return an empy list for file system items + // other than files or directories: + dfd.resolve([]); + } + return dfd.promise(); + }, + + _handleFileTreeEntries: function (entries, path) { + var that = this; + return $.when.apply( + $, + $.map(entries, function (entry) { + return that._handleFileTreeEntry(entry, path); + }) + ).then(function () { + return Array.prototype.concat.apply( + [], + arguments + ); + }); + }, + + _getDroppedFiles: function (dataTransfer) { + dataTransfer = dataTransfer || {}; + var items = dataTransfer.items; + if (items && items.length && (items[0].webkitGetAsEntry || + items[0].getAsEntry)) { + return this._handleFileTreeEntries( + $.map(items, function (item) { + var entry; + if (item.webkitGetAsEntry) { + entry = item.webkitGetAsEntry(); + if (entry) { + // Workaround for Chrome bug #149735: + entry._file = item.getAsFile(); + } + return entry; + } + return item.getAsEntry(); + }) + ); + } + return $.Deferred().resolve( + $.makeArray(dataTransfer.files) + ).promise(); + }, + + _getSingleFileInputFiles: function (fileInput) { + fileInput = $(fileInput); + var entries = fileInput.prop('webkitEntries') || + fileInput.prop('entries'), + files, + value; + if (entries && entries.length) { + return this._handleFileTreeEntries(entries); + } + files = $.makeArray(fileInput.prop('files')); + if (!files.length) { + value = fileInput.prop('value'); + if (!value) { + return $.Deferred().resolve([]).promise(); + } + // If the files property is not available, the browser does not + // support the File API and we add a pseudo File object with + // the input value as name with path information removed: + files = [{name: value.replace(/^.*\\/, '')}]; + } else if (files[0].name === undefined && files[0].fileName) { + // File normalization for Safari 4 and Firefox 3: + $.each(files, function (index, file) { + file.name = file.fileName; + file.size = file.fileSize; + }); + } + return $.Deferred().resolve(files).promise(); + }, + + _getFileInputFiles: function (fileInput) { + if (!(fileInput instanceof $) || fileInput.length === 1) { + return this._getSingleFileInputFiles(fileInput); + } + return $.when.apply( + $, + $.map(fileInput, this._getSingleFileInputFiles) + ).then(function () { + return Array.prototype.concat.apply( + [], + arguments + ); + }); + }, + + _onChange: function (e) { + var that = this, + data = { + fileInput: $(e.target), + form: $(e.target.form) + }; + this._getFileInputFiles(data.fileInput).always(function (files) { + data.files = files; + if (that.options.replaceFileInput) { + that._replaceFileInput(data); + } + if (that._trigger( + 'change', + $.Event('change', {delegatedEvent: e}), + data + ) !== false) { + that._onAdd(e, data); + } + }); + }, + + _onPaste: function (e) { + var items = e.originalEvent && e.originalEvent.clipboardData && + e.originalEvent.clipboardData.items, + data = {files: []}; + if (items && items.length) { + $.each(items, function (index, item) { + var file = item.getAsFile && item.getAsFile(); + if (file) { + data.files.push(file); + } + }); + if (this._trigger( + 'paste', + $.Event('paste', {delegatedEvent: e}), + data + ) !== false) { + this._onAdd(e, data); + } + } + }, + + _onDrop: function (e) { + e.dataTransfer = e.originalEvent && e.originalEvent.dataTransfer; + var that = this, + dataTransfer = e.dataTransfer, + data = {}; + if (dataTransfer && dataTransfer.files && dataTransfer.files.length) { + e.preventDefault(); + this._getDroppedFiles(dataTransfer).always(function (files) { + data.files = files; + if (that._trigger( + 'drop', + $.Event('drop', {delegatedEvent: e}), + data + ) !== false) { + that._onAdd(e, data); + } + }); + } + }, + + _onDragOver: getDragHandler('dragover'), + + _onDragEnter: getDragHandler('dragenter'), + + _onDragLeave: getDragHandler('dragleave'), + + _initEventHandlers: function () { + if (this._isXHRUpload(this.options)) { + this._on(this.options.dropZone, { + dragover: this._onDragOver, + drop: this._onDrop, + // event.preventDefault() on dragenter is required for IE10+: + dragenter: this._onDragEnter, + // dragleave is not required, but added for completeness: + dragleave: this._onDragLeave + }); + this._on(this.options.pasteZone, { + paste: this._onPaste + }); + } + if ($.support.fileInput) { + this._on(this.options.fileInput, { + change: this._onChange + }); + } + }, + + _destroyEventHandlers: function () { + this._off(this.options.dropZone, 'dragenter dragleave dragover drop'); + this._off(this.options.pasteZone, 'paste'); + this._off(this.options.fileInput, 'change'); + }, + + _destroy: function () { + this._destroyEventHandlers(); + }, + + _setOption: function (key, value) { + var reinit = $.inArray(key, this._specialOptions) !== -1; + if (reinit) { + this._destroyEventHandlers(); + } + this._super(key, value); + if (reinit) { + this._initSpecialOptions(); + this._initEventHandlers(); + } + }, + + _initSpecialOptions: function () { + var options = this.options; + if (options.fileInput === undefined) { + options.fileInput = this.element.is('input[type="file"]') ? + this.element : this.element.find('input[type="file"]'); + } else if (!(options.fileInput instanceof $)) { + options.fileInput = $(options.fileInput); + } + if (!(options.dropZone instanceof $)) { + options.dropZone = $(options.dropZone); + } + if (!(options.pasteZone instanceof $)) { + options.pasteZone = $(options.pasteZone); + } + }, + + _getRegExp: function (str) { + var parts = str.split('/'), + modifiers = parts.pop(); + parts.shift(); + return new RegExp(parts.join('/'), modifiers); + }, + + _isRegExpOption: function (key, value) { + return key !== 'url' && $.type(value) === 'string' && + /^\/.*\/[igm]{0,3}$/.test(value); + }, + + _initDataAttributes: function () { + var that = this, + options = this.options, + data = this.element.data(); + // Initialize options set via HTML5 data-attributes: + $.each( + this.element[0].attributes, + function (index, attr) { + var key = attr.name.toLowerCase(), + value; + if (/^data-/.test(key)) { + // Convert hyphen-ated key to camelCase: + key = key.slice(5).replace(/-[a-z]/g, function (str) { + return str.charAt(1).toUpperCase(); + }); + value = data[key]; + if (that._isRegExpOption(key, value)) { + value = that._getRegExp(value); + } + options[key] = value; + } + } + ); + }, + + _create: function () { + this._initDataAttributes(); + this._initSpecialOptions(); + this._slots = []; + this._sequence = this._getXHRPromise(true); + this._sending = this._active = 0; + this._initProgressObject(this); + this._initEventHandlers(); + }, + + // This method is exposed to the widget API and allows to query + // the number of active uploads: + active: function () { + return this._active; + }, + + // This method is exposed to the widget API and allows to query + // the widget upload progress. + // It returns an object with loaded, total and bitrate properties + // for the running uploads: + progress: function () { + return this._progress; + }, + + // This method is exposed to the widget API and allows adding files + // using the fileupload API. The data parameter accepts an object which + // must have a files property and can contain additional options: + // .fileupload('add', {files: filesList}); + add: function (data) { + var that = this; + if (!data || this.options.disabled) { + return; + } + if (data.fileInput && !data.files) { + this._getFileInputFiles(data.fileInput).always(function (files) { + data.files = files; + that._onAdd(null, data); + }); + } else { + data.files = $.makeArray(data.files); + this._onAdd(null, data); + } + }, + + // This method is exposed to the widget API and allows sending files + // using the fileupload API. The data parameter accepts an object which + // must have a files or fileInput property and can contain additional options: + // .fileupload('send', {files: filesList}); + // The method returns a Promise object for the file upload call. + send: function (data) { + if (data && !this.options.disabled) { + if (data.fileInput && !data.files) { + var that = this, + dfd = $.Deferred(), + promise = dfd.promise(), + jqXHR, + aborted; + promise.abort = function () { + aborted = true; + if (jqXHR) { + return jqXHR.abort(); + } + dfd.reject(null, 'abort', 'abort'); + return promise; + }; + this._getFileInputFiles(data.fileInput).always( + function (files) { + if (aborted) { + return; + } + if (!files.length) { + dfd.reject(); + return; + } + data.files = files; + jqXHR = that._onSend(null, data); + jqXHR.then( + function (result, textStatus, jqXHR) { + dfd.resolve(result, textStatus, jqXHR); + }, + function (jqXHR, textStatus, errorThrown) { + dfd.reject(jqXHR, textStatus, errorThrown); + } + ); + } + ); + return this._enhancePromise(promise); + } + data.files = $.makeArray(data.files); + if (data.files.length) { + return this._onSend(null, data); + } + } + return this._getXHRPromise(false, data && data.context); + } + + }); + +})); From a846e4a09b8bf35a1b48a1185a5e4fa85d22f2ef Mon Sep 17 00:00:00 2001 From: Michael Klein Date: Wed, 31 May 2017 16:22:06 -0500 Subject: [PATCH 005/237] Additional Migration: Cherry pick migration improvements from develop Beef up migration Add AttachDerivativeJob Pull in latest migration changes Allow for namespace override on migration rake task Fix refactor errors in migration code Don't be so timid with the callbacks --- Gemfile.lock | 313 +++++++++--------- app/jobs/attach_derivative_job.rb | 28 ++ .../master_file/object_mover.rb | 12 +- app/models/admin/collection.rb | 1 + app/models/concerns/migration_target.rb | 25 ++ app/models/derivative.rb | 1 + app/models/lease.rb | 11 +- app/models/master_file.rb | 11 +- app/models/media_object.rb | 7 +- lib/tasks/avalon.rake | 2 +- 10 files changed, 248 insertions(+), 163 deletions(-) create mode 100644 app/jobs/attach_derivative_job.rb create mode 100644 app/models/concerns/migration_target.rb diff --git a/Gemfile.lock b/Gemfile.lock index fe8801fe53..3914fcdfce 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -122,7 +122,7 @@ GIT GIT remote: https://github.com/javan/whenever.git - revision: 075afb22a0f681ae5d2c4289b78e71527284bfe7 + revision: 1dcb91484e6f1ee91c9272daccbe84111754102b specs: whenever (0.9.7) chronic (>= 0.6.3) @@ -163,7 +163,7 @@ GEM builder (~> 3.1) erubis (~> 2.7.0) rails-dom-testing (~> 1.0, >= 1.0.5) - rails-html-sanitizer (~> 1.0, >= 1.0.3) + rails-html-sanitizer (~> 1.0, >= 1.0.2) active-fedora (11.2.0) active-triples (~> 0.11.0) activemodel (>= 4.2, < 6) @@ -203,45 +203,45 @@ GEM activemodel (= 4.2.9) activesupport (= 4.2.9) arel (~> 6.0) - activerecord-session_store (1.0.0) - actionpack (>= 4.0, < 5.1) - activerecord (>= 4.0, < 5.1) + activerecord-session_store (1.1.0) + actionpack (>= 4.0, < 5.2) + activerecord (>= 4.0, < 5.2) multi_json (~> 1.11, >= 1.11.2) rack (>= 1.5.2, < 3) - railties (>= 4.0, < 5.1) - activesupport (4.2.9) + railties (>= 4.0, < 5.2) + activesupport (4.2.7.1) i18n (~> 0.7) minitest (~> 5.1) thread_safe (~> 0.3, >= 0.3.4) tzinfo (~> 1.1) - acts_as_list (0.8.2) + acts_as_list (0.9.5) activerecord (>= 3.0) - addressable (2.5.0) + addressable (2.5.1) public_suffix (~> 2.0, >= 2.0.2) - airbrussh (1.1.1) + airbrussh (1.2.0) sshkit (>= 1.6.1, != 1.7.0) - api-pagination (4.5.1) + api-pagination (4.6.3) arel (6.0.4) ast (2.3.0) - autoprefixer-rails (6.5.4) + autoprefixer-rails (7.1.1) execjs - aws-sdk (2.9.5) - aws-sdk-resources (= 2.9.5) - aws-sdk-core (2.9.5) + aws-sdk (2.9.27) + aws-sdk-resources (= 2.9.27) + aws-sdk-core (2.9.27) aws-sigv4 (~> 1.0) jmespath (~> 1.0) aws-sdk-rails (1.0.1) aws-sdk-resources (~> 2) railties (>= 3) - aws-sdk-resources (2.9.5) - aws-sdk-core (= 2.9.5) + aws-sdk-resources (2.9.27) + aws-sdk-core (= 2.9.27) aws-sigv4 (1.0.0) bcrypt (3.1.11) bcrypt-ruby (3.1.5) bcrypt (>= 3.1.3) binding_of_caller (0.7.2) debug_inspector (>= 0.0.1) - blacklight (6.7.2) + blacklight (6.10.0) bootstrap-sass (~> 3.2) deprecation globalid @@ -250,15 +250,15 @@ GEM rails (>= 4.2, < 6) rsolr (>= 1.0.6, < 3) twitter-typeahead-rails (= 0.11.1.pre.corejavascript) - blacklight-access_controls (0.6.1) + blacklight-access_controls (0.6.2) blacklight (~> 6.0) cancancan (~> 1.8) deprecation (~> 1.0) bootstrap-sass (3.3.7) autoprefixer-rails (>= 5.2.1) sass (>= 3.3.4) - bootstrap_form (2.5.2) - browse-everything (0.13.0) + bootstrap_form (2.7.0) + browse-everything (0.13.1) addressable (~> 2.5) aws-sdk bootstrap-sass @@ -274,20 +274,18 @@ GEM skydrive builder (3.2.3) byebug (9.0.6) - cancancan (1.15.0) - capistrano (3.7.1) + cancancan (1.17.0) + capistrano (3.8.1) airbrussh (>= 1.0.0) - capistrano-harrow i18n rake (>= 10.0.0) sshkit (>= 1.9.0) capistrano-bundler (1.2.0) capistrano (~> 3.1) sshkit (~> 1.2) - capistrano-harrow (0.5.3) capistrano-passenger (0.2.0) capistrano (~> 3.0) - capistrano-rails (1.2.0) + capistrano-rails (1.2.3) capistrano (~> 3.1) capistrano-bundler (~> 1.1) capistrano-resque (0.2.3) @@ -297,7 +295,7 @@ GEM capistrano-rvm (0.1.2) capistrano (~> 3.0) sshkit (~> 1.2) - capybara (2.11.0) + capybara (2.14.0) addressable mime-types (>= 1.16) nokogiri (>= 1.3.3) @@ -318,83 +316,86 @@ GEM config (1.4.0) activesupport (>= 3.0) deep_merge (~> 1.1.1) - coveralls (0.8.17) + coveralls (0.8.21) json (>= 1.8, < 3) - simplecov (~> 0.12.0) + simplecov (~> 0.14.1) term-ansicolor (~> 1.3) - thor (~> 0.19.1) + thor (~> 0.19.4) tins (~> 1.6) crack (0.4.3) safe_yaml (~> 1.0.0) daemons (1.2.4) - database_cleaner (1.5.3) - debug_inspector (0.0.2) + database_cleaner (1.6.1) + debug_inspector (0.0.3) declarative (0.0.9) declarative-option (0.1.0) deep_merge (1.1.1) deprecation (1.0.0) activesupport - devise (4.2.0) + devise (4.3.0) bcrypt (~> 3.0) orm_adapter (~> 0.1) - railties (>= 4.1.0, < 5.1) + railties (>= 4.1.0, < 5.2) responders warden (~> 1.2.3) - diff-lcs (1.2.5) + diff-lcs (1.3) docile (1.1.5) - domain_name (0.5.20161129) + domain_name (0.5.20170404) unf (>= 0.0.5, < 1.0.0) - dotenv (2.1.1) - dotenv-rails (2.1.1) - dotenv (= 2.1.1) - railties (>= 4.0, < 5.1) + dotenv (2.2.1) + dotenv-rails (2.2.1) + dotenv (= 2.2.1) + railties (>= 3.2, < 5.2) dropbox-sdk (1.6.5) json ebnf (1.1.0) rdf (~> 2.0) sxp (~> 1.0) - edtf (3.0.0) + edtf (3.0.2) activesupport (>= 3.0, < 6.0) - email_spec (2.1.0) + email_spec (2.1.1) htmlentities (~> 4.3.3) launchy (~> 2.1) - mail (~> 2.6.3) + mail (~> 2.6) equivalent-xml (0.6.0) nokogiri (>= 1.4.3) erubis (2.7.0) + et-orbi (1.0.4) + tzinfo execjs (2.7.0) factory_girl (4.8.0) activesupport (>= 3.0.0) factory_girl_rails (4.8.0) factory_girl (~> 4.8.0) railties (>= 3.0.0) - fakefs (0.10.1) - faker (1.6.6) + fakefs (0.11.0) + faker (1.7.3) i18n (~> 0.5) - faraday (0.9.2) + fakeweb (1.3.0) + faraday (0.11.0) multipart-post (>= 1.2, < 3) - fcrepo_wrapper (0.7.0) + faraday_middleware (0.11.0.1) + faraday (>= 0.7.4, < 1.0) + fcrepo_wrapper (0.8.0) ruby-progressbar fedora-migrate (0.5.0) rchardet rdf-rdfxml rubydora (~> 1.8) flamegraph (0.9.5) - font-awesome-rails (4.7.0.1) - railties (>= 3.2, < 5.1) - globalid (0.3.7) - activesupport (>= 4.1.0) - google-api-client (0.10.3) - addressable (~> 2.3) + font-awesome-rails (4.7.0.2) + railties (>= 3.2, < 5.2) + globalid (0.4.0) + activesupport (>= 4.2.0) + google-api-client (0.11.3) + addressable (>= 2.5.1) googleauth (~> 0.5) - httpclient (~> 2.7) - hurley (~> 0.1) - memoist (~> 0.11) - mime-types (>= 1.6) + httpclient (>= 2.8.1, < 3.0) + mime-types (>= 3.0) representable (~> 3.0) retriable (>= 2.0, < 4.0) - google_drive (2.1.2) - google-api-client (>= 0.9.0, < 1.0.0) + google_drive (2.1.4) + google-api-client (>= 0.11.0, < 0.12.0) googleauth (>= 0.5.0, < 1.0.0) nokogiri (>= 1.5.3, < 2.0.0) googleauth (0.5.1) @@ -409,8 +410,8 @@ GEM tilt hamster (3.0.0) concurrent-ruby (~> 1.0) - hashdiff (0.3.1) - hashie (3.4.6) + hashdiff (0.3.4) + hashie (3.5.5) hooks (0.4.1) uber (~> 0.0.14) htmlentities (4.3.4) @@ -421,10 +422,9 @@ GEM http-cookie (1.0.3) domain_name (~> 0.5) http_logger (0.5.1) - httparty (0.14.0) + httparty (0.15.5) multi_xml (>= 0.5.2) httpclient (2.8.3) - hurley (0.2) hydra-access-controls (10.3.4) active-fedora (>= 10.0.0, < 12) activesupport (>= 4, < 6) @@ -439,29 +439,40 @@ GEM hydra-access-controls (= 10.3.4) hydra-core (= 10.3.4) rails (>= 3.2.6) - i18n (0.8.6) + i18n (0.8.4) iconv (1.0.4) - ims-lti (1.1.13) - builder - oauth (>= 0.4.5, < 0.6) - jbuilder (2.6.1) - activesupport (>= 3.0.0, < 5.1) - multi_json (~> 1.2) + ims-lti (2.1.2) + builder (~> 3.2) + faraday (~> 0.8) + faraday_middleware (~> 0.8) + simple_oauth (= 0.2) + jbuilder (2.6.4) + activesupport (>= 3.0.0) + multi_json (>= 1.2) jmespath (1.3.1) - jquery-rails (4.2.1) + jquery-rails (4.3.1) rails-dom-testing (>= 1, < 3) railties (>= 4.2.0) thor (>= 0.14, < 2.0) jquery-ui-rails (6.0.1) railties (>= 3.2.16) json (1.8.6) - json-ld (2.1.3) + json-ld (2.1.2) multi_json (~> 1.12) - rdf (~> 2.2) + rdf (~> 2.1) jwt (1.5.6) - kaminari (0.17.0) - actionpack (>= 3.0.0) - activesupport (>= 3.0.0) + kaminari (1.0.1) + activesupport (>= 4.1.0) + kaminari-actionview (= 1.0.1) + kaminari-activerecord (= 1.0.1) + kaminari-core (= 1.0.1) + kaminari-actionview (1.0.1) + actionview + kaminari-core (= 1.0.1) + kaminari-activerecord (1.0.1) + activerecord + kaminari-core (= 1.0.1) + kaminari-core (1.0.1) launchy (2.4.3) addressable (~> 2.3) ldp (0.6.4) @@ -479,13 +490,13 @@ GEM logging (2.2.2) little-plugger (~> 1.1) multi_json (~> 1.10) - lograge (0.4.1) - actionpack (>= 4, < 5.1) - activesupport (>= 4, < 5.1) - railties (>= 4, < 5.1) + lograge (0.5.1) + actionpack (>= 4, < 5.2) + activesupport (>= 4, < 5.2) + railties (>= 4, < 5.2) loofah (2.0.3) nokogiri (>= 1.5.9) - mail (2.6.6) + mail (2.6.5) mime-types (>= 1.16, < 4) marc (1.0.0) scrub_rb (>= 1.0.1, < 2) @@ -499,27 +510,26 @@ GEM mime-types-data (~> 3.2015) mime-types-data (3.2016.0521) mimemagic (0.3.2) - mini_portile2 (2.3.0) - minitest (5.10.3) + mini_portile2 (2.1.0) + minitest (5.10.2) mono_logger (1.1.0) multi_json (1.12.1) multi_xml (0.6.0) multipart-post (2.0.0) - mysql2 (0.4.5) + mysql2 (0.4.6) net-http-digest_auth (1.4.1) - net-ldap (0.15.0) + net-ldap (0.16.0) net-scp (1.2.1) net-ssh (>= 2.6.5) - net-ssh (3.2.0) + net-ssh (4.1.0) netrc (0.11.0) noid (0.9.0) - nokogiri (1.8.1) - mini_portile2 (~> 2.3.0) + nokogiri (1.7.2) + mini_portile2 (~> 2.1.0) nom-xml (0.6.0) activesupport (>= 3.2.18) i18n nokogiri - oauth (0.5.1) oauth2 (1.3.1) faraday (>= 0.8, < 0.12) jwt (~> 1.0) @@ -531,9 +541,9 @@ GEM activesupport nokogiri (>= 1.4.2) solrizer (~> 3.3) - omniauth (1.3.1) - hashie (>= 1.2, < 4) - rack (>= 1.0, < 3) + omniauth (1.6.1) + hashie (>= 3.4.6, < 3.6.0) + rack (>= 1.6.2, < 3) omniauth-identity (1.1.1) bcrypt-ruby (~> 3.0) omniauth (~> 1.0) @@ -542,14 +552,10 @@ GEM omniauth (~> 1.0) orm_adapter (0.5.0) os (0.9.6) - parallel (1.10.0) - parser (2.3.3.1) + parallel (1.11.2) + parser (2.4.0.0) ast (~> 2.2) - pg (0.19.0) - poltergeist (1.15.0) - capybara (~> 2.1) - cliver (~> 0.3.1) - websocket-driver (>= 0.2.0) + pg (0.20.0) powerpack (0.1.1) pry (0.10.4) coderay (~> 1.1.0) @@ -558,11 +564,11 @@ GEM pry-byebug (3.4.2) byebug (~> 9.0) pry (~> 0.10) - pry-rails (0.3.4) - pry (>= 0.9.10) - public_suffix (2.0.4) + pry-rails (0.3.6) + pry (>= 0.10.4) + public_suffix (2.0.5) rack (1.6.8) - rack-mini-profiler (0.10.2) + rack-mini-profiler (0.10.5) rack (>= 1.2.0) rack-protection (1.5.3) rack @@ -592,18 +598,19 @@ GEM activesupport (= 4.2.9) rake (>= 0.8.7) thor (>= 0.18.1, < 2.0) - rainbow (2.1.0) - rake (12.1.0) + rainbow (2.2.2) + rake + rake (12.0.0) rb-readline (0.5.4) rchardet (1.6.1) rdf (2.2.6) hamster (~> 3.0) link_header (~> 0.0, >= 0.0.8) - rdf-aggregate-repo (2.0.0) + rdf-aggregate-repo (2.2.0) rdf (~> 2.0) rdf-isomorphic (2.0.0) rdf (~> 2.0) - rdf-rdfa (2.0.1) + rdf-rdfa (2.1.1) haml (~> 4.0) htmlentities (~> 4.3) rdf (~> 2.0) @@ -614,23 +621,31 @@ GEM rdf (~> 2.0) rdf-rdfa (~> 2.0) rdf-xsd (~> 2.0) - rdf-turtle (2.2.0) - ebnf (~> 1.1) - rdf (~> 2.2) + rdf-turtle (2.0.0) + ebnf (~> 1.0, >= 1.0.1) + rdf (~> 2.0) rdf-vocab (2.1.1) rdf (~> 2.1) - rdf-xsd (2.0.0) - rdf (~> 2.0) + rdf-xsd (2.1.0) + rdf (~> 2.1) rdoc (4.3.0) - redis (3.3.2) - redis-namespace (1.5.2) + redis (3.3.3) + redis-actionpack (5.0.1) + actionpack (>= 4.0, < 6) + redis-rack (>= 1, < 3) + redis-store (>= 1.1.0, < 1.4.0) + redis-activesupport (5.0.2) + activesupport (>= 3, < 6) + redis-store (~> 1.3.0) + redis-namespace (1.5.3) redis (~> 3.0, >= 3.0.4) representable (3.0.4) declarative (< 0.1.0) declarative-option (< 0.2.0) uber (< 0.2.0) - responders (2.3.0) - railties (>= 4.2.0, < 5.1) + responders (2.4.0) + actionpack (>= 4.2.0, < 5.3) + railties (>= 4.2.0, < 5.3) resque (1.26.0) mono_logger (~> 1.0) multi_json (~> 1.0) @@ -642,35 +657,33 @@ GEM redis (~> 3.3) resque (~> 1.26) rufus-scheduler (~> 3.2) - rest-client (2.0.0) + rest-client (2.0.2) http-cookie (>= 1.0.2, < 2.0) mime-types (>= 1.16, < 4.0) netrc (~> 0.8) retriable (3.0.2) - roo (2.5.1) + roo (2.7.1) nokogiri (~> 1) rubyzip (~> 1.1, < 2.0.0) rsolr (1.1.2) builder (>= 2.1.2) - rspec-core (3.5.4) - rspec-support (~> 3.5.0) - rspec-expectations (3.5.0) + rspec-core (3.6.0) + rspec-support (~> 3.6.0) + rspec-expectations (3.6.0) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.5.0) - rspec-mocks (3.5.0) + rspec-support (~> 3.6.0) + rspec-mocks (3.6.0) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.5.0) - rspec-rails (3.5.2) + rspec-support (~> 3.6.0) + rspec-rails (3.6.0) actionpack (>= 3.0) activesupport (>= 3.0) railties (>= 3.0) - rspec-core (~> 3.5.0) - rspec-expectations (~> 3.5.0) - rspec-mocks (~> 3.5.0) - rspec-support (~> 3.5.0) - rspec-retry (0.5.4) - rspec-core (> 3.3, < 3.7) - rspec-support (3.5.0) + rspec-core (~> 3.6.0) + rspec-expectations (~> 3.6.0) + rspec-mocks (~> 3.6.0) + rspec-support (~> 3.6.0) + rspec-support (3.6.0) rubocop (0.40.0) parser (>= 2.3.1.0, < 3.0) powerpack (~> 0.1) @@ -693,10 +706,9 @@ GEM nokogiri rest-client rubyzip (1.2.1) - rufus-scheduler (3.3.1) - tzinfo - safe_yaml (1.0.4) - sass (3.4.22) + rufus-scheduler (3.4.2) + et-orbi (~> 1.0) + sass (3.4.24) sass-rails (5.0.6) railties (>= 4.0.0, < 6) sass (~> 3.1) @@ -714,12 +726,13 @@ GEM faraday (~> 0.9) jwt (~> 1.5) multi_json (~> 1.10) - simplecov (0.12.0) + simple_oauth (0.2.0) + simplecov (0.14.1) docile (~> 1.1.0) json (>= 1.8, < 3) simplecov-html (~> 0.10.0) - simplecov-html (0.10.0) - sinatra (1.4.7) + simplecov-html (0.10.1) + sinatra (1.4.8) rack (~> 1.5) rack-protection (~> 1.4) tilt (>= 1.3, < 3) @@ -729,8 +742,9 @@ GEM httparty (>= 0.11.0) oauth2 (>= 0.9.2) slop (3.6.0) - solr_wrapper (0.19.0) + solr_wrapper (1.1.0) faraday + retriable ruby-progressbar rubyzip solrizer (3.4.1) @@ -753,17 +767,20 @@ GEM actionpack (>= 4.0) activesupport (>= 4.0) sprockets (>= 3.0.0) - sqlite3 (1.3.12) - sshkit (1.11.5) + sqlite3 (1.3.13) + sshkit (1.13.1) net-scp (>= 1.1.2) net-ssh (>= 2.8.0) stackprof (0.2.10) stomp (1.4.3) sxp (1.0.0) rdf (~> 2.0) + term-ansicolor (1.6.0) + tins (~> 1.0) thor (0.19.4) thread_safe (0.3.6) - tilt (2.0.5) + tilt (2.0.7) + tins (1.14.0) twitter-typeahead-rails (0.11.1.pre.corejavascript) actionpack (>= 3.1) jquery-rails @@ -771,15 +788,15 @@ GEM tzinfo (1.2.3) thread_safe (~> 0.1) uber (0.0.15) - uglifier (3.0.4) + uglifier (3.2.0) execjs (>= 0.3.0, < 3) unf (0.1.4) unf_ext - unf_ext (0.0.7.2) - unicode-display_width (1.1.2) + unf_ext (0.0.7.4) + unicode-display_width (1.2.1) vegas (0.1.11) rack (>= 1.0.0) - warden (1.2.6) + warden (1.2.7) rack (>= 1.0) web-console (2.3.0) activemodel (>= 4.0) @@ -795,7 +812,7 @@ GEM websocket-extensions (0.1.2) with_locking (1.0.2) xml-simple (1.1.5) - xpath (2.0.0) + xpath (2.1.0) nokogiri (~> 1.3) zk (1.9.6) zookeeper (~> 1.4.0) diff --git a/app/jobs/attach_derivative_job.rb b/app/jobs/attach_derivative_job.rb new file mode 100644 index 0000000000..3442213824 --- /dev/null +++ b/app/jobs/attach_derivative_job.rb @@ -0,0 +1,28 @@ +class AttachDerivativeJob < ActiveJob::Base + queue_as :attach_derivative + + def perform(derivative_id) + derivative = Derivative.find(derivative_id) + location = derivative.derivativeFile.split(/\//)[-4..-2].join('/') + filename = File.basename(derivative.derivativeFile) + client = Aws::S3::Client.new + bucket = Aws::S3::Bucket.new(name: Settings.encoding.derivative_bucket) + source_prefix = Pathname("pending/#{location}/") + target_prefix = Pathname("#{derivative.master_file_id}/#{derivative.quality}/") + + source_objects = bucket.objects(prefix: source_prefix.to_s) + source_objects.each do |source| + target = target_prefix.join(Pathname(source.key).relative_path_from(source_prefix)).to_s.sub(%r{/segments/},'/hls/') + + client.copy_object({ + copy_source: "#{source.bucket_name}/#{source.key}", + bucket: bucket.name, + key: target + }) + end + + derivative.derivativeFile = "s3://#{bucket.name}/#{target_prefix}#{filename}" + derivative.set_streaming_locations! + derivative.save + end +end diff --git a/app/migration/fedora_migrate/master_file/object_mover.rb b/app/migration/fedora_migrate/master_file/object_mover.rb index aebdf5c15c..04b798d402 100644 --- a/app/migration/fedora_migrate/master_file/object_mover.rb +++ b/app/migration/fedora_migrate/master_file/object_mover.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -59,7 +59,11 @@ def migrate_desc_metadata def migrate_transcoding_metadata return unless source.datastreams.keys.include?(MH_METADATA_DATASTREAM) mover = FedoraMigrate::MasterFile::MhMetadataDatastreamMover.new(source.datastreams[MH_METADATA_DATASTREAM], target) - mover.migrate + result = mover.migrate + if target.workflow_name.nil? || (not ::MasterFile::WORKFLOWS.include?(target.workflow_name)) + target.workflow_name = target.file_format == 'Sound' ? 'fullaudio' : 'avalon' + end + result end def migrate_poster_and_thumbnail diff --git a/app/models/admin/collection.rb b/app/models/admin/collection.rb index b4a8d7cd9e..0701d06e9f 100644 --- a/app/models/admin/collection.rb +++ b/app/models/admin/collection.rb @@ -21,6 +21,7 @@ class Admin::Collection < ActiveFedora::Base include Hydra::AdminPolicyBehavior include ActiveFedora::Associations include Identifier + include MigrationTarget has_many :media_objects, class_name: 'MediaObject', predicate: ActiveFedora::RDF::Fcrepo::RelsExt.isMemberOfCollection diff --git a/app/models/concerns/migration_target.rb b/app/models/concerns/migration_target.rb new file mode 100644 index 0000000000..434584d0bf --- /dev/null +++ b/app/models/concerns/migration_target.rb @@ -0,0 +1,25 @@ +# Copyright 2011-2017, The Trustees of Indiana University and Northwestern +# University. Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +# --- END LICENSE_HEADER BLOCK --- + +module MigrationTarget + + extend ActiveSupport::Concern + + included do + property :migrated_from, predicate: RDF::URI("http://www.w3.org/ns/prov#wasDerivedFrom"), multiple: true do |index| + index.as :symbol + end + end + +end diff --git a/app/models/derivative.rb b/app/models/derivative.rb index 969adc547d..9a759f066d 100644 --- a/app/models/derivative.rb +++ b/app/models/derivative.rb @@ -17,6 +17,7 @@ class Derivative < ActiveFedora::Base include DerivativeBehavior include FrameSize + include MigrationTarget belongs_to :master_file, class_name: 'MasterFile', predicate: ActiveFedora::RDF::Fcrepo::RelsExt.isDerivationOf diff --git a/app/models/lease.rb b/app/models/lease.rb index 6bfc133070..fbc24ca903 100644 --- a/app/models/lease.rb +++ b/app/models/lease.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -22,12 +22,13 @@ # Always set to the supplied date and 23:59:59 UTC for the time on save (end of the day). class Lease < ActiveFedora::Base include Hydra::AdminPolicyBehavior + include MigrationTarget scope :local, -> { where(lease_type_ssi: "local") } scope :user, -> { where(lease_type_ssi: "user") } scope :external, -> { where(lease_type_ssi: "external") } scope :ip, -> { where(lease_type_ssi: "ip") } - + before_save :apply_default_begin_time, :ensure_end_time_present, :validate_dates#, :format_times has_many :media_objects, class_name: 'MediaObject', predicate: ActiveFedora::RDF::ProjectHydra.isGovernedBy @@ -159,7 +160,7 @@ def determine_lease_type return "local" if Admin::Group.exists? group return "external" end - + def set_lease_type self.lease_type = determine_lease_type end diff --git a/app/models/master_file.rb b/app/models/master_file.rb index b82d3eec18..6f512d02c2 100644 --- a/app/models/master_file.rb +++ b/app/models/master_file.rb @@ -27,6 +27,7 @@ class MasterFile < ActiveFedora::Base include Permalink include FrameSize include Identifier + include MigrationTarget include MasterFileBehavior belongs_to :media_object, class_name: 'MediaObject', predicate: ActiveFedora::RDF::Fcrepo::RelsExt.isPartOf @@ -509,8 +510,14 @@ def mediainfo def find_frame_source(options={}) options[:offset] ||= 2000 - response = { source: FileLocator.new(file_location).location, offset: options[:offset], master: true } - return response if response[:source] =~ %r(^https?://) + source = FileLocator.new(file_location) + options[:master] = true + if source.source.nil? or (source.uri.scheme == 's3' and not source.exist?) + source = FileLocator.new(self.derivatives.where(quality_ssi: 'high').first.absolute_location) + options[:master] = false + end + response = { source: source&.location }.merge(options) + return response if response[:source].to_s =~ %r(^https?://) unless File.exists?(response[:source]) Rails.logger.warn("Masterfile `#{file_location}` not found. Extracting via HLS.") diff --git a/app/models/media_object.rb b/app/models/media_object.rb index f62c58c2e7..d0f708dc9e 100644 --- a/app/models/media_object.rb +++ b/app/models/media_object.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -21,6 +21,7 @@ class MediaObject < ActiveFedora::Base include Avalon::Workflow::WorkflowModelMixin include Permalink include Identifier + include MigrationTarget include SpeedyAF::OrderedAggregationIndex require 'avalon/controlled_vocabulary' diff --git a/lib/tasks/avalon.rake b/lib/tasks/avalon.rake index b964705b17..504f7360d0 100644 --- a/lib/tasks/avalon.rake +++ b/lib/tasks/avalon.rake @@ -43,7 +43,7 @@ EOC ids = Array(ids) | File.readlines(ENV['pidfile']).map(&:strip) unless ENV['pidfile'].nil? parallel_processes = ENV['parallel_processes'] overwrite = !!ENV['overwrite'] - namespace = ENV['namespace'] || 'avalon' + namespace = ENV['namespace'] || Settings&.fedora&.namespace || 'avalon' #disable callbacks Admin::Collection.skip_callback(:save, :around, :reindex_members) From 87fee337fd36488083416f450d25d8e2ae8c754a Mon Sep 17 00:00:00 2001 From: Michael Klein Date: Tue, 6 Jun 2017 08:35:46 -0500 Subject: [PATCH 006/237] Additional core functionality: Add oauth 0.5.1 back to Gemfile. Fix stupid sass logger problem Set streaming URLs properly on derivatives Harden MasterFile embed_title, fix up AttachDerivativeJob, add LTI config Fix LTI user lookup Sort out uid/email doubling LTI fixes Allow 2-part fields to be updated with empty strings Don't solrize 'immense' string fields Redirect v4-style PIDs to v6 pids. Index MasterFile#file_size as a long instead of an int Update mediainfo Javascript-encode embed title Add SolrCollectionAdmin service for solr collection backup/restore Restore direct-to-S3 web upload behavior Add support for unlinking course context IDs from MediaObjects ActiveEncodeJob fix Handle empty MasterFile#file_location more gracefully Begin to fix broken embedding Use `Aws::S3::Object#copy_from` instead of `Aws::S3::Client#copy_object` to take advantage of multipart copying Update to speedy-af v0.1.1 Update MasterFileManagementJobs to include s3 source/destination cases Change multipart_copy threshold to 15.megabytes across the board rename local -> file Allow old-style embed URLs Fix S3File addressing in MasterFileManagementJobs Treeify the MasterFile target filename instead of just prefixing it Add /is_it_alive route Fix html_safe Allow for nil file_locations in displaying MasterFile structure Properly handle embed authentication popup Update active_elastic_job to get periodic task support. Fix reversed optimize logic in solr backup job Always add stream cookies when securing streams. Pin aws-sdk to v2.x Configuration fixup --- .ebextensions/03_environment.config | 2 +- .ebextensions/04_nu_environment.config | 21 ++ .ebextensions/05_nginx_worker_timeout.config | 4 + .gitignore | 2 +- Gemfile | 18 +- Gemfile.lock | 257 ++++++++------- app/controllers/application_controller.rb | 12 + app/controllers/master_files_controller.rb | 7 +- app/controllers/media_objects_controller.rb | 3 +- app/helpers/security_helper.rb | 1 + app/jobs/attach_derivative_job.rb | 41 +-- app/jobs/delete_course_job.rb | 21 ++ app/jobs/delete_old_searches_job.rb | 7 + app/jobs/master_file_management_jobs.rb | 57 +++- app/jobs/s3_split_job.rb | 2 +- app/jobs/solr_backup_job.rb | 7 + app/models/course.rb | 14 +- app/models/derivative.rb | 4 +- app/models/elastic_transcoder_job.rb | 4 +- app/models/master_file.rb | 5 +- app/models/mods_behaviors.rb | 17 +- app/models/user.rb | 5 +- app/services/solr_collection_admin.rb | 25 ++ app/views/media_objects/_file_upload.html.erb | 296 +++++++++--------- app/views/media_objects/_structure.html.erb | 2 +- app/views/modules/_flash_messages.html.erb | 4 +- config/application.rb | 14 + config/environments/production.rb | 2 + config/initializers/af_environment_config.rb | 13 +- config/initializers/ezid.rb | 34 ++ config/initializers/user_key.rb | 1 + config/lti.yml | 16 + config/nu_vocab.yml | 31 ++ config/routes.rb | 6 + config/settings.yml | 1 + controlled_vocabulary.yml | 31 ++ cron.yaml | 8 + db/schema.rb | 35 ++- spec/lib/avalon/bib_retriever_spec.rb | 2 +- spec/models/media_object_spec.rb | 2 +- 40 files changed, 690 insertions(+), 344 deletions(-) create mode 100644 .ebextensions/04_nu_environment.config create mode 100644 .ebextensions/05_nginx_worker_timeout.config create mode 100644 app/jobs/delete_course_job.rb create mode 100644 app/jobs/delete_old_searches_job.rb create mode 100644 app/jobs/solr_backup_job.rb create mode 100644 app/services/solr_collection_admin.rb create mode 100644 config/initializers/ezid.rb create mode 100644 config/initializers/user_key.rb create mode 100644 config/lti.yml create mode 100644 config/nu_vocab.yml create mode 100644 controlled_vocabulary.yml create mode 100644 cron.yaml diff --git a/.ebextensions/03_environment.config b/.ebextensions/03_environment.config index d0b00b6bbf..a4ce655e4d 100644 --- a/.ebextensions/03_environment.config +++ b/.ebextensions/03_environment.config @@ -1,6 +1,6 @@ option_settings: - option_name: BUNDLE_WITH - value: aws:postgres + value: aws:postgres:zoom:ezid - option_name: BUNDLE_WITHOUT value: development:test - option_name: DISABLE_REDIS_CLUSTER diff --git a/.ebextensions/04_nu_environment.config b/.ebextensions/04_nu_environment.config new file mode 100644 index 0000000000..bc17183246 --- /dev/null +++ b/.ebextensions/04_nu_environment.config @@ -0,0 +1,21 @@ +option_settings: + - option_name: SETTINGS__AUTH__CONFIGURATION__NU__NAME + value: Northwestern WebSSO + - option_name: SETTINGS__AUTH__CONFIGURATION__NU__PROVIDER + value: openam + - option_name: SETTINGS__AUTH__CONFIGURATION__NU__PARAMS__AUTH_URL + value: https://websso.it.northwestern.edu/amserver + - option_name: SETTINGS__AUTH__CONFIGURATION__NU__PARAMS__COOKIE_NAME + value: openAMssoToken + - option_name: SETTINGS__BIB_RETRIEVER__PROTOCOL + value: z39.50 + - option_name: SETTINGS__BIB_RETRIEVER__HOST + value: na02.alma.exlibrisgroup.com + - option_name: SETTINGS__BIB_RETRIEVER__PORT + value: 1921 + - option_name: SETTINGS__BIB_RETRIEVER__DATABASE + value: 01NWU_INST + - option_name: SETTINGS__BIB_RETRIEVER__ATTRIBUTE + value: 12 + - option_name: SETTINGS__CONTROLLED_VOCABULARY__PATH + value: config/nu_vocab.yml diff --git a/.ebextensions/05_nginx_worker_timeout.config b/.ebextensions/05_nginx_worker_timeout.config new file mode 100644 index 0000000000..5d91b52b01 --- /dev/null +++ b/.ebextensions/05_nginx_worker_timeout.config @@ -0,0 +1,4 @@ +commands: + set_worker_timeout: + command: 'sed -i "/match the name of upstream directive which is defined above/a\ proxy_read_timeout 60m;" /etc/nginx/conf.d/webapp_healthd.conf && service nginx restart' + test: 'source $(/opt/elasticbeanstalk/bin/get-config container -k support_dir)/envvars && [ -n "$SETTINGS__WORKER" ]' diff --git a/.gitignore b/.gitignore index 17154e2914..392f35f4b1 100644 --- a/.gitignore +++ b/.gitignore @@ -26,7 +26,7 @@ config/matterhorn.yml config/role_map_development.yml config/initializers/rubyhorn.rb config/secrets.yml -config/lti.yml +#config/lti.yml public/media_objects public/streams diff --git a/Gemfile b/Gemfile index 957c4cbc04..87e369561a 100644 --- a/Gemfile +++ b/Gemfile @@ -6,7 +6,7 @@ gem 'hydra-head', '~> 10.3.4' gem 'active-fedora', '~> 11.2' gem 'active_fedora-datastreams' gem 'active_fedora-noid', '~> 2.0.2' -gem 'speedy-af', '~>0.1.1' +gem 'speedy-af', '~> 0.1.1' gem 'blacklight', '~> 6.6' gem 'rdf', '~> 2.2' @@ -16,6 +16,7 @@ gem 'rails', '4.2.9' gem 'sqlite3' # Use SCSS for stylesheets gem 'sass-rails', '~> 5.0' +gem 'sass', '3.4.22' # Use Uglifier as compressor for JavaScript assets gem 'uglifier', '>= 1.3.0' # Use CoffeeScript for .coffee assets and views @@ -56,6 +57,7 @@ gem 'iconv' gem 'mediainfo', git: "https://github.com/avalonmediasystem/mediainfo.git", branch: 'remote_files' gem 'omniauth-identity' gem 'omniauth-lti', git: "https://github.com/avalonmediasystem/omniauth-lti.git", tag: 'avalon-r4' +gem 'ims-lti', '~> 1.1.13' gem 'omniauth-openam' gem 'net-ldap' gem 'edtf' @@ -74,6 +76,7 @@ gem 'with_locking' gem 'parallel' gem 'avalon-about', git: 'https://github.com/avalonmediasystem/avalon-about.git', tag: 'avalon-r6' gem 'about_page', git: 'https://github.com/avalonmediasystem/about_page.git', tag: 'avalon-r6.1' +gem 'jquery-datatables' gem 'config' gem 'marc' @@ -91,13 +94,16 @@ gem 'mediaelement-track-scrubber', git: 'https://github.com/avalonmediasystem/me gem 'resque', '~> 1.26.0' gem 'resque-scheduler', '~> 4.3.0' +gem 'redis-rails' group :production do gem 'lograge' + gem 'google-analytics-rails', '1.1.0' end group :development, :test do gem 'equivalent-xml' + gem 'rb-readline' # Call 'byebug' anywhere in the code to stop execution and get a debugger console gem 'byebug' gem 'pry-rails' @@ -150,11 +156,11 @@ group :test do end group :aws, optional: true do - gem 'aws-sdk' + gem 'aws-sdk', '~> 2.0' gem 'aws-sdk-rails' gem 'cloudfront-signer' gem 'zk' - gem 'active_elastic_job', '~> 1.7' + gem 'active_elastic_job', '~> 2.0' end group :zoom, optional: true do @@ -167,6 +173,12 @@ end group :postgres, optional: true do gem 'pg' end +group :ssl_dev, optional: true do + gem 'puma' +end +group :ezid, optional: true do + gem 'ezid-client' +end extra_gems = File.expand_path("../Gemfile.local",__FILE__) eval File.read(extra_gems) if File.exists?(extra_gems) diff --git a/Gemfile.lock b/Gemfile.lock index 3914fcdfce..5809065f4f 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,8 +1,8 @@ GIT remote: http://github.com/projecthydra-labs/active-encode.git - revision: 715159a9b66a5b2418701cc6b962df66d677034f + revision: 83d3bd1a8c3f262de7284c61bcd4285de3ef0c91 specs: - active_encode (0.1.0) + active_encode (0.1.1) activesupport GIT @@ -31,8 +31,7 @@ GIT GIT remote: https://github.com/avalonmediasystem/media-element-add-to-playlist.git - revision: 5585bd06178dee4dbd2f8ff64164aad803adeb43 - tag: avalon-r6.2 + revision: 129f6bb1d05bd9cea5b359d25ccd2288f2336632 specs: media_element_add_to_playlist (0.0.1) rails (~> 4.0) @@ -92,7 +91,7 @@ GIT GIT remote: https://github.com/avalonmediasystem/mediainfo.git - revision: cd5b2675958f3215f93190ab696727dd2327e842 + revision: f6d7a14c5035effc9eb9986ff56621b020ebc27f branch: remote_files specs: mediainfo (0.7.1) @@ -109,7 +108,6 @@ GIT GIT remote: https://github.com/avalonmediasystem/rubyhorn.git revision: fd1e0ee2cca53df515f58690603e9a6351066106 - tag: avalon-r6 specs: rubyhorn (0.0.6) activesupport @@ -122,20 +120,11 @@ GIT GIT remote: https://github.com/javan/whenever.git - revision: 1dcb91484e6f1ee91c9272daccbe84111754102b + revision: b4f61737b89bf5a0b699989dc1bceb50cdb258c1 specs: whenever (0.9.7) chronic (>= 0.6.3) -GIT - remote: https://github.com/projecthydra-labs/speedy_af.git - revision: 661b34c57f4e8de9db2c07aa80acce7c6288462d - tag: v0.1.0 - specs: - speedy-af (0.1.0) - active-fedora (>= 11.0.0) - activesupport - GIT remote: https://github.com/rkallensee/bootstrap-toggle-rails.git revision: 1eaf2b57b4e2fab387f913ef6833ab735eacb0d4 @@ -163,13 +152,15 @@ GEM builder (~> 3.1) erubis (~> 2.7.0) rails-dom-testing (~> 1.0, >= 1.0.5) - rails-html-sanitizer (~> 1.0, >= 1.0.2) - active-fedora (11.2.0) + rails-html-sanitizer (~> 1.0, >= 1.0.3) + active-fedora (11.4.0) active-triples (~> 0.11.0) activemodel (>= 4.2, < 6) activesupport (>= 4.2.4, < 6) deprecation - ldp (~> 0.6.0) + faraday (~> 0.12.1) + faraday-encoding (= 0.0.4) + ldp (~> 0.7.0) rsolr (>= 1.1.2, < 3) ruby-progressbar (~> 1.0) solrizer (>= 3.4, < 5) @@ -181,7 +172,7 @@ GEM active_annotations (0.2.2) json-ld rdf-vocab (~> 2.1.0) - active_elastic_job (1.7.0) + active_elastic_job (2.0.1) aws-sdk (~> 2) rails (>= 4.2) active_fedora-datastreams (0.1.0) @@ -209,42 +200,47 @@ GEM multi_json (~> 1.11, >= 1.11.2) rack (>= 1.5.2, < 3) railties (>= 4.0, < 5.2) - activesupport (4.2.7.1) + activesupport (4.2.9) i18n (~> 0.7) minitest (~> 5.1) thread_safe (~> 0.3, >= 0.3.4) tzinfo (~> 1.1) - acts_as_list (0.9.5) + acts_as_list (0.9.7) activerecord (>= 3.0) - addressable (2.5.1) - public_suffix (~> 2.0, >= 2.0.2) - airbrussh (1.2.0) + addressable (2.5.2) + public_suffix (>= 2.0.2, < 4.0) + airbrussh (1.3.0) sshkit (>= 1.6.1, != 1.7.0) api-pagination (4.6.3) arel (6.0.4) ast (2.3.0) - autoprefixer-rails (7.1.1) + autoprefixer-rails (7.1.3) execjs - aws-sdk (2.9.27) - aws-sdk-resources (= 2.9.27) - aws-sdk-core (2.9.27) + aws-sdk (2.10.37) + aws-sdk-resources (= 2.10.37) + aws-sdk-core (2.10.37) aws-sigv4 (~> 1.0) jmespath (~> 1.0) aws-sdk-rails (1.0.1) aws-sdk-resources (~> 2) railties (>= 3) - aws-sdk-resources (2.9.27) - aws-sdk-core (= 2.9.27) - aws-sigv4 (1.0.0) + aws-sdk-resources (2.10.37) + aws-sdk-core (= 2.10.37) + aws-sigv4 (1.0.2) + babel-source (5.8.35) + babel-transpiler (0.7.0) + babel-source (>= 4.0, < 6) + execjs (~> 2.0) bcrypt (3.1.11) bcrypt-ruby (3.1.5) bcrypt (>= 3.1.3) binding_of_caller (0.7.2) debug_inspector (>= 0.0.1) - blacklight (6.10.0) + blacklight (6.11.0) bootstrap-sass (~> 3.2) deprecation globalid + jbuilder kaminari (>= 0.15) nokogiri (~> 1.6) rails (>= 4.2, < 6) @@ -273,9 +269,9 @@ GEM signet skydrive builder (3.2.3) - byebug (9.0.6) + byebug (9.1.0) cancancan (1.17.0) - capistrano (3.8.1) + capistrano (3.9.0) airbrussh (>= 1.0.0) i18n rake (>= 10.0.0) @@ -285,7 +281,7 @@ GEM sshkit (~> 1.2) capistrano-passenger (0.2.0) capistrano (~> 3.0) - capistrano-rails (1.2.3) + capistrano-rails (1.3.0) capistrano (~> 3.1) capistrano-bundler (~> 1.1) capistrano-resque (0.2.3) @@ -295,15 +291,16 @@ GEM capistrano-rvm (0.1.2) capistrano (~> 3.0) sshkit (~> 1.2) - capybara (2.14.0) + capybara (2.15.1) addressable - mime-types (>= 1.16) + mini_mime (>= 0.1.3) nokogiri (>= 1.3.3) rack (>= 1.0.0) rack-test (>= 0.5.4) xpath (~> 2.0) chronic (0.10.2) - cloudfront-signer (3.0.1) + cliver (0.3.2) + cloudfront-signer (3.0.2) coderay (1.1.1) coffee-rails (4.1.1) coffee-script (>= 2.2.0) @@ -348,8 +345,8 @@ GEM railties (>= 3.2, < 5.2) dropbox-sdk (1.6.5) json - ebnf (1.1.0) - rdf (~> 2.0) + ebnf (1.1.1) + rdf (~> 2.2) sxp (~> 1.0) edtf (3.0.2) activesupport (>= 3.0, < 6.0) @@ -360,22 +357,23 @@ GEM equivalent-xml (0.6.0) nokogiri (>= 1.4.3) erubis (2.7.0) - et-orbi (1.0.4) + et-orbi (1.0.5) tzinfo execjs (2.7.0) + ezid-client (1.7.1) + hashie (~> 3.4, >= 3.4.3) factory_girl (4.8.0) activesupport (>= 3.0.0) factory_girl_rails (4.8.0) factory_girl (~> 4.8.0) railties (>= 3.0.0) - fakefs (0.11.0) - faker (1.7.3) + fakefs (0.11.1) + faker (1.8.4) i18n (~> 0.5) - fakeweb (1.3.0) - faraday (0.11.0) + faraday (0.12.2) multipart-post (>= 1.2, < 3) - faraday_middleware (0.11.0.1) - faraday (>= 0.7.4, < 1.0) + faraday-encoding (0.0.4) + faraday fcrepo_wrapper (0.8.0) ruby-progressbar fedora-migrate (0.5.0) @@ -387,31 +385,33 @@ GEM railties (>= 3.2, < 5.2) globalid (0.4.0) activesupport (>= 4.2.0) - google-api-client (0.11.3) - addressable (>= 2.5.1) + google-analytics-rails (1.1.0) + google-api-client (0.12.0) + addressable (~> 2.5, >= 2.5.1) googleauth (~> 0.5) httpclient (>= 2.8.1, < 3.0) - mime-types (>= 3.0) + mime-types (~> 3.0) representable (~> 3.0) retriable (>= 2.0, < 4.0) - google_drive (2.1.4) - google-api-client (>= 0.11.0, < 0.12.0) + google_drive (2.1.5) + google-api-client (>= 0.11.0, < 0.13.0) googleauth (>= 0.5.0, < 1.0.0) nokogiri (>= 1.5.3, < 2.0.0) - googleauth (0.5.1) - faraday (~> 0.9) + googleauth (0.5.3) + faraday (~> 0.12) jwt (~> 1.4) logging (~> 2.0) memoist (~> 0.12) multi_json (~> 1.11) os (~> 0.9) signet (~> 0.7) - haml (4.0.7) + haml (5.0.2) + temple (>= 0.8.0) tilt hamster (3.0.0) concurrent-ruby (~> 1.0) - hashdiff (0.3.4) - hashie (3.5.5) + hashdiff (0.3.6) + hashie (3.5.6) hooks (0.4.1) uber (~> 0.0.14) htmlentities (4.3.4) @@ -422,7 +422,7 @@ GEM http-cookie (1.0.3) domain_name (~> 0.5) http_logger (0.5.1) - httparty (0.15.5) + httparty (0.15.6) multi_xml (>= 0.5.2) httpclient (2.8.3) hydra-access-controls (10.3.4) @@ -439,17 +439,16 @@ GEM hydra-access-controls (= 10.3.4) hydra-core (= 10.3.4) rails (>= 3.2.6) - i18n (0.8.4) + i18n (0.8.6) iconv (1.0.4) - ims-lti (2.1.2) - builder (~> 3.2) - faraday (~> 0.8) - faraday_middleware (~> 0.8) - simple_oauth (= 0.2) - jbuilder (2.6.4) - activesupport (>= 3.0.0) + ims-lti (1.1.13) + builder + oauth (>= 0.4.5, < 0.6) + jbuilder (2.7.0) + activesupport (>= 4.2.0) multi_json (>= 1.2) jmespath (1.3.1) + jquery-datatables (1.10.15) jquery-rails (4.3.1) rails-dom-testing (>= 1, < 3) railties (>= 4.2.0) @@ -457,9 +456,9 @@ GEM jquery-ui-rails (6.0.1) railties (>= 3.2.16) json (1.8.6) - json-ld (2.1.2) + json-ld (2.1.5) multi_json (~> 1.12) - rdf (~> 2.1) + rdf (~> 2.2) jwt (1.5.6) kaminari (1.0.1) activesupport (>= 4.1.0) @@ -475,7 +474,7 @@ GEM kaminari-core (1.0.1) launchy (2.4.3) addressable (~> 2.3) - ldp (0.6.4) + ldp (0.7.0) deprecation faraday http_logger @@ -490,33 +489,35 @@ GEM logging (2.2.2) little-plugger (~> 1.1) multi_json (~> 1.10) - lograge (0.5.1) + lograge (0.6.0) actionpack (>= 4, < 5.2) activesupport (>= 4, < 5.2) railties (>= 4, < 5.2) + request_store (~> 1.0) loofah (2.0.3) nokogiri (>= 1.5.9) - mail (2.6.5) + mail (2.6.6) mime-types (>= 1.16, < 4) - marc (1.0.0) + marc (1.0.2) scrub_rb (>= 1.0.1, < 2) unf media-element-logo-plugin (0.0.2) rails (>= 3.2.3) mediashelf-loggable (0.4.10) - memoist (0.15.0) + memoist (0.16.0) method_source (0.8.2) mime-types (3.1) mime-types-data (~> 3.2015) mime-types-data (3.2016.0521) mimemagic (0.3.2) - mini_portile2 (2.1.0) - minitest (5.10.2) + mini_mime (0.1.4) + mini_portile2 (2.2.0) + minitest (5.10.3) mono_logger (1.1.0) multi_json (1.12.1) multi_xml (0.6.0) multipart-post (2.0.0) - mysql2 (0.4.6) + mysql2 (0.4.9) net-http-digest_auth (1.4.1) net-ldap (0.16.0) net-scp (1.2.1) @@ -524,14 +525,15 @@ GEM net-ssh (4.1.0) netrc (0.11.0) noid (0.9.0) - nokogiri (1.7.2) - mini_portile2 (~> 2.1.0) + nokogiri (1.8.0) + mini_portile2 (~> 2.2.0) nom-xml (0.6.0) activesupport (>= 3.2.18) i18n nokogiri - oauth2 (1.3.1) - faraday (>= 0.8, < 0.12) + oauth (0.5.3) + oauth2 (1.4.0) + faraday (>= 0.8, < 0.13) jwt (~> 1.0) multi_json (~> 1.3) multi_xml (~> 0.5) @@ -547,26 +549,31 @@ GEM omniauth-identity (1.1.1) bcrypt-ruby (~> 3.0) omniauth (~> 1.0) - omniauth-openam (1.0.0) + omniauth-openam (1.1.0) faraday omniauth (~> 1.0) orm_adapter (0.5.0) os (0.9.6) - parallel (1.11.2) + parallel (1.12.0) parser (2.4.0.0) ast (~> 2.2) - pg (0.20.0) + pg (0.21.0) + poltergeist (1.16.0) + capybara (~> 2.1) + cliver (~> 0.3.1) + websocket-driver (>= 0.2.0) powerpack (0.1.1) pry (0.10.4) coderay (~> 1.1.0) method_source (~> 0.8.1) slop (~> 3.4) - pry-byebug (3.4.2) - byebug (~> 9.0) + pry-byebug (3.5.0) + byebug (~> 9.1) pry (~> 0.10) pry-rails (0.3.6) pry (>= 0.10.4) - public_suffix (2.0.5) + public_suffix (3.0.0) + puma (3.10.0) rack (1.6.8) rack-mini-profiler (0.10.5) rack (>= 1.2.0) @@ -601,32 +608,32 @@ GEM rainbow (2.2.2) rake rake (12.0.0) - rb-readline (0.5.4) + rb-readline (0.5.5) rchardet (1.6.1) - rdf (2.2.6) + rdf (2.2.9) hamster (~> 3.0) link_header (~> 0.0, >= 0.0.8) rdf-aggregate-repo (2.2.0) rdf (~> 2.0) rdf-isomorphic (2.0.0) rdf (~> 2.0) - rdf-rdfa (2.1.1) - haml (~> 4.0) + rdf-rdfa (2.2.3) + haml (~> 5.0) htmlentities (~> 4.3) - rdf (~> 2.0) - rdf-aggregate-repo (~> 2.0) - rdf-xsd (~> 2.0) - rdf-rdfxml (2.0.0) + rdf (~> 2.2) + rdf-aggregate-repo (~> 2.2) + rdf-xsd (~> 2.1) + rdf-rdfxml (2.2.0) htmlentities (~> 4.3) rdf (~> 2.0) rdf-rdfa (~> 2.0) rdf-xsd (~> 2.0) - rdf-turtle (2.0.0) - ebnf (~> 1.0, >= 1.0.1) - rdf (~> 2.0) + rdf-turtle (2.2.0) + ebnf (~> 1.1) + rdf (~> 2.2) rdf-vocab (2.1.1) rdf (~> 2.1) - rdf-xsd (2.1.0) + rdf-xsd (2.2.0) rdf (~> 2.1) rdoc (4.3.0) redis (3.3.3) @@ -634,15 +641,25 @@ GEM actionpack (>= 4.0, < 6) redis-rack (>= 1, < 3) redis-store (>= 1.1.0, < 1.4.0) - redis-activesupport (5.0.2) + redis-activesupport (5.0.3) activesupport (>= 3, < 6) redis-store (~> 1.3.0) redis-namespace (1.5.3) redis (~> 3.0, >= 3.0.4) + redis-rack (2.0.2) + rack (>= 1.5, < 3) + redis-store (>= 1.2, < 1.4) + redis-rails (5.0.2) + redis-actionpack (>= 5.0, < 6) + redis-activesupport (>= 5.0, < 6) + redis-store (>= 1.2, < 2) + redis-store (1.3.0) + redis (>= 2.2) representable (3.0.4) declarative (< 0.1.0) declarative-option (< 0.2.0) uber (< 0.2.0) + request_store (1.3.2) responders (2.4.0) actionpack (>= 4.2.0, < 5.3) railties (>= 4.2.0, < 5.3) @@ -661,7 +678,7 @@ GEM http-cookie (>= 1.0.2, < 2.0) mime-types (>= 1.16, < 4.0) netrc (~> 0.8) - retriable (3.0.2) + retriable (3.1.1) roo (2.7.1) nokogiri (~> 1) rubyzip (~> 1.1, < 2.0.0) @@ -675,7 +692,7 @@ GEM rspec-mocks (3.6.0) diff-lcs (>= 1.2.0, < 2.0) rspec-support (~> 3.6.0) - rspec-rails (3.6.0) + rspec-rails (3.6.1) actionpack (>= 3.0) activesupport (>= 3.0) railties (>= 3.0) @@ -683,6 +700,8 @@ GEM rspec-expectations (~> 3.6.0) rspec-mocks (~> 3.6.0) rspec-support (~> 3.6.0) + rspec-retry (0.5.5) + rspec-core (> 3.3, < 3.7) rspec-support (3.6.0) rubocop (0.40.0) parser (>= 2.3.1.0, < 3.0) @@ -708,7 +727,8 @@ GEM rubyzip (1.2.1) rufus-scheduler (3.4.2) et-orbi (~> 1.0) - sass (3.4.24) + safe_yaml (1.0.4) + sass (3.4.22) sass-rails (5.0.6) railties (>= 4.0.0, < 6) sass (~> 3.1) @@ -719,19 +739,18 @@ GEM sdoc (0.4.2) json (~> 1.7, >= 1.7.7) rdoc (~> 4.0) - shoulda-matchers (3.1.1) + shoulda-matchers (3.1.2) activesupport (>= 4.0.0) signet (0.7.3) addressable (~> 2.3) faraday (~> 0.9) jwt (~> 1.5) multi_json (~> 1.10) - simple_oauth (0.2.0) simplecov (0.14.1) docile (~> 1.1.0) json (>= 1.8, < 3) simplecov-html (~> 0.10.0) - simplecov-html (0.10.1) + simplecov-html (0.10.2) sinatra (1.4.8) rack (~> 1.5) rack-protection (~> 1.4) @@ -768,19 +787,20 @@ GEM activesupport (>= 4.0) sprockets (>= 3.0.0) sqlite3 (1.3.13) - sshkit (1.13.1) + sshkit (1.14.0) net-scp (>= 1.1.2) net-ssh (>= 2.8.0) stackprof (0.2.10) - stomp (1.4.3) + stomp (1.4.4) sxp (1.0.0) rdf (~> 2.0) + temple (0.8.0) term-ansicolor (1.6.0) tins (~> 1.0) thor (0.19.4) thread_safe (0.3.6) - tilt (2.0.7) - tins (1.14.0) + tilt (2.0.8) + tins (1.15.0) twitter-typeahead-rails (0.11.1.pre.corejavascript) actionpack (>= 3.1) jquery-rails @@ -793,7 +813,7 @@ GEM unf (0.1.4) unf_ext unf_ext (0.0.7.4) - unicode-display_width (1.2.1) + unicode-display_width (1.3.0) vegas (0.1.11) rack (>= 1.0.0) warden (1.2.7) @@ -814,6 +834,8 @@ GEM xml-simple (1.1.5) xpath (2.1.0) nokogiri (~> 1.3) + xray-rails (0.3.1) + rails (>= 3.1.0) zk (1.9.6) zookeeper (~> 1.4.0) zookeeper (1.4.11) @@ -826,7 +848,7 @@ DEPENDENCIES about_page! active-fedora (~> 11.2) active_annotations (~> 0.2.2) - active_elastic_job (~> 1.7) + active_elastic_job (~> 2.0) active_encode! active_fedora-datastreams active_fedora-noid (~> 2.0.2) @@ -835,7 +857,7 @@ DEPENDENCIES api-pagination avalon-about! avalon-workflow! - aws-sdk + aws-sdk (~> 2.0) aws-sdk-rails blacklight (~> 6.6) bootstrap-toggle-rails! @@ -858,16 +880,19 @@ DEPENDENCIES edtf email_spec equivalent-xml + ezid-client factory_girl_rails fakefs faker fcrepo_wrapper fedora-migrate (~> 0.5.0) flamegraph + google-analytics-rails (= 1.1.0) hashdiff hooks hydra-head (~> 10.3.4) iconv + ims-lti (~> 1.1.13) jbuilder (~> 2.0) jquery-datatables jquery-rails @@ -894,11 +919,13 @@ DEPENDENCIES poltergeist pry-byebug pry-rails + puma rack-mini-profiler rails (= 4.2.9) rb-readline rdf (~> 2.2) rdf-rdfxml + redis-rails resque (~> 1.26.0) resque-scheduler (~> 4.3.0) rest-client @@ -908,6 +935,7 @@ DEPENDENCIES rspec-retry rubocop (~> 0.40.0) rubyhorn! + sass (= 3.4.22) sass-rails (~> 5.0) sdoc (~> 0.4.0) shoulda-matchers @@ -922,8 +950,9 @@ DEPENDENCIES webmock whenever! with_locking + xray-rails zk zoom BUNDLED WITH - 1.14.6 + 1.15.4 diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb index 729d9d8b83..01d63f110e 100644 --- a/app/controllers/application_controller.rb +++ b/app/controllers/application_controller.rb @@ -27,6 +27,18 @@ class ApplicationController < ActionController::Base helper_method :render_bookmarks_control? around_action :handle_api_request, if: proc{|c| request.format.json?} + before_action :rewrite_v4_ids, if: proc{|c| request.method_symbol == :get && [params[:id], params[:content]].compact.any? { |i| i =~ /^[a-z]+:[0-9]+$/}} + + def alive + render inline: 'OK' + end + + def rewrite_v4_ids + return if params[:controller] =~ /migration/ + new_id = ActiveFedora::SolrService.query(%{identifier_ssim:"#{params[:id]}"}, rows: 1, fl: 'id').first['id'] + new_content_id = params[:content] ? ActiveFedora::SolrService.query(%{identifier_ssim:"#{params[:content]}"}, rows: 1, fl: 'id').first['id'] : nil + redirect_to(url_for(params.merge(id: new_id, content: new_content_id))) + end def store_location store_location_for(:user, request.url) diff --git a/app/controllers/master_files_controller.rb b/app/controllers/master_files_controller.rb index b4a1c35638..75df587798 100644 --- a/app/controllers/master_files_controller.rb +++ b/app/controllers/master_files_controller.rb @@ -46,10 +46,9 @@ def show end def embed - @masterfile = MasterFile.find(params[:id]) - if can? :read, @masterfile.mediaobject - add_stream_cookies(id: @masterfile.id) - @stream_info = secure_streams(@masterfile.stream_details) + @master_file = MasterFile.find(params[:id]) + if can? :read, @master_file.media_object + @stream_info = secure_streams(@master_file.stream_details) end respond_to do |format| format.html do diff --git a/app/controllers/media_objects_controller.rb b/app/controllers/media_objects_controller.rb index eaccef68f0..e6dfdd80c2 100644 --- a/app/controllers/media_objects_controller.rb +++ b/app/controllers/media_objects_controller.rb @@ -436,7 +436,6 @@ def set_player_token def load_current_stream set_active_file set_player_token - add_stream_cookies(id: @currentStream.id) unless @currentStream.nil? @currentStreamInfo = @currentStream.nil? ? {} : secure_streams(@currentStream.stream_details) @currentStreamInfo['t'] = view_context.parse_media_fragment(params[:t]) # add MediaFragment from params end @@ -506,9 +505,11 @@ def media_object_parameters mo_parameters end + def master_files_params # TODO: Restrist permitted params!!! params.permit! params[:files] end + end diff --git a/app/helpers/security_helper.rb b/app/helpers/security_helper.rb index 319e3f8ac5..f28e794f6e 100644 --- a/app/helpers/security_helper.rb +++ b/app/helpers/security_helper.rb @@ -6,6 +6,7 @@ def add_stream_cookies(stream_info) end def secure_streams(stream_info) + add_stream_cookies(id: stream_info[:id]) [:stream_flash, :stream_hls].each do |protocol| stream_info[protocol].each do |quality| quality[:url] = SecurityHandler.secure_url(quality[:url], session: session, target: stream_info[:id], protocol: protocol) diff --git a/app/jobs/attach_derivative_job.rb b/app/jobs/attach_derivative_job.rb index 3442213824..a74aa38050 100644 --- a/app/jobs/attach_derivative_job.rb +++ b/app/jobs/attach_derivative_job.rb @@ -3,26 +3,31 @@ class AttachDerivativeJob < ActiveJob::Base def perform(derivative_id) derivative = Derivative.find(derivative_id) - location = derivative.derivativeFile.split(/\//)[-4..-2].join('/') - filename = File.basename(derivative.derivativeFile) - client = Aws::S3::Client.new - bucket = Aws::S3::Bucket.new(name: Settings.encoding.derivative_bucket) - source_prefix = Pathname("pending/#{location}/") - target_prefix = Pathname("#{derivative.master_file_id}/#{derivative.quality}/") + changed = false + unless derivative.absolute_location =~ %r{^s3://} + location = derivative.absolute_location.split(/\//)[-4..-2].join('/') + filename = File.basename(derivative.absolute_location) + bucket = Aws::S3::Bucket.new(name: Settings.encoding.derivative_bucket) + source_prefix = Pathname("pending/#{location}/") + target_prefix = Pathname("#{derivative.master_file_id}/#{derivative.quality}/") - source_objects = bucket.objects(prefix: source_prefix.to_s) - source_objects.each do |source| - target = target_prefix.join(Pathname(source.key).relative_path_from(source_prefix)).to_s.sub(%r{/segments/},'/hls/') - - client.copy_object({ - copy_source: "#{source.bucket_name}/#{source.key}", - bucket: bucket.name, - key: target - }) + source_objects = bucket.objects(prefix: source_prefix.to_s) + source_objects.each do |source| + target = target_prefix.join(Pathname(source.key).relative_path_from(source_prefix)).to_s.sub(%r{/segments/},'/hls/') + destination = bucket.object(target) + next if destination.exists? + destination.copy_from(source, multipart_copy: source.size > 15.megabytes) + end + derivative.absolute_location = "s3://#{bucket.name}/#{target_prefix}#{filename}" + changed = true end - derivative.derivativeFile = "s3://#{bucket.name}/#{target_prefix}#{filename}" - derivative.set_streaming_locations! - derivative.save + unless derivative.location_url =~ %r{^s3://} && derivative.hls_url =~ %r{^s3://} + derivative.location_url = derivative.absolute_location + uri = URI.parse(derivative.absolute_location) + derivative.hls_url = uri.merge("hls/#{File.basename(uri.path,'.*')}.m3u8").to_s + changed = true + end + derivative.save if changed end end diff --git a/app/jobs/delete_course_job.rb b/app/jobs/delete_course_job.rb new file mode 100644 index 0000000000..24fe95d7f0 --- /dev/null +++ b/app/jobs/delete_course_job.rb @@ -0,0 +1,21 @@ +# Copyright 2011-2017, The Trustees of Indiana University and Northwestern +# University. Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +# --- END LICENSE_HEADER BLOCK --- + +class DeleteCourseJob < ActiveJob::Base + queue_as :delete_course + def perform(context_id) + Course.unlink_all(context_id) + Course.find_by(context_id: context_id)&.destroy + end +end diff --git a/app/jobs/delete_old_searches_job.rb b/app/jobs/delete_old_searches_job.rb new file mode 100644 index 0000000000..88947a837a --- /dev/null +++ b/app/jobs/delete_old_searches_job.rb @@ -0,0 +1,7 @@ +class DeleteOldSearchesJob < ActiveJob::Base + + def perform + Search.where(['created_at < ? AND user_id IS NULL', 20.minutes.ago]).destroy_all + end + +end diff --git a/app/jobs/master_file_management_jobs.rb b/app/jobs/master_file_management_jobs.rb index 51c4870819..b27236b6e7 100644 --- a/app/jobs/master_file_management_jobs.rb +++ b/app/jobs/master_file_management_jobs.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -17,19 +17,50 @@ module MasterFileManagementJobs class Move < ActiveJob::Base queue_as :master_file_management_move + + def s3_to_s3(source, dest) + source_object = FileLocator::S3File.new(source.source).object + dest_object = FileLocator::S3File.new(dest.source).object + if dest_object.copy_from(source_object, multipart_copy: source_object.size > 15.megabytes) + source_object.delete + end + end + + def s3_to_file(source, dest) + source_object = FileLocator::S3File.new(source.source).object + FileUtils.mkdir_p File.dirname(dest.uri.path) unless File.exist? File.dirname(dest.uri.path) + if source_object.download_file(dest.uri.path) + source_object.delete + end + end + + def file_to_s3(source, dest) + dest_object = FileLocator::S3File.new(dest.source).object + if dest_object.upload_file(source.uri.path) + FileUtils.rm(source.uri.path) + end + end + + def file_to_file(source, dest) + FileUtils.mkdir_p File.dirname(dest.uri.path) unless File.exist? File.dirname(dest.uri.path) + FileUtils.mv source.uri.path, dest.uri.path + end + def perform(id, newpath) Rails.logger.debug "Moving masterfile to #{newpath}" masterfile = MasterFile.find(id) oldpath = masterfile.file_location - if File.exist? oldpath - FileUtils.mkdir_p File.dirname(newpath) unless File.exist? File.dirname(newpath) - FileUtils.mv oldpath, newpath - masterfile.file_location = newpath + old_locator = FileLocator.new(oldpath) + if old_locator.exists? + new_locator = FileLocator.new(newpath) + copy_method = "#{old_locator.uri.scheme}_to_#{new_locator.uri.scheme}".to_sym + send(copy_method, old_locator, new_locator) + masterfile.file_location = newpath masterfile.save - Rails.logger.info "#{oldpath} has been moved to #{newpath}" + Rails.logger.info "#{oldpath} has been moved to #{newpath}" else - Rails.logger.error "MasterFile #{oldpath} does not exist" + Rails.logger.error "MasterFile #{oldpath} does not exist" end end end @@ -41,8 +72,12 @@ def perform(id) masterfile = MasterFile.find(id) oldpath = masterfile.file_location - if File.exist? oldpath - File.delete(oldpath) + locator = FileLocator.new(oldpath) + if locator.exists? + case locator.uri.scheme + when 'file' then File.delete(oldpath) + when 's3' then FileLocator::S3File.new(locator.source).object.delete + end masterfile.file_location = "" masterfile.save Rails.logger.info "#{oldpath} has been deleted" diff --git a/app/jobs/s3_split_job.rb b/app/jobs/s3_split_job.rb index 3d0a76a7c6..dba3802fd2 100644 --- a/app/jobs/s3_split_job.rb +++ b/app/jobs/s3_split_job.rb @@ -36,7 +36,7 @@ def perform(file) if Kernel.system(*cmd) segment_files = Dir[File.join(dir,'*')] segment_files.each do |seg| - File.open(seg,'r') { |io| bucket.put_object(key: File.join(path,'segments',File.basename(seg)), body: io) } + File.open(seg,'r') { |io| bucket.put_object(key: File.join(path,'hls',File.basename(seg)), body: io) } end end end diff --git a/app/jobs/solr_backup_job.rb b/app/jobs/solr_backup_job.rb new file mode 100644 index 0000000000..d86851ed39 --- /dev/null +++ b/app/jobs/solr_backup_job.rb @@ -0,0 +1,7 @@ +class SolrBackupJob < ActiveJob::Base + queue_as :solr_backup + + def perform(location = '/data/backup') + SolrCollectionAdmin.new.backup(location) + end +end diff --git a/app/models/course.rb b/app/models/course.rb index cc5917aed7..e2c6cdb087 100644 --- a/app/models/course.rb +++ b/app/models/course.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -20,4 +20,12 @@ def self.autocomplete(query) { id: course.context_id, display: course.title } } end + + def self.unlink_all(context_id) + MediaObject.find_each(read_access_group_ssim: context_id) do |mo| + mo.read_groups = mo.read_groups - [context_id] + mo.update_index + end + end + end diff --git a/app/models/derivative.rb b/app/models/derivative.rb index 9a759f066d..64f8eca246 100644 --- a/app/models/derivative.rb +++ b/app/models/derivative.rb @@ -35,7 +35,9 @@ class Derivative < ActiveFedora::Base property :managed, predicate: Avalon::RDFVocab::Derivative.isManaged, multiple: false do |index| index.as Solrizer::Descriptor.new(:boolean, :stored, :indexed) end - property :derivativeFile, predicate: ::RDF::Vocab::EBUCore.filename, multiple: false + property :derivativeFile, predicate: ::RDF::Vocab::EBUCore.filename, multiple: false do |index| + index.as :stored_sortable + end # Encoding datastream properties property :quality, predicate: ::RDF::Vocab::EBUCore.encodingLevel, multiple: false do |index| diff --git a/app/models/elastic_transcoder_job.rb b/app/models/elastic_transcoder_job.rb index 898c133c74..88af58a62a 100644 --- a/app/models/elastic_transcoder_job.rb +++ b/app/models/elastic_transcoder_job.rb @@ -111,8 +111,8 @@ def check_s3_bucket else self.input = File.join(SecureRandom.uuid,s3_object.key) logger.info("Copying to `#{source_bucket}/#{input}'") - s3client.copy_object(copy_source: File.join(s3_object.bucket_name,s3_object.key), - bucket: source_bucket, key: self.input) + target = Aws::S3::Object.new(bucket_name: source_bucket, key: self.input) + target.copy_from(s3_object, multipart_copy: s3_object.size > 15.megabytes) end end diff --git a/app/models/master_file.rb b/app/models/master_file.rb index 6f512d02c2..56ffe76831 100644 --- a/app/models/master_file.rb +++ b/app/models/master_file.rb @@ -57,9 +57,7 @@ class MasterFile < ActiveFedora::Base property :file_checksum, predicate: ::RDF::Vocab::NFO.hashValue, multiple: false do |index| index.as :stored_sortable end - property :file_size, predicate: ::RDF::Vocab::EBUCore.fileSize, multiple: false do |index| - index.as :stored_sortable - end + property :file_size, predicate: ::RDF::Vocab::EBUCore.fileSize, multiple: false # indexed in to_solr property :duration, predicate: ::RDF::Vocab::EBUCore.duration, multiple: false do |index| index.as :stored_sortable end @@ -489,6 +487,7 @@ def has_structuralMetadata? def to_solr *args super.tap do |solr_doc| + solr_doc['file_size_ltsi'] = file_size solr_doc['has_captions?_bs'] = has_captions? solr_doc['has_poster?_bs'] = has_poster? solr_doc['has_thumbnail?_bs'] = has_thumbnail? diff --git a/app/models/mods_behaviors.rb b/app/models/mods_behaviors.rb index 73eb744c22..36a627c7fe 100644 --- a/app/models/mods_behaviors.rb +++ b/app/models/mods_behaviors.rb @@ -1,11 +1,11 @@ # Copyright 2011-2017, The Trustees of Indiana University and Northwestern # University. Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# +# # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the @@ -87,6 +87,17 @@ def to_solr(solr_doc = Hash.new, opts = {}) # For full text, we stuff it into the mods_tesim field which is already configured for Mods doucments solr_doc['mods_tesim'] = self.ng_xml.xpath('//text()').collect { |t| t.text } + solr_doc.delete_if do |field,value| + case value + when String + value.length > 32000 + when Array + value.reject! { |t| t.length > 32000 } + false + else false + end + end + return solr_doc end diff --git a/app/models/user.rb b/app/models/user.rb index c1c16221ff..003b3b5cd5 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -53,8 +53,9 @@ def self.from_api_token(token) end def self.find_for_generic(access_token, signed_in_resource=nil) - username = access_token.info['email'] - User.find_by_username(username) || User.find_by_email(username) || User.create(username: username, email: username) + username = access_token.uid + email = access_token.info.email + User.find_by_username(username) || User.find_by_email(email) || User.create(username: username, email: email) end def self.find_for_lti(auth_hash, signed_in_resource=nil) diff --git a/app/services/solr_collection_admin.rb b/app/services/solr_collection_admin.rb new file mode 100644 index 0000000000..71acafec7e --- /dev/null +++ b/app/services/solr_collection_admin.rb @@ -0,0 +1,25 @@ +class SolrCollectionAdmin + + def initialize + base, @collection = ActiveFedora.solr_config[:url].reverse.split(/\//,2).reverse.collect(&:reverse) + @conn = Faraday.new(url: base) + end + + def backup(location, opts={}) + opts[:optimize] = true unless opts.key?(:optimize) + optimize = !!opts[:optimize] + timestamp = Time.now.strftime('%Y%m%d%H%M%S') + backup_name = "#{@collection}_backup_#{timestamp}" + @conn.get("#{@collection}/update", optimize: 'true') if optimize + response = @conn.get('admin/collections', action: 'BACKUP', name: backup_name, collection: @collection, location: location, wt: 'json') + response.body + end + + def restore(location, opts={}) + raise ArgumentError, "Required parameter `name` missing or invalid" unless opts[:name] + params = { action: 'RESTORE', collection: @collection, location: location, wt: 'json' }.merge(opts) + response = @conn.get('admin/collections', params) + response.body + end + +end diff --git a/app/views/media_objects/_file_upload.html.erb b/app/views/media_objects/_file_upload.html.erb index 9f80f73478..80e1844616 100644 --- a/app/views/media_objects/_file_upload.html.erb +++ b/app/views/media_objects/_file_upload.html.erb @@ -1,25 +1,25 @@ <%# Copyright 2011-2017, The Trustees of Indiana University and Northwestern - University. Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. +University. Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed - under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR - CONDITIONS OF ANY KIND, either express or implied. See the License for the - specific language governing permissions and limitations under the License. +under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. --- END LICENSE_HEADER BLOCK --- %> <%= form_for @media_object, html: { class: 'form-vertical', id: 'master_files_form' } do |media| %> - <%= hidden_field_tag :donot_advance, true %> - <%= hidden_field_tag :step, 'file-upload' %> +<%= hidden_field_tag :donot_advance, true %> +<%= hidden_field_tag :step, 'file-upload' %> <% unless @masterFiles.empty? %>
- Associated files -

For items with multiple files, enter a display label for each file. Users will click on these labels to switch between files.

+ Associated files +

For items with multiple files, enter a display label for each file. Users will click on these labels to switch between files.

<%= t("file_upload_tip.title").html_safe %>

@@ -30,164 +30,158 @@ Unless required by applicable law or agreed to in writing, software distributed

<%= t("file_upload_tip.datedigitized").html_safe %>

- <%= t("file_upload_tip.thumbnail").html_safe %>

+ +

<%= t("file_upload_tip.thumbnail").html_safe %>

- - - - - - - - - - - - - - - - - - - - - - +
+ <% @masterFiles.each do |part| %> <%= hidden_field_tag "master_files[#{part.id}][id]", part.id %> -
- - - - - - - - - - <% end %> - -
Type
Section label
Permalink
Date Digitized
Thumbnail
File nameSize 
- <% case part.file_format - when 'Sound' %> + +
+
+ + + <% case part.file_format + when 'Sound' %> - <% when 'Moving image' %> + <% when 'Moving image' %> - <% else %> + <% else %> - <% end %> -
- <%= text_field_tag "master_files[#{part.id}][title]", part.title, class: 'form-control' %> - - <%= text_field_tag "master_files[#{part.id}][permalink]", part.permalink, class: 'form-control' %> - - <%= text_field_tag "master_files[#{part.id}][date_digitized]", part.date_digitized, class: 'form-control date-input' %> - - <% if part.is_video? %> - <%= text_field_tag "master_files[#{part.id}][poster_offset]", - part.poster_offset.to_i.to_hms, class: 'input-small form-control' %> - <% else %> - n/a - <% end %> - - <% if part.file_location.present? %> - <%= truncate_center(File.basename(part.file_location), 50, 20) %> - <% else %> - - - <% end %> - - <%= number_to_human_size(part.file_size) %> - - <% if can? :edit, @media_object %> - - <%# On a Rails level this needs to be folded into the masterfiles - # controller's destroy method to help remove more vestiges of the - # catalog controller %> - <%= link_to '×'.html_safe, - master_file_path(part.id), - title: 'Delete', - class: 'btn btn-xs btn-danger btn-confirmation', - data: { placement: 'left' }, - method: :delete %> - <% end %> -
-
-<% end %> -<% end %> - -
- Upload through the web -

Uploaded files must not exceed <%= number_to_human_size MasterFile::MAXIMUM_UPLOAD_SIZE %>

- -
- <%= form_tag(master_files_path, :enctype=>"multipart/form-data", class: upload_form_classes, data: upload_form_data) do -%> - - - - <%= hidden_field_tag("container_content_type", container_content_type, :id => "file_upload_content_type") if defined?(container_content_type) %> - - <%- field_tag_options = defined?(uploader_options) ? uploader_options : {multiple: true} %> - -
-
- - -
- Upload - - Select file - Change - + <% end %> - Remove - - <%= check_box_tag(:workflow, 'skip_transcoding', false, id: nil)%> - <%= label_tag(:skip_transcoding) do %> -
- Skip transcoding -
+ <%= truncate_center(File.basename(part.file_location.to_s), 50, 20) %> + <%= number_to_human_size(part.file_size) %> + + <% if can? :edit, @media_object %> + <%= link_to 'Delete'.html_safe, + master_file_path(part.id), + title: 'Delete', + class: 'btn btn-xs btn-danger btn-confirmation', + data: { placement: 'left' }, + method: :delete %> <% end %>
+
+
+
+ + <%= text_field_tag "master_files[#{part.id}][title]", part.title, class: '' %> +
+
+
+
+
+
+ + <%= text_field_tag "master_files[#{part.id}][date_digitized]", part.date_digitized, class: 'date-input' %> +
+
+
+
+ + <% if part.is_video? %> + <%= text_field_tag "master_files[#{part.id}][poster_offset]", + part.poster_offset.to_i.to_hms, class: 'input-small' %> + <% else %> + n/a + <% end %> +
+
+
+
+ + <%= text_field_tag "master_files[#{part.id}][permalink]", part.permalink, class: '' %> +
+
+
+
+ + <% end %> + + - <%= hidden_field_tag(:new_asset, true, :id => "files_new_asset") if params[:new_asset] %> - <%= hidden_field_tag("id",params[:id], :id => "file_upload_id") if params[:id] %> - <%= hidden_field_tag(:original, params[:original], :id => "files_original") %> - <% end %> -
-
- -

<%= t("file_upload_tip.skip_transcoding").html_safe %>

- + <% end %> + <% end %> -