Skip to content

Commit

Permalink
[compatibility] Patches for 7.7.2
Browse files Browse the repository at this point in the history
  • Loading branch information
mbklein committed Jul 17, 2024
1 parent fecb0d8 commit df3bf7e
Show file tree
Hide file tree
Showing 10 changed files with 414 additions and 397 deletions.
6 changes: 6 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ RUN apt-get update && \
zip \
dumb-init \
libsqlite3-dev \
sudo \
&& apt-get -y install mediainfo \
&& ln -s /usr/bin/lsof /usr/sbin/

Expand Down Expand Up @@ -154,6 +155,11 @@ LABEL stage=final
LABEL project=avalon
COPY --from=assets --chown=app:app /home/app/avalon /home/app/avalon
COPY --from=bundle-prod --chown=app:app /usr/local/bundle /usr/local/bundle
RUN mkdir /var/run/puma && chown root:app /var/run/puma && chmod 0775 /var/run/puma

USER app
ENV RAILS_ENV=production
ENV PATH="/home/app/bin:${PATH}"
EXPOSE 3000
CMD bin/boot_container
HEALTHCHECK --start-period=60s CMD curl -f http://localhost:3000/
6 changes: 3 additions & 3 deletions app/controllers/master_files_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -272,12 +272,12 @@ def hls_manifest
return head :unauthorized
end
else
return head :unauthorized if cannot?(:read, master_file)
stream = hls_stream(master_file, quality).first
return head :unauthorized if cannot?(:read, @master_file)
stream = hls_stream(@master_file, quality).first
case stream
when nil
raise ActionController::RoutingError.new('Not Found') unless quality == 'auto'
@hls_streams = gather_hls_streams(master_file)
@hls_streams = gather_hls_streams(@master_file)
else
redirect_to(stream[:url])
end
Expand Down
5 changes: 4 additions & 1 deletion app/controllers/media_objects_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,10 @@ def manifest
authorize! :read, @media_object

stream_info_hash = secure_stream_infos(master_file_presenters, [@media_object])
canvas_presenters = master_file_presenters.collect { |mf| IiifCanvasPresenter.new(master_file: mf, stream_info: stream_info_hash[mf.id]) }
canvas_presenters = master_file_presenters.collect do |mf|
stream_info = secure_streams(mf.stream_details, @media_object.id)
IiifCanvasPresenter.new(master_file: mf, stream_info: stream_info)
end
presenter = IiifManifestPresenter.new(media_object: @media_object, master_files: canvas_presenters, lending_enabled: lending_enabled?(@media_object))

manifest = IIIFManifest::V3::ManifestFactory.new(presenter).to_h
Expand Down
2 changes: 1 addition & 1 deletion app/services/security_service.rb
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def rewrite_url(url, context)
expiration = Settings.streaming.stream_token_ttl.to_f.minutes.from_now
case context[:protocol]
when :stream_hls
streaming_url = URI.encode(Addressable::URI.join(Settings.streaming.http_base,uri.path).to_s)
streaming_url = URI::DEFAULT_PARSER.escape(Addressable::URI.join(Settings.streaming.http_base,uri.path).to_s)
url_signer.signed_url(streaming_url, expires: expiration)
else
url
Expand Down
7 changes: 7 additions & 0 deletions config/environments/development.rb
Original file line number Diff line number Diff line change
Expand Up @@ -79,4 +79,11 @@

# Uncomment if you wish to allow Action Cable access from any origin.
# config.action_cable.disable_request_forgery_protection = true
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end

config.web_console.permissions = '0.0.0.0/0'
end
697 changes: 348 additions & 349 deletions config/initializers/devise.rb

Large diffs are not rendered by default.

4 changes: 4 additions & 0 deletions config/initializers/omniauth.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
OmniAuth.configure do |config|
config.allowed_request_methods << :get
config.silence_get_warning = true
end
3 changes: 0 additions & 3 deletions config/routes.rb
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,6 @@
devise_for :users, controllers: { omniauth_callbacks: 'users/omniauth_callbacks', sessions: 'users/sessions' }
devise_scope :user do
match '/users/auth/:provider', to: 'users/omniauth_callbacks#passthru', as: :user_omniauth_authorize, via: [:get, :post]
Avalon::Authentication::Providers.collect { |provider| provider[:provider] }.uniq.each do |provider_name|
match "/users/auth/#{provider_name}/callback", to: "users/omniauth_callbacks##{provider_name}", as: "user_omniauth_callback_#{provider_name}".to_sym, via: [:get, :post]
end
end

mount BrowseEverything::Engine => '/browse'
Expand Down
80 changes: 40 additions & 40 deletions db/schema.rb
Original file line number Diff line number Diff line change
Expand Up @@ -60,42 +60,42 @@
t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true
end

create_table "annotations", force: :cascade do |t|
create_table "annotations", id: :serial, force: :cascade do |t|
t.string "uuid"
t.string "source_uri"
t.bigint "playlist_item_id"
t.integer "playlist_item_id"
t.text "annotation"
t.string "type"
t.index ["playlist_item_id"], name: "index_annotations_on_playlist_item_id"
t.index ["type"], name: "index_annotations_on_type"
end

create_table "api_tokens", force: :cascade do |t|
create_table "api_tokens", id: :serial, force: :cascade do |t|
t.string "token", null: false
t.string "username", null: false
t.string "email", null: false
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.datetime "created_at", precision: nil
t.datetime "updated_at", precision: nil
t.index ["token"], name: "index_api_tokens_on_token", unique: true
t.index ["username"], name: "index_api_tokens_on_username"
end

create_table "batch_entries", force: :cascade do |t|
t.bigint "batch_registries_id"
create_table "batch_entries", id: :serial, force: :cascade do |t|
t.integer "batch_registries_id"
t.text "payload"
t.boolean "complete", default: false, null: false
t.boolean "error", default: false, null: false
t.string "current_status"
t.text "error_message"
t.string "media_object_pid"
t.integer "position"
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.datetime "created_at", precision: nil
t.datetime "updated_at", precision: nil
t.index ["batch_registries_id"], name: "index_batch_entries_on_batch_registries_id"
t.index ["position"], name: "index_batch_entries_on_position"
end

create_table "batch_registries", force: :cascade do |t|
create_table "batch_registries", id: :serial, force: :cascade do |t|
t.string "file_name"
t.string "replay_name"
t.string "dir"
Expand All @@ -108,11 +108,11 @@
t.text "error_message"
t.boolean "error_email_sent", default: false, null: false
t.boolean "locked", default: false, null: false
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.datetime "created_at", precision: nil
t.datetime "updated_at", precision: nil
end

create_table "bookmarks", force: :cascade do |t|
create_table "bookmarks", id: :serial, force: :cascade do |t|
t.integer "user_id", null: false
t.string "user_type"
t.string "document_id"
Expand All @@ -134,32 +134,32 @@
t.index ["user_id"], name: "index_checkouts_on_user_id"
end

create_table "courses", force: :cascade do |t|
create_table "courses", id: :serial, force: :cascade do |t|
t.string "context_id"
t.string "title"
t.text "label"
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.datetime "created_at", precision: nil
t.datetime "updated_at", precision: nil
end

create_table "identities", force: :cascade do |t|
create_table "identities", id: :serial, force: :cascade do |t|
t.string "email"
t.string "password_digest"
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.datetime "created_at", precision: nil
t.datetime "updated_at", precision: nil
end

create_table "ingest_batches", force: :cascade do |t|
create_table "ingest_batches", id: :serial, force: :cascade do |t|
t.string "name", limit: 50
t.string "email"
t.text "media_object_ids"
t.boolean "finished", default: false
t.boolean "email_sent", default: false
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.datetime "created_at", precision: nil
t.datetime "updated_at", precision: nil
end

create_table "migration_statuses", force: :cascade do |t|
create_table "migration_statuses", id: :serial, force: :cascade do |t|
t.string "source_class", null: false
t.string "f3_pid", null: false
t.string "f4_pid"
Expand All @@ -172,7 +172,7 @@
t.index ["source_class", "f3_pid", "datastream"], name: "index_migration_statuses"
end

create_table "minter_states", force: :cascade do |t|
create_table "minter_states", id: :serial, force: :cascade do |t|
t.string "namespace", default: "default", null: false
t.string "template", null: false
t.text "counters"
Expand All @@ -183,23 +183,23 @@
t.index ["namespace"], name: "index_minter_states_on_namespace", unique: true
end

create_table "playlist_items", force: :cascade do |t|
t.bigint "playlist_id", null: false
t.bigint "clip_id", null: false
create_table "playlist_items", id: :serial, force: :cascade do |t|
t.integer "playlist_id", null: false
t.integer "clip_id", null: false
t.integer "position"
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.datetime "created_at", precision: nil
t.datetime "updated_at", precision: nil
t.index ["clip_id"], name: "index_playlist_items_on_clip_id"
t.index ["playlist_id"], name: "index_playlist_items_on_playlist_id"
end

create_table "playlists", force: :cascade do |t|
create_table "playlists", id: :serial, force: :cascade do |t|
t.string "title"
t.bigint "user_id", null: false
t.integer "user_id", null: false
t.string "comment"
t.string "visibility"
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.datetime "created_at", precision: nil
t.datetime "updated_at", precision: nil
t.string "access_token"
t.string "tags"
t.index ["user_id"], name: "index_playlists_on_user_id"
Expand All @@ -210,12 +210,12 @@
t.string "embed_target"
end

create_table "role_maps", force: :cascade do |t|
create_table "role_maps", id: :serial, force: :cascade do |t|
t.string "entry"
t.integer "parent_id"
end

create_table "searches", force: :cascade do |t|
create_table "searches", id: :serial, force: :cascade do |t|
t.text "query_params"
t.integer "user_id"
t.string "user_type"
Expand All @@ -224,16 +224,16 @@
t.index ["user_id"], name: "index_searches_on_user_id"
end

create_table "sessions", force: :cascade do |t|
create_table "sessions", id: :serial, force: :cascade do |t|
t.string "session_id", null: false
t.text "data"
t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
t.datetime "created_at", precision: nil
t.datetime "updated_at", precision: nil
t.index ["session_id"], name: "index_sessions_on_session_id"
t.index ["updated_at"], name: "index_sessions_on_updated_at"
end

create_table "stream_tokens", force: :cascade do |t|
create_table "stream_tokens", id: :serial, force: :cascade do |t|
t.string "token"
t.string "target"
t.datetime "expires", precision: nil
Expand Down Expand Up @@ -261,7 +261,7 @@
t.index ["user_id"], name: "index_timelines_on_user_id"
end

create_table "users", force: :cascade do |t|
create_table "users", id: :serial, force: :cascade do |t|
t.string "username", null: false
t.string "email", null: false
t.datetime "created_at", precision: nil
Expand Down
1 change: 1 addition & 0 deletions terraform/queues.tf
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ locals {
ingest = 300,
master_file_management_delete = 30,
master_file_management_move = 60,
media_object_indexing = 300,
reindex = 43200,
s3_split = 600,
solr_backup = 30,
Expand Down

0 comments on commit df3bf7e

Please sign in to comment.