diff --git a/app/models/readme.rb b/app/models/readme.rb index 27f30d4b6..ad41aa869 100644 --- a/app/models/readme.rb +++ b/app/models/readme.rb @@ -10,7 +10,6 @@ def initialize(work, current_user) def attach(readme_file_param) return "A README file is required!" if readme_file_param.blank? && blank? return nil if readme_file_param.blank? - return nil if ActiveStorage::Blob.service.name == :local remove_old_readme key = upload_readme(readme_file_param) diff --git a/app/models/s3_file.rb b/app/models/s3_file.rb index 869107b46..3a119b443 100644 --- a/app/models/s3_file.rb +++ b/app/models/s3_file.rb @@ -52,26 +52,6 @@ def globus_url File.join(Rails.configuration.globus["post_curation_base_url"], encoded_filename) end - def create_blob(key: nil) - key ||= filename - params = { filename:, content_type: "", byte_size: size, checksum: } - - blob = ActiveStorage::Blob.create_before_direct_upload!(**params) - blob.key = key - blob - end - - def to_blob - existing_blob = ActiveStorage::Blob.find_by(key: filename) - - if existing_blob.present? - Rails.logger.warn("There is a blob existing for #{filename}, which we are not expecting! It will be reattached #{existing_blob.inspect}") - return existing_blob - end - - create_blob - end - delegate :s3_query_service, to: :@work delegate :bucket_name, to: :s3_query_service def s3_client diff --git a/app/models/work.rb b/app/models/work.rb index e887e414f..8cfb4da80 100644 --- a/app/models/work.rb +++ b/app/models/work.rb @@ -530,32 +530,6 @@ def update_ark_information end end - # Generates the key for ActiveStorage::Attachment and Attachment::Blob objects - # @param attachment [ActiveStorage::Attachment] - # @return [String] - def generate_attachment_key(attachment) - attachment_filename = attachment.filename.to_s - attachment_key = attachment.key - - # Files actually coming from S3 include the DOI and bucket as part of the file name - # Files being attached in another manner may not have it, so we should include it. - # This is really for testing only. - key_base = "#{doi}/#{id}" - attachment_key = [key_base, attachment_filename].join("/") unless attachment_key.include?(key_base) - - attachment_ext = File.extname(attachment_filename) - attachment_query = attachment_key.gsub(attachment_ext, "") - results = ActiveStorage::Blob.where("key LIKE :query", query: "%#{attachment_query}%") - blobs = results.to_a - - if blobs.present? - index = blobs.length + 1 - attachment_key = attachment_key.gsub(/\.([a-zA-Z0-9\.]+)$/, "_#{index}.\\1") - end - - attachment_key - end - def track_state_change(user, state = aasm.to_state) uw = UserWork.new(user_id: user.id, work_id: id, state:) uw.save! @@ -563,23 +537,6 @@ def track_state_change(user, state = aasm.to_state) WorkStateTransitionNotification.new(self, user.id).send end - # This needs to be called #before_save - # This ensures that new ActiveStorage::Attachment objects are persisted with custom keys (which are generated from the file name and DOI) - # @param new_attachments [Array] - def save_new_attachments(new_attachments:) - new_attachments.each do |attachment| - # There are cases (race conditions?) where the ActiveStorage::Blob objects are not persisted - next if attachment.frozen? - - # This ensures that the custom key for the ActiveStorage::Attachment and ActiveStorage::Blob objects are generated - generated_key = generate_attachment_key(attachment) - attachment.blob.key = generated_key - attachment.blob.save - - attachment.save - end - end - # Request S3 Bucket Objects associated with this Work # @return [Array] def s3_resources diff --git a/config/environments/development.rb b/config/environments/development.rb index 86775448f..5232f6e48 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -35,7 +35,7 @@ end # Store uploaded files on amazon (see config/storage.yml for options). - config.active_storage.service = :amazon + config.active_storage.service = :local config.action_mailer.perform_caching = false diff --git a/config/environments/production.rb b/config/environments/production.rb index 29533b900..1968d0707 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -41,7 +41,7 @@ # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX # Store uploaded files on the local file system (see config/storage.yml for options). - config.active_storage.service = :production + config.active_storage.service = :local # Mount Action Cable outside main process or domain. # config.action_cable.mount_path = nil diff --git a/config/environments/staging.rb b/config/environments/staging.rb index 623150c3c..be665d3ae 100644 --- a/config/environments/staging.rb +++ b/config/environments/staging.rb @@ -41,7 +41,7 @@ # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX # Store uploaded files on the local file system (see config/storage.yml for options). - config.active_storage.service = :staging + config.active_storage.service = :local # Mount Action Cable outside main process or domain. # config.action_cable.mount_path = nil diff --git a/config/storage.yml b/config/storage.yml index a5a2af967..695f17bd3 100644 --- a/config/storage.yml +++ b/config/storage.yml @@ -1,61 +1,7 @@ -local: &local - service: 'Disk' - root: <%= Rails.root.join("storage") %> - -amazon: &amazon - service: 'S3' - access_key_id: <%= ENV['AWS_S3_KEY_ID'] || 'not-used-access_key_id' %> - secret_access_key: <%= ENV['AWS_S3_SECRET_KEY'] || 'not-used-secret_access_key' %> - region: <%= S3QueryService.pre_curation_config.fetch(:region) %> - bucket: <%= S3QueryService.pre_curation_config.fetch(:bucket) %> - -development: &development - # Default to the local file system (this avoids coercion into AWS) - # AWS is the default as the will not really work without AWS - # AWS can be enabled here should that be preferred for development - <<: *amazon - -amazon_pre_curation: - <% if Rails.env.development? %> - <<: *development - <% else %> - <<: *amazon - region: <%= S3QueryService.pre_curation_config.fetch(:region) %> - bucket: <%= S3QueryService.pre_curation_config.fetch(:bucket) %> - <% end %> - -test: &test - <<: *amazon - access_key_id: 'not-used-access_key_id' - secret_access_key: 'not-used-secret_access_key' - -staging: *amazon +test: + service: Disk + root: <%= Rails.root.join("tmp/storage") %> -production: *amazon - -# Use rails credentials:edit to set the AWS secrets (as aws:access_key_id|secret_access_key) -# amazon: -# service: S3 -# access_key_id: <%= Rails.application.credentials.dig(:aws, :access_key_id) %> -# secret_access_key: <%= Rails.application.credentials.dig(:aws, :secret_access_key) %> -# region: us-east-1 -# bucket: your_own_bucket - -# Remember not to checkin your GCS keyfile to a repository -# google: -# service: GCS -# project: your_project -# credentials: <%= Rails.root.join("path/to/gcs.keyfile") %> -# bucket: your_own_bucket - -# Use rails credentials:edit to set the Azure Storage secret (as azure_storage:storage_access_key) -# microsoft: -# service: AzureStorage -# storage_account_name: your_account_name -# storage_access_key: <%= Rails.application.credentials.dig(:azure_storage, :storage_access_key) %> -# container: your_container_name - -# mirror: -# service: Mirror -# primary: local -# mirrors: [ amazon, google, microsoft ] +local: + service: Disk + root: <%= Rails.root.join("storage") %> diff --git a/spec/models/s3_file_spec.rb b/spec/models/s3_file_spec.rb index cf269166f..f70950cc9 100644 --- a/spec/models/s3_file_spec.rb +++ b/spec/models/s3_file_spec.rb @@ -48,40 +48,6 @@ end end - describe "#to_blob" do - let(:file) do - S3File.new( - filename: "abc123/111/SCoData_combined_v1_2020-07_README.txt", - last_modified: Time.parse("2022-04-21T18:30:07.000Z"), - size: 12_739, - checksum: "abc567", - work: FactoryBot.create(:draft_work) - ) - end - - it "persists S3 Bucket resources as ActiveStorage::Blob" do - # call the s3 reload and make sure no more files get added to the model - blob = nil - expect { blob = file.to_blob }.to change { ActiveStorage::Blob.count }.by(1) - - expect(blob.key).to eq("abc123/111/SCoData_combined_v1_2020-07_README.txt") - end - - context "a blob already exists for one of the files" do - before do - persisted = ActiveStorage::Blob.create_before_direct_upload!( - filename: file.filename, content_type: "", byte_size: file.size, checksum: "" - ) - persisted.key = file.filename - persisted.save - end - - it "finds the blob" do - expect { file.to_blob }.not_to change { ActiveStorage::Blob.count } - end - end - end - describe "#create_snapshot" do let(:s3_file) do S3File.new( diff --git a/spec/support/humanizable_storage_specs.rb b/spec/support/humanizable_storage_specs.rb deleted file mode 100644 index d1d48380d..000000000 --- a/spec/support/humanizable_storage_specs.rb +++ /dev/null @@ -1,24 +0,0 @@ -# frozen_string_literal: true -require "active_storage/blob" - -RSpec.configure do |config| - config.before(:each, humanizable_storage: true) do - @active_storage_config = { - "amazon_pre_curation" => { - "service" => "HumanizedDisk", - "root" => Rails.root.join("spec", "fixtures", "storage") - }, - "amazon_post_curation" => { - "service" => "HumanizedDisk", - "root" => Rails.root.join("spec", "fixtures", "storage") - } - } - @active_storage_test_registry = ActiveStorage::Service::Registry.new(@active_storage_config) - @active_storage_default_registry = ActiveStorage::Blob.services - ActiveStorage::Blob.services = @active_storage_test_registry - end - - config.after(:each, humanizable_storage: true) do - ActiveStorage::Blob.services = @active_storage_default_registry - end -end