diff --git a/spec/lib/file_store/s3_store_spec.rb b/spec/lib/file_store/s3_store_spec.rb index 639927a95d..346ad44000 100644 --- a/spec/lib/file_store/s3_store_spec.rb +++ b/spec/lib/file_store/s3_store_spec.rb @@ -10,6 +10,7 @@ describe FileStore::S3Store do let(:resource) { Aws::S3::Resource.new(client: client) } let(:s3_bucket) { resource.bucket("s3-upload-bucket") } let(:s3_object) { stub } + let(:upload_path) { Discourse.store.upload_path } fab!(:optimized_image) { Fabricate(:optimized_image) } let(:optimized_image_file) { file_from_fixtures("logo.png") } @@ -191,22 +192,21 @@ describe FileStore::S3Store do context 'copying files in S3' do describe '#copy_file' do it "copies the from in S3 with the right paths" do - s3_helper.expects(:s3_bucket).returns(s3_bucket) - upload.update!( url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/original/1X/#{upload.sha1}.png" ) - source = Discourse.store.get_path_for_upload(upload) - destination = Discourse.store.get_path_for_upload(upload).sub('.png', '.jpg') + source = "#{upload_path}/#{Discourse.store.get_path_for_upload(upload)}" + destination = source.sub('.png', '.jpg') + bucket = prepare_fake_s3(source, upload) - s3_object = stub - - s3_bucket.expects(:object).with(destination).returns(s3_object) - - expect_copy_from(s3_object, "s3-upload-bucket/#{source}") + expect(bucket.find_object(source)).to be_present + expect(bucket.find_object(destination)).to be_nil store.copy_file(upload.url, source, destination) + + expect(bucket.find_object(source)).to be_present + expect(bucket.find_object(destination)).to be_present end end end @@ -214,33 +214,19 @@ describe FileStore::S3Store do context 'removal from s3' do describe "#remove_upload" do it "removes the file from s3 with the right paths" do - s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once - upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/original/1X/#{upload.sha1}.png") - s3_object = stub + upload_key = Discourse.store.get_path_for_upload(upload) + tombstone_key = "tombstone/#{upload_key}" + bucket = prepare_fake_s3(upload_key, upload) - s3_bucket.expects(:object).with("tombstone/original/1X/#{upload.sha1}.png").returns(s3_object) - expect_copy_from(s3_object, "s3-upload-bucket/original/1X/#{upload.sha1}.png") - s3_bucket.expects(:object).with("original/1X/#{upload.sha1}.png").returns(s3_object) - s3_object.expects(:delete) + upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_key}") + + expect(bucket.find_object(upload_key)).to be_present + expect(bucket.find_object(tombstone_key)).to be_nil store.remove_upload(upload) - end - it "removes the optimized image from s3 with the right paths" do - optimized = Fabricate(:optimized_image, version: 1) - upload = optimized.upload - path = "optimized/1X/#{upload.sha1}_#{optimized.version}_#{optimized.width}x#{optimized.height}.png" - - s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once - optimized.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{path}") - s3_object = stub - - s3_bucket.expects(:object).with("tombstone/#{path}").returns(s3_object) - expect_copy_from(s3_object, "s3-upload-bucket/#{path}") - s3_bucket.expects(:object).with(path).returns(s3_object) - s3_object.expects(:delete) - - store.remove_optimized_image(optimized) + expect(bucket.find_object(upload_key)).to be_nil + expect(bucket.find_object(tombstone_key)).to be_present end describe "when s3_upload_bucket includes folders path" do @@ -249,41 +235,47 @@ describe FileStore::S3Store do end it "removes the file from s3 with the right paths" do - s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once - upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/discourse-uploads/original/1X/#{upload.sha1}.png") - s3_object = stub + upload_key = "discourse-uploads/#{Discourse.store.get_path_for_upload(upload)}" + tombstone_key = "discourse-uploads/tombstone/#{Discourse.store.get_path_for_upload(upload)}" + bucket = prepare_fake_s3(upload_key, upload) - s3_bucket.expects(:object).with("discourse-uploads/tombstone/original/1X/#{upload.sha1}.png").returns(s3_object) - expect_copy_from(s3_object, "s3-upload-bucket/discourse-uploads/original/1X/#{upload.sha1}.png") - s3_bucket.expects(:object).with("discourse-uploads/original/1X/#{upload.sha1}.png").returns(s3_object) - s3_object.expects(:delete) + upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_key}") + + expect(bucket.find_object(upload_key)).to be_present + expect(bucket.find_object(tombstone_key)).to be_nil store.remove_upload(upload) + + expect(bucket.find_object(upload_key)).to be_nil + expect(bucket.find_object(tombstone_key)).to be_present end end end describe "#remove_optimized_image" do - let(:image_path) do - FileStore::BaseStore.new.get_path_for_optimized_image(optimized_image) - end + let(:optimized_key) { FileStore::BaseStore.new.get_path_for_optimized_image(optimized_image) } + let(:tombstone_key) { "tombstone/#{optimized_key}" } + let(:upload) { optimized_image.upload } + let(:upload_key) { Discourse.store.get_path_for_upload(upload) } before do - optimized_image.update!( - url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{image_path}" - ) + optimized_image.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{optimized_key}") + upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_key}") end - it "removes the file from s3 with the right paths" do - s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once - s3_object = stub + it "removes the optimized image from s3 with the right paths" do + bucket = prepare_fake_s3(upload_key, upload) + store_fake_s3_object(optimized_key, optimized_image) - s3_bucket.expects(:object).with("tombstone/#{image_path}").returns(s3_object) - expect_copy_from(s3_object, "s3-upload-bucket/#{image_path}") - s3_bucket.expects(:object).with("#{image_path}").returns(s3_object) - s3_object.expects(:delete) + expect(bucket.find_object(upload_key)).to be_present + expect(bucket.find_object(optimized_key)).to be_present + expect(bucket.find_object(tombstone_key)).to be_nil store.remove_optimized_image(optimized_image) + + expect(bucket.find_object(upload_key)).to be_present + expect(bucket.find_object(optimized_key)).to be_nil + expect(bucket.find_object(tombstone_key)).to be_present end describe "when s3_upload_bucket includes folders path" do @@ -291,29 +283,24 @@ describe FileStore::S3Store do SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads" end - before do - optimized_image.update!( - url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/discourse-uploads/#{image_path}" - ) - end + let(:image_path) { FileStore::BaseStore.new.get_path_for_optimized_image(optimized_image) } + let(:optimized_key) { "discourse-uploads/#{image_path}" } + let(:tombstone_key) { "discourse-uploads/tombstone/#{image_path}" } + let(:upload_key) { "discourse-uploads/#{Discourse.store.get_path_for_upload(upload)}" } it "removes the file from s3 with the right paths" do - s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once - s3_object = stub + bucket = prepare_fake_s3(upload_key, upload) + store_fake_s3_object(optimized_key, optimized_image) - s3_bucket.expects(:object) - .with("discourse-uploads/tombstone/#{image_path}") - .returns(s3_object) - - expect_copy_from(s3_object, "s3-upload-bucket/discourse-uploads/#{image_path}") - - s3_bucket.expects(:object).with( - "discourse-uploads/#{image_path}" - ).returns(s3_object) - - s3_object.expects(:delete) + expect(bucket.find_object(upload_key)).to be_present + expect(bucket.find_object(optimized_key)).to be_present + expect(bucket.find_object(tombstone_key)).to be_nil store.remove_optimized_image(optimized_image) + + expect(bucket.find_object(upload_key)).to be_present + expect(bucket.find_object(optimized_key)).to be_nil + expect(bucket.find_object(tombstone_key)).to be_present end end end @@ -484,11 +471,18 @@ describe FileStore::S3Store do end end - def expect_copy_from(s3_object, source) - s3_object.expects(:copy_from).with( - copy_source: source - ).returns( - stub(copy_object_result: stub(etag: '"etagtest"')) + def prepare_fake_s3(upload_key, upload) + @fake_s3 = FakeS3.create + @fake_s3_bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket) + store_fake_s3_object(upload_key, upload) + @fake_s3_bucket + end + + def store_fake_s3_object(upload_key, upload) + @fake_s3_bucket.put_object( + key: upload_key, + size: upload.filesize, + last_modified: upload.created_at ) end end diff --git a/spec/multisite/s3_store_spec.rb b/spec/multisite/s3_store_spec.rb index c0e66060e3..2429802424 100644 --- a/spec/multisite/s3_store_spec.rb +++ b/spec/multisite/s3_store_spec.rb @@ -99,61 +99,73 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do describe "#remove_upload" do let(:store) { FileStore::S3Store.new } - let(:client) { Aws::S3::Client.new(stub_responses: true) } - let(:resource) { Aws::S3::Resource.new(client: client) } - let(:s3_bucket) { resource.bucket(SiteSetting.s3_upload_bucket) } - let(:s3_helper) { store.s3_helper } + + let(:upload) { build_upload } + let(:upload_key) { "#{upload_path}/original/1X/#{upload.sha1}.png" } + + def prepare_fake_s3 + @fake_s3 = FakeS3.create + bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket) + bucket.put_object( + key: upload_key, + size: upload.filesize, + last_modified: upload.created_at + ) + bucket + end it "removes the file from s3 on multisite" do test_multisite_connection('default') do - upload = build_upload - s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png") - s3_object = stub + tombstone_key = "uploads/tombstone/default/original/1X/#{upload.sha1}.png" + bucket = prepare_fake_s3 - s3_bucket.expects(:object).with("uploads/tombstone/default/original/1X/#{upload.sha1}.png").returns(s3_object) - expect_copy_from(s3_object, "s3-upload-bucket/#{upload_path}/original/1X/#{upload.sha1}.png") - s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object) - s3_object.expects(:delete) + expect(bucket.find_object(upload_key)).to be_present + expect(bucket.find_object(tombstone_key)).to be_nil store.remove_upload(upload) + + expect(bucket.find_object(upload_key)).to be_nil + expect(bucket.find_object(tombstone_key)).to be_present end end it "removes the file from s3 on another multisite db" do test_multisite_connection('second') do - upload = build_upload - s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png") - s3_object = stub + tombstone_key = "uploads/tombstone/second/original/1X/#{upload.sha1}.png" + bucket = prepare_fake_s3 - s3_bucket.expects(:object).with("uploads/tombstone/second/original/1X/#{upload.sha1}.png").returns(s3_object) - expect_copy_from(s3_object, "s3-upload-bucket/#{upload_path}/original/1X/#{upload.sha1}.png") - s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object) - s3_object.expects(:delete) + expect(bucket.find_object(upload_key)).to be_present + expect(bucket.find_object(tombstone_key)).to be_nil store.remove_upload(upload) + + expect(bucket.find_object(upload_key)).to be_nil + expect(bucket.find_object(tombstone_key)).to be_present end end describe "when s3_upload_bucket includes folders path" do + let(:upload_key) { "discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png" } + before do SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads" end it "removes the file from s3 on multisite" do test_multisite_connection('default') do - upload = build_upload - s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png") - s3_object = stub + tombstone_key = "discourse-uploads/uploads/tombstone/default/original/1X/#{upload.sha1}.png" + bucket = prepare_fake_s3 - s3_bucket.expects(:object).with("discourse-uploads/uploads/tombstone/default/original/1X/#{upload.sha1}.png").returns(s3_object) - expect_copy_from(s3_object, "s3-upload-bucket/discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png") - s3_bucket.expects(:object).with("discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object) - s3_object.expects(:delete) + expect(bucket.find_object(upload_key)).to be_present + expect(bucket.find_object(tombstone_key)).to be_nil store.remove_upload(upload) + + expect(bucket.find_object(upload_key)).to be_nil + expect(bucket.find_object(tombstone_key)).to be_present end end end @@ -345,12 +357,4 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do end end end - - def expect_copy_from(s3_object, source) - s3_object.expects(:copy_from).with( - copy_source: source - ).returns( - stub(copy_object_result: stub(etag: '"etagtest"')) - ) - end end diff --git a/spec/services/external_upload_manager_spec.rb b/spec/services/external_upload_manager_spec.rb index aa9e31969c..6a0332ea18 100644 --- a/spec/services/external_upload_manager_spec.rb +++ b/spec/services/external_upload_manager_spec.rb @@ -2,7 +2,6 @@ RSpec.describe ExternalUploadManager do fab!(:user) { Fabricate(:user) } - let(:type) { "card_background" } let!(:logo_file) { file_from_fixtures("logo.png") } let!(:pdf_file) { file_from_fixtures("large.pdf", "pdf") } let(:object_size) { 1.megabyte } @@ -10,9 +9,9 @@ RSpec.describe ExternalUploadManager do let(:client_sha1) { Upload.generate_digest(object_file) } let(:sha1) { Upload.generate_digest(object_file) } let(:object_file) { logo_file } - let(:metadata_headers) { {} } + let(:external_upload_stub_metadata) { {} } let!(:external_upload_stub) { Fabricate(:image_external_upload_stub, created_by: user) } - let(:upload_base_url) { "https://#{SiteSetting.s3_upload_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com" } + let(:s3_bucket_name) { SiteSetting.s3_upload_bucket } subject do ExternalUploadManager.new(external_upload_stub) @@ -27,10 +26,8 @@ RSpec.describe ExternalUploadManager do SiteSetting.s3_backup_bucket = "s3-backup-bucket" SiteSetting.backup_location = BackupLocationSiteSetting::S3 - stub_head_object + prepare_fake_s3 stub_download_object_filehelper - stub_copy_object - stub_delete_object end describe "#ban_user_from_external_uploads!" do @@ -69,6 +66,7 @@ RSpec.describe ExternalUploadManager do before do external_upload_stub.update!(status: ExternalUploadStub.statuses[:uploaded]) end + it "raises an error" do expect { subject.transform! }.to raise_error(ExternalUploadManager::CannotPromoteError) end @@ -77,14 +75,11 @@ RSpec.describe ExternalUploadManager do context "when the upload does not get changed in UploadCreator (resized etc.)" do it "copies the stubbed upload on S3 to its new destination and deletes it" do upload = subject.transform! - expect(WebMock).to have_requested( - :put, - "#{upload_base_url}/#{Discourse.store.get_path_for_upload(upload)}", - ).with(headers: { 'X-Amz-Copy-Source' => "#{SiteSetting.s3_upload_bucket}/#{external_upload_stub.key}" }) - expect(WebMock).to have_requested( - :delete, - "#{upload_base_url}/#{external_upload_stub.key}" - ) + + bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket) + expect(@fake_s3.operation_called?(:copy_object)).to eq(true) + expect(bucket.find_object(Discourse.store.get_path_for_upload(upload))).to be_present + expect(bucket.find_object(external_upload_stub.key)).to be_nil end it "errors if the image upload is too big" do @@ -105,22 +100,22 @@ RSpec.describe ExternalUploadManager do end context "when the upload does get changed by the UploadCreator" do - let(:file) { file_from_fixtures("should_be_jpeg.heic", "images") } + let(:object_file) { file_from_fixtures("should_be_jpeg.heic", "images") } + let(:object_size) { 1.megabyte } + let(:external_upload_stub) { Fabricate(:image_external_upload_stub, original_filename: "should_be_jpeg.heic", filesize: object_size) } it "creates a new upload in s3 (not copy) and deletes the original stubbed upload" do upload = subject.transform! - expect(WebMock).to have_requested( - :put, - "#{upload_base_url}/#{Discourse.store.get_path_for_upload(upload)}", - ) - expect(WebMock).to have_requested( - :delete, "#{upload_base_url}/#{external_upload_stub.key}" - ) + + bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket) + expect(@fake_s3.operation_called?(:copy_object)).to eq(false) + expect(bucket.find_object(Discourse.store.get_path_for_upload(upload))).to be_present + expect(bucket.find_object(external_upload_stub.key)).to be_nil end end context "when the sha has been set on the s3 object metadata by the clientside JS" do - let(:metadata_headers) { { "x-amz-meta-sha1-checksum" => client_sha1 } } + let(:external_upload_stub_metadata) { { "sha1-checksum" => client_sha1 } } context "when the downloaded file sha1 does not match the client sha1" do let(:client_sha1) { "blahblah" } @@ -128,6 +123,9 @@ RSpec.describe ExternalUploadManager do it "raises an error, deletes the stub" do expect { subject.transform! }.to raise_error(ExternalUploadManager::ChecksumMismatchError) expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false) + + bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket) + expect(bucket.find_object(external_upload_stub.key)).to be_nil end it "does not delete the stub if enable_upload_debug_mode" do @@ -135,6 +133,9 @@ RSpec.describe ExternalUploadManager do expect { subject.transform! }.to raise_error(ExternalUploadManager::ChecksumMismatchError) external_stub = ExternalUploadStub.find(external_upload_stub.id) expect(external_stub.status).to eq(ExternalUploadStub.statuses[:failed]) + + bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket) + expect(bucket.find_object(external_upload_stub.key)).to be_present end end end @@ -150,10 +151,9 @@ RSpec.describe ExternalUploadManager do expect { subject.transform! }.to raise_error(ExternalUploadManager::SizeMismatchError) expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false) expect(Discourse.redis.get("#{ExternalUploadManager::BAN_USER_REDIS_PREFIX}#{external_upload_stub.created_by_id}")).to eq("1") - expect(WebMock).to have_requested( - :delete, - "#{upload_base_url}/#{external_upload_stub.key}" - ) + + bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket) + expect(bucket.find_object(external_upload_stub.key)).to be_nil end it "does not delete the stub if enable_upload_debug_mode" do @@ -161,6 +161,9 @@ RSpec.describe ExternalUploadManager do expect { subject.transform! }.to raise_error(ExternalUploadManager::SizeMismatchError) external_stub = ExternalUploadStub.find(external_upload_stub.id) expect(external_stub.status).to eq(ExternalUploadStub.statuses[:failed]) + + bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket) + expect(bucket.find_object(external_upload_stub.key)).to be_present end end end @@ -193,18 +196,14 @@ RSpec.describe ExternalUploadManager do it "copies the stubbed upload on S3 to its new destination and deletes it" do upload = subject.transform! - expect(WebMock).to have_requested( - :put, - "#{upload_base_url}/#{Discourse.store.get_path_for_upload(upload)}", - ).with(headers: { 'X-Amz-Copy-Source' => "#{SiteSetting.s3_upload_bucket}/#{external_upload_stub.key}" }) - expect(WebMock).to have_requested( - :delete, "#{upload_base_url}/#{external_upload_stub.key}" - ) + + bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket) + expect(bucket.find_object(Discourse.store.get_path_for_upload(upload))).to be_present + expect(bucket.find_object(external_upload_stub.key)).to be_nil end end context "when the upload type is backup" do - let(:upload_base_url) { "https://#{SiteSetting.s3_backup_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com" } let(:object_size) { 200.megabytes } let(:object_file) { file_from_fixtures("backup_since_v1.6.tar.gz", "backups") } let!(:external_upload_stub) do @@ -217,21 +216,7 @@ RSpec.describe ExternalUploadManager do folder_prefix: RailsMultisite::ConnectionManagement.current_db ) end - - before do - stub_request(:head, "https://#{SiteSetting.s3_backup_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com/") - - # stub copy and delete object for backup, which copies the original filename to the root, - # and also uses current_db in the bucket name always - stub_request( - :put, - "#{upload_base_url}/#{RailsMultisite::ConnectionManagement.current_db}/backup_since_v1.6.tar.gz" - ).to_return( - status: 200, - headers: { "ETag" => etag }, - body: copy_object_result - ) - end + let(:s3_bucket_name) { SiteSetting.s3_backup_bucket } it "does not try and download the file" do FileHelper.expects(:download).never @@ -253,32 +238,17 @@ RSpec.describe ExternalUploadManager do end it "copies the stubbed upload on S3 to its new destination and deletes it" do - upload = subject.transform! - expect(WebMock).to have_requested( - :put, - "#{upload_base_url}/#{RailsMultisite::ConnectionManagement.current_db}/backup_since_v1.6.tar.gz", - ).with(headers: { 'X-Amz-Copy-Source' => "#{SiteSetting.s3_backup_bucket}/#{external_upload_stub.key}" }) - expect(WebMock).to have_requested( - :delete, "#{upload_base_url}/#{external_upload_stub.key}" - ) + bucket = @fake_s3.bucket(SiteSetting.s3_backup_bucket) + expect(bucket.find_object(external_upload_stub.key)).to be_present + + subject.transform! + + expect(bucket.find_object("#{RailsMultisite::ConnectionManagement.current_db}/backup_since_v1.6.tar.gz")).to be_present + expect(bucket.find_object(external_upload_stub.key)).to be_nil end end end - def stub_head_object - stub_request( - :head, - "#{upload_base_url}/#{external_upload_stub.key}" - ).to_return( - status: 200, - headers: { - ETag: etag, - "Content-Length" => object_size, - "Content-Type" => "image/png", - }.merge(metadata_headers) - ) - end - def stub_download_object_filehelper signed_url = Discourse.store.signed_url_for_path(external_upload_stub.key) uri = URI.parse(signed_url) @@ -289,49 +259,14 @@ RSpec.describe ExternalUploadManager do ) end - def copy_object_result - <<~XML - \n - - 2021-07-19T04:10:41.000Z - "#{etag}" - - XML - end + def prepare_fake_s3 + @fake_s3 = FakeS3.create - def stub_copy_object - upload_pdf = Fabricate(:upload, sha1: "testbc60eb18e8f974cbfae8bb0f069c3a311024", original_filename: "test.pdf", extension: "pdf") - upload_path = Discourse.store.get_path_for_upload(upload_pdf) - upload_pdf.destroy! - - stub_request( - :put, - "#{upload_base_url}/#{upload_path}" - ).to_return( - status: 200, - headers: { "ETag" => etag }, - body: copy_object_result - ) - - upload_png = Fabricate(:upload, sha1: "bc975735dfc6409c1c2aa5ebf2239949bcbdbd65", original_filename: "test.png", extension: "png") - upload_path = Discourse.store.get_path_for_upload(upload_png) - upload_png.destroy! - stub_request( - :put, - "#{upload_base_url}/#{upload_path}" - ).to_return( - status: 200, - headers: { "ETag" => etag }, - body: copy_object_result - ) - end - - def stub_delete_object - stub_request( - :delete, "#{upload_base_url}/#{external_upload_stub.key}" - ).to_return( - status: 200 + @fake_s3.bucket(s3_bucket_name).put_object( + key: external_upload_stub.key, + size: object_size, + last_modified: Time.zone.now, + metadata: external_upload_stub_metadata ) end end diff --git a/spec/support/fake_s3.rb b/spec/support/fake_s3.rb new file mode 100644 index 0000000000..48be026966 --- /dev/null +++ b/spec/support/fake_s3.rb @@ -0,0 +1,157 @@ +# frozen_string_literal: true + +class FakeS3 + attr_reader :s3_client + + def self.create + s3 = self.new + s3.stub_bucket(SiteSetting.s3_upload_bucket) if SiteSetting.s3_upload_bucket.present? + s3.stub_bucket(File.join(SiteSetting.s3_backup_bucket, RailsMultisite::ConnectionManagement.current_db)) if SiteSetting.s3_backup_bucket.present? + s3.stub_s3_helper + s3 + end + + def initialize + @buckets = {} + @operations = [] + @s3_client = Aws::S3::Client.new(stub_responses: true, region: SiteSetting.s3_region) + + stub_methods + end + + def bucket(bucket_name) + bucket_name, _prefix = bucket_name.split("/", 2) + @buckets[bucket_name] + end + + def stub_bucket(full_bucket_name) + bucket_name, _prefix = full_bucket_name.split("/", 2) + + s3_helper = S3Helper.new( + full_bucket_name, + Rails.configuration.multisite ? FileStore::S3Store.new.multisite_tombstone_prefix : FileStore::S3Store::TOMBSTONE_PREFIX, + client: @s3_client + ) + @buckets[bucket_name] = FakeS3Bucket.new(full_bucket_name, s3_helper) + end + + def stub_s3_helper + @buckets.each do |bucket_name, bucket| + S3Helper.stubs(:new) + .with { |b| b == bucket_name || b == bucket.name } + .returns(bucket.s3_helper) + end + end + + def operation_called?(name) + @operations.any? do |operation| + operation[:name] == name && (block_given? ? yield(operation) : true) + end + end + + private + + def find_bucket(params) + bucket(params[:bucket]) + end + + def find_object(params) + bucket = find_bucket(params) + bucket&.find_object(params[:key]) + end + + def log_operation(context) + @operations << { + name: context.operation_name, + params: context.params.dup + } + end + + def calculate_etag(context) + # simple, reproducible ETag calculation + Digest::MD5.hexdigest(context.params.to_json) + end + + def stub_methods + @s3_client.stub_responses(:head_object, -> (context) do + log_operation(context) + + if object = find_object(context.params) + { content_length: object[:size], last_modified: object[:last_modified], metadata: object[:metadata] } + else + { status_code: 404, headers: {}, body: "" } + end + end) + + @s3_client.stub_responses(:get_object, -> (context) do + log_operation(context) + + if object = find_object(context.params) + { content_length: object[:size], body: "" } + else + { status_code: 404, headers: {}, body: "" } + end + end) + + @s3_client.stub_responses(:delete_object, -> (context) do + log_operation(context) + + find_bucket(context.params)&.delete_object(context.params[:key]) + nil + end) + + @s3_client.stub_responses(:copy_object, -> (context) do + log_operation(context) + + source_bucket_name, source_key = context.params[:copy_source].split("/", 2) + copy_source = { bucket: source_bucket_name, key: source_key } + + if context.params[:metadata_directive] == "REPLACE" + attribute_overrides = context.params.except(:copy_source, :metadata_directive) + else + attribute_overrides = context.params.slice(:key, :bucket) + end + + new_object = find_object(copy_source).dup.merge(attribute_overrides) + find_bucket(new_object).put_object(new_object) + + { copy_object_result: { etag: calculate_etag(context) } } + end) + + @s3_client.stub_responses(:create_multipart_upload, -> (context) do + log_operation(context) + + find_bucket(context.params).put_object(context.params) + { upload_id: SecureRandom.hex } + end) + + @s3_client.stub_responses(:put_object, -> (context) do + log_operation(context) + + find_bucket(context.params).put_object(context.params) + { etag: calculate_etag(context) } + end) + end +end + +class FakeS3Bucket + attr_reader :name, :s3_helper + + def initialize(bucket_name, s3_helper) + @name = bucket_name + @s3_helper = s3_helper + @objects = {} + end + + def put_object(obj) + @objects[obj[:key]] = obj + end + + def delete_object(key) + @objects.delete(key) + end + + def find_object(key) + @objects[key] + end +end