FIX: Allow S3Helper.ensure_cors! to apply more than one rule
The ensure_cors! function needs to be able to add CORS rules for the S3 bucket for 3 things now: * assets * direct S3 backups * direct S3 uploads As it is, only one rule can be applied, which is generally the assets rule as it is called first. This commit changes the ensure_cors! method to be able to apply new rules as well as the existing ones. Also in this commit we are calling ensure_cors! when creating presigned PUT and multipart uploads when uploading direct to S3, so the rules definitely exist to avoid errors in the process.
This commit is contained in:
parent
dcf3733c13
commit
ac12948e8d
@ -224,22 +224,15 @@ class UploadsController < ApplicationController
|
||||
)
|
||||
end
|
||||
|
||||
metadata = parse_allowed_metadata(params[:metadata])
|
||||
|
||||
url = Discourse.store.signed_url_for_temporary_upload(
|
||||
file_name, metadata: metadata
|
||||
)
|
||||
key = Discourse.store.path_from_url(url)
|
||||
|
||||
upload_stub = ExternalUploadStub.create!(
|
||||
key: key,
|
||||
created_by: current_user,
|
||||
original_filename: file_name,
|
||||
external_upload_data = ExternalUploadManager.create_direct_upload(
|
||||
current_user: current_user,
|
||||
file_name: file_name,
|
||||
file_size: file_size,
|
||||
upload_type: type,
|
||||
filesize: file_size
|
||||
metadata: parse_allowed_metadata(params[:metadata])
|
||||
)
|
||||
|
||||
render json: { url: url, key: key, unique_identifier: upload_stub.unique_identifier }
|
||||
render json: external_upload_data
|
||||
end
|
||||
|
||||
def complete_external_upload
|
||||
@ -307,7 +300,6 @@ class UploadsController < ApplicationController
|
||||
file_name = params.require(:file_name)
|
||||
file_size = params.require(:file_size).to_i
|
||||
upload_type = params.require(:upload_type)
|
||||
content_type = MiniMime.lookup_by_filename(file_name)&.content_type
|
||||
|
||||
if file_size_too_big?(file_name, file_size)
|
||||
return render_json_error(
|
||||
@ -316,11 +308,13 @@ class UploadsController < ApplicationController
|
||||
)
|
||||
end
|
||||
|
||||
metadata = parse_allowed_metadata(params[:metadata])
|
||||
|
||||
begin
|
||||
multipart_upload = Discourse.store.create_multipart(
|
||||
file_name, content_type, metadata: metadata
|
||||
external_upload_data = ExternalUploadManager.create_direct_multipart_upload(
|
||||
current_user: current_user,
|
||||
file_name: file_name,
|
||||
file_size: file_size,
|
||||
upload_type: upload_type,
|
||||
metadata: parse_allowed_metadata(params[:metadata])
|
||||
)
|
||||
rescue Aws::S3::Errors::ServiceError => err
|
||||
return render_json_error(
|
||||
@ -329,21 +323,7 @@ class UploadsController < ApplicationController
|
||||
)
|
||||
end
|
||||
|
||||
upload_stub = ExternalUploadStub.create!(
|
||||
key: multipart_upload[:key],
|
||||
created_by: current_user,
|
||||
original_filename: file_name,
|
||||
upload_type: upload_type,
|
||||
external_upload_identifier: multipart_upload[:upload_id],
|
||||
multipart: true,
|
||||
filesize: file_size
|
||||
)
|
||||
|
||||
render json: {
|
||||
external_upload_identifier: upload_stub.external_upload_identifier,
|
||||
key: upload_stub.key,
|
||||
unique_identifier: upload_stub.unique_identifier
|
||||
}
|
||||
render json: external_upload_data
|
||||
end
|
||||
|
||||
def batch_presign_multipart_parts
|
||||
|
||||
@ -20,6 +20,50 @@ class ExternalUploadManager
|
||||
Discourse.redis.get("#{BAN_USER_REDIS_PREFIX}#{user.id}") == "1"
|
||||
end
|
||||
|
||||
def self.create_direct_upload(current_user:, file_name:, file_size:, upload_type:, metadata: {})
|
||||
Discourse.store.s3_helper.ensure_cors!([S3CorsRulesets::DIRECT_UPLOAD])
|
||||
url = Discourse.store.signed_url_for_temporary_upload(
|
||||
file_name, metadata: metadata
|
||||
)
|
||||
key = Discourse.store.path_from_url(url)
|
||||
|
||||
upload_stub = ExternalUploadStub.create!(
|
||||
key: key,
|
||||
created_by: current_user,
|
||||
original_filename: file_name,
|
||||
upload_type: upload_type,
|
||||
filesize: file_size
|
||||
)
|
||||
|
||||
{ url: url, key: key, unique_identifier: upload_stub.unique_identifier }
|
||||
end
|
||||
|
||||
def self.create_direct_multipart_upload(
|
||||
current_user:, file_name:, file_size:, upload_type:, metadata: {}
|
||||
)
|
||||
Discourse.store.s3_helper.ensure_cors!([S3CorsRulesets::DIRECT_UPLOAD])
|
||||
content_type = MiniMime.lookup_by_filename(file_name)&.content_type
|
||||
multipart_upload = Discourse.store.create_multipart(
|
||||
file_name, content_type, metadata: metadata
|
||||
)
|
||||
|
||||
upload_stub = ExternalUploadStub.create!(
|
||||
key: multipart_upload[:key],
|
||||
created_by: current_user,
|
||||
original_filename: file_name,
|
||||
upload_type: upload_type,
|
||||
external_upload_identifier: multipart_upload[:upload_id],
|
||||
multipart: true,
|
||||
filesize: file_size
|
||||
)
|
||||
|
||||
{
|
||||
external_upload_identifier: upload_stub.external_upload_identifier,
|
||||
key: upload_stub.key,
|
||||
unique_identifier: upload_stub.unique_identifier
|
||||
}
|
||||
end
|
||||
|
||||
def initialize(external_upload_stub, upload_create_opts = {})
|
||||
@external_upload_stub = external_upload_stub
|
||||
@upload_create_opts = upload_create_opts
|
||||
|
||||
@ -44,7 +44,7 @@ module BackupRestore
|
||||
obj = @s3_helper.object(filename)
|
||||
raise BackupFileExists.new if obj.exists?
|
||||
|
||||
ensure_cors!
|
||||
@s3_helper.ensure_cors!([S3CorsRulesets::BACKUP_DIRECT_UPLOAD])
|
||||
presigned_url(obj, :put, UPLOAD_URL_EXPIRES_AFTER_SECONDS)
|
||||
rescue Aws::Errors::ServiceError => e
|
||||
Rails.logger.warn("Failed to generate upload URL for S3: #{e.message.presence || e.class.name}")
|
||||
@ -82,10 +82,6 @@ module BackupRestore
|
||||
obj.presigned_url(method, expires_in: expires_in_seconds)
|
||||
end
|
||||
|
||||
def ensure_cors!
|
||||
@s3_helper.ensure_cors!([S3CorsRulesets::BACKUP_DIRECT_UPLOAD])
|
||||
end
|
||||
|
||||
def cleanup_allowed?
|
||||
!SiteSetting.s3_disable_cleanup
|
||||
end
|
||||
|
||||
@ -14,4 +14,12 @@ class S3CorsRulesets
|
||||
allowed_origins: [Discourse.base_url_no_prefix],
|
||||
max_age_seconds: 3000
|
||||
}.freeze
|
||||
|
||||
DIRECT_UPLOAD = {
|
||||
allowed_headers: ["Authorization", "Content-Disposition", "Content-Type"],
|
||||
expose_headers: ["ETag"],
|
||||
allowed_methods: ["GET", "HEAD", "PUT"],
|
||||
allowed_origins: ["*"],
|
||||
max_age_seconds: 3000
|
||||
}
|
||||
end
|
||||
|
||||
@ -124,31 +124,42 @@ class S3Helper
|
||||
[destination, response.copy_object_result.etag.gsub('"', '')]
|
||||
end
|
||||
|
||||
# make sure we have a cors config for assets
|
||||
# otherwise we will have no fonts
|
||||
# Several places in the application need to ensure that certain CORS
|
||||
# rules exist inside an S3 bucket so requests to the bucket can be made
|
||||
# directly from the browser.
|
||||
#
|
||||
# 1. The S3CorsRulesets::ASSETS rule is added by default,
|
||||
# otherwise we will have no fonts. This is called by the
|
||||
# s3:upload_assets rake task.
|
||||
# 2. When uploading backups to S3, the S3CorsRulesets::BACKUP_DIRECT_UPLOAD
|
||||
# rule is added to allow us to upload to a presigned URL in the backup
|
||||
# bucket.
|
||||
# 3. When using direct and multipart S3 uploads for the composer and elsewhere
|
||||
# the S3CorsRulesets::DIRECT_UPLOAD rules are added to allow us to upload
|
||||
# to a presigned URL in the main upload bucket.
|
||||
def ensure_cors!(rules = nil)
|
||||
return unless SiteSetting.s3_install_cors_rule
|
||||
|
||||
rule = nil
|
||||
rules = [S3CorsRulesets::ASSETS] if rules.nil?
|
||||
rules = [rules] if !rules.is_a?(Array)
|
||||
existing_rules = []
|
||||
|
||||
begin
|
||||
rule = s3_resource.client.get_bucket_cors(
|
||||
existing_rules = s3_resource.client.get_bucket_cors(
|
||||
bucket: @s3_bucket_name
|
||||
).cors_rules&.first
|
||||
).cors_rules&.map(&:to_h) || []
|
||||
rescue Aws::S3::Errors::NoSuchCORSConfiguration
|
||||
# no rule
|
||||
end
|
||||
|
||||
unless rule
|
||||
rules = [S3CorsRulesets::ASSETS] if rules.nil?
|
||||
new_rules = rules - existing_rules
|
||||
return if new_rules.empty?
|
||||
|
||||
s3_resource.client.put_bucket_cors(
|
||||
bucket: @s3_bucket_name,
|
||||
cors_configuration: {
|
||||
cors_rules: rules
|
||||
}
|
||||
)
|
||||
end
|
||||
s3_resource.client.put_bucket_cors(
|
||||
bucket: @s3_bucket_name,
|
||||
cors_configuration: {
|
||||
cors_rules: existing_rules + new_rules
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
def update_lifecycle(id, days, prefix: nil, tag: nil)
|
||||
|
||||
@ -158,12 +158,40 @@ describe "S3Helper" do
|
||||
s3_helper.ensure_cors!
|
||||
end
|
||||
|
||||
it "does not apply the passed in rules if a different rule already exists" do
|
||||
it "applies the passed in rule alongside the existing rule" do
|
||||
s3_helper.s3_client.stub_responses(:get_bucket_cors, {
|
||||
cors_rules: [S3CorsRulesets::ASSETS]
|
||||
})
|
||||
s3_helper.s3_client.expects(:put_bucket_cors).never
|
||||
s3_helper.ensure_cors!([S3CorsRulesets::BACKUP_DIRECT_UPLOAD])
|
||||
s3_helper.s3_client.expects(:put_bucket_cors).with(
|
||||
bucket: s3_helper.s3_bucket_name,
|
||||
cors_configuration: {
|
||||
cors_rules: [
|
||||
S3CorsRulesets::ASSETS,
|
||||
S3CorsRulesets::BACKUP_DIRECT_UPLOAD
|
||||
]
|
||||
}
|
||||
)
|
||||
s3_helper.ensure_cors!([
|
||||
S3CorsRulesets::BACKUP_DIRECT_UPLOAD
|
||||
])
|
||||
end
|
||||
|
||||
it "applies the passed in rule if it is slightly different from the existing rule" do
|
||||
new_rule = S3CorsRulesets::ASSETS.deep_dup
|
||||
new_rule[:allowed_headers] = ["GET", "PUT", "POST", "DELETE"]
|
||||
s3_helper.s3_client.stub_responses(:get_bucket_cors, {
|
||||
cors_rules: [S3CorsRulesets::ASSETS]
|
||||
})
|
||||
s3_helper.s3_client.expects(:put_bucket_cors).with(
|
||||
bucket: s3_helper.s3_bucket_name,
|
||||
cors_configuration: {
|
||||
cors_rules: [
|
||||
S3CorsRulesets::ASSETS,
|
||||
new_rule
|
||||
]
|
||||
}
|
||||
)
|
||||
s3_helper.ensure_cors!([new_rule])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -267,6 +267,12 @@ shared_examples "remote backup store" do
|
||||
expect(url).to match(upload_url_regex("default", filename, multisite: false))
|
||||
end
|
||||
|
||||
it "ensures the CORS rules for backup uploads exist" do
|
||||
store.instance_variable_get(:@s3_helper).expects(:ensure_cors!).with([S3CorsRulesets::BACKUP_DIRECT_UPLOAD])
|
||||
filename = "foo.tar.gz"
|
||||
url = store.generate_upload_url(filename)
|
||||
end
|
||||
|
||||
it "raises an exception when a file with same filename exists" do
|
||||
expect { store.generate_upload_url(backup1.filename) }
|
||||
.to raise_exception(BackupRestore::BackupStore::BackupFileExists)
|
||||
|
||||
@ -183,14 +183,15 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do
|
||||
it "returns signed URL with correct path" do
|
||||
test_multisite_connection('default') do
|
||||
upload = Fabricate(:upload, original_filename: "small.pdf", extension: "pdf", secure: true)
|
||||
path = Discourse.store.get_path_for_upload(upload)
|
||||
|
||||
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
||||
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.pdf").returns(s3_object).at_least_once
|
||||
s3_bucket.expects(:object).with("#{upload_path}/#{path}").returns(s3_object).at_least_once
|
||||
s3_object.expects(:presigned_url).with(:get, expires_in: S3Helper::DOWNLOAD_URL_EXPIRES_AFTER_SECONDS)
|
||||
|
||||
upload.url = store.store_upload(uploaded_file, upload)
|
||||
expect(upload.url).to eq(
|
||||
"//some-really-cool-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.pdf"
|
||||
"//some-really-cool-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/#{path}"
|
||||
)
|
||||
|
||||
expect(store.url_for(upload)).not_to eq(upload.url)
|
||||
|
||||
@ -711,6 +711,7 @@ describe UploadsController do
|
||||
sign_in(user)
|
||||
SiteSetting.enable_direct_s3_uploads = true
|
||||
setup_s3
|
||||
stub_bucket_cors_requests
|
||||
end
|
||||
|
||||
it "errors if the correct params are not provided" do
|
||||
@ -792,6 +793,7 @@ describe UploadsController do
|
||||
sign_in(user)
|
||||
SiteSetting.enable_direct_s3_uploads = true
|
||||
setup_s3
|
||||
stub_bucket_cors_requests
|
||||
FileStore::S3Store.any_instance.stubs(:temporary_upload_path).returns(
|
||||
"uploads/default/test_0/temp/28fccf8259bbe75b873a2bd2564b778c/test.png"
|
||||
)
|
||||
@ -1076,6 +1078,22 @@ describe UploadsController do
|
||||
end
|
||||
end
|
||||
|
||||
def stub_bucket_cors_requests
|
||||
cors_response = <<~BODY
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<CORSConfiguration>
|
||||
</CORSConfiguration>
|
||||
BODY
|
||||
stub_request(
|
||||
:get,
|
||||
"https://s3-upload-bucket.s3.us-west-1.amazonaws.com/?cors"
|
||||
).to_return({ status: 200, body: "" })
|
||||
stub_request(
|
||||
:put,
|
||||
"https://s3-upload-bucket.s3.us-west-1.amazonaws.com/?cors"
|
||||
).to_return({ status: 200 })
|
||||
end
|
||||
|
||||
describe "#complete_multipart" do
|
||||
let(:upload_base_url) { "https://#{SiteSetting.s3_upload_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com" }
|
||||
let(:mock_multipart_upload_id) { "ibZBv_75gd9r8lH_gqXatLdxMVpAlj6CFTR.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--" }
|
||||
|
||||
Reference in New Issue
Block a user