FEATURE: allow S3 ACLs to be disabled (#21769)

AWS recommends running buckets without ACLs, and to use resource policies to manage access control instead.
This is not a bad idea, because S3 ACLs are whack, and while resource policies are also whack, they're a more constrained form of whack.
Further, some compliance regimes get antsy if you don't go with the vendor's recommended settings, and arguing that you need to enable ACLs on a bucket just to store images in there is more hassle than it's worth.
The new site setting (s3_use_acls) cannot be disabled when secure
uploads is enabled -- the latter relies on private ACLs for security
at this point in time. We may want to reexamine this in future.
This commit is contained in:
Matt Palmer
2023-06-06 15:47:40 +10:00
committed by GitHub
parent d2ef490e9a
commit a98d2a8086
11 changed files with 103 additions and 13 deletions

View File

@@ -80,7 +80,7 @@ module BackupRestore
expires_in: expires_in,
opts: {
metadata: metadata,
acl: "private",
acl: SiteSetting.s3_use_acls ? "private" : nil,
},
)
end
@@ -115,7 +115,7 @@ module BackupRestore
existing_external_upload_key,
File.join(s3_helper.s3_bucket_folder_path, original_filename),
options: {
acl: "private",
acl: SiteSetting.s3_use_acls ? "private" : nil,
apply_metadata_to_destination: true,
},
)

View File

@@ -87,7 +87,7 @@ module FileStore
# cache file locally when needed
cache_file(file, File.basename(path)) if opts[:cache_locally]
options = {
acl: opts[:private_acl] ? "private" : "public-read",
acl: SiteSetting.s3_use_acls ? (opts[:private_acl] ? "private" : "public-read") : nil,
cache_control: "max-age=31556952, public, immutable",
content_type:
opts[:content_type].presence || MiniMime.lookup_by_filename(filename)&.content_type,
@@ -262,7 +262,7 @@ module FileStore
expires_in: expires_in,
opts: {
metadata: metadata,
acl: "private",
acl: SiteSetting.s3_use_acls ? "private" : nil,
},
)
end
@@ -397,7 +397,9 @@ module FileStore
def update_ACL(key, secure)
begin
object_from_path(key).acl.put(acl: secure ? "private" : "public-read")
object_from_path(key).acl.put(
acl: SiteSetting.s3_use_acls ? (secure ? "private" : "public-read") : nil,
)
rescue Aws::S3::Errors::NoSuchKey
Rails.logger.warn("Could not update ACL on upload with key: '#{key}'. Upload is missing.")
end

View File

@@ -241,7 +241,7 @@ module FileStore
end
options = {
acl: "public-read",
acl: SiteSetting.s3_use_acls ? "public-read" : nil,
body: File.open(path, "rb"),
bucket: bucket,
content_type: MiniMime.lookup_by_filename(name)&.content_type,

View File

@@ -293,7 +293,7 @@ class S3Helper
def create_multipart(key, content_type, metadata: {})
response =
s3_client.create_multipart_upload(
acl: "private",
acl: SiteSetting.s3_use_acls ? "private" : nil,
bucket: s3_bucket_name,
key: key,
content_type: content_type,

View File

@@ -168,11 +168,15 @@ module SiteSettings::Validations
end
def validate_secure_uploads(new_val)
if new_val == "t" && !SiteSetting.Upload.enable_s3_uploads
if new_val == "t" && (!SiteSetting.Upload.enable_s3_uploads || !SiteSetting.s3_use_acls)
validate_error :secure_uploads_requirements
end
end
def validate_s3_use_acls(new_val)
validate_error :s3_use_acls_requirements if new_val == "f" && SiteSetting.secure_uploads
end
def validate_enable_page_publishing(new_val)
validate_error :page_publishing_requirements if new_val == "t" && SiteSetting.secure_uploads?
end

View File

@@ -28,7 +28,7 @@ def upload(path, remote_path, content_type, content_encoding = nil)
options = {
cache_control: "max-age=31556952, public, immutable",
content_type: content_type,
acl: "public-read",
acl: SiteSetting.s3_use_acls ? "public-read" : nil,
}
options[:content_encoding] = content_encoding if content_encoding
@@ -104,6 +104,11 @@ end
task "s3:correct_acl" => :environment do
ensure_s3_configured!
if !SiteSetting.s3_use_acls
$stderr.puts "Not correcting ACLs as the site is configured to not use ACLs"
return
end
puts "ensuring public-read is set on every upload and optimized image"
i = 0
@@ -158,7 +163,7 @@ task "s3:correct_cachecontrol" => :environment do
object = Discourse.store.s3_helper.object(key)
object.copy_from(
copy_source: "#{object.bucket_name}/#{object.key}",
acl: "public-read",
acl: SiteSetting.s3_use_acls ? "public-read" : nil,
cache_control: cache_control,
content_type: object.content_type,
content_disposition: object.content_disposition,

View File

@@ -150,7 +150,13 @@ class UploadRecovery
old_key = key
key = key.sub(tombstone_prefix, "")
Discourse.store.s3_helper.copy(old_key, key, options: { acl: "public-read" })
Discourse.store.s3_helper.copy(
old_key,
key,
options: {
acl: SiteSetting.s3_use_acls ? "public-read" : nil,
},
)
end
next if upload_exists