discourse/lib/s3_helper.rb

270 lines
6.7 KiB
Ruby
Raw Normal View History

# frozen_string_literal: true
require "aws-sdk-s3"
class S3Helper
class SettingMissing < StandardError; end
attr_reader :s3_bucket_name, :s3_bucket_folder_path
DOWNLOAD_URL_EXPIRES_AFTER_SECONDS ||= 15
def initialize(s3_bucket_name, tombstone_prefix = '', options = {})
@s3_client = options.delete(:client)
@s3_options = default_s3_options.merge(options)
@s3_bucket_name, @s3_bucket_folder_path = begin
raise Discourse::InvalidParameters.new("s3_bucket_name") if s3_bucket_name.blank?
self.class.get_bucket_and_folder_path(s3_bucket_name)
end
@tombstone_prefix =
if @s3_bucket_folder_path
File.join(@s3_bucket_folder_path, tombstone_prefix)
else
tombstone_prefix
end
end
def self.get_bucket_and_folder_path(s3_bucket_name)
s3_bucket_name.downcase.split("/".freeze, 2)
end
2017-07-27 20:20:09 -05:00
def upload(file, path, options = {})
path = get_path_for_s3_upload(path)
obj = s3_bucket.object(path)
etag = begin
if File.size(file.path) >= Aws::S3::FileUploader::FIFTEEN_MEGABYTES
options[:multipart_threshold] = Aws::S3::FileUploader::FIFTEEN_MEGABYTES
obj.upload_file(file, options)
obj.load
obj.etag
else
options[:body] = file
obj.put(options).etag
end
end
return path, etag.gsub('"', '')
end
2017-07-27 20:20:09 -05:00
def remove(s3_filename, copy_to_tombstone = false)
s3_filename = s3_filename.dup
# copy the file in tombstone
if copy_to_tombstone && @tombstone_prefix.present?
2018-08-07 22:26:05 -05:00
self.copy(
get_path_for_s3_upload(s3_filename),
File.join(@tombstone_prefix, s3_filename)
2018-08-07 22:26:05 -05:00
)
end
# delete the file
s3_filename.prepend(multisite_upload_path) if Rails.configuration.multisite
2018-08-07 22:26:05 -05:00
s3_bucket.object(get_path_for_s3_upload(s3_filename)).delete
2015-05-25 02:57:06 -05:00
rescue Aws::S3::Errors::NoSuchKey
end
def copy(source, destination, options: {})
if !Rails.configuration.multisite
options[:copy_source] = File.join(@s3_bucket_name, source)
else
if @s3_bucket_folder_path
folder, filename = begin
source.split("/".freeze, 2)
end
options[:copy_source] = File.join(@s3_bucket_name, folder, multisite_upload_path, filename)
else
options[:copy_source] = File.join(@s3_bucket_name, multisite_upload_path, source)
end
end
2018-08-07 22:26:05 -05:00
s3_bucket
.object(destination)
.copy_from(options)
2018-08-07 22:26:05 -05:00
end
2017-10-08 18:26:58 -05:00
# make sure we have a cors config for assets
# otherwise we will have no fonts
def ensure_cors!(rules = nil)
2017-10-08 18:26:58 -05:00
rule = nil
begin
rule = s3_resource.client.get_bucket_cors(
bucket: @s3_bucket_name
).cors_rules&.first
rescue Aws::S3::Errors::NoSuchCORSConfiguration
# no rule
end
unless rule
rules = [{
allowed_headers: ["Authorization"],
allowed_methods: ["GET", "HEAD"],
allowed_origins: ["*"],
max_age_seconds: 3000
}] if rules.nil?
2017-10-08 18:26:58 -05:00
s3_resource.client.put_bucket_cors(
bucket: @s3_bucket_name,
cors_configuration: {
cors_rules: rules
2017-10-08 18:26:58 -05:00
}
)
end
end
def update_lifecycle(id, days, prefix: nil, tag: nil)
filter = {}
if prefix
filter[:prefix] = prefix
elsif tag
filter[:tag] = tag
end
# cf. http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html
rule = {
id: id,
status: "Enabled",
2017-10-08 18:26:58 -05:00
expiration: { days: days },
filter: filter
}
2017-10-08 18:26:58 -05:00
rules = []
2017-10-08 18:26:58 -05:00
begin
rules = s3_resource.client.get_bucket_lifecycle_configuration(bucket: @s3_bucket_name).rules
rescue Aws::S3::Errors::NoSuchLifecycleConfiguration
# skip trying to merge
end
# in the past we has a rule that was called purge-tombstone vs purge_tombstone
# just go ahead and normalize for our bucket
rules.delete_if do |r|
r.id.gsub('_', '-') == id.gsub('_', '-')
end
rules << rule
# normalize filter in rules, due to AWS library bug
rules = rules.map do |r|
r = r.to_h
prefix = r.delete(:prefix)
if prefix
r[:filter] = { prefix: prefix }
end
r
end
2017-10-08 18:26:58 -05:00
s3_resource.client.put_bucket_lifecycle_configuration(
bucket: @s3_bucket_name,
lifecycle_configuration: {
rules: rules
})
end
def update_tombstone_lifecycle(grace_period)
return if !SiteSetting.s3_configure_tombstone_policy
return if @tombstone_prefix.blank?
update_lifecycle("purge_tombstone", grace_period, prefix: @tombstone_prefix)
end
def list(prefix = "", marker = nil)
options = { prefix: get_path_for_s3_upload(prefix) }
options[:marker] = marker if marker.present?
s3_bucket.objects(options)
end
def tag_file(key, tags)
tag_array = []
tags.each do |k, v|
tag_array << { key: k.to_s, value: v.to_s }
end
s3_resource.client.put_object_tagging(
bucket: @s3_bucket_name,
key: key,
tagging: {
tag_set: tag_array
}
)
end
def object(path)
2018-12-26 10:34:49 -06:00
s3_bucket.object(get_path_for_s3_upload(path))
end
def self.s3_options(obj)
2018-12-26 10:34:49 -06:00
opts = {
region: obj.s3_region
2018-12-26 10:34:49 -06:00
}
opts[:endpoint] = SiteSetting.s3_endpoint if SiteSetting.s3_endpoint.present?
unless obj.s3_use_iam_profile
opts[:access_key_id] = obj.s3_access_key_id
opts[:secret_access_key] = obj.s3_secret_access_key
end
opts
end
def download_file(filename, destination_path, failure_message = nil)
unless object(filename).download_file(destination_path)
raise failure_message&.to_s || "Failed to download file"
end
end
def s3_client
@s3_client ||= Aws::S3::Client.new(@s3_options)
end
def s3_inventory_path(path = 'inventory')
get_path_for_s3_upload(path)
end
private
def default_s3_options
if SiteSetting.enable_s3_uploads?
options = self.class.s3_options(SiteSetting)
check_missing_site_options
options
elsif GlobalSetting.use_s3?
self.class.s3_options(GlobalSetting)
else
{}
end
end
def get_path_for_s3_upload(path)
path = File.join(@s3_bucket_folder_path, path) if @s3_bucket_folder_path && path !~ /^#{@s3_bucket_folder_path}\//
path
end
def multisite_upload_path
File.join("uploads", RailsMultisite::ConnectionManagement.current_db, "/")
end
def s3_resource
Aws::S3::Resource.new(client: s3_client)
end
def s3_bucket
2018-08-07 22:26:05 -05:00
@s3_bucket ||= begin
bucket = s3_resource.bucket(@s3_bucket_name)
bucket.create unless bucket.exists?
bucket
end
end
def check_missing_site_options
unless SiteSetting.s3_use_iam_profile
raise SettingMissing.new("access_key_id") if SiteSetting.s3_access_key_id.blank?
raise SettingMissing.new("secret_access_key") if SiteSetting.s3_secret_access_key.blank?
end
end
end