Merge branch 's3_refactor'

This commit is contained in:
Sam 2017-10-09 10:27:52 +11:00
commit 4ea87b5ab8
28 changed files with 405 additions and 159 deletions

View File

@ -52,15 +52,29 @@ module ApplicationHelper
end
end
def is_brotli_req?
ENV["COMPRESS_BROTLI"] == "1" &&
request.env["HTTP_ACCEPT_ENCODING"] =~ /br/
end
def preload_script(script)
path = asset_path("#{script}.js")
if GlobalSetting.cdn_url &&
GlobalSetting.cdn_url.start_with?("https") &&
ENV["COMPRESS_BROTLI"] == "1" &&
request.env["HTTP_ACCEPT_ENCODING"] =~ /br/
if GlobalSetting.use_s3? && GlobalSetting.s3_cdn_url
if GlobalSetting.cdn_url
path.gsub!(GlobalSetting.cdn_url, GlobalSetting.s3_cdn_url)
else
path = "#{GlobalSetting.s3_cdn_url}#{path}"
end
if is_brotli_req?
path.gsub!(/\.([^.]+)$/, '.br.\1')
end
elsif GlobalSetting.cdn_url&.start_with?("https") && is_brotli_req?
path.gsub!("#{GlobalSetting.cdn_url}/assets/", "#{GlobalSetting.cdn_url}/brotli_asset/")
end
"<link rel='preload' href='#{path}' as='script'/>
<script src='#{path}'></script>".html_safe
end

View File

@ -166,7 +166,7 @@ module Jobs
# we don't want to pull images hosted on the CDN (if we use one)
return false if Discourse.asset_host.present? && URI.parse(Discourse.asset_host).hostname == hostname
return false if SiteSetting.s3_cdn_url.present? && URI.parse(SiteSetting.s3_cdn_url).hostname == hostname
return false if SiteSetting.Upload.s3_cdn_url.present? && URI.parse(SiteSetting.Upload.s3_cdn_url).hostname == hostname
# we don't want to pull images hosted on the main domain
return false if URI.parse(Discourse.base_url_no_prefix).hostname == hostname
# check the domains blacklist

View File

@ -7,7 +7,7 @@ module Jobs
base_url = Discourse.store.internal? ? Discourse.store.relative_base_url : Discourse.store.absolute_base_url
s3_hostname = URI.parse(base_url).hostname
s3_cdn_hostname = URI.parse(SiteSetting.s3_cdn_url || "").hostname
s3_cdn_hostname = URI.parse(SiteSetting.Upload.s3_cdn_url || "").hostname
# Any URLs in site settings are fair game
ignore_urls = [

View File

@ -46,7 +46,7 @@ class Backup
def s3
require "s3_helper" unless defined? S3Helper
@s3_helper ||= S3Helper.new(s3_bucket)
@s3_helper ||= S3Helper.new(s3_bucket, '', S3Helper.s3_options(SiteSetting))
end
def upload_to_s3

View File

@ -75,6 +75,21 @@ class GlobalSetting
end
end
def self.use_s3?
(@use_s3 ||=
begin
s3_bucket &&
s3_region && (
s3_use_iam_profile || (s3_access_key_id && s3_secret_access_key)
) ? :true : :false
end) == :true
end
# for testing
def self.reset_s3_cache!
@use_s3 = nil
end
def self.database_config
hash = { "adapter" => "postgresql" }
%w{pool timeout socket host port username password replica_host replica_port}.each do |s|

View File

@ -118,6 +118,43 @@ class SiteSetting < ActiveRecord::Base
def self.attachment_filename_blacklist_regex
@attachment_filename_blacklist_regex ||= Regexp.union(SiteSetting.attachment_filename_blacklist.split("|"))
end
# helpers for getting s3 settings that fallback to global
class Upload
def self.s3_cdn_url
SiteSetting.enable_s3_uploads ? SiteSetting.s3_cdn_url : GlobalSetting.s3_cdn_url
end
def self.s3_region
SiteSetting.enable_s3_uploads ? SiteSetting.s3_region : GlobalSetting.s3_region
end
def self.s3_upload_bucket
SiteSetting.enable_s3_uploads ? SiteSetting.s3_upload_bucket : GlobalSetting.s3_bucket
end
def self.enable_s3_uploads
SiteSetting.enable_s3_uploads || GlobalSetting.use_s3?
end
def self.absolute_base_url
bucket = SiteSetting.enable_s3_uploads ? Discourse.store.s3_bucket_name : GlobalSetting.s3_bucket
# cf. http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
if SiteSetting.Upload.s3_region == "us-east-1"
"//#{bucket}.s3.amazonaws.com"
elsif SiteSetting.Upload.s3_region == 'cn-north-1'
"//#{bucket}.s3.cn-north-1.amazonaws.com.cn"
else
"//#{bucket}.s3-#{SiteSetting.Upload.s3_region}.amazonaws.com"
end
end
end
def self.Upload
SiteSetting::Upload
end
end
# == Schema Information

View File

@ -44,8 +44,8 @@ class TopicLinkClick < ActiveRecord::Base
end
end
if SiteSetting.s3_cdn_url.present?
cdn_uri = URI.parse(SiteSetting.s3_cdn_url) rescue nil
if SiteSetting.Upload.s3_cdn_url.present?
cdn_uri = URI.parse(SiteSetting.Upload.s3_cdn_url) rescue nil
if cdn_uri && cdn_uri.hostname == uri.hostname && uri.path.starts_with?(cdn_uri.path)
is_cdn_link = true
path = uri.path[cdn_uri.path.length..-1]

View File

@ -79,7 +79,7 @@ class Upload < ActiveRecord::Base
# we store relative urls, so we need to remove any host/cdn
url = url.sub(Discourse.asset_host, "") if Discourse.asset_host.present?
# when using s3, we need to replace with the absolute base url
url = url.sub(SiteSetting.s3_cdn_url, Discourse.store.absolute_base_url) if SiteSetting.s3_cdn_url.present?
url = url.sub(SiteSetting.Upload.s3_cdn_url, Discourse.store.absolute_base_url) if SiteSetting.Upload.s3_cdn_url.present?
# always try to get the path
uri = URI(url) rescue nil

View File

@ -52,11 +52,11 @@
Discourse.Session.currentProp("safe_mode", <%= normalized_safe_mode.inspect.html_safe %>);
<%- end %>
Discourse.HighlightJSPath = <%= HighlightJs.path.inspect.html_safe %>;
<%- if SiteSetting.enable_s3_uploads %>
<%- if SiteSetting.s3_cdn_url.present? %>
Discourse.S3CDN = '<%= SiteSetting.s3_cdn_url %>';
<%- if SiteSetting.Upload.enable_s3_uploads %>
<%- if SiteSetting.Upload.s3_cdn_url.present? %>
Discourse.S3CDN = '<%= SiteSetting.Upload.s3_cdn_url %>';
<%- end %>
Discourse.S3BaseUrl = '<%= Discourse.store.absolute_base_url %>';
Discourse.S3BaseUrl = '<%= SiteSetting.Upload.absolute_base_url %>';
<%- end %>
})();
</script>

View File

@ -156,3 +156,12 @@ secret_key_base =
# in multi host setups this allows you to have old unicorn instances serve
# newly compiled assets
fallback_assets_path =
# S3 settings used for serving ALL public files
# be sure to configre a CDN as well per cdn_url
s3_bucket =
s3_region =
s3_access_key_id =
s3_secret_access_key =
s3_use_iam_profile = false
s3_cdn_url =

View File

@ -28,10 +28,10 @@ module Autospec
def self.reload(pattern); RELOADERS << pattern; end
def reloaders; RELOADERS; end
# We need to reload the whole app when changing any of these files
reload("spec/rails_helper.rb")
reload(%r{config/.+\.rb})
reload(%r{app/helpers/.+\.rb})
# we are using a simple runner at the moment, whole idea of using a reloader is no longer needed
watch("spec/rails_helper.rb")
watch(%r{config/.+\.rb})
#reload(%r{app/helpers/.+\.rb})
def failed_specs
specs = []

View File

@ -337,7 +337,7 @@ class CookedPostProcessor
end
end
use_s3_cdn = SiteSetting.enable_s3_uploads && SiteSetting.s3_cdn_url.present?
use_s3_cdn = SiteSetting.Upload.enable_s3_uploads && SiteSetting.Upload.s3_cdn_url.present?
%w{href data-download-href}.each do |selector|
@doc.css("a[#{selector}]").each do |a|

View File

@ -391,7 +391,7 @@ module Discourse
end
def self.store
if SiteSetting.enable_s3_uploads?
if SiteSetting.Upload.enable_s3_uploads
@s3_store_loaded ||= require 'file_store/s3_store'
FileStore::S3Store.new
else

View File

@ -56,22 +56,17 @@ module FileStore
base_hostname = URI.parse(absolute_base_url).hostname
return true if url[base_hostname]
return false if SiteSetting.s3_cdn_url.blank?
cdn_hostname = URI.parse(SiteSetting.s3_cdn_url || "").hostname
return false if SiteSetting.Upload.s3_cdn_url.blank?
cdn_hostname = URI.parse(SiteSetting.Upload.s3_cdn_url || "").hostname
cdn_hostname.presence && url[cdn_hostname]
end
def absolute_base_url
bucket = @s3_helper.s3_bucket_name
def s3_bucket_name
@s3_helper.s3_bucket_name
end
# cf. http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
@absolute_base_url ||= if SiteSetting.s3_region == "us-east-1"
"//#{bucket}.s3.amazonaws.com"
elsif SiteSetting.s3_region == 'cn-north-1'
"//#{bucket}.s3.cn-north-1.amazonaws.com.cn"
else
"//#{bucket}.s3-#{SiteSetting.s3_region}.amazonaws.com"
end
def absolute_base_url
@absolute_base_url ||= SiteSetting.Upload.absolute_base_url
end
def external?
@ -88,9 +83,9 @@ module FileStore
end
def cdn_url(url)
return url if SiteSetting.s3_cdn_url.blank?
return url if SiteSetting.Upload.s3_cdn_url.blank?
schema = url[/^(https?:)?\/\//, 1]
url.sub("#{schema}#{absolute_base_url}", SiteSetting.s3_cdn_url)
url.sub("#{schema}#{absolute_base_url}", SiteSetting.Upload.s3_cdn_url)
end
def cache_avatar(avatar, user_id)
@ -104,8 +99,8 @@ module FileStore
end
def s3_bucket
raise Discourse::SiteSettingMissing.new("s3_upload_bucket") if SiteSetting.s3_upload_bucket.blank?
SiteSetting.s3_upload_bucket.downcase
raise Discourse::SiteSettingMissing.new("s3_upload_bucket") if SiteSetting.Upload.s3_upload_bucket.blank?
SiteSetting.Upload.s3_upload_bucket.downcase
end
end
end

View File

@ -170,7 +170,7 @@ class FinalDestination
return false unless @uri && @uri.host
# Whitelisted hosts
return true if hostname_matches?(SiteSetting.s3_cdn_url) ||
return true if hostname_matches?(SiteSetting.Upload.s3_cdn_url) ||
hostname_matches?(GlobalSetting.try(:cdn_url)) ||
hostname_matches?(Discourse.base_url_no_prefix)

View File

@ -8,8 +8,8 @@ module GlobalPath
end
def upload_cdn_path(p)
if SiteSetting.s3_cdn_url.present?
p = p.sub(Discourse.store.absolute_base_url, SiteSetting.s3_cdn_url)
if SiteSetting.Upload.s3_cdn_url.present?
p = p.sub(Discourse.store.absolute_base_url, SiteSetting.Upload.s3_cdn_url)
end
p =~ /^http/ ? p : cdn_path(p)
end

View File

@ -141,9 +141,9 @@ module PrettyText
CDN: Rails.configuration.action_controller.asset_host,
}
if SiteSetting.enable_s3_uploads?
if SiteSetting.s3_cdn_url.present?
paths[:S3CDN] = SiteSetting.s3_cdn_url
if SiteSetting.Upload.enable_s3_uploads
if SiteSetting.Upload.s3_cdn_url.present?
paths[:S3CDN] = SiteSetting.Upload.s3_cdn_url
end
paths[:S3BaseUrl] = Discourse.store.absolute_base_url
end
@ -250,7 +250,7 @@ module PrettyText
add_rel_nofollow_to_user_content(doc)
end
if SiteSetting.enable_s3_uploads && SiteSetting.s3_cdn_url.present?
if SiteSetting.Upload.enable_s3_uploads && SiteSetting.Upload.s3_cdn_url.present?
add_s3_cdn(doc)
end

View File

@ -6,12 +6,12 @@ class S3Helper
attr_reader :s3_bucket_name
def initialize(s3_upload_bucket, tombstone_prefix = '', options = {})
def initialize(s3_bucket_name, tombstone_prefix = '', options = {})
@s3_options = default_s3_options.merge(options)
@s3_bucket_name, @s3_bucket_folder_path = begin
raise Discourse::InvalidParameters.new("s3_bucket") if s3_upload_bucket.blank?
s3_upload_bucket.downcase.split("/".freeze, 2)
raise Discourse::InvalidParameters.new("s3_bucket_name") if s3_bucket_name.blank?
s3_bucket_name.downcase.split("/".freeze, 2)
end
@tombstone_prefix =
@ -20,8 +20,6 @@ class S3Helper
else
tombstone_prefix
end
check_missing_options
end
def upload(file, path, options = {})
@ -46,31 +44,71 @@ class S3Helper
rescue Aws::S3::Errors::NoSuchKey
end
def update_lifecycle(id, days, prefix: nil)
# make sure we have a cors config for assets
# otherwise we will have no fonts
def ensure_cors!
rule = nil
begin
rule = s3_resource.client.get_bucket_cors(
bucket: @s3_bucket_name
).cors_rules&.first
rescue Aws::S3::Errors::NoSuchCORSConfiguration
# no rule
end
unless rule
puts "installing CORS rule"
s3_resource.client.put_bucket_cors(
bucket: @s3_bucket_name,
cors_configuration: {
cors_rules: [{
allowed_headers: ["Authorization"],
allowed_methods: ["GET", "HEAD"],
allowed_origins: ["*"],
max_age_seconds: 3000
}]
}
)
end
end
def update_lifecycle(id, days, prefix: nil, tag: nil)
filter = {}
if prefix
filter[:prefix] = prefix
elsif tag
filter[:tag] = tag
end
# cf. http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html
rule = {
id: id,
status: "Enabled",
expiration: { days: days }
expiration: { days: days },
filter: filter
}
if prefix
rule[:prefix] = prefix
end
rules = []
rules = s3_resource.client.get_bucket_lifecycle_configuration(bucket: @s3_bucket_name).rules
begin
rules = s3_resource.client.get_bucket_lifecycle_configuration(bucket: @s3_bucket_name).rules
rescue Aws::S3::Errors::NoSuchLifecycleConfiguration
# skip trying to merge
end
rules.delete_if do |r|
r.id == id
end
rules.map! { |r| r.to_h }
rules << rule
s3_resource.client.put_bucket_lifecycle(bucket: @s3_bucket_name,
lifecycle_configuration: {
s3_resource.client.put_bucket_lifecycle_configuration(
bucket: @s3_bucket_name,
lifecycle_configuration: {
rules: rules
})
end
@ -80,8 +118,8 @@ class S3Helper
update_lifecycle("purge_tombstone", grace_period, prefix: @tombstone_prefix)
end
def list
s3_bucket.objects(prefix: @s3_bucket_folder_path)
def list(prefix = "")
s3_bucket.objects(prefix: @s3_bucket_folder_path.to_s + prefix)
end
def tag_file(key, tags)
@ -99,24 +137,36 @@ class S3Helper
)
end
def self.s3_options(obj)
opts = { region: obj.s3_region }
unless obj.s3_use_iam_profile
opts[:access_key_id] = obj.s3_access_key_id
opts[:secret_access_key] = obj.s3_secret_access_key
end
opts
end
private
def default_s3_options
if SiteSetting.enable_s3_uploads?
options = self.class.s3_options(SiteSetting)
check_missing_site_options
options
elsif GlobalSetting.use_s3?
self.class.s3_options(GlobalSetting)
else
{}
end
end
def get_path_for_s3_upload(path)
path = File.join(@s3_bucket_folder_path, path) if @s3_bucket_folder_path
path
end
def default_s3_options
opts = { region: SiteSetting.s3_region }
unless SiteSetting.s3_use_iam_profile
opts[:access_key_id] = SiteSetting.s3_access_key_id
opts[:secret_access_key] = SiteSetting.s3_secret_access_key
end
opts
end
def s3_resource
Aws::S3::Resource.new(@s3_options)
end
@ -127,10 +177,10 @@ class S3Helper
bucket
end
def check_missing_options
def check_missing_site_options
unless SiteSetting.s3_use_iam_profile
raise SettingMissing.new("access_key_id") if @s3_options[:access_key_id].blank?
raise SettingMissing.new("secret_access_key") if @s3_options[:secret_access_key].blank?
raise SettingMissing.new("access_key_id") if SiteSetting.s3_access_key_id.blank?
raise SettingMissing.new("secret_access_key") if SiteSetting.s3_secret_access_key.blank?
end
end
end

View File

@ -11,15 +11,12 @@ def gzip_s3_path(path)
end
def should_skip?(path)
return true if ENV['FORCE_S3_UPLOADS']
@existing_assets ||= Set.new(helper.list.map(&:key))
@existing_assets.include?('assets/' + path)
return false if ENV['FORCE_S3_UPLOADS']
@existing_assets ||= Set.new(helper.list("assets/").map(&:key))
@existing_assets.include?(path)
end
def upload_asset(helper, path, recurse: true, content_type: nil, fullpath: nil, content_encoding: nil)
fullpath ||= (Rails.root + "public/assets/#{path}").to_s
content_type ||= MiniMime.lookup_by_filename(path).content_type
def upload(path, remote_path, content_type, content_encoding = nil)
options = {
cache_control: 'max-age=31556952, public, immutable',
@ -32,102 +29,91 @@ def upload_asset(helper, path, recurse: true, content_type: nil, fullpath: nil,
options[:content_encoding] = content_encoding
end
if should_skip?(path)
puts "Skipping: #{path}"
if should_skip?(remote_path)
puts "Skipping: #{remote_path}"
else
puts "Uploading: #{path}"
helper.upload(fullpath, path, options)
puts "Uploading: #{remote_path}"
helper.upload(path, remote_path, options)
end
end
if recurse
if File.exist?(fullpath + ".br")
brotli_path = brotli_s3_path(path)
upload_asset(helper, brotli_path,
fullpath: fullpath + ".br",
recurse: false,
content_type: content_type,
content_encoding: 'br'
)
end
if File.exist?(fullpath + ".gz")
gzip_path = gzip_s3_path(path)
upload_asset(helper, gzip_path,
fullpath: fullpath + ".gz",
recurse: false,
content_type: content_type,
content_encoding: 'gzip'
)
end
if File.exist?(fullpath + ".map")
upload_asset(helper, path + ".map", recurse: false, content_type: 'application/json')
end
end
def helper
@helper ||= S3Helper.new(GlobalSetting.s3_bucket.downcase, '', S3Helper.s3_options(GlobalSetting))
end
def assets
cached = Rails.application.assets&.cached
manifest = Sprockets::Manifest.new(cached, Rails.root + 'public/assets', Rails.application.config.assets.manifest)
raise Discourse::SiteSettingMissing.new("s3_upload_bucket") if SiteSetting.s3_upload_bucket.blank?
manifest.assets
end
results = []
def helper
@helper ||= S3Helper.new(SiteSetting.s3_upload_bucket.downcase + '/assets')
end
def in_manifest
found = []
assets.each do |_, path|
manifest.assets.each do |_, path|
fullpath = (Rails.root + "public/assets/#{path}").to_s
content_type = MiniMime.lookup_by_filename(fullpath).content_type
asset_path = "assets/#{path}"
found << asset_path
results << [fullpath, asset_path, content_type]
if File.exist?(fullpath + '.br')
found << brotli_s3_path(asset_path)
results << [fullpath + '.br', brotli_s3_path(asset_path), content_type, 'br']
end
if File.exist?(fullpath + '.gz')
found << gzip_s3_path(asset_path)
results << [fullpath + '.gz', gzip_s3_path(asset_path), content_type, 'gzip']
end
if File.exist?(fullpath + '.map')
found << asset_path + '.map'
results << [fullpath + '.map', asset_path + '.map', 'application/json']
end
end
Set.new(found)
results
end
def asset_paths
Set.new(assets.map { |_, asset_path| asset_path })
end
def ensure_s3_configured!
unless GlobalSetting.use_s3?
STDERR.puts "ERROR: Ensure S3 is configured in config/discourse.conf of environment vars"
exit 1
end
end
task 's3:upload_assets' => :environment do
assets.each do |name, fingerprint|
upload_asset(helper, fingerprint)
ensure_s3_configured!
helper.ensure_cors!
assets.each do |asset|
upload(*asset)
end
end
task 's3:expire_missing_assets' => :environment do
keep = in_manifest
ensure_s3_configured!
count = 0
keep = 0
in_manifest = asset_paths
puts "Ensuring AWS assets are tagged correctly for removal"
helper.list.each do |f|
if keep.include?(f.key)
helper.list('assets/').each do |f|
if !in_manifest.include?(f.key)
helper.tag_file(f.key, old: true)
count += 1
else
# ensure we do not delete this by mistake
helper.tag_file(f.key, {})
keep += 1
end
end
puts "#{count} assets were flagged for removal in 10 days"
puts "#{count} assets were flagged for removal in 10 days (#{keep} assets will be retained)"
puts "Ensuring AWS rule exists for purging old assets"
#helper.update_lifecycle("delete_old_assets", 10, prefix: 'old=true')
puts "Waiting on https://github.com/aws/aws-sdk-ruby/issues/1623"
helper.update_lifecycle("delete_old_assets", 10, tag: { key: 'old', value: 'true' })
end

View File

@ -114,13 +114,13 @@ def migrate_from_s3
require "file_store/s3_store"
# make sure S3 is disabled
if SiteSetting.enable_s3_uploads
if SiteSetting.Uploads.enable_s3_uploads
puts "You must disable S3 uploads before running that task."
return
end
# make sure S3 bucket is set
if SiteSetting.s3_upload_bucket.blank?
if SiteSetting.Upload.s3_upload_bucket.blank?
puts "The S3 upload bucket must be set before running that task."
return
end
@ -188,14 +188,14 @@ end
def migrate_to_s3
# make sure s3 is enabled
if !SiteSetting.enable_s3_uploads
if !SiteSetting.Upload.enable_s3_uploads
puts "You must enable s3 uploads before running that task"
return
end
db = RailsMultisite::ConnectionManagement.current_db
puts "Migrating uploads to S3 (#{SiteSetting.s3_upload_bucket}) for '#{db}'..."
puts "Migrating uploads to S3 (#{SiteSetting.Upload.s3_upload_bucket}) for '#{db}'..."
# will throw an exception if the bucket is missing
s3 = FileStore::S3Store.new

View File

@ -16,6 +16,7 @@ describe FileStore::S3Store do
SiteSetting.s3_upload_bucket = "s3-upload-bucket"
SiteSetting.s3_access_key_id = "s3-access-key-id"
SiteSetting.s3_secret_access_key = "s3-secret-access-key"
SiteSetting.enable_s3_uploads = true
end
shared_context 's3 helpers' do

View File

@ -292,6 +292,7 @@ describe FinalDestination do
end
it "returns true for the S3 CDN url" do
SiteSetting.enable_s3_uploads = true
SiteSetting.s3_cdn_url = "https://s3.example.com"
expect(fd("https://s3.example.com/some/thing").is_dest_valid?).to eq(true)
end

View File

@ -623,29 +623,56 @@ describe PrettyText do
expect(PrettyText.cook(raw)).to eq(html.strip)
end
it 'can substitute s3 cdn correctly' do
SiteSetting.enable_s3_uploads = true
SiteSetting.s3_access_key_id = "XXX"
SiteSetting.s3_secret_access_key = "XXX"
SiteSetting.s3_upload_bucket = "test"
SiteSetting.s3_cdn_url = "https://awesome.cdn"
describe 's3_cdn' do
# add extra img tag to ensure it does not blow up
raw = <<~HTML
<img>
<img src='https:#{Discourse.store.absolute_base_url}/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg'>
<img src='http:#{Discourse.store.absolute_base_url}/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg'>
<img src='#{Discourse.store.absolute_base_url}/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg'>
HTML
def test_s3_cdn
# add extra img tag to ensure it does not blow up
raw = <<~HTML
<img>
<img src='https:#{Discourse.store.absolute_base_url}/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg'>
<img src='http:#{Discourse.store.absolute_base_url}/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg'>
<img src='#{Discourse.store.absolute_base_url}/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg'>
HTML
html = <<~HTML
<p><img><br>
<img src="https://awesome.cdn/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg"><br>
<img src="https://awesome.cdn/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg"><br>
<img src="https://awesome.cdn/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg"></p>
HTML
html = <<~HTML
<p><img><br>
<img src="https://awesome.cdn/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg"><br>
<img src="https://awesome.cdn/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg"><br>
<img src="https://awesome.cdn/original/9/9/99c9384b8b6d87f8509f8395571bc7512ca3cad1.jpg"></p>
HTML
expect(PrettyText.cook(raw)).to eq(html.strip)
expect(PrettyText.cook(raw)).to eq(html.strip)
end
before do
GlobalSetting.reset_s3_cache!
end
after do
GlobalSetting.reset_s3_cache!
end
it 'can substitute s3 cdn when added via global setting' do
global_setting :s3_access_key_id, 'XXX'
global_setting :s3_secret_access_key, 'XXX'
global_setting :s3_bucket, 'XXX'
global_setting :s3_region, 'XXX'
global_setting :s3_cdn_url, 'https://awesome.cdn'
test_s3_cdn
end
it 'can substitute s3 cdn correctly' do
SiteSetting.s3_access_key_id = "XXX"
SiteSetting.s3_secret_access_key = "XXX"
SiteSetting.s3_upload_bucket = "test"
SiteSetting.s3_cdn_url = "https://awesome.cdn"
SiteSetting.enable_s3_uploads = true
test_s3_cdn
end
end
describe "emoji" do

View File

@ -20,8 +20,13 @@ describe Stylesheet::Importer do
end
it "applies S3 CDN to background category images" do
SiteSetting.s3_use_iam_profile = true
SiteSetting.s3_upload_bucket = 'test'
SiteSetting.s3_region = 'ap-southeast-2'
SiteSetting.s3_cdn_url = "https://s3.cdn"
SiteSetting.enable_s3_uploads = true
background = Fabricate(:upload_s3)
category = Fabricate(:category, uploaded_background: background)

View File

@ -2,6 +2,50 @@ require 'rails_helper'
describe ApplicationHelper do
describe "preload_script" do
it "provides brotli links to brotli cdn" do
set_cdn_url "https://awesome.com"
set_env "COMPRESS_BROTLI", "1"
helper.request.env["HTTP_ACCEPT_ENCODING"] = 'br'
link = helper.preload_script('application')
expect(link).to eq("<link rel='preload' href='https://awesome.com/brotli_asset/application.js' as='script'/>\n<script src='https://awesome.com/brotli_asset/application.js'></script>")
end
context "with s3 CDN" do
before do
global_setting :s3_bucket, 'test_bucket'
global_setting :s3_region, 'ap-australia'
global_setting :s3_access_key_id, '123'
global_setting :s3_secret_access_key, '123'
global_setting :s3_cdn_url, 'https://s3cdn.com'
set_env "COMPRESS_BROTLI", "1"
end
it "returns magic brotli mangling for brotli requests" do
helper.request.env["HTTP_ACCEPT_ENCODING"] = 'br'
link = helper.preload_script('application')
expect(link).to eq("<link rel='preload' href='https://s3cdn.com/assets/application.br.js' as='script'/>\n<script src='https://s3cdn.com/assets/application.br.js'></script>")
end
it "gives s3 cdn if asset host is not set" do
link = helper.preload_script('application')
expect(link).to eq("<link rel='preload' href='https://s3cdn.com/assets/application.js' as='script'/>\n<script src='https://s3cdn.com/assets/application.js'></script>")
end
it "gives s3 cdn even if asset host is set" do
set_cdn_url "https://awesome.com"
link = helper.preload_script('application')
expect(link).to eq("<link rel='preload' href='https://s3cdn.com/assets/application.js' as='script'/>\n<script src='https://s3cdn.com/assets/application.js'></script>")
end
end
end
describe "escape_unicode" do
it "encodes tags" do
expect(helper.escape_unicode("<tag>")).to eq("\u003ctag>")

View File

@ -9,6 +9,21 @@ end
describe GlobalSetting do
describe '.use_s3_assets?' do
it 'returns false by default' do
expect(GlobalSetting.use_s3?).to eq(false)
end
it 'returns true once set' do
global_setting :s3_bucket, 'test_bucket'
global_setting :s3_region, 'ap-australia'
global_setting :s3_access_key_id, '123'
global_setting :s3_secret_access_key, '123'
expect(GlobalSetting.use_s3?).to eq(true)
end
end
describe '.safe_secret_key_base' do
it 'sets redis token if it is somehow flushed after 30 seconds' do

View File

@ -145,6 +145,9 @@ describe TopicLinkClick do
it "works with s3 urls" do
SiteSetting.s3_cdn_url = "https://discourse-s3-cdn.global.ssl.fastly.net"
SiteSetting.s3_access_key_id = 'X'
SiteSetting.s3_secret_access_key = 'X'
SiteSetting.enable_s3_uploads = true
post = Fabricate(:post, topic: @topic, raw: "[test](//test.localhost/uploads/default/my-test-link)")
TopicLink.extract_from(post)

View File

@ -124,6 +124,11 @@ RSpec.configure do |config|
Sidekiq::Worker.clear_all
I18n.locale = :en
if $test_cleanup_callbacks
$test_cleanup_callbacks.reverse_each(&:call)
$test_cleanup_callbacks = nil
end
end
class TestCurrentUserProvider < Auth::DefaultCurrentUserProvider
@ -146,6 +151,45 @@ class TrackTimeStub
end
end
def before_next_spec(&callback)
($test_cleanup_callbacks ||= []) << callback
end
def global_setting(name, value)
GlobalSetting.reset_s3_cache!
GlobalSetting.stubs(name).returns(value)
before_next_spec do
GlobalSetting.reset_s3_cache!
end
end
def set_env(var, value)
old = ENV.fetch var, :missing
ENV[var] = value
before_next_spec do
if old == :missing
ENV.delete var
else
ENV[var] = old
end
end
end
def set_cdn_url(cdn_url)
global_setting :cdn_url, cdn_url
Rails.configuration.action_controller.asset_host = cdn_url
ActionController::Base.asset_host = cdn_url
before_next_spec do
Rails.configuration.action_controller.asset_host = nil
ActionController::Base.asset_host = nil
end
end
def freeze_time(now = Time.now)
datetime = DateTime.parse(now.to_s)
time = Time.parse(now.to_s)