mirror of
https://github.com/discourse/discourse.git
synced 2024-11-21 16:38:15 -06:00
DEV: Apply syntax_tree formatting to lib/*
This commit is contained in:
parent
b0fda61a8e
commit
6417173082
1
.streerc
1
.streerc
@ -1,4 +1,3 @@
|
||||
--print-width=100
|
||||
--plugins=plugin/trailing_comma,disable_ternary
|
||||
--ignore-files=app/*
|
||||
--ignore-files=lib/*
|
||||
|
@ -9,9 +9,7 @@ class ActionDispatch::Session::DiscourseCookieStore < ActionDispatch::Session::C
|
||||
|
||||
def set_cookie(request, session_id, cookie)
|
||||
if Hash === cookie
|
||||
if SiteSetting.force_https
|
||||
cookie[:secure] = true
|
||||
end
|
||||
cookie[:secure] = true if SiteSetting.force_https
|
||||
unless SiteSetting.same_site_cookies == "Disabled"
|
||||
cookie[:same_site] = SiteSetting.same_site_cookies
|
||||
end
|
||||
|
@ -17,10 +17,7 @@ class AdminConfirmation
|
||||
@token = SecureRandom.hex
|
||||
Discourse.redis.setex("admin-confirmation:#{@target_user.id}", 3.hours.to_i, @token)
|
||||
|
||||
payload = {
|
||||
target_user_id: @target_user.id,
|
||||
performed_by: @performed_by.id
|
||||
}
|
||||
payload = { target_user_id: @target_user.id, performed_by: @performed_by.id }
|
||||
Discourse.redis.setex("admin-confirmation-token:#{@token}", 3.hours.to_i, payload.to_json)
|
||||
|
||||
Jobs.enqueue(
|
||||
@ -28,7 +25,7 @@ class AdminConfirmation
|
||||
to_address: @performed_by.email,
|
||||
target_email: @target_user.email,
|
||||
target_username: @target_user.username,
|
||||
token: @token
|
||||
token: @token,
|
||||
)
|
||||
end
|
||||
|
||||
@ -51,8 +48,8 @@ class AdminConfirmation
|
||||
return nil unless json
|
||||
|
||||
parsed = JSON.parse(json)
|
||||
target_user = User.find(parsed['target_user_id'].to_i)
|
||||
performed_by = User.find(parsed['performed_by'].to_i)
|
||||
target_user = User.find(parsed["target_user_id"].to_i)
|
||||
performed_by = User.find(parsed["performed_by"].to_i)
|
||||
|
||||
ac = AdminConfirmation.new(target_user, performed_by)
|
||||
ac.token = token
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class AdminConstraint
|
||||
|
||||
def initialize(options = {})
|
||||
@require_master = options[:require_master]
|
||||
end
|
||||
@ -19,5 +18,4 @@ class AdminConstraint
|
||||
def custom_admin_check(request)
|
||||
true
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class AdminUserIndexQuery
|
||||
|
||||
def initialize(params = {}, klass = User, trust_levels = TrustLevel.levels)
|
||||
@params = params
|
||||
@query = initialize_query_with_order(klass)
|
||||
@ -11,24 +10,22 @@ class AdminUserIndexQuery
|
||||
attr_reader :params, :trust_levels
|
||||
|
||||
SORTABLE_MAPPING = {
|
||||
'created' => 'created_at',
|
||||
'last_emailed' => "COALESCE(last_emailed_at, to_date('1970-01-01', 'YYYY-MM-DD'))",
|
||||
'seen' => "COALESCE(last_seen_at, to_date('1970-01-01', 'YYYY-MM-DD'))",
|
||||
'username' => 'username',
|
||||
'email' => 'email',
|
||||
'trust_level' => 'trust_level',
|
||||
'days_visited' => 'user_stats.days_visited',
|
||||
'posts_read' => 'user_stats.posts_read_count',
|
||||
'topics_viewed' => 'user_stats.topics_entered',
|
||||
'posts' => 'user_stats.post_count',
|
||||
'read_time' => 'user_stats.time_read'
|
||||
"created" => "created_at",
|
||||
"last_emailed" => "COALESCE(last_emailed_at, to_date('1970-01-01', 'YYYY-MM-DD'))",
|
||||
"seen" => "COALESCE(last_seen_at, to_date('1970-01-01', 'YYYY-MM-DD'))",
|
||||
"username" => "username",
|
||||
"email" => "email",
|
||||
"trust_level" => "trust_level",
|
||||
"days_visited" => "user_stats.days_visited",
|
||||
"posts_read" => "user_stats.posts_read_count",
|
||||
"topics_viewed" => "user_stats.topics_entered",
|
||||
"posts" => "user_stats.post_count",
|
||||
"read_time" => "user_stats.time_read",
|
||||
}
|
||||
|
||||
def find_users(limit = 100)
|
||||
page = params[:page].to_i - 1
|
||||
if page < 0
|
||||
page = 0
|
||||
end
|
||||
page = 0 if page < 0
|
||||
find_users_query.limit(limit).offset(page * limit)
|
||||
end
|
||||
|
||||
@ -37,7 +34,13 @@ class AdminUserIndexQuery
|
||||
end
|
||||
|
||||
def custom_direction
|
||||
Discourse.deprecate(":ascending is deprecated please use :asc instead", output_in_test: true, drop_from: '2.9.0') if params[:ascending]
|
||||
if params[:ascending]
|
||||
Discourse.deprecate(
|
||||
":ascending is deprecated please use :asc instead",
|
||||
output_in_test: true,
|
||||
drop_from: "2.9.0",
|
||||
)
|
||||
end
|
||||
asc = params[:asc] || params[:ascending]
|
||||
asc.present? && asc ? "ASC" : "DESC"
|
||||
end
|
||||
@ -47,7 +50,7 @@ class AdminUserIndexQuery
|
||||
|
||||
custom_order = params[:order]
|
||||
if custom_order.present? &&
|
||||
without_dir = SORTABLE_MAPPING[custom_order.downcase.sub(/ (asc|desc)$/, '')]
|
||||
without_dir = SORTABLE_MAPPING[custom_order.downcase.sub(/ (asc|desc)$/, "")]
|
||||
order << "#{without_dir} #{custom_direction}"
|
||||
end
|
||||
|
||||
@ -61,13 +64,9 @@ class AdminUserIndexQuery
|
||||
order << "users.username"
|
||||
end
|
||||
|
||||
query = klass
|
||||
.includes(:totps)
|
||||
.order(order.reject(&:blank?).join(","))
|
||||
query = klass.includes(:totps).order(order.reject(&:blank?).join(","))
|
||||
|
||||
unless params[:stats].present? && params[:stats] == false
|
||||
query = query.includes(:user_stat)
|
||||
end
|
||||
query = query.includes(:user_stat) unless params[:stats].present? && params[:stats] == false
|
||||
|
||||
query = query.joins(:primary_email) if params[:show_emails] == "true"
|
||||
|
||||
@ -77,32 +76,44 @@ class AdminUserIndexQuery
|
||||
def filter_by_trust
|
||||
levels = trust_levels.map { |key, _| key.to_s }
|
||||
if levels.include?(params[:query])
|
||||
@query.where('trust_level = ?', trust_levels[params[:query].to_sym])
|
||||
@query.where("trust_level = ?", trust_levels[params[:query].to_sym])
|
||||
end
|
||||
end
|
||||
|
||||
def filter_by_query_classification
|
||||
case params[:query]
|
||||
when 'staff' then @query.where("admin or moderator")
|
||||
when 'admins' then @query.where(admin: true)
|
||||
when 'moderators' then @query.where(moderator: true)
|
||||
when 'silenced' then @query.silenced
|
||||
when 'suspended' then @query.suspended
|
||||
when 'pending' then @query.not_suspended.where(approved: false, active: true)
|
||||
when 'staged' then @query.where(staged: true)
|
||||
when "staff"
|
||||
@query.where("admin or moderator")
|
||||
when "admins"
|
||||
@query.where(admin: true)
|
||||
when "moderators"
|
||||
@query.where(moderator: true)
|
||||
when "silenced"
|
||||
@query.silenced
|
||||
when "suspended"
|
||||
@query.suspended
|
||||
when "pending"
|
||||
@query.not_suspended.where(approved: false, active: true)
|
||||
when "staged"
|
||||
@query.where(staged: true)
|
||||
end
|
||||
end
|
||||
|
||||
def filter_by_search
|
||||
if params[:email].present?
|
||||
return @query.joins(:primary_email).where('user_emails.email = ?', params[:email].downcase)
|
||||
return @query.joins(:primary_email).where("user_emails.email = ?", params[:email].downcase)
|
||||
end
|
||||
|
||||
filter = params[:filter]
|
||||
if filter.present?
|
||||
filter = filter.strip
|
||||
if ip = IPAddr.new(filter) rescue nil
|
||||
@query.where('ip_address <<= :ip OR registration_ip_address <<= :ip', ip: ip.to_cidr_s)
|
||||
if ip =
|
||||
begin
|
||||
IPAddr.new(filter)
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
@query.where("ip_address <<= :ip OR registration_ip_address <<= :ip", ip: ip.to_cidr_s)
|
||||
else
|
||||
@query.filter_by_username_or_email(filter)
|
||||
end
|
||||
@ -111,14 +122,12 @@ class AdminUserIndexQuery
|
||||
|
||||
def filter_by_ip
|
||||
if params[:ip].present?
|
||||
@query.where('ip_address = :ip OR registration_ip_address = :ip', ip: params[:ip].strip)
|
||||
@query.where("ip_address = :ip OR registration_ip_address = :ip", ip: params[:ip].strip)
|
||||
end
|
||||
end
|
||||
|
||||
def filter_exclude
|
||||
if params[:exclude].present?
|
||||
@query.where('users.id != ?', params[:exclude])
|
||||
end
|
||||
@query.where("users.id != ?", params[:exclude]) if params[:exclude].present?
|
||||
end
|
||||
|
||||
# this might not be needed in rails 4 ?
|
||||
@ -134,5 +143,4 @@ class AdminUserIndexQuery
|
||||
append filter_by_search
|
||||
@query
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module AgeWords
|
||||
|
||||
def self.age_words(secs)
|
||||
if secs.blank?
|
||||
"—"
|
||||
@ -10,5 +9,4 @@ module AgeWords
|
||||
FreedomPatches::Rails4.distance_of_time_in_words(now, now + secs)
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -11,22 +11,19 @@ class Archetype
|
||||
end
|
||||
|
||||
def attributes
|
||||
{
|
||||
id: @id,
|
||||
options: @options
|
||||
}
|
||||
{ id: @id, options: @options }
|
||||
end
|
||||
|
||||
def self.default
|
||||
'regular'
|
||||
"regular"
|
||||
end
|
||||
|
||||
def self.private_message
|
||||
'private_message'
|
||||
"private_message"
|
||||
end
|
||||
|
||||
def self.banner
|
||||
'banner'
|
||||
"banner"
|
||||
end
|
||||
|
||||
def self.list
|
||||
@ -40,8 +37,7 @@ class Archetype
|
||||
end
|
||||
|
||||
# default archetypes
|
||||
register 'regular'
|
||||
register 'private_message'
|
||||
register 'banner'
|
||||
|
||||
register "regular"
|
||||
register "private_message"
|
||||
register "banner"
|
||||
end
|
||||
|
21
lib/auth.rb
21
lib/auth.rb
@ -1,13 +1,14 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Auth; end
|
||||
module Auth
|
||||
end
|
||||
|
||||
require 'auth/auth_provider'
|
||||
require 'auth/result'
|
||||
require 'auth/authenticator'
|
||||
require 'auth/managed_authenticator'
|
||||
require 'auth/facebook_authenticator'
|
||||
require 'auth/github_authenticator'
|
||||
require 'auth/twitter_authenticator'
|
||||
require 'auth/google_oauth2_authenticator'
|
||||
require 'auth/discord_authenticator'
|
||||
require "auth/auth_provider"
|
||||
require "auth/result"
|
||||
require "auth/authenticator"
|
||||
require "auth/managed_authenticator"
|
||||
require "auth/facebook_authenticator"
|
||||
require "auth/github_authenticator"
|
||||
require "auth/twitter_authenticator"
|
||||
require "auth/google_oauth2_authenticator"
|
||||
require "auth/discord_authenticator"
|
||||
|
@ -8,32 +8,60 @@ class Auth::AuthProvider
|
||||
end
|
||||
|
||||
def self.auth_attributes
|
||||
[:authenticator, :pretty_name, :title, :message, :frame_width, :frame_height,
|
||||
:pretty_name_setting, :title_setting, :enabled_setting, :full_screen_login, :full_screen_login_setting,
|
||||
:custom_url, :background_color, :icon]
|
||||
%i[
|
||||
authenticator
|
||||
pretty_name
|
||||
title
|
||||
message
|
||||
frame_width
|
||||
frame_height
|
||||
pretty_name_setting
|
||||
title_setting
|
||||
enabled_setting
|
||||
full_screen_login
|
||||
full_screen_login_setting
|
||||
custom_url
|
||||
background_color
|
||||
icon
|
||||
]
|
||||
end
|
||||
|
||||
attr_accessor(*auth_attributes)
|
||||
|
||||
def enabled_setting=(val)
|
||||
Discourse.deprecate("(#{authenticator.name}) enabled_setting is deprecated. Please define authenticator.enabled? instead", drop_from: '2.9.0')
|
||||
Discourse.deprecate(
|
||||
"(#{authenticator.name}) enabled_setting is deprecated. Please define authenticator.enabled? instead",
|
||||
drop_from: "2.9.0",
|
||||
)
|
||||
@enabled_setting = val
|
||||
end
|
||||
|
||||
def background_color=(val)
|
||||
Discourse.deprecate("(#{authenticator.name}) background_color is no longer functional. Please use CSS instead", drop_from: '2.9.0')
|
||||
Discourse.deprecate(
|
||||
"(#{authenticator.name}) background_color is no longer functional. Please use CSS instead",
|
||||
drop_from: "2.9.0",
|
||||
)
|
||||
end
|
||||
|
||||
def full_screen_login=(val)
|
||||
Discourse.deprecate("(#{authenticator.name}) full_screen_login is now forced. The full_screen_login parameter can be removed from the auth_provider.", drop_from: '2.9.0')
|
||||
Discourse.deprecate(
|
||||
"(#{authenticator.name}) full_screen_login is now forced. The full_screen_login parameter can be removed from the auth_provider.",
|
||||
drop_from: "2.9.0",
|
||||
)
|
||||
end
|
||||
|
||||
def full_screen_login_setting=(val)
|
||||
Discourse.deprecate("(#{authenticator.name}) full_screen_login is now forced. The full_screen_login_setting parameter can be removed from the auth_provider.", drop_from: '2.9.0')
|
||||
Discourse.deprecate(
|
||||
"(#{authenticator.name}) full_screen_login is now forced. The full_screen_login_setting parameter can be removed from the auth_provider.",
|
||||
drop_from: "2.9.0",
|
||||
)
|
||||
end
|
||||
|
||||
def message=(val)
|
||||
Discourse.deprecate("(#{authenticator.name}) message is no longer used because all logins are full screen. It should be removed from the auth_provider", drop_from: '2.9.0')
|
||||
Discourse.deprecate(
|
||||
"(#{authenticator.name}) message is no longer used because all logins are full screen. It should be removed from the auth_provider",
|
||||
drop_from: "2.9.0",
|
||||
)
|
||||
end
|
||||
|
||||
def name
|
||||
@ -47,5 +75,4 @@ class Auth::AuthProvider
|
||||
def can_revoke
|
||||
authenticator.can_revoke?
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -1,8 +1,8 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Auth; end
|
||||
module Auth
|
||||
end
|
||||
class Auth::CurrentUserProvider
|
||||
|
||||
# do all current user initialization here
|
||||
def initialize(env)
|
||||
raise NotImplementedError
|
||||
|
@ -1,5 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
require_relative '../route_matcher'
|
||||
require_relative "../route_matcher"
|
||||
|
||||
# You may have seen references to v0 and v1 of our auth cookie in the codebase
|
||||
# and you're not sure how they differ, so here is an explanation:
|
||||
@ -23,7 +23,6 @@ require_relative '../route_matcher'
|
||||
# We'll drop support for v0 after Discourse 2.9 is released.
|
||||
|
||||
class Auth::DefaultCurrentUserProvider
|
||||
|
||||
CURRENT_USER_KEY ||= "_DISCOURSE_CURRENT_USER"
|
||||
USER_TOKEN_KEY ||= "_DISCOURSE_USER_TOKEN"
|
||||
API_KEY ||= "api_key"
|
||||
@ -37,7 +36,7 @@ class Auth::DefaultCurrentUserProvider
|
||||
USER_API_CLIENT_ID ||= "HTTP_USER_API_CLIENT_ID"
|
||||
API_KEY_ENV ||= "_DISCOURSE_API"
|
||||
USER_API_KEY_ENV ||= "_DISCOURSE_USER_API"
|
||||
TOKEN_COOKIE ||= ENV['DISCOURSE_TOKEN_COOKIE'] || "_t"
|
||||
TOKEN_COOKIE ||= ENV["DISCOURSE_TOKEN_COOKIE"] || "_t"
|
||||
PATH_INFO ||= "PATH_INFO"
|
||||
COOKIE_ATTEMPTS_PER_MIN ||= 10
|
||||
BAD_TOKEN ||= "_DISCOURSE_BAD_TOKEN"
|
||||
@ -59,30 +58,20 @@ class Auth::DefaultCurrentUserProvider
|
||||
"badges#show",
|
||||
"tags#tag_feed",
|
||||
"tags#show",
|
||||
*[:latest, :unread, :new, :read, :posted, :bookmarks].map { |f| "list##{f}_feed" },
|
||||
*[:all, :yearly, :quarterly, :monthly, :weekly, :daily].map { |p| "list#top_#{p}_feed" },
|
||||
*[:latest, :unread, :new, :read, :posted, :bookmarks].map { |f| "tags#show_#{f}" }
|
||||
*%i[latest unread new read posted bookmarks].map { |f| "list##{f}_feed" },
|
||||
*%i[all yearly quarterly monthly weekly daily].map { |p| "list#top_#{p}_feed" },
|
||||
*%i[latest unread new read posted bookmarks].map { |f| "tags#show_#{f}" },
|
||||
],
|
||||
formats: :rss
|
||||
),
|
||||
RouteMatcher.new(
|
||||
methods: :get,
|
||||
actions: "users#bookmarks",
|
||||
formats: :ics
|
||||
),
|
||||
RouteMatcher.new(
|
||||
methods: :post,
|
||||
actions: "admin/email#handle_mail",
|
||||
formats: nil
|
||||
formats: :rss,
|
||||
),
|
||||
RouteMatcher.new(methods: :get, actions: "users#bookmarks", formats: :ics),
|
||||
RouteMatcher.new(methods: :post, actions: "admin/email#handle_mail", formats: nil),
|
||||
]
|
||||
|
||||
def self.find_v0_auth_cookie(request)
|
||||
cookie = request.cookies[TOKEN_COOKIE]
|
||||
|
||||
if cookie&.valid_encoding? && cookie.present? && cookie.size == TOKEN_SIZE
|
||||
cookie
|
||||
end
|
||||
cookie if cookie&.valid_encoding? && cookie.present? && cookie.size == TOKEN_SIZE
|
||||
end
|
||||
|
||||
def self.find_v1_auth_cookie(env)
|
||||
@ -111,12 +100,10 @@ class Auth::DefaultCurrentUserProvider
|
||||
return @env[CURRENT_USER_KEY] if @env.key?(CURRENT_USER_KEY)
|
||||
|
||||
# bypass if we have the shared session header
|
||||
if shared_key = @env['HTTP_X_SHARED_SESSION_KEY']
|
||||
if shared_key = @env["HTTP_X_SHARED_SESSION_KEY"]
|
||||
uid = Discourse.redis.get("shared_session_key_#{shared_key}")
|
||||
user = nil
|
||||
if uid
|
||||
user = User.find_by(id: uid.to_i)
|
||||
end
|
||||
user = User.find_by(id: uid.to_i) if uid
|
||||
@env[CURRENT_USER_KEY] = user
|
||||
return user
|
||||
end
|
||||
@ -130,28 +117,27 @@ class Auth::DefaultCurrentUserProvider
|
||||
user_api_key ||= request[PARAMETER_USER_API_KEY]
|
||||
end
|
||||
|
||||
if !@env.blank? && request[API_KEY] && api_parameter_allowed?
|
||||
api_key ||= request[API_KEY]
|
||||
end
|
||||
api_key ||= request[API_KEY] if !@env.blank? && request[API_KEY] && api_parameter_allowed?
|
||||
|
||||
auth_token = find_auth_token
|
||||
current_user = nil
|
||||
|
||||
if auth_token
|
||||
limiter = RateLimiter.new(nil, "cookie_auth_#{request.ip}", COOKIE_ATTEMPTS_PER_MIN , 60)
|
||||
limiter = RateLimiter.new(nil, "cookie_auth_#{request.ip}", COOKIE_ATTEMPTS_PER_MIN, 60)
|
||||
|
||||
if limiter.can_perform?
|
||||
@env[USER_TOKEN_KEY] = @user_token = begin
|
||||
UserAuthToken.lookup(
|
||||
auth_token,
|
||||
seen: true,
|
||||
user_agent: @env['HTTP_USER_AGENT'],
|
||||
path: @env['REQUEST_PATH'],
|
||||
client_ip: @request.ip
|
||||
)
|
||||
rescue ActiveRecord::ReadOnlyError
|
||||
nil
|
||||
end
|
||||
@env[USER_TOKEN_KEY] = @user_token =
|
||||
begin
|
||||
UserAuthToken.lookup(
|
||||
auth_token,
|
||||
seen: true,
|
||||
user_agent: @env["HTTP_USER_AGENT"],
|
||||
path: @env["REQUEST_PATH"],
|
||||
client_ip: @request.ip,
|
||||
)
|
||||
rescue ActiveRecord::ReadOnlyError
|
||||
nil
|
||||
end
|
||||
|
||||
current_user = @user_token.try(:user)
|
||||
end
|
||||
@ -161,14 +147,10 @@ class Auth::DefaultCurrentUserProvider
|
||||
begin
|
||||
limiter.performed!
|
||||
rescue RateLimiter::LimitExceeded
|
||||
raise Discourse::InvalidAccess.new(
|
||||
'Invalid Access',
|
||||
nil,
|
||||
delete_cookie: TOKEN_COOKIE
|
||||
)
|
||||
raise Discourse::InvalidAccess.new("Invalid Access", nil, delete_cookie: TOKEN_COOKIE)
|
||||
end
|
||||
end
|
||||
elsif @env['HTTP_DISCOURSE_LOGGED_IN']
|
||||
elsif @env["HTTP_DISCOURSE_LOGGED_IN"]
|
||||
@env[BAD_TOKEN] = true
|
||||
end
|
||||
|
||||
@ -177,10 +159,10 @@ class Auth::DefaultCurrentUserProvider
|
||||
current_user = lookup_api_user(api_key, request)
|
||||
if !current_user
|
||||
raise Discourse::InvalidAccess.new(
|
||||
I18n.t('invalid_api_credentials'),
|
||||
nil,
|
||||
custom_message: "invalid_api_credentials"
|
||||
)
|
||||
I18n.t("invalid_api_credentials"),
|
||||
nil,
|
||||
custom_message: "invalid_api_credentials",
|
||||
)
|
||||
end
|
||||
raise Discourse::InvalidAccess if current_user.suspended? || !current_user.active
|
||||
admin_api_key_limiter.performed! if !Rails.env.profile?
|
||||
@ -191,12 +173,13 @@ class Auth::DefaultCurrentUserProvider
|
||||
if user_api_key
|
||||
@hashed_user_api_key = ApiKey.hash_key(user_api_key)
|
||||
|
||||
user_api_key_obj = UserApiKey
|
||||
.active
|
||||
.joins(:user)
|
||||
.where(key_hash: @hashed_user_api_key)
|
||||
.includes(:user, :scopes)
|
||||
.first
|
||||
user_api_key_obj =
|
||||
UserApiKey
|
||||
.active
|
||||
.joins(:user)
|
||||
.where(key_hash: @hashed_user_api_key)
|
||||
.includes(:user, :scopes)
|
||||
.first
|
||||
|
||||
raise Discourse::InvalidAccess unless user_api_key_obj
|
||||
|
||||
@ -208,18 +191,14 @@ class Auth::DefaultCurrentUserProvider
|
||||
current_user = user_api_key_obj.user
|
||||
raise Discourse::InvalidAccess if current_user.suspended? || !current_user.active
|
||||
|
||||
if can_write?
|
||||
user_api_key_obj.update_last_used(@env[USER_API_CLIENT_ID])
|
||||
end
|
||||
user_api_key_obj.update_last_used(@env[USER_API_CLIENT_ID]) if can_write?
|
||||
|
||||
@env[USER_API_KEY_ENV] = true
|
||||
end
|
||||
|
||||
# keep this rule here as a safeguard
|
||||
# under no conditions to suspended or inactive accounts get current_user
|
||||
if current_user && (current_user.suspended? || !current_user.active)
|
||||
current_user = nil
|
||||
end
|
||||
current_user = nil if current_user && (current_user.suspended? || !current_user.active)
|
||||
|
||||
if current_user && should_update_last_seen?
|
||||
ip = request.ip
|
||||
@ -247,31 +226,40 @@ class Auth::DefaultCurrentUserProvider
|
||||
if !is_user_api? && @user_token && @user_token.user == user
|
||||
rotated_at = @user_token.rotated_at
|
||||
|
||||
needs_rotation = @user_token.auth_token_seen ? rotated_at < UserAuthToken::ROTATE_TIME.ago : rotated_at < UserAuthToken::URGENT_ROTATE_TIME.ago
|
||||
needs_rotation =
|
||||
(
|
||||
if @user_token.auth_token_seen
|
||||
rotated_at < UserAuthToken::ROTATE_TIME.ago
|
||||
else
|
||||
rotated_at < UserAuthToken::URGENT_ROTATE_TIME.ago
|
||||
end
|
||||
)
|
||||
|
||||
if needs_rotation
|
||||
if @user_token.rotate!(user_agent: @env['HTTP_USER_AGENT'],
|
||||
client_ip: @request.ip,
|
||||
path: @env['REQUEST_PATH'])
|
||||
if @user_token.rotate!(
|
||||
user_agent: @env["HTTP_USER_AGENT"],
|
||||
client_ip: @request.ip,
|
||||
path: @env["REQUEST_PATH"],
|
||||
)
|
||||
set_auth_cookie!(@user_token.unhashed_auth_token, user, cookie_jar)
|
||||
DiscourseEvent.trigger(:user_session_refreshed, user)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if !user && cookie_jar.key?(TOKEN_COOKIE)
|
||||
cookie_jar.delete(TOKEN_COOKIE)
|
||||
end
|
||||
cookie_jar.delete(TOKEN_COOKIE) if !user && cookie_jar.key?(TOKEN_COOKIE)
|
||||
end
|
||||
|
||||
def log_on_user(user, session, cookie_jar, opts = {})
|
||||
@env[USER_TOKEN_KEY] = @user_token = UserAuthToken.generate!(
|
||||
user_id: user.id,
|
||||
user_agent: @env['HTTP_USER_AGENT'],
|
||||
path: @env['REQUEST_PATH'],
|
||||
client_ip: @request.ip,
|
||||
staff: user.staff?,
|
||||
impersonate: opts[:impersonate])
|
||||
@env[USER_TOKEN_KEY] = @user_token =
|
||||
UserAuthToken.generate!(
|
||||
user_id: user.id,
|
||||
user_agent: @env["HTTP_USER_AGENT"],
|
||||
path: @env["REQUEST_PATH"],
|
||||
client_ip: @request.ip,
|
||||
staff: user.staff?,
|
||||
impersonate: opts[:impersonate],
|
||||
)
|
||||
|
||||
set_auth_cookie!(@user_token.unhashed_auth_token, user, cookie_jar)
|
||||
user.unstage!
|
||||
@ -288,23 +276,19 @@ class Auth::DefaultCurrentUserProvider
|
||||
token: unhashed_auth_token,
|
||||
user_id: user.id,
|
||||
trust_level: user.trust_level,
|
||||
issued_at: Time.zone.now.to_i
|
||||
issued_at: Time.zone.now.to_i,
|
||||
}
|
||||
|
||||
if SiteSetting.persistent_sessions
|
||||
expires = SiteSetting.maximum_session_age.hours.from_now
|
||||
end
|
||||
expires = SiteSetting.maximum_session_age.hours.from_now if SiteSetting.persistent_sessions
|
||||
|
||||
if SiteSetting.same_site_cookies != "Disabled"
|
||||
same_site = SiteSetting.same_site_cookies
|
||||
end
|
||||
same_site = SiteSetting.same_site_cookies if SiteSetting.same_site_cookies != "Disabled"
|
||||
|
||||
cookie_jar.encrypted[TOKEN_COOKIE] = {
|
||||
value: data,
|
||||
httponly: true,
|
||||
secure: SiteSetting.force_https,
|
||||
expires: expires,
|
||||
same_site: same_site
|
||||
same_site: same_site,
|
||||
}
|
||||
end
|
||||
|
||||
@ -313,10 +297,8 @@ class Auth::DefaultCurrentUserProvider
|
||||
# for signup flow, since all admin emails are stored in
|
||||
# DISCOURSE_DEVELOPER_EMAILS for self-hosters.
|
||||
def make_developer_admin(user)
|
||||
if user.active? &&
|
||||
!user.admin &&
|
||||
Rails.configuration.respond_to?(:developer_emails) &&
|
||||
Rails.configuration.developer_emails.include?(user.email)
|
||||
if user.active? && !user.admin && Rails.configuration.respond_to?(:developer_emails) &&
|
||||
Rails.configuration.developer_emails.include?(user.email)
|
||||
user.admin = true
|
||||
user.save
|
||||
Group.refresh_automatic_groups!(:staff, :admins)
|
||||
@ -347,7 +329,7 @@ class Auth::DefaultCurrentUserProvider
|
||||
@user_token.destroy
|
||||
end
|
||||
|
||||
cookie_jar.delete('authentication_data')
|
||||
cookie_jar.delete("authentication_data")
|
||||
cookie_jar.delete(TOKEN_COOKIE)
|
||||
end
|
||||
|
||||
@ -384,9 +366,7 @@ class Auth::DefaultCurrentUserProvider
|
||||
if api_key = ApiKey.active.with_key(api_key_value).includes(:user).first
|
||||
api_username = header_api_key? ? @env[HEADER_API_USERNAME] : request[API_USERNAME]
|
||||
|
||||
if !api_key.request_allowed?(@env)
|
||||
return nil
|
||||
end
|
||||
return nil if !api_key.request_allowed?(@env)
|
||||
|
||||
user =
|
||||
if api_key.user
|
||||
@ -395,7 +375,8 @@ class Auth::DefaultCurrentUserProvider
|
||||
User.find_by(username_lower: api_username.downcase)
|
||||
elsif user_id = header_api_key? ? @env[HEADER_API_USER_ID] : request["api_user_id"]
|
||||
User.find_by(id: user_id.to_i)
|
||||
elsif external_id = header_api_key? ? @env[HEADER_API_USER_EXTERNAL_ID] : request["api_user_external_id"]
|
||||
elsif external_id =
|
||||
header_api_key? ? @env[HEADER_API_USER_EXTERNAL_ID] : request["api_user_external_id"]
|
||||
SingleSignOnRecord.find_by(external_id: external_id.to_s).try(:user)
|
||||
end
|
||||
|
||||
@ -435,52 +416,48 @@ class Auth::DefaultCurrentUserProvider
|
||||
|
||||
limit = GlobalSetting.max_admin_api_reqs_per_minute.to_i
|
||||
if GlobalSetting.respond_to?(:max_admin_api_reqs_per_key_per_minute)
|
||||
Discourse.deprecate("DISCOURSE_MAX_ADMIN_API_REQS_PER_KEY_PER_MINUTE is deprecated. Please use DISCOURSE_MAX_ADMIN_API_REQS_PER_MINUTE", drop_from: '2.9.0')
|
||||
limit = [
|
||||
GlobalSetting.max_admin_api_reqs_per_key_per_minute.to_i,
|
||||
limit
|
||||
].max
|
||||
Discourse.deprecate(
|
||||
"DISCOURSE_MAX_ADMIN_API_REQS_PER_KEY_PER_MINUTE is deprecated. Please use DISCOURSE_MAX_ADMIN_API_REQS_PER_MINUTE",
|
||||
drop_from: "2.9.0",
|
||||
)
|
||||
limit = [GlobalSetting.max_admin_api_reqs_per_key_per_minute.to_i, limit].max
|
||||
end
|
||||
@admin_api_key_limiter = RateLimiter.new(
|
||||
nil,
|
||||
"admin_api_min",
|
||||
limit,
|
||||
60,
|
||||
error_code: "admin_api_key_rate_limit"
|
||||
)
|
||||
@admin_api_key_limiter =
|
||||
RateLimiter.new(nil, "admin_api_min", limit, 60, error_code: "admin_api_key_rate_limit")
|
||||
end
|
||||
|
||||
def user_api_key_limiter_60_secs
|
||||
@user_api_key_limiter_60_secs ||= RateLimiter.new(
|
||||
nil,
|
||||
"user_api_min_#{@hashed_user_api_key}",
|
||||
GlobalSetting.max_user_api_reqs_per_minute,
|
||||
60,
|
||||
error_code: "user_api_key_limiter_60_secs"
|
||||
)
|
||||
@user_api_key_limiter_60_secs ||=
|
||||
RateLimiter.new(
|
||||
nil,
|
||||
"user_api_min_#{@hashed_user_api_key}",
|
||||
GlobalSetting.max_user_api_reqs_per_minute,
|
||||
60,
|
||||
error_code: "user_api_key_limiter_60_secs",
|
||||
)
|
||||
end
|
||||
|
||||
def user_api_key_limiter_1_day
|
||||
@user_api_key_limiter_1_day ||= RateLimiter.new(
|
||||
nil,
|
||||
"user_api_day_#{@hashed_user_api_key}",
|
||||
GlobalSetting.max_user_api_reqs_per_day,
|
||||
86400,
|
||||
error_code: "user_api_key_limiter_1_day"
|
||||
)
|
||||
@user_api_key_limiter_1_day ||=
|
||||
RateLimiter.new(
|
||||
nil,
|
||||
"user_api_day_#{@hashed_user_api_key}",
|
||||
GlobalSetting.max_user_api_reqs_per_day,
|
||||
86_400,
|
||||
error_code: "user_api_key_limiter_1_day",
|
||||
)
|
||||
end
|
||||
|
||||
def find_auth_token
|
||||
return @auth_token if defined?(@auth_token)
|
||||
|
||||
@auth_token = begin
|
||||
if v0 = self.class.find_v0_auth_cookie(@request)
|
||||
v0
|
||||
elsif v1 = self.class.find_v1_auth_cookie(@env)
|
||||
if v1[:issued_at] >= SiteSetting.maximum_session_age.hours.ago.to_i
|
||||
v1[:token]
|
||||
@auth_token =
|
||||
begin
|
||||
if v0 = self.class.find_v0_auth_cookie(@request)
|
||||
v0
|
||||
elsif v1 = self.class.find_v1_auth_cookie(@env)
|
||||
v1[:token] if v1[:issued_at] >= SiteSetting.maximum_session_age.hours.ago.to_i
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -2,35 +2,34 @@
|
||||
|
||||
class Auth::DiscordAuthenticator < Auth::ManagedAuthenticator
|
||||
class DiscordStrategy < OmniAuth::Strategies::OAuth2
|
||||
option :name, 'discord'
|
||||
option :scope, 'identify email guilds'
|
||||
option :name, "discord"
|
||||
option :scope, "identify email guilds"
|
||||
|
||||
option :client_options,
|
||||
site: 'https://discord.com/api',
|
||||
authorize_url: 'oauth2/authorize',
|
||||
token_url: 'oauth2/token'
|
||||
site: "https://discord.com/api",
|
||||
authorize_url: "oauth2/authorize",
|
||||
token_url: "oauth2/token"
|
||||
|
||||
option :authorize_options, %i[scope permissions]
|
||||
|
||||
uid { raw_info['id'] }
|
||||
uid { raw_info["id"] }
|
||||
|
||||
info do
|
||||
{
|
||||
name: raw_info['username'],
|
||||
email: raw_info['verified'] ? raw_info['email'] : nil,
|
||||
image: "https://cdn.discordapp.com/avatars/#{raw_info['id']}/#{raw_info['avatar']}"
|
||||
name: raw_info["username"],
|
||||
email: raw_info["verified"] ? raw_info["email"] : nil,
|
||||
image: "https://cdn.discordapp.com/avatars/#{raw_info["id"]}/#{raw_info["avatar"]}",
|
||||
}
|
||||
end
|
||||
|
||||
extra do
|
||||
{
|
||||
'raw_info' => raw_info
|
||||
}
|
||||
end
|
||||
extra { { "raw_info" => raw_info } }
|
||||
|
||||
def raw_info
|
||||
@raw_info ||= access_token.get('users/@me').parsed.
|
||||
merge(guilds: access_token.get('users/@me/guilds').parsed)
|
||||
@raw_info ||=
|
||||
access_token
|
||||
.get("users/@me")
|
||||
.parsed
|
||||
.merge(guilds: access_token.get("users/@me/guilds").parsed)
|
||||
end
|
||||
|
||||
def callback_url
|
||||
@ -39,7 +38,7 @@ class Auth::DiscordAuthenticator < Auth::ManagedAuthenticator
|
||||
end
|
||||
|
||||
def name
|
||||
'discord'
|
||||
"discord"
|
||||
end
|
||||
|
||||
def enabled?
|
||||
@ -48,23 +47,26 @@ class Auth::DiscordAuthenticator < Auth::ManagedAuthenticator
|
||||
|
||||
def register_middleware(omniauth)
|
||||
omniauth.provider DiscordStrategy,
|
||||
setup: lambda { |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:client_id] = SiteSetting.discord_client_id
|
||||
strategy.options[:client_secret] = SiteSetting.discord_secret
|
||||
}
|
||||
end
|
||||
setup:
|
||||
lambda { |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:client_id] = SiteSetting.discord_client_id
|
||||
strategy.options[:client_secret] = SiteSetting.discord_secret
|
||||
}
|
||||
end
|
||||
|
||||
def after_authenticate(auth_token, existing_account: nil)
|
||||
allowed_guild_ids = SiteSetting.discord_trusted_guilds.split("|")
|
||||
|
||||
if allowed_guild_ids.length > 0
|
||||
user_guild_ids = auth_token.extra[:raw_info][:guilds].map { |g| g['id'] }
|
||||
user_guild_ids = auth_token.extra[:raw_info][:guilds].map { |g| g["id"] }
|
||||
if (user_guild_ids & allowed_guild_ids).empty? # User is not in any allowed guilds
|
||||
return Auth::Result.new.tap do |auth_result|
|
||||
auth_result.failed = true
|
||||
auth_result.failed_reason = I18n.t("discord.not_in_allowed_guild")
|
||||
end
|
||||
return(
|
||||
Auth::Result.new.tap do |auth_result|
|
||||
auth_result.failed = true
|
||||
auth_result.failed_reason = I18n.t("discord.not_in_allowed_guild")
|
||||
end
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Auth::FacebookAuthenticator < Auth::ManagedAuthenticator
|
||||
|
||||
AVATAR_SIZE ||= 480
|
||||
|
||||
def name
|
||||
@ -14,15 +13,19 @@ class Auth::FacebookAuthenticator < Auth::ManagedAuthenticator
|
||||
|
||||
def register_middleware(omniauth)
|
||||
omniauth.provider :facebook,
|
||||
setup: lambda { |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:client_id] = SiteSetting.facebook_app_id
|
||||
strategy.options[:client_secret] = SiteSetting.facebook_app_secret
|
||||
strategy.options[:info_fields] = 'name,first_name,last_name,email'
|
||||
strategy.options[:image_size] = { width: AVATAR_SIZE, height: AVATAR_SIZE }
|
||||
strategy.options[:secure_image_url] = true
|
||||
},
|
||||
scope: "email"
|
||||
setup:
|
||||
lambda { |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:client_id] = SiteSetting.facebook_app_id
|
||||
strategy.options[:client_secret] = SiteSetting.facebook_app_secret
|
||||
strategy.options[:info_fields] = "name,first_name,last_name,email"
|
||||
strategy.options[:image_size] = {
|
||||
width: AVATAR_SIZE,
|
||||
height: AVATAR_SIZE,
|
||||
}
|
||||
strategy.options[:secure_image_url] = true
|
||||
},
|
||||
scope: "email"
|
||||
end
|
||||
|
||||
# facebook doesn't return unverified email addresses so it's safe to assume
|
||||
|
@ -1,9 +1,8 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'has_errors'
|
||||
require "has_errors"
|
||||
|
||||
class Auth::GithubAuthenticator < Auth::ManagedAuthenticator
|
||||
|
||||
def name
|
||||
"github"
|
||||
end
|
||||
@ -50,12 +49,13 @@ class Auth::GithubAuthenticator < Auth::ManagedAuthenticator
|
||||
|
||||
def register_middleware(omniauth)
|
||||
omniauth.provider :github,
|
||||
setup: lambda { |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:client_id] = SiteSetting.github_client_id
|
||||
strategy.options[:client_secret] = SiteSetting.github_client_secret
|
||||
},
|
||||
scope: "user:email"
|
||||
setup:
|
||||
lambda { |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:client_id] = SiteSetting.github_client_id
|
||||
strategy.options[:client_secret] = SiteSetting.github_client_secret
|
||||
},
|
||||
scope: "user:email"
|
||||
end
|
||||
|
||||
# the omniauth-github gem only picks up the primary email if it's verified:
|
||||
|
@ -22,47 +22,46 @@ class Auth::GoogleOAuth2Authenticator < Auth::ManagedAuthenticator
|
||||
|
||||
def register_middleware(omniauth)
|
||||
options = {
|
||||
setup: lambda { |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:client_id] = SiteSetting.google_oauth2_client_id
|
||||
strategy.options[:client_secret] = SiteSetting.google_oauth2_client_secret
|
||||
setup:
|
||||
lambda do |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:client_id] = SiteSetting.google_oauth2_client_id
|
||||
strategy.options[:client_secret] = SiteSetting.google_oauth2_client_secret
|
||||
|
||||
if (google_oauth2_hd = SiteSetting.google_oauth2_hd).present?
|
||||
strategy.options[:hd] = google_oauth2_hd
|
||||
end
|
||||
if (google_oauth2_hd = SiteSetting.google_oauth2_hd).present?
|
||||
strategy.options[:hd] = google_oauth2_hd
|
||||
end
|
||||
|
||||
if (google_oauth2_prompt = SiteSetting.google_oauth2_prompt).present?
|
||||
strategy.options[:prompt] = google_oauth2_prompt.gsub("|", " ")
|
||||
end
|
||||
if (google_oauth2_prompt = SiteSetting.google_oauth2_prompt).present?
|
||||
strategy.options[:prompt] = google_oauth2_prompt.gsub("|", " ")
|
||||
end
|
||||
|
||||
# All the data we need for the `info` and `credentials` auth hash
|
||||
# are obtained via the user info API, not the JWT. Using and verifying
|
||||
# the JWT can fail due to clock skew, so let's skip it completely.
|
||||
# https://github.com/zquestz/omniauth-google-oauth2/pull/392
|
||||
strategy.options[:skip_jwt] = true
|
||||
}
|
||||
# All the data we need for the `info` and `credentials` auth hash
|
||||
# are obtained via the user info API, not the JWT. Using and verifying
|
||||
# the JWT can fail due to clock skew, so let's skip it completely.
|
||||
# https://github.com/zquestz/omniauth-google-oauth2/pull/392
|
||||
strategy.options[:skip_jwt] = true
|
||||
end,
|
||||
}
|
||||
omniauth.provider :google_oauth2, options
|
||||
end
|
||||
|
||||
def after_authenticate(auth_token, existing_account: nil)
|
||||
groups = provides_groups? ? raw_groups(auth_token.uid) : nil
|
||||
if groups
|
||||
auth_token.extra[:raw_groups] = groups
|
||||
end
|
||||
auth_token.extra[:raw_groups] = groups if groups
|
||||
|
||||
result = super
|
||||
|
||||
if groups
|
||||
result.associated_groups = groups.map { |group| group.with_indifferent_access.slice(:id, :name) }
|
||||
result.associated_groups =
|
||||
groups.map { |group| group.with_indifferent_access.slice(:id, :name) }
|
||||
end
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
def provides_groups?
|
||||
SiteSetting.google_oauth2_hd.present? &&
|
||||
SiteSetting.google_oauth2_hd_groups &&
|
||||
SiteSetting.google_oauth2_hd.present? && SiteSetting.google_oauth2_hd_groups &&
|
||||
SiteSetting.google_oauth2_hd_groups_service_account_admin_email.present? &&
|
||||
SiteSetting.google_oauth2_hd_groups_service_account_json.present?
|
||||
end
|
||||
@ -77,20 +76,20 @@ class Auth::GoogleOAuth2Authenticator < Auth::ManagedAuthenticator
|
||||
return if client.nil?
|
||||
|
||||
loop do
|
||||
params = {
|
||||
userKey: uid
|
||||
}
|
||||
params = { userKey: uid }
|
||||
params[:pageToken] = page_token if page_token
|
||||
|
||||
response = client.get(groups_url, params: params, raise_errors: false)
|
||||
|
||||
if response.status == 200
|
||||
response = response.parsed
|
||||
groups.push(*response['groups'])
|
||||
page_token = response['nextPageToken']
|
||||
groups.push(*response["groups"])
|
||||
page_token = response["nextPageToken"]
|
||||
break if page_token.nil?
|
||||
else
|
||||
Rails.logger.error("[Discourse Google OAuth2] failed to retrieve groups for #{uid} - status #{response.status}")
|
||||
Rails.logger.error(
|
||||
"[Discourse Google OAuth2] failed to retrieve groups for #{uid} - status #{response.status}",
|
||||
)
|
||||
break
|
||||
end
|
||||
end
|
||||
@ -107,26 +106,35 @@ class Auth::GoogleOAuth2Authenticator < Auth::ManagedAuthenticator
|
||||
scope: GROUPS_SCOPE,
|
||||
iat: Time.now.to_i,
|
||||
exp: Time.now.to_i + 60,
|
||||
sub: SiteSetting.google_oauth2_hd_groups_service_account_admin_email
|
||||
sub: SiteSetting.google_oauth2_hd_groups_service_account_admin_email,
|
||||
}
|
||||
headers = { "alg" => "RS256", "typ" => "JWT" }
|
||||
key = OpenSSL::PKey::RSA.new(service_account_info["private_key"])
|
||||
|
||||
encoded_jwt = ::JWT.encode(payload, key, 'RS256', headers)
|
||||
encoded_jwt = ::JWT.encode(payload, key, "RS256", headers)
|
||||
|
||||
client = OAuth2::Client.new(
|
||||
SiteSetting.google_oauth2_client_id,
|
||||
SiteSetting.google_oauth2_client_secret,
|
||||
site: OAUTH2_BASE_URL
|
||||
)
|
||||
client =
|
||||
OAuth2::Client.new(
|
||||
SiteSetting.google_oauth2_client_id,
|
||||
SiteSetting.google_oauth2_client_secret,
|
||||
site: OAUTH2_BASE_URL,
|
||||
)
|
||||
|
||||
token_response = client.request(:post, '/token', body: {
|
||||
grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer",
|
||||
assertion: encoded_jwt
|
||||
}, raise_errors: false)
|
||||
token_response =
|
||||
client.request(
|
||||
:post,
|
||||
"/token",
|
||||
body: {
|
||||
grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer",
|
||||
assertion: encoded_jwt,
|
||||
},
|
||||
raise_errors: false,
|
||||
)
|
||||
|
||||
if token_response.status != 200
|
||||
Rails.logger.error("[Discourse Google OAuth2] failed to retrieve group fetch token - status #{token_response.status}")
|
||||
Rails.logger.error(
|
||||
"[Discourse Google OAuth2] failed to retrieve group fetch token - status #{token_response.status}",
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
|
@ -56,28 +56,27 @@ class Auth::ManagedAuthenticator < Auth::Authenticator
|
||||
|
||||
def after_authenticate(auth_token, existing_account: nil)
|
||||
# Try and find an association for this account
|
||||
association = UserAssociatedAccount.find_or_initialize_by(provider_name: auth_token[:provider], provider_uid: auth_token[:uid])
|
||||
association =
|
||||
UserAssociatedAccount.find_or_initialize_by(
|
||||
provider_name: auth_token[:provider],
|
||||
provider_uid: auth_token[:uid],
|
||||
)
|
||||
|
||||
# Reconnecting to existing account
|
||||
if can_connect_existing_user? && existing_account && (association.user.nil? || existing_account.id != association.user_id)
|
||||
if can_connect_existing_user? && existing_account &&
|
||||
(association.user.nil? || existing_account.id != association.user_id)
|
||||
association.user = existing_account
|
||||
end
|
||||
|
||||
# Matching an account by email
|
||||
if match_by_email &&
|
||||
association.user.nil? &&
|
||||
(user = find_user_by_email(auth_token))
|
||||
|
||||
if match_by_email && association.user.nil? && (user = find_user_by_email(auth_token))
|
||||
UserAssociatedAccount.where(user: user, provider_name: auth_token[:provider]).destroy_all # Destroy existing associations for the new user
|
||||
association.user = user
|
||||
end
|
||||
|
||||
# Matching an account by username
|
||||
if match_by_username &&
|
||||
association.user.nil? &&
|
||||
SiteSetting.username_change_period.zero? &&
|
||||
(user = find_user_by_username(auth_token))
|
||||
|
||||
if match_by_username && association.user.nil? && SiteSetting.username_change_period.zero? &&
|
||||
(user = find_user_by_username(auth_token))
|
||||
UserAssociatedAccount.where(user: user, provider_name: auth_token[:provider]).destroy_all # Destroy existing associations for the new user
|
||||
association.user = user
|
||||
end
|
||||
@ -100,7 +99,14 @@ class Auth::ManagedAuthenticator < Auth::Authenticator
|
||||
result = Auth::Result.new
|
||||
info = auth_token[:info]
|
||||
result.email = info[:email]
|
||||
result.name = (info[:first_name] && info[:last_name]) ? "#{info[:first_name]} #{info[:last_name]}" : info[:name]
|
||||
result.name =
|
||||
(
|
||||
if (info[:first_name] && info[:last_name])
|
||||
"#{info[:first_name]} #{info[:last_name]}"
|
||||
else
|
||||
info[:name]
|
||||
end
|
||||
)
|
||||
if result.name.present? && result.name == result.email
|
||||
# Some IDPs send the email address in the name parameter (e.g. Auth0 with default configuration)
|
||||
# We add some generic protection here, so that users don't accidently make their email addresses public
|
||||
@ -109,10 +115,7 @@ class Auth::ManagedAuthenticator < Auth::Authenticator
|
||||
result.username = info[:nickname]
|
||||
result.email_valid = primary_email_verified?(auth_token) if result.email.present?
|
||||
result.overrides_email = always_update_user_email?
|
||||
result.extra_data = {
|
||||
provider: auth_token[:provider],
|
||||
uid: auth_token[:uid]
|
||||
}
|
||||
result.extra_data = { provider: auth_token[:provider], uid: auth_token[:uid] }
|
||||
result.user = association.user
|
||||
|
||||
result
|
||||
@ -120,7 +123,11 @@ class Auth::ManagedAuthenticator < Auth::Authenticator
|
||||
|
||||
def after_create_account(user, auth_result)
|
||||
auth_token = auth_result[:extra_data]
|
||||
association = UserAssociatedAccount.find_or_initialize_by(provider_name: auth_token[:provider], provider_uid: auth_token[:uid])
|
||||
association =
|
||||
UserAssociatedAccount.find_or_initialize_by(
|
||||
provider_name: auth_token[:provider],
|
||||
provider_uid: auth_token[:uid],
|
||||
)
|
||||
association.user = user
|
||||
association.save!
|
||||
|
||||
@ -132,16 +139,12 @@ class Auth::ManagedAuthenticator < Auth::Authenticator
|
||||
|
||||
def find_user_by_email(auth_token)
|
||||
email = auth_token.dig(:info, :email)
|
||||
if email && primary_email_verified?(auth_token)
|
||||
User.find_by_email(email)
|
||||
end
|
||||
User.find_by_email(email) if email && primary_email_verified?(auth_token)
|
||||
end
|
||||
|
||||
def find_user_by_username(auth_token)
|
||||
username = auth_token.dig(:info, :nickname)
|
||||
if username
|
||||
User.find_by_username(username)
|
||||
end
|
||||
User.find_by_username(username) if username
|
||||
end
|
||||
|
||||
def retrieve_avatar(user, url)
|
||||
@ -158,7 +161,7 @@ class Auth::ManagedAuthenticator < Auth::Authenticator
|
||||
|
||||
if bio || location
|
||||
profile = user.user_profile
|
||||
profile.bio_raw = bio unless profile.bio_raw.present?
|
||||
profile.bio_raw = bio unless profile.bio_raw.present?
|
||||
profile.location = location unless profile.location.present?
|
||||
profile.save
|
||||
end
|
||||
|
@ -1,48 +1,48 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Auth::Result
|
||||
ATTRIBUTES = [
|
||||
:user,
|
||||
:name,
|
||||
:username,
|
||||
:email,
|
||||
:email_valid,
|
||||
:extra_data,
|
||||
:awaiting_activation,
|
||||
:awaiting_approval,
|
||||
:authenticated,
|
||||
:authenticator_name,
|
||||
:requires_invite,
|
||||
:not_allowed_from_ip_address,
|
||||
:admin_not_allowed_from_ip_address,
|
||||
:skip_email_validation,
|
||||
:destination_url,
|
||||
:omniauth_disallow_totp,
|
||||
:failed,
|
||||
:failed_reason,
|
||||
:failed_code,
|
||||
:associated_groups,
|
||||
:overrides_email,
|
||||
:overrides_username,
|
||||
:overrides_name,
|
||||
ATTRIBUTES = %i[
|
||||
user
|
||||
name
|
||||
username
|
||||
email
|
||||
email_valid
|
||||
extra_data
|
||||
awaiting_activation
|
||||
awaiting_approval
|
||||
authenticated
|
||||
authenticator_name
|
||||
requires_invite
|
||||
not_allowed_from_ip_address
|
||||
admin_not_allowed_from_ip_address
|
||||
skip_email_validation
|
||||
destination_url
|
||||
omniauth_disallow_totp
|
||||
failed
|
||||
failed_reason
|
||||
failed_code
|
||||
associated_groups
|
||||
overrides_email
|
||||
overrides_username
|
||||
overrides_name
|
||||
]
|
||||
|
||||
attr_accessor *ATTRIBUTES
|
||||
|
||||
# These are stored in the session during
|
||||
# account creation. The user cannot read or modify them
|
||||
SESSION_ATTRIBUTES = [
|
||||
:email,
|
||||
:username,
|
||||
:email_valid,
|
||||
:name,
|
||||
:authenticator_name,
|
||||
:extra_data,
|
||||
:skip_email_validation,
|
||||
:associated_groups,
|
||||
:overrides_email,
|
||||
:overrides_username,
|
||||
:overrides_name,
|
||||
SESSION_ATTRIBUTES = %i[
|
||||
email
|
||||
username
|
||||
email_valid
|
||||
name
|
||||
authenticator_name
|
||||
extra_data
|
||||
skip_email_validation
|
||||
associated_groups
|
||||
overrides_email
|
||||
overrides_username
|
||||
overrides_name
|
||||
]
|
||||
|
||||
def [](key)
|
||||
@ -59,9 +59,7 @@ class Auth::Result
|
||||
end
|
||||
|
||||
def email_valid=(val)
|
||||
if !val.in? [true, false, nil]
|
||||
raise ArgumentError, "email_valid should be boolean or nil"
|
||||
end
|
||||
raise ArgumentError, "email_valid should be boolean or nil" if !val.in? [true, false, nil]
|
||||
@email_valid = !!val
|
||||
end
|
||||
|
||||
@ -83,14 +81,14 @@ class Auth::Result
|
||||
|
||||
def apply_user_attributes!
|
||||
change_made = false
|
||||
if (SiteSetting.auth_overrides_username? || overrides_username) && (resolved_username = resolve_username).present?
|
||||
if (SiteSetting.auth_overrides_username? || overrides_username) &&
|
||||
(resolved_username = resolve_username).present?
|
||||
change_made = UsernameChanger.override(user, resolved_username)
|
||||
end
|
||||
|
||||
if (SiteSetting.auth_overrides_email || overrides_email || user&.email&.ends_with?(".invalid")) &&
|
||||
email_valid &&
|
||||
email.present? &&
|
||||
user.email != Email.downcase(email)
|
||||
if (
|
||||
SiteSetting.auth_overrides_email || overrides_email || user&.email&.ends_with?(".invalid")
|
||||
) && email_valid && email.present? && user.email != Email.downcase(email)
|
||||
user.email = email
|
||||
change_made = true
|
||||
end
|
||||
@ -109,11 +107,12 @@ class Auth::Result
|
||||
|
||||
associated_groups.uniq.each do |associated_group|
|
||||
begin
|
||||
associated_group = AssociatedGroup.find_or_create_by(
|
||||
name: associated_group[:name],
|
||||
provider_id: associated_group[:id],
|
||||
provider_name: extra_data[:provider]
|
||||
)
|
||||
associated_group =
|
||||
AssociatedGroup.find_or_create_by(
|
||||
name: associated_group[:name],
|
||||
provider_id: associated_group[:id],
|
||||
provider_name: extra_data[:provider],
|
||||
)
|
||||
rescue ActiveRecord::RecordNotUnique
|
||||
retry
|
||||
end
|
||||
@ -135,22 +134,12 @@ class Auth::Result
|
||||
end
|
||||
|
||||
def to_client_hash
|
||||
if requires_invite
|
||||
return { requires_invite: true }
|
||||
end
|
||||
return { requires_invite: true } if requires_invite
|
||||
|
||||
if user&.suspended?
|
||||
return {
|
||||
suspended: true,
|
||||
suspended_message: user.suspended_message
|
||||
}
|
||||
end
|
||||
return { suspended: true, suspended_message: user.suspended_message } if user&.suspended?
|
||||
|
||||
if omniauth_disallow_totp
|
||||
return {
|
||||
omniauth_disallow_totp: !!omniauth_disallow_totp,
|
||||
email: email
|
||||
}
|
||||
return { omniauth_disallow_totp: !!omniauth_disallow_totp, email: email }
|
||||
end
|
||||
|
||||
if user
|
||||
@ -159,7 +148,7 @@ class Auth::Result
|
||||
awaiting_activation: !!awaiting_activation,
|
||||
awaiting_approval: !!awaiting_approval,
|
||||
not_allowed_from_ip_address: !!not_allowed_from_ip_address,
|
||||
admin_not_allowed_from_ip_address: !!admin_not_allowed_from_ip_address
|
||||
admin_not_allowed_from_ip_address: !!admin_not_allowed_from_ip_address,
|
||||
}
|
||||
|
||||
result[:destination_url] = destination_url if authenticated && destination_url.present?
|
||||
@ -173,7 +162,7 @@ class Auth::Result
|
||||
auth_provider: authenticator_name,
|
||||
email_valid: !!email_valid,
|
||||
can_edit_username: can_edit_username,
|
||||
can_edit_name: can_edit_name
|
||||
can_edit_name: can_edit_name,
|
||||
}
|
||||
|
||||
result[:destination_url] = destination_url if destination_url.present?
|
||||
@ -190,9 +179,7 @@ class Auth::Result
|
||||
|
||||
def staged_user
|
||||
return @staged_user if defined?(@staged_user)
|
||||
if email.present? && email_valid
|
||||
@staged_user = User.where(staged: true).find_by_email(email)
|
||||
end
|
||||
@staged_user = User.where(staged: true).find_by_email(email) if email.present? && email_valid
|
||||
end
|
||||
|
||||
def username_suggester_attributes
|
||||
|
@ -17,11 +17,12 @@ class Auth::TwitterAuthenticator < Auth::ManagedAuthenticator
|
||||
|
||||
def register_middleware(omniauth)
|
||||
omniauth.provider :twitter,
|
||||
setup: lambda { |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:consumer_key] = SiteSetting.twitter_consumer_key
|
||||
strategy.options[:consumer_secret] = SiteSetting.twitter_consumer_secret
|
||||
}
|
||||
setup:
|
||||
lambda { |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:consumer_key] = SiteSetting.twitter_consumer_key
|
||||
strategy.options[:consumer_secret] = SiteSetting.twitter_consumer_secret
|
||||
}
|
||||
end
|
||||
|
||||
# twitter doesn't return unverfied email addresses in the API
|
||||
|
@ -1,9 +1,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Autospec
|
||||
|
||||
class BaseRunner
|
||||
|
||||
# used when starting the runner - preloading happens here
|
||||
def start(opts = {})
|
||||
end
|
||||
@ -32,7 +30,5 @@ module Autospec
|
||||
# used to stop the runner
|
||||
def stop
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -3,11 +3,15 @@
|
||||
require "rspec/core/formatters/base_text_formatter"
|
||||
require "parallel_tests/rspec/logger_base"
|
||||
|
||||
module Autospec; end
|
||||
module Autospec
|
||||
end
|
||||
|
||||
class Autospec::Formatter < RSpec::Core::Formatters::BaseTextFormatter
|
||||
|
||||
RSpec::Core::Formatters.register self, :example_passed, :example_pending, :example_failed, :start_dump
|
||||
RSpec::Core::Formatters.register self,
|
||||
:example_passed,
|
||||
:example_pending,
|
||||
:example_failed,
|
||||
:start_dump
|
||||
|
||||
RSPEC_RESULT = "./tmp/rspec_result"
|
||||
|
||||
@ -19,15 +23,15 @@ class Autospec::Formatter < RSpec::Core::Formatters::BaseTextFormatter
|
||||
end
|
||||
|
||||
def example_passed(_notification)
|
||||
output.print RSpec::Core::Formatters::ConsoleCodes.wrap('.', :success)
|
||||
output.print RSpec::Core::Formatters::ConsoleCodes.wrap(".", :success)
|
||||
end
|
||||
|
||||
def example_pending(_notification)
|
||||
output.print RSpec::Core::Formatters::ConsoleCodes.wrap('*', :pending)
|
||||
output.print RSpec::Core::Formatters::ConsoleCodes.wrap("*", :pending)
|
||||
end
|
||||
|
||||
def example_failed(notification)
|
||||
output.print RSpec::Core::Formatters::ConsoleCodes.wrap('F', :failure)
|
||||
output.print RSpec::Core::Formatters::ConsoleCodes.wrap("F", :failure)
|
||||
@fail_file.puts(notification.example.location + " ")
|
||||
@fail_file.flush
|
||||
end
|
||||
@ -40,5 +44,4 @@ class Autospec::Formatter < RSpec::Core::Formatters::BaseTextFormatter
|
||||
@fail_file.close
|
||||
super(filename)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -7,7 +7,8 @@ require "autospec/reload_css"
|
||||
require "autospec/base_runner"
|
||||
require "socket_server"
|
||||
|
||||
module Autospec; end
|
||||
module Autospec
|
||||
end
|
||||
|
||||
class Autospec::Manager
|
||||
def self.run(opts = {})
|
||||
@ -25,7 +26,10 @@ class Autospec::Manager
|
||||
end
|
||||
|
||||
def run
|
||||
Signal.trap("HUP") { stop_runners; exit }
|
||||
Signal.trap("HUP") do
|
||||
stop_runners
|
||||
exit
|
||||
end
|
||||
|
||||
Signal.trap("INT") do
|
||||
begin
|
||||
@ -47,7 +51,6 @@ class Autospec::Manager
|
||||
STDIN.gets
|
||||
process_queue
|
||||
end
|
||||
|
||||
rescue => e
|
||||
fail(e, "failed in run")
|
||||
ensure
|
||||
@ -71,16 +74,16 @@ class Autospec::Manager
|
||||
|
||||
@queue.reject! { |_, s, _| s == "spec" }
|
||||
|
||||
if current_runner
|
||||
@queue.concat [['spec', 'spec', current_runner]]
|
||||
end
|
||||
@queue.concat [["spec", "spec", current_runner]] if current_runner
|
||||
|
||||
@runners.each do |runner|
|
||||
@queue.concat [['spec', 'spec', runner]] unless @queue.any? { |_, s, r| s == "spec" && r == runner }
|
||||
unless @queue.any? { |_, s, r| s == "spec" && r == runner }
|
||||
@queue.concat [["spec", "spec", runner]]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
[:start, :stop, :abort].each do |verb|
|
||||
%i[start stop abort].each do |verb|
|
||||
define_method("#{verb}_runners") do
|
||||
puts "@@@@@@@@@@@@ #{verb}_runners" if @debug
|
||||
@runners.each(&verb)
|
||||
@ -89,11 +92,7 @@ class Autospec::Manager
|
||||
|
||||
def start_service_queue
|
||||
puts "@@@@@@@@@@@@ start_service_queue" if @debug
|
||||
Thread.new do
|
||||
while true
|
||||
thread_loop
|
||||
end
|
||||
end
|
||||
Thread.new { thread_loop while true }
|
||||
end
|
||||
|
||||
# the main loop, will run the specs in the queue till one fails or the queue is empty
|
||||
@ -176,9 +175,7 @@ class Autospec::Manager
|
||||
Dir[root_path + "/plugins/*"].each do |f|
|
||||
next if !File.directory? f
|
||||
resolved = File.realpath(f)
|
||||
if resolved != f
|
||||
map[resolved] = f
|
||||
end
|
||||
map[resolved] = f if resolved != f
|
||||
end
|
||||
map
|
||||
end
|
||||
@ -188,9 +185,7 @@ class Autospec::Manager
|
||||
resolved = file
|
||||
@reverse_map ||= reverse_symlink_map
|
||||
@reverse_map.each do |location, discourse_location|
|
||||
if file.start_with?(location)
|
||||
resolved = discourse_location + file[location.length..-1]
|
||||
end
|
||||
resolved = discourse_location + file[location.length..-1] if file.start_with?(location)
|
||||
end
|
||||
|
||||
resolved
|
||||
@ -199,9 +194,7 @@ class Autospec::Manager
|
||||
def listen_for_changes
|
||||
puts "@@@@@@@@@@@@ listen_for_changes" if @debug
|
||||
|
||||
options = {
|
||||
ignore: /^lib\/autospec/,
|
||||
}
|
||||
options = { ignore: %r{^lib/autospec} }
|
||||
|
||||
if @opts[:force_polling]
|
||||
options[:force_polling] = true
|
||||
@ -210,14 +203,14 @@ class Autospec::Manager
|
||||
|
||||
path = root_path
|
||||
|
||||
if ENV['VIM_AUTOSPEC']
|
||||
if ENV["VIM_AUTOSPEC"]
|
||||
STDERR.puts "Using VIM file listener"
|
||||
|
||||
socket_path = (Rails.root + "tmp/file_change.sock").to_s
|
||||
FileUtils.rm_f(socket_path)
|
||||
server = SocketServer.new(socket_path)
|
||||
server.start do |line|
|
||||
file, line = line.split(' ')
|
||||
file, line = line.split(" ")
|
||||
file = reverse_symlink(file)
|
||||
file = file.sub(Rails.root.to_s + "/", "")
|
||||
# process_change can acquire a mutex and block
|
||||
@ -235,20 +228,20 @@ class Autospec::Manager
|
||||
end
|
||||
|
||||
# to speed up boot we use a thread
|
||||
["spec", "lib", "app", "config", "test", "vendor", "plugins"].each do |watch|
|
||||
|
||||
%w[spec lib app config test vendor plugins].each do |watch|
|
||||
puts "@@@@@@@@@ Listen to #{path}/#{watch} #{options}" if @debug
|
||||
Thread.new do
|
||||
begin
|
||||
listener = Listen.to("#{path}/#{watch}", options) do |modified, added, _|
|
||||
paths = [modified, added].flatten
|
||||
paths.compact!
|
||||
paths.map! do |long|
|
||||
long = reverse_symlink(long)
|
||||
long[(path.length + 1)..-1]
|
||||
listener =
|
||||
Listen.to("#{path}/#{watch}", options) do |modified, added, _|
|
||||
paths = [modified, added].flatten
|
||||
paths.compact!
|
||||
paths.map! do |long|
|
||||
long = reverse_symlink(long)
|
||||
long[(path.length + 1)..-1]
|
||||
end
|
||||
process_change(paths)
|
||||
end
|
||||
process_change(paths)
|
||||
end
|
||||
listener.start
|
||||
sleep
|
||||
rescue => e
|
||||
@ -257,7 +250,6 @@ class Autospec::Manager
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
def process_change(files)
|
||||
@ -285,13 +277,9 @@ class Autospec::Manager
|
||||
hit = true
|
||||
spec = v ? (v.arity == 1 ? v.call(m) : v.call) : file
|
||||
with_line = spec
|
||||
if spec == file && line
|
||||
with_line = spec + ":" << line.to_s
|
||||
end
|
||||
with_line = spec + ":" << line.to_s if spec == file && line
|
||||
if File.exist?(spec) || Dir.exist?(spec)
|
||||
if with_line != spec
|
||||
specs << [file, spec, runner]
|
||||
end
|
||||
specs << [file, spec, runner] if with_line != spec
|
||||
specs << [file, with_line, runner]
|
||||
end
|
||||
end
|
||||
@ -329,9 +317,7 @@ class Autospec::Manager
|
||||
focus = @queue.shift
|
||||
@queue.unshift([file, spec, runner])
|
||||
unless spec.include?(":") && focus[1].include?(spec.split(":")[0])
|
||||
if focus[1].include?(spec) || file != spec
|
||||
@queue.unshift(focus)
|
||||
end
|
||||
@queue.unshift(focus) if focus[1].include?(spec) || file != spec
|
||||
end
|
||||
else
|
||||
@queue.unshift([file, spec, runner])
|
||||
|
@ -1,9 +1,9 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Autospec; end
|
||||
module Autospec
|
||||
end
|
||||
|
||||
class Autospec::ReloadCss
|
||||
|
||||
WATCHERS = {}
|
||||
def self.watch(pattern, &blk)
|
||||
WATCHERS[pattern] = blk
|
||||
@ -30,7 +30,7 @@ class Autospec::ReloadCss
|
||||
if paths.any? { |p| p =~ /\.(css|s[ac]ss)/ }
|
||||
# todo connect to dev instead?
|
||||
ActiveRecord::Base.establish_connection
|
||||
[:desktop, :mobile].each do |style|
|
||||
%i[desktop mobile].each do |style|
|
||||
s = DiscourseStylesheets.new(style)
|
||||
s.compile
|
||||
paths << "public" + s.stylesheet_relpath_no_digest
|
||||
@ -44,10 +44,9 @@ class Autospec::ReloadCss
|
||||
p = p.sub(/\.sass\.erb/, "")
|
||||
p = p.sub(/\.sass/, "")
|
||||
p = p.sub(/\.scss/, "")
|
||||
p = p.sub(/^app\/assets\/stylesheets/, "assets")
|
||||
p = p.sub(%r{^app/assets/stylesheets}, "assets")
|
||||
{ name: p, hash: hash || SecureRandom.hex }
|
||||
end
|
||||
message_bus.publish "/file-change", paths
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -1,9 +1,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Autospec
|
||||
|
||||
class RspecRunner < BaseRunner
|
||||
|
||||
WATCHERS = {}
|
||||
def self.watch(pattern, &blk)
|
||||
WATCHERS[pattern] = blk
|
||||
@ -13,26 +11,28 @@ module Autospec
|
||||
end
|
||||
|
||||
# Discourse specific
|
||||
watch(%r{^lib/(.+)\.rb$}) { |m| "spec/components/#{m[1]}_spec.rb" }
|
||||
watch(%r{^lib/(.+)\.rb$}) { |m| "spec/components/#{m[1]}_spec.rb" }
|
||||
|
||||
watch(%r{^app/(.+)\.rb$}) { |m| "spec/#{m[1]}_spec.rb" }
|
||||
watch(%r{^app/(.+)(\.erb|\.haml)$}) { |m| "spec/#{m[1]}#{m[2]}_spec.rb" }
|
||||
watch(%r{^app/(.+)\.rb$}) { |m| "spec/#{m[1]}_spec.rb" }
|
||||
watch(%r{^app/(.+)(\.erb|\.haml)$}) { |m| "spec/#{m[1]}#{m[2]}_spec.rb" }
|
||||
watch(%r{^spec/.+_spec\.rb$})
|
||||
watch(%r{^spec/support/.+\.rb$}) { "spec" }
|
||||
watch("app/controllers/application_controller.rb") { "spec/requests" }
|
||||
watch(%r{^spec/support/.+\.rb$}) { "spec" }
|
||||
watch("app/controllers/application_controller.rb") { "spec/requests" }
|
||||
|
||||
watch(%r{app/controllers/(.+).rb}) { |m| "spec/requests/#{m[1]}_spec.rb" }
|
||||
watch(%r{app/controllers/(.+).rb}) { |m| "spec/requests/#{m[1]}_spec.rb" }
|
||||
|
||||
watch(%r{^app/views/(.+)/.+\.(erb|haml)$}) { |m| "spec/requests/#{m[1]}_spec.rb" }
|
||||
watch(%r{^app/views/(.+)/.+\.(erb|haml)$}) { |m| "spec/requests/#{m[1]}_spec.rb" }
|
||||
|
||||
watch(%r{^spec/fabricators/.+_fabricator\.rb$}) { "spec" }
|
||||
watch(%r{^spec/fabricators/.+_fabricator\.rb$}) { "spec" }
|
||||
|
||||
watch(%r{^app/assets/javascripts/pretty-text/.*\.js\.es6$}) { "spec/components/pretty_text_spec.rb" }
|
||||
watch(%r{^app/assets/javascripts/pretty-text/.*\.js\.es6$}) do
|
||||
"spec/components/pretty_text_spec.rb"
|
||||
end
|
||||
watch(%r{^plugins/.*/discourse-markdown/.*\.js\.es6$}) { "spec/components/pretty_text_spec.rb" }
|
||||
|
||||
watch(%r{^plugins/.*/spec/.*\.rb})
|
||||
watch(%r{^(plugins/.*/)plugin\.rb}) { |m| "#{m[1]}spec" }
|
||||
watch(%r{^(plugins/.*)/(lib|app)}) { |m| "#{m[1]}/spec/integration" }
|
||||
watch(%r{^(plugins/.*/)plugin\.rb}) { |m| "#{m[1]}spec" }
|
||||
watch(%r{^(plugins/.*)/(lib|app)}) { |m| "#{m[1]}/spec/integration" }
|
||||
watch(%r{^(plugins/.*)/lib/(.*)\.rb}) { |m| "#{m[1]}/spec/lib/#{m[2]}_spec.rb" }
|
||||
|
||||
RELOADERS = Set.new
|
||||
@ -50,11 +50,9 @@ module Autospec
|
||||
|
||||
def failed_specs
|
||||
specs = []
|
||||
path = './tmp/rspec_result'
|
||||
path = "./tmp/rspec_result"
|
||||
specs = File.readlines(path) if File.exist?(path)
|
||||
specs
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -3,7 +3,6 @@
|
||||
require "autospec/rspec_runner"
|
||||
|
||||
module Autospec
|
||||
|
||||
class SimpleRunner < RspecRunner
|
||||
def initialize
|
||||
@mutex = Mutex.new
|
||||
@ -12,36 +11,29 @@ module Autospec
|
||||
def run(specs)
|
||||
puts "Running Rspec: #{specs}"
|
||||
# kill previous rspec instance
|
||||
@mutex.synchronize do
|
||||
self.abort
|
||||
end
|
||||
@mutex.synchronize { self.abort }
|
||||
# we use our custom rspec formatter
|
||||
args = [
|
||||
"-r", "#{File.dirname(__FILE__)}/formatter.rb",
|
||||
"-f", "Autospec::Formatter"
|
||||
]
|
||||
args = ["-r", "#{File.dirname(__FILE__)}/formatter.rb", "-f", "Autospec::Formatter"]
|
||||
|
||||
command = begin
|
||||
line_specified = specs.split.any? { |s| s =~ /\:/ } # Parallel spec can't run specific line
|
||||
multiple_files = specs.split.count > 1 || specs == "spec" # Only parallelize multiple files
|
||||
if ENV["PARALLEL_SPEC"] == '1' && multiple_files && !line_specified
|
||||
"bin/turbo_rspec #{args.join(" ")} #{specs.split.join(" ")}"
|
||||
else
|
||||
"bin/rspec #{args.join(" ")} #{specs.split.join(" ")}"
|
||||
command =
|
||||
begin
|
||||
line_specified = specs.split.any? { |s| s =~ /\:/ } # Parallel spec can't run specific line
|
||||
multiple_files = specs.split.count > 1 || specs == "spec" # Only parallelize multiple files
|
||||
if ENV["PARALLEL_SPEC"] == "1" && multiple_files && !line_specified
|
||||
"bin/turbo_rspec #{args.join(" ")} #{specs.split.join(" ")}"
|
||||
else
|
||||
"bin/rspec #{args.join(" ")} #{specs.split.join(" ")}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# launch rspec
|
||||
Dir.chdir(Rails.root) do # rubocop:disable Discourse/NoChdir because this is not part of the app
|
||||
env = { "RAILS_ENV" => "test" }
|
||||
if specs.split(' ').any? { |s| s =~ /^(.\/)?plugins/ }
|
||||
if specs.split(" ").any? { |s| s =~ %r{^(./)?plugins} }
|
||||
env["LOAD_PLUGINS"] = "1"
|
||||
puts "Loading plugins while running specs"
|
||||
end
|
||||
pid =
|
||||
@mutex.synchronize do
|
||||
@pid = Process.spawn(env, command)
|
||||
end
|
||||
pid = @mutex.synchronize { @pid = Process.spawn(env, command) }
|
||||
|
||||
_, status = Process.wait2(pid)
|
||||
|
||||
@ -51,7 +43,11 @@ module Autospec
|
||||
|
||||
def abort
|
||||
if pid = @pid
|
||||
Process.kill("TERM", pid) rescue nil
|
||||
begin
|
||||
Process.kill("TERM", pid)
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
wait_for_done(pid)
|
||||
pid = nil
|
||||
end
|
||||
@ -66,16 +62,26 @@ module Autospec
|
||||
|
||||
def wait_for_done(pid)
|
||||
i = 3000
|
||||
while (i > 0 && Process.getpgid(pid) rescue nil)
|
||||
while (
|
||||
begin
|
||||
i > 0 && Process.getpgid(pid)
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
)
|
||||
sleep 0.001
|
||||
i -= 1
|
||||
end
|
||||
if (Process.getpgid(pid) rescue nil)
|
||||
if (
|
||||
begin
|
||||
Process.getpgid(pid)
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
)
|
||||
STDERR.puts "Terminating rspec #{pid} by force cause it refused graceful termination"
|
||||
Process.kill("KILL", pid)
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -1,8 +1,8 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestore
|
||||
|
||||
class OperationRunningError < RuntimeError; end
|
||||
class OperationRunningError < RuntimeError
|
||||
end
|
||||
|
||||
VERSION_PREFIX = "v"
|
||||
DUMP_FILE = "dump.sql.gz"
|
||||
@ -22,9 +22,7 @@ module BackupRestore
|
||||
|
||||
def self.rollback!
|
||||
raise BackupRestore::OperationRunningError if BackupRestore.is_operation_running?
|
||||
if can_rollback?
|
||||
move_tables_between_schemas("backup", "public")
|
||||
end
|
||||
move_tables_between_schemas("backup", "public") if can_rollback?
|
||||
end
|
||||
|
||||
def self.cancel!
|
||||
@ -58,7 +56,7 @@ module BackupRestore
|
||||
{
|
||||
is_operation_running: is_operation_running?,
|
||||
can_rollback: can_rollback?,
|
||||
allow_restore: Rails.env.development? || SiteSetting.allow_restore
|
||||
allow_restore: Rails.env.development? || SiteSetting.allow_restore,
|
||||
}
|
||||
end
|
||||
|
||||
@ -133,7 +131,7 @@ module BackupRestore
|
||||
config["backup_port"] || config["port"],
|
||||
config["username"] || username || ENV["USER"] || "postgres",
|
||||
config["password"] || password,
|
||||
config["database"]
|
||||
config["database"],
|
||||
)
|
||||
end
|
||||
|
||||
@ -194,7 +192,11 @@ module BackupRestore
|
||||
end
|
||||
|
||||
def self.backup_tables_count
|
||||
DB.query_single("SELECT COUNT(*) AS count FROM information_schema.tables WHERE table_schema = 'backup'").first.to_i
|
||||
DB
|
||||
.query_single(
|
||||
"SELECT COUNT(*) AS count FROM information_schema.tables WHERE table_schema = 'backup'",
|
||||
)
|
||||
.first
|
||||
.to_i
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -69,15 +69,22 @@ module BackupRestore
|
||||
path_transformation =
|
||||
case tar_implementation
|
||||
when :gnu
|
||||
['--transform', 's|var/www/discourse/public/uploads/|uploads/|']
|
||||
%w[--transform s|var/www/discourse/public/uploads/|uploads/|]
|
||||
when :bsd
|
||||
['-s', '|var/www/discourse/public/uploads/|uploads/|']
|
||||
%w[-s |var/www/discourse/public/uploads/|uploads/|]
|
||||
end
|
||||
|
||||
log "Unzipping archive, this may take a while..."
|
||||
Discourse::Utils.execute_command(
|
||||
'tar', '--extract', '--gzip', '--file', @archive_path, '--directory', @tmp_directory,
|
||||
*path_transformation, failure_message: "Failed to decompress archive."
|
||||
"tar",
|
||||
"--extract",
|
||||
"--gzip",
|
||||
"--file",
|
||||
@archive_path,
|
||||
"--directory",
|
||||
@tmp_directory,
|
||||
*path_transformation,
|
||||
failure_message: "Failed to decompress archive.",
|
||||
)
|
||||
end
|
||||
|
||||
@ -86,15 +93,19 @@ module BackupRestore
|
||||
if @is_archive
|
||||
# for compatibility with backups from Discourse v1.5 and below
|
||||
old_dump_path = File.join(@tmp_directory, OLD_DUMP_FILENAME)
|
||||
File.exist?(old_dump_path) ? old_dump_path : File.join(@tmp_directory, BackupRestore::DUMP_FILE)
|
||||
if File.exist?(old_dump_path)
|
||||
old_dump_path
|
||||
else
|
||||
File.join(@tmp_directory, BackupRestore::DUMP_FILE)
|
||||
end
|
||||
else
|
||||
File.join(@tmp_directory, @filename)
|
||||
end
|
||||
|
||||
if File.extname(@db_dump_path) == '.gz'
|
||||
if File.extname(@db_dump_path) == ".gz"
|
||||
log "Extracting dump file..."
|
||||
Compression::Gzip.new.decompress(@tmp_directory, @db_dump_path, available_size)
|
||||
@db_dump_path.delete_suffix!('.gz')
|
||||
@db_dump_path.delete_suffix!(".gz")
|
||||
end
|
||||
|
||||
@db_dump_path
|
||||
@ -105,17 +116,18 @@ module BackupRestore
|
||||
end
|
||||
|
||||
def tar_implementation
|
||||
@tar_version ||= begin
|
||||
tar_version = Discourse::Utils.execute_command('tar', '--version')
|
||||
@tar_version ||=
|
||||
begin
|
||||
tar_version = Discourse::Utils.execute_command("tar", "--version")
|
||||
|
||||
if tar_version.include?("GNU tar")
|
||||
:gnu
|
||||
elsif tar_version.include?("bsdtar")
|
||||
:bsd
|
||||
else
|
||||
raise "Unknown tar implementation: #{tar_version}"
|
||||
if tar_version.include?("GNU tar")
|
||||
:gnu
|
||||
elsif tar_version.include?("bsdtar")
|
||||
:bsd
|
||||
else
|
||||
raise "Unknown tar implementation: #{tar_version}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -37,9 +37,7 @@ module BackupRestore
|
||||
return unless cleanup_allowed?
|
||||
return if (backup_files = files).size <= SiteSetting.maximum_backups
|
||||
|
||||
backup_files[SiteSetting.maximum_backups..-1].each do |file|
|
||||
delete_file(file.filename)
|
||||
end
|
||||
backup_files[SiteSetting.maximum_backups..-1].each { |file| delete_file(file.filename) }
|
||||
|
||||
reset_cache
|
||||
end
|
||||
@ -74,7 +72,7 @@ module BackupRestore
|
||||
used_bytes: used_bytes,
|
||||
free_bytes: free_bytes,
|
||||
count: files.size,
|
||||
last_backup_taken_at: latest_file&.last_modified
|
||||
last_backup_taken_at: latest_file&.last_modified,
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -4,7 +4,6 @@ require "mini_mime"
|
||||
require "file_store/s3_store"
|
||||
|
||||
module BackupRestore
|
||||
|
||||
class Backuper
|
||||
attr_reader :success
|
||||
|
||||
@ -84,7 +83,11 @@ module BackupRestore
|
||||
@dump_filename = File.join(@tmp_directory, BackupRestore::DUMP_FILE)
|
||||
@archive_directory = BackupRestore::LocalBackupStore.base_directory(db: @current_db)
|
||||
filename = @filename_override || "#{get_parameterized_title}-#{@timestamp}"
|
||||
@archive_basename = File.join(@archive_directory, "#{filename}-#{BackupRestore::VERSION_PREFIX}#{BackupRestore.current_version}")
|
||||
@archive_basename =
|
||||
File.join(
|
||||
@archive_directory,
|
||||
"#{filename}-#{BackupRestore::VERSION_PREFIX}#{BackupRestore.current_version}",
|
||||
)
|
||||
|
||||
@backup_filename =
|
||||
if @with_uploads
|
||||
@ -119,9 +122,18 @@ module BackupRestore
|
||||
BackupMetadata.delete_all
|
||||
BackupMetadata.create!(name: "base_url", value: Discourse.base_url)
|
||||
BackupMetadata.create!(name: "cdn_url", value: Discourse.asset_host)
|
||||
BackupMetadata.create!(name: "s3_base_url", value: SiteSetting.Upload.enable_s3_uploads ? SiteSetting.Upload.s3_base_url : nil)
|
||||
BackupMetadata.create!(name: "s3_cdn_url", value: SiteSetting.Upload.enable_s3_uploads ? SiteSetting.Upload.s3_cdn_url : nil)
|
||||
BackupMetadata.create!(name: "db_name", value: RailsMultisite::ConnectionManagement.current_db)
|
||||
BackupMetadata.create!(
|
||||
name: "s3_base_url",
|
||||
value: SiteSetting.Upload.enable_s3_uploads ? SiteSetting.Upload.s3_base_url : nil,
|
||||
)
|
||||
BackupMetadata.create!(
|
||||
name: "s3_cdn_url",
|
||||
value: SiteSetting.Upload.enable_s3_uploads ? SiteSetting.Upload.s3_cdn_url : nil,
|
||||
)
|
||||
BackupMetadata.create!(
|
||||
name: "db_name",
|
||||
value: RailsMultisite::ConnectionManagement.current_db,
|
||||
)
|
||||
BackupMetadata.create!(name: "multisite", value: Rails.configuration.multisite)
|
||||
end
|
||||
|
||||
@ -132,7 +144,7 @@ module BackupRestore
|
||||
pg_dump_running = true
|
||||
|
||||
Thread.new do
|
||||
RailsMultisite::ConnectionManagement::establish_connection(db: @current_db)
|
||||
RailsMultisite::ConnectionManagement.establish_connection(db: @current_db)
|
||||
while pg_dump_running
|
||||
message = logs.pop.strip
|
||||
log(message) unless message.blank?
|
||||
@ -159,23 +171,24 @@ module BackupRestore
|
||||
db_conf = BackupRestore.database_configuration
|
||||
|
||||
password_argument = "PGPASSWORD='#{db_conf.password}'" if db_conf.password.present?
|
||||
host_argument = "--host=#{db_conf.host}" if db_conf.host.present?
|
||||
port_argument = "--port=#{db_conf.port}" if db_conf.port.present?
|
||||
host_argument = "--host=#{db_conf.host}" if db_conf.host.present?
|
||||
port_argument = "--port=#{db_conf.port}" if db_conf.port.present?
|
||||
username_argument = "--username=#{db_conf.username}" if db_conf.username.present?
|
||||
|
||||
[ password_argument, # pass the password to pg_dump (if any)
|
||||
"pg_dump", # the pg_dump command
|
||||
"--schema=public", # only public schema
|
||||
"-T public.pg_*", # exclude tables and views whose name starts with "pg_"
|
||||
[
|
||||
password_argument, # pass the password to pg_dump (if any)
|
||||
"pg_dump", # the pg_dump command
|
||||
"--schema=public", # only public schema
|
||||
"-T public.pg_*", # exclude tables and views whose name starts with "pg_"
|
||||
"--file='#{@dump_filename}'", # output to the dump.sql file
|
||||
"--no-owner", # do not output commands to set ownership of objects
|
||||
"--no-privileges", # prevent dumping of access privileges
|
||||
"--verbose", # specifies verbose mode
|
||||
"--compress=4", # Compression level of 4
|
||||
host_argument, # the hostname to connect to (if any)
|
||||
port_argument, # the port to connect to (if any)
|
||||
username_argument, # the username to connect as (if any)
|
||||
db_conf.database # the name of the database to dump
|
||||
"--no-owner", # do not output commands to set ownership of objects
|
||||
"--no-privileges", # prevent dumping of access privileges
|
||||
"--verbose", # specifies verbose mode
|
||||
"--compress=4", # Compression level of 4
|
||||
host_argument, # the hostname to connect to (if any)
|
||||
port_argument, # the port to connect to (if any)
|
||||
username_argument, # the username to connect as (if any)
|
||||
db_conf.database, # the name of the database to dump
|
||||
].join(" ")
|
||||
end
|
||||
|
||||
@ -185,8 +198,10 @@ module BackupRestore
|
||||
archive_filename = File.join(@archive_directory, @backup_filename)
|
||||
|
||||
Discourse::Utils.execute_command(
|
||||
'mv', @dump_filename, archive_filename,
|
||||
failure_message: "Failed to move database dump file."
|
||||
"mv",
|
||||
@dump_filename,
|
||||
archive_filename,
|
||||
failure_message: "Failed to move database dump file.",
|
||||
)
|
||||
|
||||
remove_tmp_directory
|
||||
@ -198,17 +213,29 @@ module BackupRestore
|
||||
tar_filename = "#{@archive_basename}.tar"
|
||||
|
||||
log "Making sure archive does not already exist..."
|
||||
Discourse::Utils.execute_command('rm', '-f', tar_filename)
|
||||
Discourse::Utils.execute_command('rm', '-f', "#{tar_filename}.gz")
|
||||
Discourse::Utils.execute_command("rm", "-f", tar_filename)
|
||||
Discourse::Utils.execute_command("rm", "-f", "#{tar_filename}.gz")
|
||||
|
||||
log "Creating empty archive..."
|
||||
Discourse::Utils.execute_command('tar', '--create', '--file', tar_filename, '--files-from', '/dev/null')
|
||||
Discourse::Utils.execute_command(
|
||||
"tar",
|
||||
"--create",
|
||||
"--file",
|
||||
tar_filename,
|
||||
"--files-from",
|
||||
"/dev/null",
|
||||
)
|
||||
|
||||
log "Archiving data dump..."
|
||||
Discourse::Utils.execute_command(
|
||||
'tar', '--append', '--dereference', '--file', tar_filename, File.basename(@dump_filename),
|
||||
"tar",
|
||||
"--append",
|
||||
"--dereference",
|
||||
"--file",
|
||||
tar_filename,
|
||||
File.basename(@dump_filename),
|
||||
failure_message: "Failed to archive data dump.",
|
||||
chdir: File.dirname(@dump_filename)
|
||||
chdir: File.dirname(@dump_filename),
|
||||
)
|
||||
|
||||
add_local_uploads_to_archive(tar_filename)
|
||||
@ -218,8 +245,10 @@ module BackupRestore
|
||||
|
||||
log "Gzipping archive, this may take a while..."
|
||||
Discourse::Utils.execute_command(
|
||||
'gzip', "-#{SiteSetting.backup_gzip_compression_level_for_uploads}", tar_filename,
|
||||
failure_message: "Failed to gzip archive."
|
||||
"gzip",
|
||||
"-#{SiteSetting.backup_gzip_compression_level_for_uploads}",
|
||||
tar_filename,
|
||||
failure_message: "Failed to gzip archive.",
|
||||
)
|
||||
end
|
||||
|
||||
@ -244,14 +273,21 @@ module BackupRestore
|
||||
if SiteSetting.include_thumbnails_in_backups
|
||||
exclude_optimized = ""
|
||||
else
|
||||
optimized_path = File.join(upload_directory, 'optimized')
|
||||
optimized_path = File.join(upload_directory, "optimized")
|
||||
exclude_optimized = "--exclude=#{optimized_path}"
|
||||
end
|
||||
|
||||
Discourse::Utils.execute_command(
|
||||
'tar', '--append', '--dereference', exclude_optimized, '--file', tar_filename, upload_directory,
|
||||
failure_message: "Failed to archive uploads.", success_status_codes: [0, 1],
|
||||
chdir: File.join(Rails.root, "public")
|
||||
"tar",
|
||||
"--append",
|
||||
"--dereference",
|
||||
exclude_optimized,
|
||||
"--file",
|
||||
tar_filename,
|
||||
upload_directory,
|
||||
failure_message: "Failed to archive uploads.",
|
||||
success_status_codes: [0, 1],
|
||||
chdir: File.join(Rails.root, "public"),
|
||||
)
|
||||
else
|
||||
log "No local uploads found. Skipping archiving of local uploads..."
|
||||
@ -287,9 +323,14 @@ module BackupRestore
|
||||
|
||||
log "Appending uploads to archive..."
|
||||
Discourse::Utils.execute_command(
|
||||
'tar', '--append', '--file', tar_filename, upload_directory,
|
||||
failure_message: "Failed to append uploads to archive.", success_status_codes: [0, 1],
|
||||
chdir: @tmp_directory
|
||||
"tar",
|
||||
"--append",
|
||||
"--file",
|
||||
tar_filename,
|
||||
upload_directory,
|
||||
failure_message: "Failed to append uploads to archive.",
|
||||
success_status_codes: [0, 1],
|
||||
chdir: @tmp_directory,
|
||||
)
|
||||
|
||||
log "No uploads found on S3. Skipping archiving of uploads stored on S3..." if count == 0
|
||||
@ -327,9 +368,7 @@ module BackupRestore
|
||||
logs = Discourse::Utils.logs_markdown(@logs, user: @user)
|
||||
post = SystemMessage.create_from_system_user(@user, status, logs: logs)
|
||||
|
||||
if @user.id == Discourse::SYSTEM_USER_ID
|
||||
post.topic.invite_group(@user, Group[:admins])
|
||||
end
|
||||
post.topic.invite_group(@user, Group[:admins]) if @user.id == Discourse::SYSTEM_USER_ID
|
||||
rescue => ex
|
||||
log "Something went wrong while notifying user.", ex
|
||||
end
|
||||
@ -399,7 +438,12 @@ module BackupRestore
|
||||
def publish_log(message, timestamp)
|
||||
return unless @publish_to_message_bus
|
||||
data = { timestamp: timestamp, operation: "backup", message: message }
|
||||
MessageBus.publish(BackupRestore::LOGS_CHANNEL, data, user_ids: [@user_id], client_ids: [@client_id])
|
||||
MessageBus.publish(
|
||||
BackupRestore::LOGS_CHANNEL,
|
||||
data,
|
||||
user_ids: [@user_id],
|
||||
client_ids: [@client_id],
|
||||
)
|
||||
end
|
||||
|
||||
def save_log(message, timestamp)
|
||||
@ -416,5 +460,4 @@ module BackupRestore
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -46,9 +46,7 @@ module BackupRestore
|
||||
end
|
||||
|
||||
def self.drop_backup_schema
|
||||
if backup_schema_dropable?
|
||||
ActiveRecord::Base.connection.drop_schema(BACKUP_SCHEMA)
|
||||
end
|
||||
ActiveRecord::Base.connection.drop_schema(BACKUP_SCHEMA) if backup_schema_dropable?
|
||||
end
|
||||
|
||||
def self.core_migration_files
|
||||
@ -65,13 +63,14 @@ module BackupRestore
|
||||
last_line = nil
|
||||
psql_running = true
|
||||
|
||||
log_thread = Thread.new do
|
||||
RailsMultisite::ConnectionManagement::establish_connection(db: @current_db)
|
||||
while psql_running || !logs.empty?
|
||||
message = logs.pop.strip
|
||||
log(message) if message.present?
|
||||
log_thread =
|
||||
Thread.new do
|
||||
RailsMultisite::ConnectionManagement.establish_connection(db: @current_db)
|
||||
while psql_running || !logs.empty?
|
||||
message = logs.pop.strip
|
||||
log(message) if message.present?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
IO.popen(restore_dump_command) do |pipe|
|
||||
begin
|
||||
@ -89,7 +88,9 @@ module BackupRestore
|
||||
logs << ""
|
||||
log_thread.join
|
||||
|
||||
raise DatabaseRestoreError.new("psql failed: #{last_line}") if Process.last_status&.exitstatus != 0
|
||||
if Process.last_status&.exitstatus != 0
|
||||
raise DatabaseRestoreError.new("psql failed: #{last_line}")
|
||||
end
|
||||
end
|
||||
|
||||
# Removes unwanted SQL added by certain versions of pg_dump and modifies
|
||||
@ -99,7 +100,7 @@ module BackupRestore
|
||||
"DROP SCHEMA", # Discourse <= v1.5
|
||||
"CREATE SCHEMA", # PostgreSQL 11+
|
||||
"COMMENT ON SCHEMA", # PostgreSQL 11+
|
||||
"SET default_table_access_method" # PostgreSQL 12
|
||||
"SET default_table_access_method", # PostgreSQL 12
|
||||
].join("|")
|
||||
|
||||
command = "sed -E '/^(#{unwanted_sql})/d' #{@db_dump_path}"
|
||||
@ -117,18 +118,19 @@ module BackupRestore
|
||||
db_conf = BackupRestore.database_configuration
|
||||
|
||||
password_argument = "PGPASSWORD='#{db_conf.password}'" if db_conf.password.present?
|
||||
host_argument = "--host=#{db_conf.host}" if db_conf.host.present?
|
||||
port_argument = "--port=#{db_conf.port}" if db_conf.port.present?
|
||||
username_argument = "--username=#{db_conf.username}" if db_conf.username.present?
|
||||
host_argument = "--host=#{db_conf.host}" if db_conf.host.present?
|
||||
port_argument = "--port=#{db_conf.port}" if db_conf.port.present?
|
||||
username_argument = "--username=#{db_conf.username}" if db_conf.username.present?
|
||||
|
||||
[ password_argument, # pass the password to psql (if any)
|
||||
"psql", # the psql command
|
||||
[
|
||||
password_argument, # pass the password to psql (if any)
|
||||
"psql", # the psql command
|
||||
"--dbname='#{db_conf.database}'", # connect to database *dbname*
|
||||
"--single-transaction", # all or nothing (also runs COPY commands faster)
|
||||
"--variable=ON_ERROR_STOP=1", # stop on first error
|
||||
host_argument, # the hostname to connect to (if any)
|
||||
port_argument, # the port to connect to (if any)
|
||||
username_argument # the username to connect as (if any)
|
||||
"--single-transaction", # all or nothing (also runs COPY commands faster)
|
||||
"--variable=ON_ERROR_STOP=1", # stop on first error
|
||||
host_argument, # the hostname to connect to (if any)
|
||||
port_argument, # the port to connect to (if any)
|
||||
username_argument, # the username to connect as (if any)
|
||||
].compact.join(" ")
|
||||
end
|
||||
|
||||
@ -136,21 +138,22 @@ module BackupRestore
|
||||
log "Migrating the database..."
|
||||
|
||||
log Discourse::Utils.execute_command(
|
||||
{
|
||||
"SKIP_POST_DEPLOYMENT_MIGRATIONS" => "0",
|
||||
"SKIP_OPTIMIZE_ICONS" => "1",
|
||||
"DISABLE_TRANSLATION_OVERRIDES" => "1"
|
||||
},
|
||||
"rake", "db:migrate",
|
||||
failure_message: "Failed to migrate database.",
|
||||
chdir: Rails.root
|
||||
)
|
||||
{
|
||||
"SKIP_POST_DEPLOYMENT_MIGRATIONS" => "0",
|
||||
"SKIP_OPTIMIZE_ICONS" => "1",
|
||||
"DISABLE_TRANSLATION_OVERRIDES" => "1",
|
||||
},
|
||||
"rake",
|
||||
"db:migrate",
|
||||
failure_message: "Failed to migrate database.",
|
||||
chdir: Rails.root,
|
||||
)
|
||||
end
|
||||
|
||||
def reconnect_database
|
||||
log "Reconnecting to the database..."
|
||||
RailsMultisite::ConnectionManagement::reload if RailsMultisite::ConnectionManagement::instance
|
||||
RailsMultisite::ConnectionManagement::establish_connection(db: @current_db)
|
||||
RailsMultisite::ConnectionManagement.reload if RailsMultisite::ConnectionManagement.instance
|
||||
RailsMultisite::ConnectionManagement.establish_connection(db: @current_db)
|
||||
end
|
||||
|
||||
def create_missing_discourse_functions
|
||||
@ -179,10 +182,12 @@ module BackupRestore
|
||||
end
|
||||
end
|
||||
|
||||
existing_function_names = Migration::BaseDropper.existing_discourse_function_names.map { |name| "#{name}()" }
|
||||
existing_function_names =
|
||||
Migration::BaseDropper.existing_discourse_function_names.map { |name| "#{name}()" }
|
||||
|
||||
all_readonly_table_columns.each do |table_name, column_name|
|
||||
function_name = Migration::BaseDropper.readonly_function_name(table_name, column_name, with_schema: false)
|
||||
function_name =
|
||||
Migration::BaseDropper.readonly_function_name(table_name, column_name, with_schema: false)
|
||||
|
||||
if !existing_function_names.include?(function_name)
|
||||
Migration::BaseDropper.create_readonly_function(table_name, column_name)
|
||||
|
@ -12,7 +12,12 @@ module BackupRestore
|
||||
end
|
||||
|
||||
def self.chunk_path(identifier, filename, chunk_number)
|
||||
File.join(LocalBackupStore.base_directory, "tmp", identifier, "#{filename}.part#{chunk_number}")
|
||||
File.join(
|
||||
LocalBackupStore.base_directory,
|
||||
"tmp",
|
||||
identifier,
|
||||
"#{filename}.part#{chunk_number}",
|
||||
)
|
||||
end
|
||||
|
||||
def initialize(opts = {})
|
||||
@ -39,7 +44,7 @@ module BackupRestore
|
||||
|
||||
def download_file(filename, destination, failure_message = "")
|
||||
path = path_from_filename(filename)
|
||||
Discourse::Utils.execute_command('cp', path, destination, failure_message: failure_message)
|
||||
Discourse::Utils.execute_command("cp", path, destination, failure_message: failure_message)
|
||||
end
|
||||
|
||||
private
|
||||
@ -59,7 +64,7 @@ module BackupRestore
|
||||
filename: File.basename(path),
|
||||
size: File.size(path),
|
||||
last_modified: File.mtime(path).utc,
|
||||
source: include_download_source ? path : nil
|
||||
source: include_download_source ? path : nil,
|
||||
)
|
||||
end
|
||||
|
||||
|
@ -32,7 +32,12 @@ module BackupRestore
|
||||
def publish_log(message, timestamp)
|
||||
return unless @publish_to_message_bus
|
||||
data = { timestamp: timestamp, operation: "restore", message: message }
|
||||
MessageBus.publish(BackupRestore::LOGS_CHANNEL, data, user_ids: [@user_id], client_ids: [@client_id])
|
||||
MessageBus.publish(
|
||||
BackupRestore::LOGS_CHANNEL,
|
||||
data,
|
||||
user_ids: [@user_id],
|
||||
client_ids: [@client_id],
|
||||
)
|
||||
end
|
||||
|
||||
def save_log(message, timestamp)
|
||||
|
@ -28,8 +28,10 @@ module BackupRestore
|
||||
log " Restored version: #{metadata[:version]}"
|
||||
|
||||
if metadata[:version] > @current_version
|
||||
raise MigrationRequiredError.new("You're trying to restore a more recent version of the schema. " \
|
||||
"You should migrate first!")
|
||||
raise MigrationRequiredError.new(
|
||||
"You're trying to restore a more recent version of the schema. " \
|
||||
"You should migrate first!",
|
||||
)
|
||||
end
|
||||
|
||||
metadata
|
||||
|
@ -65,8 +65,8 @@ module BackupRestore
|
||||
|
||||
after_restore_hook
|
||||
rescue Compression::Strategy::ExtractFailed
|
||||
log 'ERROR: The uncompressed file is too big. Consider increasing the hidden ' \
|
||||
'"decompressed_backup_max_file_size_mb" setting.'
|
||||
log "ERROR: The uncompressed file is too big. Consider increasing the hidden " \
|
||||
'"decompressed_backup_max_file_size_mb" setting.'
|
||||
@database_restorer.rollback
|
||||
rescue SystemExit
|
||||
log "Restore process was cancelled!"
|
||||
@ -118,10 +118,10 @@ module BackupRestore
|
||||
|
||||
DiscourseEvent.trigger(:site_settings_restored)
|
||||
|
||||
if @disable_emails && SiteSetting.disable_emails == 'no'
|
||||
if @disable_emails && SiteSetting.disable_emails == "no"
|
||||
log "Disabling outgoing emails for non-staff users..."
|
||||
user = User.find_by_email(@user_info[:email]) || Discourse.system_user
|
||||
SiteSetting.set_and_log(:disable_emails, 'non-staff', user)
|
||||
SiteSetting.set_and_log(:disable_emails, "non-staff", user)
|
||||
end
|
||||
end
|
||||
|
||||
@ -152,7 +152,7 @@ module BackupRestore
|
||||
post = SystemMessage.create_from_system_user(user, status, logs: logs)
|
||||
else
|
||||
log "Could not send notification to '#{@user_info[:username]}' " \
|
||||
"(#{@user_info[:email]}), because the user does not exist."
|
||||
"(#{@user_info[:email]}), because the user does not exist."
|
||||
end
|
||||
rescue => ex
|
||||
log "Something went wrong while notifying user.", ex
|
||||
|
@ -4,8 +4,11 @@ module BackupRestore
|
||||
class S3BackupStore < BackupStore
|
||||
UPLOAD_URL_EXPIRES_AFTER_SECONDS ||= 6.hours.to_i
|
||||
|
||||
delegate :abort_multipart, :presign_multipart_part, :list_multipart_parts,
|
||||
:complete_multipart, to: :s3_helper
|
||||
delegate :abort_multipart,
|
||||
:presign_multipart_part,
|
||||
:list_multipart_parts,
|
||||
:complete_multipart,
|
||||
to: :s3_helper
|
||||
|
||||
def initialize(opts = {})
|
||||
@s3_options = S3Helper.s3_options(SiteSetting)
|
||||
@ -13,7 +16,7 @@ module BackupRestore
|
||||
end
|
||||
|
||||
def s3_helper
|
||||
@s3_helper ||= S3Helper.new(s3_bucket_name_with_prefix, '', @s3_options.clone)
|
||||
@s3_helper ||= S3Helper.new(s3_bucket_name_with_prefix, "", @s3_options.clone)
|
||||
end
|
||||
|
||||
def remote?
|
||||
@ -57,11 +60,17 @@ module BackupRestore
|
||||
|
||||
presigned_url(obj, :put, UPLOAD_URL_EXPIRES_AFTER_SECONDS)
|
||||
rescue Aws::Errors::ServiceError => e
|
||||
Rails.logger.warn("Failed to generate upload URL for S3: #{e.message.presence || e.class.name}")
|
||||
Rails.logger.warn(
|
||||
"Failed to generate upload URL for S3: #{e.message.presence || e.class.name}",
|
||||
)
|
||||
raise StorageError.new(e.message.presence || e.class.name)
|
||||
end
|
||||
|
||||
def signed_url_for_temporary_upload(file_name, expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS, metadata: {})
|
||||
def signed_url_for_temporary_upload(
|
||||
file_name,
|
||||
expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS,
|
||||
metadata: {}
|
||||
)
|
||||
obj = object_from_path(file_name)
|
||||
raise BackupFileExists.new if obj.exists?
|
||||
key = temporary_upload_path(file_name)
|
||||
@ -71,8 +80,8 @@ module BackupRestore
|
||||
expires_in: expires_in,
|
||||
opts: {
|
||||
metadata: metadata,
|
||||
acl: "private"
|
||||
}
|
||||
acl: "private",
|
||||
},
|
||||
)
|
||||
end
|
||||
|
||||
@ -84,7 +93,7 @@ module BackupRestore
|
||||
folder_prefix = s3_helper.s3_bucket_folder_path.nil? ? "" : s3_helper.s3_bucket_folder_path
|
||||
|
||||
if Rails.env.test?
|
||||
folder_prefix = File.join(folder_prefix, "test_#{ENV['TEST_ENV_NUMBER'].presence || '0'}")
|
||||
folder_prefix = File.join(folder_prefix, "test_#{ENV["TEST_ENV_NUMBER"].presence || "0"}")
|
||||
end
|
||||
|
||||
folder_prefix
|
||||
@ -105,7 +114,10 @@ module BackupRestore
|
||||
s3_helper.copy(
|
||||
existing_external_upload_key,
|
||||
File.join(s3_helper.s3_bucket_folder_path, original_filename),
|
||||
options: { acl: "private", apply_metadata_to_destination: true }
|
||||
options: {
|
||||
acl: "private",
|
||||
apply_metadata_to_destination: true,
|
||||
},
|
||||
)
|
||||
s3_helper.delete_object(existing_external_upload_key)
|
||||
end
|
||||
@ -120,9 +132,7 @@ module BackupRestore
|
||||
objects = []
|
||||
|
||||
s3_helper.list.each do |obj|
|
||||
if obj.key.match?(file_regex)
|
||||
objects << create_file_from_object(obj)
|
||||
end
|
||||
objects << create_file_from_object(obj) if obj.key.match?(file_regex)
|
||||
end
|
||||
|
||||
objects
|
||||
@ -137,7 +147,7 @@ module BackupRestore
|
||||
filename: File.basename(obj.key),
|
||||
size: obj.size,
|
||||
last_modified: obj.last_modified,
|
||||
source: include_download_source ? presigned_url(obj, :get, expires) : nil
|
||||
source: include_download_source ? presigned_url(obj, :get, expires) : nil,
|
||||
)
|
||||
end
|
||||
|
||||
@ -154,16 +164,17 @@ module BackupRestore
|
||||
end
|
||||
|
||||
def file_regex
|
||||
@file_regex ||= begin
|
||||
path = s3_helper.s3_bucket_folder_path || ""
|
||||
@file_regex ||=
|
||||
begin
|
||||
path = s3_helper.s3_bucket_folder_path || ""
|
||||
|
||||
if path.present?
|
||||
path = "#{path}/" unless path.end_with?("/")
|
||||
path = Regexp.quote(path)
|
||||
if path.present?
|
||||
path = "#{path}/" unless path.end_with?("/")
|
||||
path = Regexp.quote(path)
|
||||
end
|
||||
|
||||
%r{^#{path}[^/]*\.t?gz$}i
|
||||
end
|
||||
|
||||
/^#{path}[^\/]*\.t?gz$/i
|
||||
end
|
||||
end
|
||||
|
||||
def free_bytes
|
||||
|
@ -98,9 +98,7 @@ module BackupRestore
|
||||
|
||||
def flush_redis
|
||||
redis = Discourse.redis
|
||||
redis.scan_each(match: "*") do |key|
|
||||
redis.del(key) unless key == SidekiqPauser::PAUSED_KEY
|
||||
end
|
||||
redis.scan_each(match: "*") { |key| redis.del(key) unless key == SidekiqPauser::PAUSED_KEY }
|
||||
end
|
||||
|
||||
def clear_sidekiq_queues
|
||||
|
@ -11,11 +11,12 @@ module BackupRestore
|
||||
def self.s3_regex_string(s3_base_url)
|
||||
clean_url = s3_base_url.sub(S3_ENDPOINT_REGEX, ".s3.amazonaws.com")
|
||||
|
||||
regex_string = clean_url
|
||||
.split(".s3.amazonaws.com")
|
||||
.map { |s| Regexp.escape(s) }
|
||||
.insert(1, S3_ENDPOINT_REGEX.source)
|
||||
.join("")
|
||||
regex_string =
|
||||
clean_url
|
||||
.split(".s3.amazonaws.com")
|
||||
.map { |s| Regexp.escape(s) }
|
||||
.insert(1, S3_ENDPOINT_REGEX.source)
|
||||
.join("")
|
||||
|
||||
[regex_string, clean_url]
|
||||
end
|
||||
@ -25,12 +26,16 @@ module BackupRestore
|
||||
end
|
||||
|
||||
def restore(tmp_directory)
|
||||
upload_directories = Dir.glob(File.join(tmp_directory, "uploads", "*"))
|
||||
.reject { |path| File.basename(path).start_with?("PaxHeaders") }
|
||||
upload_directories =
|
||||
Dir
|
||||
.glob(File.join(tmp_directory, "uploads", "*"))
|
||||
.reject { |path| File.basename(path).start_with?("PaxHeaders") }
|
||||
|
||||
if upload_directories.count > 1
|
||||
raise UploadsRestoreError.new("Could not find uploads, because the uploads " \
|
||||
"directory contains multiple folders.")
|
||||
raise UploadsRestoreError.new(
|
||||
"Could not find uploads, because the uploads " \
|
||||
"directory contains multiple folders.",
|
||||
)
|
||||
end
|
||||
|
||||
@tmp_uploads_path = upload_directories.first
|
||||
@ -55,7 +60,9 @@ module BackupRestore
|
||||
if !store.respond_to?(:copy_from)
|
||||
# a FileStore implementation from a plugin might not support this method, so raise a helpful error
|
||||
store_name = Discourse.store.class.name
|
||||
raise UploadsRestoreError.new("The current file store (#{store_name}) does not support restoring uploads.")
|
||||
raise UploadsRestoreError.new(
|
||||
"The current file store (#{store_name}) does not support restoring uploads.",
|
||||
)
|
||||
end
|
||||
|
||||
log "Restoring uploads, this may take a while..."
|
||||
@ -89,13 +96,17 @@ module BackupRestore
|
||||
remap(old_base_url, Discourse.base_url)
|
||||
end
|
||||
|
||||
current_s3_base_url = SiteSetting::Upload.enable_s3_uploads ? SiteSetting::Upload.s3_base_url : nil
|
||||
if (old_s3_base_url = BackupMetadata.value_for("s3_base_url")) && old_s3_base_url != current_s3_base_url
|
||||
current_s3_base_url =
|
||||
SiteSetting::Upload.enable_s3_uploads ? SiteSetting::Upload.s3_base_url : nil
|
||||
if (old_s3_base_url = BackupMetadata.value_for("s3_base_url")) &&
|
||||
old_s3_base_url != current_s3_base_url
|
||||
remap_s3("#{old_s3_base_url}/", uploads_folder)
|
||||
end
|
||||
|
||||
current_s3_cdn_url = SiteSetting::Upload.enable_s3_uploads ? SiteSetting::Upload.s3_cdn_url : nil
|
||||
if (old_s3_cdn_url = BackupMetadata.value_for("s3_cdn_url")) && old_s3_cdn_url != current_s3_cdn_url
|
||||
current_s3_cdn_url =
|
||||
SiteSetting::Upload.enable_s3_uploads ? SiteSetting::Upload.s3_cdn_url : nil
|
||||
if (old_s3_cdn_url = BackupMetadata.value_for("s3_cdn_url")) &&
|
||||
old_s3_cdn_url != current_s3_cdn_url
|
||||
base_url = current_s3_cdn_url || Discourse.base_url
|
||||
remap("#{old_s3_cdn_url}/", UrlHelper.schemaless("#{base_url}#{uploads_folder}"))
|
||||
|
||||
@ -113,10 +124,7 @@ module BackupRestore
|
||||
remap(old_host, new_host) if old_host != new_host
|
||||
end
|
||||
|
||||
if @previous_db_name != @current_db_name
|
||||
remap("/uploads/#{@previous_db_name}/", upload_path)
|
||||
end
|
||||
|
||||
remap("/uploads/#{@previous_db_name}/", upload_path) if @previous_db_name != @current_db_name
|
||||
rescue => ex
|
||||
log "Something went wrong while remapping uploads.", ex
|
||||
end
|
||||
@ -130,7 +138,12 @@ module BackupRestore
|
||||
if old_s3_base_url.include?("amazonaws.com")
|
||||
from_regex, from_clean_url = self.class.s3_regex_string(old_s3_base_url)
|
||||
log "Remapping with regex from '#{from_clean_url}' to '#{uploads_folder}'"
|
||||
DbHelper.regexp_replace(from_regex, uploads_folder, verbose: true, excluded_tables: ["backup_metadata"])
|
||||
DbHelper.regexp_replace(
|
||||
from_regex,
|
||||
uploads_folder,
|
||||
verbose: true,
|
||||
excluded_tables: ["backup_metadata"],
|
||||
)
|
||||
else
|
||||
remap(old_s3_base_url, uploads_folder)
|
||||
end
|
||||
@ -141,13 +154,15 @@ module BackupRestore
|
||||
DB.exec("TRUNCATE TABLE optimized_images")
|
||||
SiteIconManager.ensure_optimized!
|
||||
|
||||
User.where("uploaded_avatar_id IS NOT NULL").find_each do |user|
|
||||
Jobs.enqueue(:create_avatar_thumbnails, upload_id: user.uploaded_avatar_id)
|
||||
end
|
||||
User
|
||||
.where("uploaded_avatar_id IS NOT NULL")
|
||||
.find_each do |user|
|
||||
Jobs.enqueue(:create_avatar_thumbnails, upload_id: user.uploaded_avatar_id)
|
||||
end
|
||||
end
|
||||
|
||||
def rebake_posts_with_uploads
|
||||
log 'Posts will be rebaked by a background job in sidekiq. You will see missing images until that has completed.'
|
||||
log "Posts will be rebaked by a background job in sidekiq. You will see missing images until that has completed."
|
||||
log 'You can expedite the process by manually running "rake posts:rebake_uncooked_posts"'
|
||||
|
||||
DB.exec(<<~SQL)
|
||||
|
@ -173,7 +173,7 @@ module BadgeQueries
|
||||
<<~SQL
|
||||
SELECT p.user_id, p.id post_id, current_timestamp granted_at
|
||||
FROM badge_posts p
|
||||
WHERE #{is_topic ? "p.post_number = 1" : "p.post_number > 1" } AND p.like_count >= #{count.to_i} AND
|
||||
WHERE #{is_topic ? "p.post_number = 1" : "p.post_number > 1"} AND p.like_count >= #{count.to_i} AND
|
||||
(:backfill OR p.id IN (:post_ids) )
|
||||
SQL
|
||||
end
|
||||
@ -271,5 +271,4 @@ module BadgeQueries
|
||||
WHERE "rank" = 1
|
||||
SQL
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -11,9 +11,7 @@ class BookmarkQuery
|
||||
|
||||
def self.preload(bookmarks, object)
|
||||
preload_polymorphic_associations(bookmarks, object.guardian)
|
||||
if @preload
|
||||
@preload.each { |preload| preload.call(bookmarks, object) }
|
||||
end
|
||||
@preload.each { |preload| preload.call(bookmarks, object) } if @preload
|
||||
end
|
||||
|
||||
# These polymorphic associations are loaded to make the UserBookmarkListSerializer's
|
||||
@ -42,24 +40,27 @@ class BookmarkQuery
|
||||
ts_query = search_term.present? ? Search.ts_query(term: search_term) : nil
|
||||
search_term_wildcard = search_term.present? ? "%#{search_term}%" : nil
|
||||
|
||||
queries = Bookmark.registered_bookmarkables.map do |bookmarkable|
|
||||
interim_results = bookmarkable.perform_list_query(@user, @guardian)
|
||||
queries =
|
||||
Bookmark
|
||||
.registered_bookmarkables
|
||||
.map do |bookmarkable|
|
||||
interim_results = bookmarkable.perform_list_query(@user, @guardian)
|
||||
|
||||
# this could occur if there is some security reason that the user cannot
|
||||
# access the bookmarkables that they have bookmarked, e.g. if they had 1 bookmark
|
||||
# on a topic and that topic was moved into a private category
|
||||
next if interim_results.blank?
|
||||
# this could occur if there is some security reason that the user cannot
|
||||
# access the bookmarkables that they have bookmarked, e.g. if they had 1 bookmark
|
||||
# on a topic and that topic was moved into a private category
|
||||
next if interim_results.blank?
|
||||
|
||||
if search_term.present?
|
||||
interim_results = bookmarkable.perform_search_query(
|
||||
interim_results, search_term_wildcard, ts_query
|
||||
)
|
||||
end
|
||||
if search_term.present?
|
||||
interim_results =
|
||||
bookmarkable.perform_search_query(interim_results, search_term_wildcard, ts_query)
|
||||
end
|
||||
|
||||
# this is purely to make the query easy to read and debug, otherwise it's
|
||||
# all mashed up into a massive ball in MiniProfiler :)
|
||||
"---- #{bookmarkable.model.to_s} bookmarkable ---\n\n #{interim_results.to_sql}"
|
||||
end.compact
|
||||
# this is purely to make the query easy to read and debug, otherwise it's
|
||||
# all mashed up into a massive ball in MiniProfiler :)
|
||||
"---- #{bookmarkable.model.to_s} bookmarkable ---\n\n #{interim_results.to_sql}"
|
||||
end
|
||||
.compact
|
||||
|
||||
# same for interim results being blank, the user might have been locked out
|
||||
# from all their various bookmarks, in which case they will see nothing and
|
||||
@ -68,17 +69,16 @@ class BookmarkQuery
|
||||
|
||||
union_sql = queries.join("\n\nUNION\n\n")
|
||||
results = Bookmark.select("bookmarks.*").from("(\n\n#{union_sql}\n\n) as bookmarks")
|
||||
results = results.order(
|
||||
"(CASE WHEN bookmarks.pinned THEN 0 ELSE 1 END),
|
||||
results =
|
||||
results.order(
|
||||
"(CASE WHEN bookmarks.pinned THEN 0 ELSE 1 END),
|
||||
bookmarks.reminder_at ASC,
|
||||
bookmarks.updated_at DESC"
|
||||
)
|
||||
bookmarks.updated_at DESC",
|
||||
)
|
||||
|
||||
@count = results.count
|
||||
|
||||
if @page.positive?
|
||||
results = results.offset(@page * @params[:per_page])
|
||||
end
|
||||
results = results.offset(@page * @params[:per_page]) if @page.positive?
|
||||
|
||||
if updated_results = blk&.call(results)
|
||||
results = updated_results
|
||||
|
@ -28,12 +28,10 @@ class BookmarkReminderNotificationHandler
|
||||
|
||||
def clear_reminder
|
||||
Rails.logger.debug(
|
||||
"Clearing bookmark reminder for bookmark_id #{bookmark.id}. reminder at: #{bookmark.reminder_at}"
|
||||
"Clearing bookmark reminder for bookmark_id #{bookmark.id}. reminder at: #{bookmark.reminder_at}",
|
||||
)
|
||||
|
||||
if bookmark.auto_clear_reminder_when_reminder_sent?
|
||||
bookmark.reminder_at = nil
|
||||
end
|
||||
bookmark.reminder_at = nil if bookmark.auto_clear_reminder_when_reminder_sent?
|
||||
|
||||
bookmark.clear_reminder!
|
||||
end
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BrowserDetection
|
||||
|
||||
def self.browser(user_agent)
|
||||
case user_agent
|
||||
when /Edg/i
|
||||
@ -66,5 +65,4 @@ module BrowserDetection
|
||||
:unknown
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
17
lib/cache.rb
17
lib/cache.rb
@ -15,12 +15,11 @@
|
||||
# this makes it harder to reason about the API
|
||||
|
||||
class Cache
|
||||
|
||||
# nothing is cached for longer than 1 day EVER
|
||||
# there is no reason to have data older than this clogging redis
|
||||
# it is dangerous cause if we rename keys we will be stuck with
|
||||
# pointless data
|
||||
MAX_CACHE_AGE = 1.day unless defined? MAX_CACHE_AGE
|
||||
MAX_CACHE_AGE = 1.day unless defined?(MAX_CACHE_AGE)
|
||||
|
||||
attr_reader :namespace
|
||||
|
||||
@ -47,9 +46,7 @@ class Cache
|
||||
end
|
||||
|
||||
def clear
|
||||
keys.each do |k|
|
||||
redis.del(k)
|
||||
end
|
||||
keys.each { |k| redis.del(k) }
|
||||
end
|
||||
|
||||
def normalize_key(key)
|
||||
@ -80,9 +77,7 @@ class Cache
|
||||
key = normalize_key(name)
|
||||
raw = nil
|
||||
|
||||
if !force
|
||||
raw = redis.get(key)
|
||||
end
|
||||
raw = redis.get(key) if !force
|
||||
|
||||
if raw
|
||||
begin
|
||||
@ -96,7 +91,8 @@ class Cache
|
||||
val
|
||||
end
|
||||
elsif force
|
||||
raise ArgumentError, "Missing block: Calling `Cache#fetch` with `force: true` requires a block."
|
||||
raise ArgumentError,
|
||||
"Missing block: Calling `Cache#fetch` with `force: true` requires a block."
|
||||
else
|
||||
read(name)
|
||||
end
|
||||
@ -105,7 +101,7 @@ class Cache
|
||||
protected
|
||||
|
||||
def log_first_exception(e)
|
||||
if !defined? @logged_a_warning
|
||||
if !defined?(@logged_a_warning)
|
||||
@logged_a_warning = true
|
||||
Discourse.warn_exception(e, "Corrupt cache... skipping entry for key #{key}")
|
||||
end
|
||||
@ -129,5 +125,4 @@ class Cache
|
||||
redis.setex(key, expiry, dumped)
|
||||
true
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
module CanonicalURL
|
||||
module ControllerExtensions
|
||||
ALLOWED_CANONICAL_PARAMS = %w(page)
|
||||
ALLOWED_CANONICAL_PARAMS = %w[page]
|
||||
|
||||
def canonical_url(url_for_options = {})
|
||||
case url_for_options
|
||||
@ -14,14 +14,15 @@ module CanonicalURL
|
||||
end
|
||||
|
||||
def default_canonical
|
||||
@default_canonical ||= begin
|
||||
canonical = +"#{Discourse.base_url_no_prefix}#{request.path}"
|
||||
allowed_params = params.select { |key| ALLOWED_CANONICAL_PARAMS.include?(key) }
|
||||
if allowed_params.present?
|
||||
canonical << "?#{allowed_params.keys.zip(allowed_params.values).map { |key, value| "#{key}=#{value}" }.join("&")}"
|
||||
@default_canonical ||=
|
||||
begin
|
||||
canonical = +"#{Discourse.base_url_no_prefix}#{request.path}"
|
||||
allowed_params = params.select { |key| ALLOWED_CANONICAL_PARAMS.include?(key) }
|
||||
if allowed_params.present?
|
||||
canonical << "?#{allowed_params.keys.zip(allowed_params.values).map { |key, value| "#{key}=#{value}" }.join("&")}"
|
||||
end
|
||||
canonical
|
||||
end
|
||||
canonical
|
||||
end
|
||||
end
|
||||
|
||||
def self.included(base)
|
||||
@ -31,7 +32,7 @@ module CanonicalURL
|
||||
|
||||
module Helpers
|
||||
def canonical_link_tag(url = nil)
|
||||
tag('link', rel: 'canonical', href: url || @canonical_url || default_canonical)
|
||||
tag("link", rel: "canonical", href: url || @canonical_url || default_canonical)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -1,23 +1,26 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module CategoryBadge
|
||||
|
||||
def self.category_stripe(color, classes)
|
||||
style = color ? "style='background-color: ##{color};'" : ''
|
||||
style = color ? "style='background-color: ##{color};'" : ""
|
||||
"<span class='#{classes}' #{style}></span>"
|
||||
end
|
||||
|
||||
def self.inline_category_stripe(color, styles = '', insert_blank = false)
|
||||
"<span style='background-color: ##{color};#{styles}'>#{insert_blank ? ' ' : ''}</span>"
|
||||
def self.inline_category_stripe(color, styles = "", insert_blank = false)
|
||||
"<span style='background-color: ##{color};#{styles}'>#{insert_blank ? " " : ""}</span>"
|
||||
end
|
||||
|
||||
def self.inline_badge_wrapper_style(category)
|
||||
style =
|
||||
case (SiteSetting.category_style || :box).to_sym
|
||||
when :bar then 'line-height: 1.25; margin-right: 5px;'
|
||||
when :box then "background-color:##{category.color}; line-height: 1.5; margin-top: 5px; margin-right: 5px;"
|
||||
when :bullet then 'line-height: 1; margin-right: 10px;'
|
||||
when :none then ''
|
||||
when :bar
|
||||
"line-height: 1.25; margin-right: 5px;"
|
||||
when :box
|
||||
"background-color:##{category.color}; line-height: 1.5; margin-top: 5px; margin-right: 5px;"
|
||||
when :bullet
|
||||
"line-height: 1; margin-right: 10px;"
|
||||
when :none
|
||||
""
|
||||
end
|
||||
|
||||
" style='font-size: 0.857em; white-space: nowrap; display: inline-block; position: relative; #{style}'"
|
||||
@ -34,73 +37,88 @@ module CategoryBadge
|
||||
|
||||
extra_classes = "#{opts[:extra_classes]} #{SiteSetting.category_style}"
|
||||
|
||||
result = +''
|
||||
result = +""
|
||||
|
||||
# parent span
|
||||
unless category.parent_category_id.nil? || opts[:hide_parent]
|
||||
parent_category = Category.find_by(id: category.parent_category_id)
|
||||
result <<
|
||||
if opts[:inline_style]
|
||||
case (SiteSetting.category_style || :box).to_sym
|
||||
when :bar
|
||||
inline_category_stripe(parent_category.color, 'display: inline-block; padding: 1px;', true)
|
||||
when :box
|
||||
inline_category_stripe(parent_category.color, 'display: inline-block; padding: 0 1px;', true)
|
||||
when :bullet
|
||||
inline_category_stripe(parent_category.color, 'display: inline-block; width: 5px; height: 10px; line-height: 1;')
|
||||
when :none
|
||||
''
|
||||
end
|
||||
else
|
||||
category_stripe(parent_category.color, 'badge-category-parent-bg')
|
||||
result << if opts[:inline_style]
|
||||
case (SiteSetting.category_style || :box).to_sym
|
||||
when :bar
|
||||
inline_category_stripe(
|
||||
parent_category.color,
|
||||
"display: inline-block; padding: 1px;",
|
||||
true,
|
||||
)
|
||||
when :box
|
||||
inline_category_stripe(
|
||||
parent_category.color,
|
||||
"display: inline-block; padding: 0 1px;",
|
||||
true,
|
||||
)
|
||||
when :bullet
|
||||
inline_category_stripe(
|
||||
parent_category.color,
|
||||
"display: inline-block; width: 5px; height: 10px; line-height: 1;",
|
||||
)
|
||||
when :none
|
||||
""
|
||||
end
|
||||
else
|
||||
category_stripe(parent_category.color, "badge-category-parent-bg")
|
||||
end
|
||||
end
|
||||
|
||||
# sub parent or main category span
|
||||
result <<
|
||||
if opts[:inline_style]
|
||||
case (SiteSetting.category_style || :box).to_sym
|
||||
when :bar
|
||||
inline_category_stripe(category.color, 'display: inline-block; padding: 1px;', true)
|
||||
when :box
|
||||
''
|
||||
when :bullet
|
||||
inline_category_stripe(category.color, "display: inline-block; width: #{category.parent_category_id.nil? ? 10 : 5}px; height: 10px;")
|
||||
when :none
|
||||
''
|
||||
end
|
||||
else
|
||||
category_stripe(category.color, 'badge-category-bg')
|
||||
result << if opts[:inline_style]
|
||||
case (SiteSetting.category_style || :box).to_sym
|
||||
when :bar
|
||||
inline_category_stripe(category.color, "display: inline-block; padding: 1px;", true)
|
||||
when :box
|
||||
""
|
||||
when :bullet
|
||||
inline_category_stripe(
|
||||
category.color,
|
||||
"display: inline-block; width: #{category.parent_category_id.nil? ? 10 : 5}px; height: 10px;",
|
||||
)
|
||||
when :none
|
||||
""
|
||||
end
|
||||
else
|
||||
category_stripe(category.color, "badge-category-bg")
|
||||
end
|
||||
|
||||
# category name
|
||||
class_names = 'badge-category clear-badge'
|
||||
description = category.description_text ? "title='#{category.description_text}'" : ''
|
||||
category_url = opts[:absolute_url] ? "#{Discourse.base_url_no_prefix}#{category.url}" : category.url
|
||||
class_names = "badge-category clear-badge"
|
||||
description = category.description_text ? "title='#{category.description_text}'" : ""
|
||||
category_url =
|
||||
opts[:absolute_url] ? "#{Discourse.base_url_no_prefix}#{category.url}" : category.url
|
||||
|
||||
extra_span_classes =
|
||||
if opts[:inline_style]
|
||||
case (SiteSetting.category_style || :box).to_sym
|
||||
when :bar
|
||||
'color: #222222; padding: 3px; vertical-align: text-top; margin-top: -3px; display: inline-block;'
|
||||
"color: #222222; padding: 3px; vertical-align: text-top; margin-top: -3px; display: inline-block;"
|
||||
when :box
|
||||
"color: ##{category.text_color}; padding: 0 5px;"
|
||||
when :bullet
|
||||
'color: #222222; vertical-align: text-top; line-height: 1; margin-left: 4px; padding-left: 2px; display: inline;'
|
||||
"color: #222222; vertical-align: text-top; line-height: 1; margin-left: 4px; padding-left: 2px; display: inline;"
|
||||
when :none
|
||||
''
|
||||
end + 'max-width: 150px; overflow: hidden; text-overflow: ellipsis;'
|
||||
""
|
||||
end + "max-width: 150px; overflow: hidden; text-overflow: ellipsis;"
|
||||
elsif (SiteSetting.category_style).to_sym == :box
|
||||
"color: ##{category.text_color}"
|
||||
else
|
||||
''
|
||||
""
|
||||
end
|
||||
result << "<span style='#{extra_span_classes}' data-drop-close='true' class='#{class_names}'
|
||||
#{description}>"
|
||||
|
||||
result << ERB::Util.html_escape(category.name) << '</span>'
|
||||
result << ERB::Util.html_escape(category.name) << "</span>"
|
||||
|
||||
result = "<a class='badge-wrapper #{extra_classes}' href='#{category_url}'" + (opts[:inline_style] ? inline_badge_wrapper_style(category) : '') + ">#{result}</a>"
|
||||
result =
|
||||
"<a class='badge-wrapper #{extra_classes}' href='#{category_url}'" +
|
||||
(opts[:inline_style] ? inline_badge_wrapper_style(category) : "") + ">#{result}</a>"
|
||||
|
||||
result.html_safe
|
||||
end
|
||||
|
@ -3,13 +3,17 @@
|
||||
require "rbconfig"
|
||||
|
||||
class ChromeInstalledChecker
|
||||
class ChromeError < StandardError; end
|
||||
class ChromeVersionError < ChromeError; end
|
||||
class ChromeNotInstalled < ChromeError; end
|
||||
class ChromeVersionTooLow < ChromeError; end
|
||||
class ChromeError < StandardError
|
||||
end
|
||||
class ChromeVersionError < ChromeError
|
||||
end
|
||||
class ChromeNotInstalled < ChromeError
|
||||
end
|
||||
class ChromeVersionTooLow < ChromeError
|
||||
end
|
||||
|
||||
def self.run
|
||||
if RbConfig::CONFIG['host_os'][/darwin|mac os/]
|
||||
if RbConfig::CONFIG["host_os"][/darwin|mac os/]
|
||||
binary = "/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome"
|
||||
elsif system("command -v google-chrome-stable >/dev/null;")
|
||||
binary = "google-chrome-stable"
|
||||
@ -18,15 +22,15 @@ class ChromeInstalledChecker
|
||||
binary ||= "chromium" if system("command -v chromium >/dev/null;")
|
||||
|
||||
if !binary
|
||||
raise ChromeNotInstalled.new("Chrome is not installed. Download from https://www.google.com/chrome/browser/desktop/index.html")
|
||||
raise ChromeNotInstalled.new(
|
||||
"Chrome is not installed. Download from https://www.google.com/chrome/browser/desktop/index.html",
|
||||
)
|
||||
end
|
||||
|
||||
version = `\"#{binary}\" --version`
|
||||
version_match = version.match(/[\d\.]+/)
|
||||
|
||||
if !version_match
|
||||
raise ChromeError.new("Can't get the #{binary} version")
|
||||
end
|
||||
raise ChromeError.new("Can't get the #{binary} version") if !version_match
|
||||
|
||||
if Gem::Version.new(version_match[0]) < Gem::Version.new("59")
|
||||
raise ChromeVersionTooLow.new("Chrome 59 or higher is required")
|
||||
|
@ -28,24 +28,27 @@ class CommentMigration < ActiveRecord::Migration[4.2]
|
||||
end
|
||||
|
||||
def down
|
||||
replace_nils(comments_up).deep_merge(comments_down).each do |table|
|
||||
table[1].each do |column|
|
||||
table_name = table[0]
|
||||
column_name = column[0]
|
||||
comment = column[1]
|
||||
replace_nils(comments_up)
|
||||
.deep_merge(comments_down)
|
||||
.each do |table|
|
||||
table[1].each do |column|
|
||||
table_name = table[0]
|
||||
column_name = column[0]
|
||||
comment = column[1]
|
||||
|
||||
if column_name == :_table
|
||||
DB.exec "COMMENT ON TABLE #{table_name} IS ?", comment
|
||||
puts " COMMENT ON TABLE #{table_name}"
|
||||
else
|
||||
DB.exec "COMMENT ON COLUMN #{table_name}.#{column_name} IS ?", comment
|
||||
puts " COMMENT ON COLUMN #{table_name}.#{column_name}"
|
||||
if column_name == :_table
|
||||
DB.exec "COMMENT ON TABLE #{table_name} IS ?", comment
|
||||
puts " COMMENT ON TABLE #{table_name}"
|
||||
else
|
||||
DB.exec "COMMENT ON COLUMN #{table_name}.#{column_name} IS ?", comment
|
||||
puts " COMMENT ON COLUMN #{table_name}.#{column_name}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def replace_nils(hash)
|
||||
hash.each do |key, value|
|
||||
if Hash === value
|
||||
|
@ -12,9 +12,8 @@
|
||||
# Discourse.redis.without_namespace.del CommonPasswords::LIST_KEY
|
||||
|
||||
class CommonPasswords
|
||||
|
||||
PASSWORD_FILE = File.join(Rails.root, 'lib', 'common_passwords', '10-char-common-passwords.txt')
|
||||
LIST_KEY = 'discourse-common-passwords'
|
||||
PASSWORD_FILE = File.join(Rails.root, "lib", "common_passwords", "10-char-common-passwords.txt")
|
||||
LIST_KEY = "discourse-common-passwords"
|
||||
|
||||
@mutex = Mutex.new
|
||||
|
||||
@ -32,9 +31,7 @@ class CommonPasswords
|
||||
end
|
||||
|
||||
def self.password_list
|
||||
@mutex.synchronize do
|
||||
load_passwords unless redis.scard(LIST_KEY) > 0
|
||||
end
|
||||
@mutex.synchronize { load_passwords unless redis.scard(LIST_KEY) > 0 }
|
||||
RedisPasswordList.new
|
||||
end
|
||||
|
||||
@ -49,5 +46,4 @@ class CommonPasswords
|
||||
# tolerate this so we don't block signups
|
||||
Rails.logger.error "Common passwords file #{PASSWORD_FILE} is not found! Common password checking is skipped."
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class ComposerMessagesFinder
|
||||
|
||||
def initialize(user, details)
|
||||
@user = user
|
||||
@details = details
|
||||
@ -29,26 +28,30 @@ class ComposerMessagesFinder
|
||||
|
||||
if creating_topic?
|
||||
count = @user.created_topic_count
|
||||
education_key = 'education.new-topic'
|
||||
education_key = "education.new-topic"
|
||||
else
|
||||
count = @user.post_count
|
||||
education_key = 'education.new-reply'
|
||||
education_key = "education.new-reply"
|
||||
end
|
||||
|
||||
if count < SiteSetting.educate_until_posts
|
||||
return {
|
||||
id: 'education',
|
||||
templateName: 'education',
|
||||
wait_for_typing: true,
|
||||
body: PrettyText.cook(
|
||||
I18n.t(
|
||||
education_key,
|
||||
education_posts_text: I18n.t('education.until_posts', count: SiteSetting.educate_until_posts),
|
||||
site_name: SiteSetting.title,
|
||||
base_path: Discourse.base_path
|
||||
)
|
||||
)
|
||||
}
|
||||
return(
|
||||
{
|
||||
id: "education",
|
||||
templateName: "education",
|
||||
wait_for_typing: true,
|
||||
body:
|
||||
PrettyText.cook(
|
||||
I18n.t(
|
||||
education_key,
|
||||
education_posts_text:
|
||||
I18n.t("education.until_posts", count: SiteSetting.educate_until_posts),
|
||||
site_name: SiteSetting.title,
|
||||
base_path: Discourse.base_path,
|
||||
),
|
||||
),
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
nil
|
||||
@ -59,35 +62,55 @@ class ComposerMessagesFinder
|
||||
return unless replying? && @user.posted_too_much_in_topic?(@details[:topic_id])
|
||||
|
||||
{
|
||||
id: 'too_many_replies',
|
||||
templateName: 'education',
|
||||
body: PrettyText.cook(I18n.t('education.too_many_replies', newuser_max_replies_per_topic: SiteSetting.newuser_max_replies_per_topic))
|
||||
id: "too_many_replies",
|
||||
templateName: "education",
|
||||
body:
|
||||
PrettyText.cook(
|
||||
I18n.t(
|
||||
"education.too_many_replies",
|
||||
newuser_max_replies_per_topic: SiteSetting.newuser_max_replies_per_topic,
|
||||
),
|
||||
),
|
||||
}
|
||||
end
|
||||
|
||||
# Should a user be contacted to update their avatar?
|
||||
def check_avatar_notification
|
||||
|
||||
# A user has to be basic at least to be considered for an avatar notification
|
||||
return unless @user.has_trust_level?(TrustLevel[1])
|
||||
|
||||
# We don't notify users who have avatars or who have been notified already.
|
||||
return if @user.uploaded_avatar_id || UserHistory.exists_for_user?(@user, :notified_about_avatar)
|
||||
if @user.uploaded_avatar_id || UserHistory.exists_for_user?(@user, :notified_about_avatar)
|
||||
return
|
||||
end
|
||||
|
||||
# Do not notify user if any of the following is true:
|
||||
# - "disable avatar education message" is enabled
|
||||
# - "sso overrides avatar" is enabled
|
||||
# - "allow uploaded avatars" is disabled
|
||||
return if SiteSetting.disable_avatar_education_message || SiteSetting.discourse_connect_overrides_avatar || !TrustLevelAndStaffAndDisabledSetting.matches?(SiteSetting.allow_uploaded_avatars, @user)
|
||||
if SiteSetting.disable_avatar_education_message ||
|
||||
SiteSetting.discourse_connect_overrides_avatar ||
|
||||
!TrustLevelAndStaffAndDisabledSetting.matches?(SiteSetting.allow_uploaded_avatars, @user)
|
||||
return
|
||||
end
|
||||
|
||||
# If we got this far, log that we've nagged them about the avatar
|
||||
UserHistory.create!(action: UserHistory.actions[:notified_about_avatar], target_user_id: @user.id)
|
||||
UserHistory.create!(
|
||||
action: UserHistory.actions[:notified_about_avatar],
|
||||
target_user_id: @user.id,
|
||||
)
|
||||
|
||||
# Return the message
|
||||
{
|
||||
id: 'avatar',
|
||||
templateName: 'education',
|
||||
body: PrettyText.cook(I18n.t('education.avatar', profile_path: "/u/#{@user.username_lower}/preferences/account#profile-picture"))
|
||||
id: "avatar",
|
||||
templateName: "education",
|
||||
body:
|
||||
PrettyText.cook(
|
||||
I18n.t(
|
||||
"education.avatar",
|
||||
profile_path: "/u/#{@user.username_lower}/preferences/account#profile-picture",
|
||||
),
|
||||
),
|
||||
}
|
||||
end
|
||||
|
||||
@ -96,39 +119,45 @@ class ComposerMessagesFinder
|
||||
return unless educate_reply?(:notified_about_sequential_replies)
|
||||
|
||||
# Count the posts made by this user in the last day
|
||||
recent_posts_user_ids = Post.where(topic_id: @details[:topic_id])
|
||||
.where("created_at > ?", 1.day.ago)
|
||||
.where(post_type: Post.types[:regular])
|
||||
.order('created_at desc')
|
||||
.limit(SiteSetting.sequential_replies_threshold)
|
||||
.pluck(:user_id)
|
||||
recent_posts_user_ids =
|
||||
Post
|
||||
.where(topic_id: @details[:topic_id])
|
||||
.where("created_at > ?", 1.day.ago)
|
||||
.where(post_type: Post.types[:regular])
|
||||
.order("created_at desc")
|
||||
.limit(SiteSetting.sequential_replies_threshold)
|
||||
.pluck(:user_id)
|
||||
|
||||
# Did we get back as many posts as we asked for, and are they all by the current user?
|
||||
return if recent_posts_user_ids.size != SiteSetting.sequential_replies_threshold ||
|
||||
recent_posts_user_ids.detect { |u| u != @user.id }
|
||||
if recent_posts_user_ids.size != SiteSetting.sequential_replies_threshold ||
|
||||
recent_posts_user_ids.detect { |u| u != @user.id }
|
||||
return
|
||||
end
|
||||
|
||||
# If we got this far, log that we've nagged them about the sequential replies
|
||||
UserHistory.create!(action: UserHistory.actions[:notified_about_sequential_replies],
|
||||
target_user_id: @user.id,
|
||||
topic_id: @details[:topic_id])
|
||||
UserHistory.create!(
|
||||
action: UserHistory.actions[:notified_about_sequential_replies],
|
||||
target_user_id: @user.id,
|
||||
topic_id: @details[:topic_id],
|
||||
)
|
||||
|
||||
{
|
||||
id: 'sequential_replies',
|
||||
templateName: 'education',
|
||||
id: "sequential_replies",
|
||||
templateName: "education",
|
||||
wait_for_typing: true,
|
||||
extraClass: 'education-message',
|
||||
extraClass: "education-message",
|
||||
hide_if_whisper: true,
|
||||
body: PrettyText.cook(I18n.t('education.sequential_replies'))
|
||||
body: PrettyText.cook(I18n.t("education.sequential_replies")),
|
||||
}
|
||||
end
|
||||
|
||||
def check_dominating_topic
|
||||
return unless educate_reply?(:notified_about_dominating_topic)
|
||||
|
||||
return if @topic.blank? ||
|
||||
@topic.user_id == @user.id ||
|
||||
@topic.posts_count < SiteSetting.summary_posts_required ||
|
||||
@topic.private_message?
|
||||
if @topic.blank? || @topic.user_id == @user.id ||
|
||||
@topic.posts_count < SiteSetting.summary_posts_required || @topic.private_message?
|
||||
return
|
||||
end
|
||||
|
||||
posts_by_user = @user.posts.where(topic_id: @topic.id).count
|
||||
|
||||
@ -136,16 +165,18 @@ class ComposerMessagesFinder
|
||||
return if ratio < (SiteSetting.dominating_topic_minimum_percent.to_f / 100.0)
|
||||
|
||||
# Log the topic notification
|
||||
UserHistory.create!(action: UserHistory.actions[:notified_about_dominating_topic],
|
||||
target_user_id: @user.id,
|
||||
topic_id: @details[:topic_id])
|
||||
UserHistory.create!(
|
||||
action: UserHistory.actions[:notified_about_dominating_topic],
|
||||
target_user_id: @user.id,
|
||||
topic_id: @details[:topic_id],
|
||||
)
|
||||
|
||||
{
|
||||
id: 'dominating_topic',
|
||||
templateName: 'dominating-topic',
|
||||
id: "dominating_topic",
|
||||
templateName: "dominating-topic",
|
||||
wait_for_typing: true,
|
||||
extraClass: 'education-message dominating-topic-message',
|
||||
body: PrettyText.cook(I18n.t('education.dominating_topic'))
|
||||
extraClass: "education-message dominating-topic-message",
|
||||
body: PrettyText.cook(I18n.t("education.dominating_topic")),
|
||||
}
|
||||
end
|
||||
|
||||
@ -157,73 +188,85 @@ class ComposerMessagesFinder
|
||||
reply_to_user_id = Post.where(id: @details[:post_id]).pluck(:user_id)[0]
|
||||
|
||||
# Users's last x posts in the topic
|
||||
last_x_replies = @topic.
|
||||
posts.
|
||||
where(user_id: @user.id).
|
||||
order('created_at desc').
|
||||
limit(SiteSetting.get_a_room_threshold).
|
||||
pluck(:reply_to_user_id).
|
||||
find_all { |uid| uid != @user.id && uid == reply_to_user_id }
|
||||
last_x_replies =
|
||||
@topic
|
||||
.posts
|
||||
.where(user_id: @user.id)
|
||||
.order("created_at desc")
|
||||
.limit(SiteSetting.get_a_room_threshold)
|
||||
.pluck(:reply_to_user_id)
|
||||
.find_all { |uid| uid != @user.id && uid == reply_to_user_id }
|
||||
|
||||
return unless last_x_replies.size == SiteSetting.get_a_room_threshold
|
||||
return unless @topic.posts.count('distinct user_id') >= min_users_posted
|
||||
return unless @topic.posts.count("distinct user_id") >= min_users_posted
|
||||
|
||||
UserHistory.create!(action: UserHistory.actions[:notified_about_get_a_room],
|
||||
target_user_id: @user.id,
|
||||
topic_id: @details[:topic_id])
|
||||
UserHistory.create!(
|
||||
action: UserHistory.actions[:notified_about_get_a_room],
|
||||
target_user_id: @user.id,
|
||||
topic_id: @details[:topic_id],
|
||||
)
|
||||
|
||||
reply_username = User.where(id: last_x_replies[0]).pluck_first(:username)
|
||||
|
||||
{
|
||||
id: 'get_a_room',
|
||||
templateName: 'get-a-room',
|
||||
id: "get_a_room",
|
||||
templateName: "get-a-room",
|
||||
wait_for_typing: true,
|
||||
reply_username: reply_username,
|
||||
extraClass: 'education-message get-a-room',
|
||||
body: PrettyText.cook(
|
||||
I18n.t(
|
||||
'education.get_a_room',
|
||||
count: SiteSetting.get_a_room_threshold,
|
||||
reply_username: reply_username,
|
||||
base_path: Discourse.base_path
|
||||
)
|
||||
)
|
||||
extraClass: "education-message get-a-room",
|
||||
body:
|
||||
PrettyText.cook(
|
||||
I18n.t(
|
||||
"education.get_a_room",
|
||||
count: SiteSetting.get_a_room_threshold,
|
||||
reply_username: reply_username,
|
||||
base_path: Discourse.base_path,
|
||||
),
|
||||
),
|
||||
}
|
||||
end
|
||||
|
||||
def check_reviving_old_topic
|
||||
return unless replying?
|
||||
return if @topic.nil? ||
|
||||
SiteSetting.warn_reviving_old_topic_age < 1 ||
|
||||
@topic.last_posted_at.nil? ||
|
||||
@topic.last_posted_at > SiteSetting.warn_reviving_old_topic_age.days.ago
|
||||
if @topic.nil? || SiteSetting.warn_reviving_old_topic_age < 1 || @topic.last_posted_at.nil? ||
|
||||
@topic.last_posted_at > SiteSetting.warn_reviving_old_topic_age.days.ago
|
||||
return
|
||||
end
|
||||
|
||||
{
|
||||
id: 'reviving_old',
|
||||
templateName: 'education',
|
||||
id: "reviving_old",
|
||||
templateName: "education",
|
||||
wait_for_typing: false,
|
||||
extraClass: 'education-message',
|
||||
body: PrettyText.cook(
|
||||
I18n.t(
|
||||
'education.reviving_old_topic',
|
||||
time_ago: FreedomPatches::Rails4.time_ago_in_words(@topic.last_posted_at, false, scope: :'datetime.distance_in_words_verbose')
|
||||
)
|
||||
)
|
||||
extraClass: "education-message",
|
||||
body:
|
||||
PrettyText.cook(
|
||||
I18n.t(
|
||||
"education.reviving_old_topic",
|
||||
time_ago:
|
||||
FreedomPatches::Rails4.time_ago_in_words(
|
||||
@topic.last_posted_at,
|
||||
false,
|
||||
scope: :"datetime.distance_in_words_verbose",
|
||||
),
|
||||
),
|
||||
),
|
||||
}
|
||||
end
|
||||
|
||||
def self.user_not_seen_in_a_while(usernames)
|
||||
User.where(username_lower: usernames).where("last_seen_at < ?", SiteSetting.pm_warn_user_last_seen_months_ago.months.ago).pluck(:username).sort
|
||||
User
|
||||
.where(username_lower: usernames)
|
||||
.where("last_seen_at < ?", SiteSetting.pm_warn_user_last_seen_months_ago.months.ago)
|
||||
.pluck(:username)
|
||||
.sort
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def educate_reply?(type)
|
||||
replying? &&
|
||||
@details[:topic_id] &&
|
||||
(@topic.present? && !@topic.private_message?) &&
|
||||
(@user.post_count >= SiteSetting.educate_until_posts) &&
|
||||
!UserHistory.exists_for_user?(@user, type, topic_id: @details[:topic_id])
|
||||
replying? && @details[:topic_id] && (@topic.present? && !@topic.private_message?) &&
|
||||
(@user.post_count >= SiteSetting.educate_until_posts) &&
|
||||
!UserHistory.exists_for_user?(@user, type, topic_id: @details[:topic_id])
|
||||
end
|
||||
|
||||
def creating_topic?
|
||||
@ -237,5 +280,4 @@ class ComposerMessagesFinder
|
||||
def editing_post?
|
||||
@details[:composer_action] == "edit"
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -9,12 +9,13 @@ module Compression
|
||||
Compression::Zip.new,
|
||||
Compression::Pipeline.new([Compression::Tar.new, Compression::Gzip.new]),
|
||||
Compression::Gzip.new,
|
||||
Compression::Tar.new
|
||||
Compression::Tar.new,
|
||||
]
|
||||
end
|
||||
|
||||
def self.engine_for(filename, strategies: default_strategies)
|
||||
strategy = strategies.detect(-> { raise UnsupportedFileExtension }) { |e| e.can_handle?(filename) }
|
||||
strategy =
|
||||
strategies.detect(-> { raise UnsupportedFileExtension }) { |e| e.can_handle?(filename) }
|
||||
new(strategy)
|
||||
end
|
||||
|
||||
|
@ -3,12 +3,17 @@
|
||||
module Compression
|
||||
class Gzip < Strategy
|
||||
def extension
|
||||
'.gz'
|
||||
".gz"
|
||||
end
|
||||
|
||||
def compress(path, target_name)
|
||||
gzip_target = sanitize_path("#{path}/#{target_name}")
|
||||
Discourse::Utils.execute_command('gzip', '-5', gzip_target, failure_message: "Failed to gzip file.")
|
||||
Discourse::Utils.execute_command(
|
||||
"gzip",
|
||||
"-5",
|
||||
gzip_target,
|
||||
failure_message: "Failed to gzip file.",
|
||||
)
|
||||
|
||||
"#{gzip_target}.gz"
|
||||
end
|
||||
@ -23,7 +28,8 @@ module Compression
|
||||
true
|
||||
end
|
||||
|
||||
def extract_folder(_entry, _entry_path); end
|
||||
def extract_folder(_entry, _entry_path)
|
||||
end
|
||||
|
||||
def get_compressed_file_stream(compressed_file_path)
|
||||
gzip = Zlib::GzipReader.open(compressed_file_path)
|
||||
@ -32,7 +38,7 @@ module Compression
|
||||
|
||||
def build_entry_path(dest_path, _, compressed_file_path)
|
||||
basename = File.basename(compressed_file_path)
|
||||
basename.gsub!(/#{Regexp.escape(extension)}$/, '')
|
||||
basename.gsub!(/#{Regexp.escape(extension)}$/, "")
|
||||
File.join(dest_path, basename)
|
||||
end
|
||||
|
||||
@ -44,12 +50,11 @@ module Compression
|
||||
remaining_size = available_size
|
||||
|
||||
if ::File.exist?(entry_path)
|
||||
raise ::Zip::DestinationFileExistsError,
|
||||
"Destination '#{entry_path}' already exists"
|
||||
raise ::Zip::DestinationFileExistsError, "Destination '#{entry_path}' already exists"
|
||||
end # Change this later.
|
||||
|
||||
::File.open(entry_path, 'wb') do |os|
|
||||
buf = ''.dup
|
||||
::File.open(entry_path, "wb") do |os|
|
||||
buf = "".dup
|
||||
while (buf = entry.read(chunk_size))
|
||||
remaining_size -= chunk_size
|
||||
raise ExtractFailed if remaining_size.negative?
|
||||
|
@ -7,25 +7,27 @@ module Compression
|
||||
end
|
||||
|
||||
def extension
|
||||
@strategies.reduce('') { |ext, strategy| ext += strategy.extension }
|
||||
@strategies.reduce("") { |ext, strategy| ext += strategy.extension }
|
||||
end
|
||||
|
||||
def compress(path, target_name)
|
||||
current_target = target_name
|
||||
@strategies.reduce('') do |compressed_path, strategy|
|
||||
@strategies.reduce("") do |compressed_path, strategy|
|
||||
compressed_path = strategy.compress(path, current_target)
|
||||
current_target = compressed_path.split('/').last
|
||||
current_target = compressed_path.split("/").last
|
||||
|
||||
compressed_path
|
||||
end
|
||||
end
|
||||
|
||||
def decompress(dest_path, compressed_file_path, max_size)
|
||||
@strategies.reverse.reduce(compressed_file_path) do |to_decompress, strategy|
|
||||
next_compressed_file = strategy.decompress(dest_path, to_decompress, max_size)
|
||||
FileUtils.rm_rf(to_decompress)
|
||||
next_compressed_file
|
||||
end
|
||||
@strategies
|
||||
.reverse
|
||||
.reduce(compressed_file_path) do |to_decompress, strategy|
|
||||
next_compressed_file = strategy.decompress(dest_path, to_decompress, max_size)
|
||||
FileUtils.rm_rf(to_decompress)
|
||||
next_compressed_file
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -18,9 +18,7 @@ module Compression
|
||||
|
||||
entries_of(compressed_file).each do |entry|
|
||||
entry_path = build_entry_path(sanitized_dest_path, entry, sanitized_compressed_file_path)
|
||||
if !is_safe_path_for_extraction?(entry_path, sanitized_dest_path)
|
||||
next
|
||||
end
|
||||
next if !is_safe_path_for_extraction?(entry_path, sanitized_dest_path)
|
||||
|
||||
FileUtils.mkdir_p(File.dirname(entry_path))
|
||||
if is_file?(entry)
|
||||
@ -45,10 +43,10 @@ module Compression
|
||||
filename.strip.tap do |name|
|
||||
# NOTE: File.basename doesn't work right with Windows paths on Unix
|
||||
# get only the filename, not the whole path
|
||||
name.sub! /\A.*(\\|\/)/, ''
|
||||
name.sub! %r{\A.*(\\|/)}, ""
|
||||
# Finally, replace all non alphanumeric, underscore
|
||||
# or periods with underscore
|
||||
name.gsub! /[^\w\.\-]/, '_'
|
||||
name.gsub! /[^\w\.\-]/, "_"
|
||||
end
|
||||
end
|
||||
|
||||
@ -75,7 +73,7 @@ module Compression
|
||||
raise DestinationFileExistsError, "Destination '#{entry_path}' already exists"
|
||||
end
|
||||
|
||||
::File.open(entry_path, 'wb') do |os|
|
||||
::File.open(entry_path, "wb") do |os|
|
||||
while (buf = entry.read(chunk_size))
|
||||
remaining_size -= buf.size
|
||||
raise ExtractFailed if remaining_size.negative?
|
||||
|
@ -1,23 +1,31 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rubygems/package'
|
||||
require "rubygems/package"
|
||||
|
||||
module Compression
|
||||
class Tar < Strategy
|
||||
def extension
|
||||
'.tar'
|
||||
".tar"
|
||||
end
|
||||
|
||||
def compress(path, target_name)
|
||||
tar_filename = sanitize_filename("#{target_name}.tar")
|
||||
Discourse::Utils.execute_command('tar', '--create', '--file', tar_filename, target_name, failure_message: "Failed to tar file.")
|
||||
Discourse::Utils.execute_command(
|
||||
"tar",
|
||||
"--create",
|
||||
"--file",
|
||||
tar_filename,
|
||||
target_name,
|
||||
failure_message: "Failed to tar file.",
|
||||
)
|
||||
|
||||
sanitize_path("#{path}/#{tar_filename}")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def extract_folder(_entry, _entry_path); end
|
||||
def extract_folder(_entry, _entry_path)
|
||||
end
|
||||
|
||||
def get_compressed_file_stream(compressed_file_path)
|
||||
file_stream = IO.new(IO.sysopen(compressed_file_path))
|
||||
|
@ -1,11 +1,11 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'zip'
|
||||
require "zip"
|
||||
|
||||
module Compression
|
||||
class Zip < Strategy
|
||||
def extension
|
||||
'.zip'
|
||||
".zip"
|
||||
end
|
||||
|
||||
def compress(path, target_name)
|
||||
@ -15,7 +15,7 @@ module Compression
|
||||
::Zip::File.open(zip_filename, ::Zip::File::CREATE) do |zipfile|
|
||||
if File.directory?(absolute_path)
|
||||
entries = Dir.entries(absolute_path) - %w[. ..]
|
||||
write_entries(entries, absolute_path, '', zipfile)
|
||||
write_entries(entries, absolute_path, "", zipfile)
|
||||
else
|
||||
put_into_archive(absolute_path, zipfile, target_name)
|
||||
end
|
||||
@ -47,15 +47,14 @@ module Compression
|
||||
remaining_size = available_size
|
||||
|
||||
if ::File.exist?(entry_path)
|
||||
raise ::Zip::DestinationFileExistsError,
|
||||
"Destination '#{entry_path}' already exists"
|
||||
raise ::Zip::DestinationFileExistsError, "Destination '#{entry_path}' already exists"
|
||||
end
|
||||
|
||||
::File.open(entry_path, 'wb') do |os|
|
||||
::File.open(entry_path, "wb") do |os|
|
||||
entry.get_input_stream do |is|
|
||||
entry.set_extra_attributes_on_path(entry_path)
|
||||
|
||||
buf = ''.dup
|
||||
buf = "".dup
|
||||
while (buf = is.sysread(chunk_size, buf))
|
||||
remaining_size -= chunk_size
|
||||
raise ExtractFailed if remaining_size.negative?
|
||||
@ -70,7 +69,7 @@ module Compression
|
||||
# A helper method to make the recursion work.
|
||||
def write_entries(entries, base_path, path, zipfile)
|
||||
entries.each do |e|
|
||||
zipfile_path = path == '' ? e : File.join(path, e)
|
||||
zipfile_path = path == "" ? e : File.join(path, e)
|
||||
disk_file_path = File.join(base_path, zipfile_path)
|
||||
|
||||
if File.directory? disk_file_path
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module ConfigurableUrls
|
||||
|
||||
def faq_path
|
||||
SiteSetting.faq_url.blank? ? "#{Discourse.base_path}/faq" : SiteSetting.faq_url
|
||||
end
|
||||
@ -11,7 +10,10 @@ module ConfigurableUrls
|
||||
end
|
||||
|
||||
def privacy_path
|
||||
SiteSetting.privacy_policy_url.blank? ? "#{Discourse.base_path}/privacy" : SiteSetting.privacy_policy_url
|
||||
if SiteSetting.privacy_policy_url.blank?
|
||||
"#{Discourse.base_path}/privacy"
|
||||
else
|
||||
SiteSetting.privacy_policy_url
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -3,7 +3,6 @@
|
||||
# this class is used to track changes to an arbitrary buffer
|
||||
|
||||
class ContentBuffer
|
||||
|
||||
def initialize(initial_content)
|
||||
@initial_content = initial_content
|
||||
@lines = @initial_content.split("\n")
|
||||
@ -17,7 +16,6 @@ class ContentBuffer
|
||||
text = transform[:text]
|
||||
|
||||
if transform[:operation] == :delete
|
||||
|
||||
# fix first line
|
||||
|
||||
l = @lines[start_row]
|
||||
@ -32,16 +30,13 @@ class ContentBuffer
|
||||
@lines[start_row] = l
|
||||
|
||||
# remove middle lines
|
||||
(finish_row - start_row).times do
|
||||
l = @lines.delete_at start_row + 1
|
||||
end
|
||||
(finish_row - start_row).times { l = @lines.delete_at start_row + 1 }
|
||||
|
||||
# fix last line
|
||||
@lines[start_row] << @lines[finish_row][finish_col - 1..-1]
|
||||
end
|
||||
|
||||
if transform[:operation] == :insert
|
||||
|
||||
@lines[start_row].insert(start_col, text)
|
||||
|
||||
split = @lines[start_row].split("\n")
|
||||
@ -56,7 +51,6 @@ class ContentBuffer
|
||||
@lines.insert(i, "") unless @lines.length > i
|
||||
@lines[i] = split[-1] + @lines[i]
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
require 'content_security_policy/builder'
|
||||
require 'content_security_policy/extension'
|
||||
require "content_security_policy/builder"
|
||||
require "content_security_policy/extension"
|
||||
|
||||
class ContentSecurityPolicy
|
||||
class << self
|
||||
|
@ -1,5 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
require 'content_security_policy/default'
|
||||
require "content_security_policy/default"
|
||||
|
||||
class ContentSecurityPolicy
|
||||
class Builder
|
||||
@ -33,7 +33,9 @@ class ContentSecurityPolicy
|
||||
def <<(extension)
|
||||
return unless valid_extension?(extension)
|
||||
|
||||
extension.each { |directive, sources| extend_directive(normalize_directive(directive), sources) }
|
||||
extension.each do |directive, sources|
|
||||
extend_directive(normalize_directive(directive), sources)
|
||||
end
|
||||
end
|
||||
|
||||
def build
|
||||
@ -53,7 +55,7 @@ class ContentSecurityPolicy
|
||||
private
|
||||
|
||||
def normalize_directive(directive)
|
||||
directive.to_s.gsub('-', '_').to_sym
|
||||
directive.to_s.gsub("-", "_").to_sym
|
||||
end
|
||||
|
||||
def normalize_source(source)
|
||||
|
@ -1,5 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
require 'content_security_policy'
|
||||
require "content_security_policy"
|
||||
|
||||
class ContentSecurityPolicy
|
||||
class Default
|
||||
@ -7,16 +7,19 @@ class ContentSecurityPolicy
|
||||
|
||||
def initialize(base_url:)
|
||||
@base_url = base_url
|
||||
@directives = {}.tap do |directives|
|
||||
directives[:upgrade_insecure_requests] = [] if SiteSetting.force_https
|
||||
directives[:base_uri] = [:self]
|
||||
directives[:object_src] = [:none]
|
||||
directives[:script_src] = script_src
|
||||
directives[:worker_src] = worker_src
|
||||
directives[:report_uri] = report_uri if SiteSetting.content_security_policy_collect_reports
|
||||
directives[:frame_ancestors] = frame_ancestors if restrict_embed?
|
||||
directives[:manifest_src] = ["'self'"]
|
||||
end
|
||||
@directives =
|
||||
{}.tap do |directives|
|
||||
directives[:upgrade_insecure_requests] = [] if SiteSetting.force_https
|
||||
directives[:base_uri] = [:self]
|
||||
directives[:object_src] = [:none]
|
||||
directives[:script_src] = script_src
|
||||
directives[:worker_src] = worker_src
|
||||
directives[
|
||||
:report_uri
|
||||
] = report_uri if SiteSetting.content_security_policy_collect_reports
|
||||
directives[:frame_ancestors] = frame_ancestors if restrict_embed?
|
||||
directives[:manifest_src] = ["'self'"]
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
@ -27,27 +30,34 @@ class ContentSecurityPolicy
|
||||
|
||||
SCRIPT_ASSET_DIRECTORIES = [
|
||||
# [dir, can_use_s3_cdn, can_use_cdn, for_worker]
|
||||
['/assets/', true, true, true],
|
||||
['/brotli_asset/', true, true, true],
|
||||
['/extra-locales/', false, false, false],
|
||||
['/highlight-js/', false, true, false],
|
||||
['/javascripts/', false, true, true],
|
||||
['/plugins/', false, true, true],
|
||||
['/theme-javascripts/', false, true, false],
|
||||
['/svg-sprite/', false, true, false],
|
||||
["/assets/", true, true, true],
|
||||
["/brotli_asset/", true, true, true],
|
||||
["/extra-locales/", false, false, false],
|
||||
["/highlight-js/", false, true, false],
|
||||
["/javascripts/", false, true, true],
|
||||
["/plugins/", false, true, true],
|
||||
["/theme-javascripts/", false, true, false],
|
||||
["/svg-sprite/", false, true, false],
|
||||
]
|
||||
|
||||
def script_assets(base = base_url, s3_cdn = GlobalSetting.s3_asset_cdn_url.presence || GlobalSetting.s3_cdn_url, cdn = GlobalSetting.cdn_url, worker: false)
|
||||
SCRIPT_ASSET_DIRECTORIES.map do |dir, can_use_s3_cdn, can_use_cdn, for_worker|
|
||||
next if worker && !for_worker
|
||||
if can_use_s3_cdn && s3_cdn
|
||||
s3_cdn + dir
|
||||
elsif can_use_cdn && cdn
|
||||
cdn + Discourse.base_path + dir
|
||||
else
|
||||
base + dir
|
||||
def script_assets(
|
||||
base = base_url,
|
||||
s3_cdn = GlobalSetting.s3_asset_cdn_url.presence || GlobalSetting.s3_cdn_url,
|
||||
cdn = GlobalSetting.cdn_url,
|
||||
worker: false
|
||||
)
|
||||
SCRIPT_ASSET_DIRECTORIES
|
||||
.map do |dir, can_use_s3_cdn, can_use_cdn, for_worker|
|
||||
next if worker && !for_worker
|
||||
if can_use_s3_cdn && s3_cdn
|
||||
s3_cdn + dir
|
||||
elsif can_use_cdn && cdn
|
||||
cdn + Discourse.base_path + dir
|
||||
else
|
||||
base + dir
|
||||
end
|
||||
end
|
||||
end.compact
|
||||
.compact
|
||||
end
|
||||
|
||||
def script_src
|
||||
@ -55,7 +65,7 @@ class ContentSecurityPolicy
|
||||
"#{base_url}/logs/",
|
||||
"#{base_url}/sidekiq/",
|
||||
"#{base_url}/mini-profiler-resources/",
|
||||
*script_assets
|
||||
*script_assets,
|
||||
].tap do |sources|
|
||||
sources << :report_sample if SiteSetting.content_security_policy_collect_reports
|
||||
sources << :unsafe_eval if Rails.env.development? # TODO remove this once we have proper source maps in dev
|
||||
@ -67,23 +77,25 @@ class ContentSecurityPolicy
|
||||
end
|
||||
|
||||
# we need analytics.js still as gtag/js is a script wrapper for it
|
||||
sources << 'https://www.google-analytics.com/analytics.js' if SiteSetting.ga_universal_tracking_code.present?
|
||||
sources << 'https://www.googletagmanager.com/gtag/js' if SiteSetting.ga_universal_tracking_code.present? && SiteSetting.ga_version == "v4_gtag"
|
||||
if SiteSetting.ga_universal_tracking_code.present?
|
||||
sources << "https://www.google-analytics.com/analytics.js"
|
||||
end
|
||||
if SiteSetting.ga_universal_tracking_code.present? && SiteSetting.ga_version == "v4_gtag"
|
||||
sources << "https://www.googletagmanager.com/gtag/js"
|
||||
end
|
||||
if SiteSetting.gtm_container_id.present?
|
||||
sources << 'https://www.googletagmanager.com/gtm.js'
|
||||
sources << "https://www.googletagmanager.com/gtm.js"
|
||||
sources << "'nonce-#{ApplicationHelper.google_tag_manager_nonce}'"
|
||||
end
|
||||
|
||||
if SiteSetting.splash_screen
|
||||
sources << "'#{SplashScreenHelper.fingerprint}'"
|
||||
end
|
||||
sources << "'#{SplashScreenHelper.fingerprint}'" if SiteSetting.splash_screen
|
||||
end
|
||||
end
|
||||
|
||||
def worker_src
|
||||
[
|
||||
"'self'", # For service worker
|
||||
*script_assets(worker: true)
|
||||
*script_assets(worker: true),
|
||||
]
|
||||
end
|
||||
|
||||
@ -92,15 +104,11 @@ class ContentSecurityPolicy
|
||||
end
|
||||
|
||||
def frame_ancestors
|
||||
[
|
||||
"'self'",
|
||||
*EmbeddableHost.pluck(:host).map { |host| "https://#{host}" }
|
||||
]
|
||||
["'self'", *EmbeddableHost.pluck(:host).map { |host| "https://#{host}" }]
|
||||
end
|
||||
|
||||
def restrict_embed?
|
||||
SiteSetting.content_security_policy_frame_ancestors &&
|
||||
!SiteSetting.embed_any_origin
|
||||
SiteSetting.content_security_policy_frame_ancestors && !SiteSetting.embed_any_origin
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -4,12 +4,12 @@ class ContentSecurityPolicy
|
||||
extend self
|
||||
|
||||
def site_setting_extension
|
||||
{ script_src: SiteSetting.content_security_policy_script_src.split('|') }
|
||||
{ script_src: SiteSetting.content_security_policy_script_src.split("|") }
|
||||
end
|
||||
|
||||
def path_specific_extension(path_info)
|
||||
{}.tap do |obj|
|
||||
for_qunit_route = !Rails.env.production? && ["/qunit", "/wizard/qunit"].include?(path_info)
|
||||
for_qunit_route = !Rails.env.production? && %w[/qunit /wizard/qunit].include?(path_info)
|
||||
for_qunit_route ||= "/theme-qunit" == path_info
|
||||
obj[:script_src] = :unsafe_eval if for_qunit_route
|
||||
end
|
||||
@ -23,7 +23,7 @@ class ContentSecurityPolicy
|
||||
end
|
||||
end
|
||||
|
||||
THEME_SETTING = 'extend_content_security_policy'
|
||||
THEME_SETTING = "extend_content_security_policy"
|
||||
|
||||
def theme_extensions(theme_id)
|
||||
key = "theme_extensions_#{theme_id}"
|
||||
@ -37,47 +37,55 @@ class ContentSecurityPolicy
|
||||
private
|
||||
|
||||
def cache
|
||||
@cache ||= DistributedCache.new('csp_extensions')
|
||||
@cache ||= DistributedCache.new("csp_extensions")
|
||||
end
|
||||
|
||||
def find_theme_extensions(theme_id)
|
||||
extensions = []
|
||||
theme_ids = Theme.transform_ids(theme_id)
|
||||
|
||||
Theme.where(id: theme_ids).find_each do |theme|
|
||||
theme.cached_settings.each do |setting, value|
|
||||
extensions << build_theme_extension(value.split("|")) if setting.to_s == THEME_SETTING
|
||||
Theme
|
||||
.where(id: theme_ids)
|
||||
.find_each do |theme|
|
||||
theme.cached_settings.each do |setting, value|
|
||||
extensions << build_theme_extension(value.split("|")) if setting.to_s == THEME_SETTING
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
extensions << build_theme_extension(ThemeModifierHelper.new(theme_ids: theme_ids).csp_extensions)
|
||||
|
||||
html_fields = ThemeField.where(
|
||||
theme_id: theme_ids,
|
||||
target_id: ThemeField.basic_targets.map { |target| Theme.targets[target.to_sym] },
|
||||
name: ThemeField.html_fields
|
||||
extensions << build_theme_extension(
|
||||
ThemeModifierHelper.new(theme_ids: theme_ids).csp_extensions,
|
||||
)
|
||||
|
||||
html_fields =
|
||||
ThemeField.where(
|
||||
theme_id: theme_ids,
|
||||
target_id: ThemeField.basic_targets.map { |target| Theme.targets[target.to_sym] },
|
||||
name: ThemeField.html_fields,
|
||||
)
|
||||
|
||||
auto_script_src_extension = { script_src: [] }
|
||||
html_fields.each(&:ensure_baked!)
|
||||
doc = html_fields.map(&:value_baked).join("\n")
|
||||
|
||||
Nokogiri::HTML5.fragment(doc).css('script[src]').each do |node|
|
||||
src = node['src']
|
||||
uri = URI(src)
|
||||
Nokogiri::HTML5
|
||||
.fragment(doc)
|
||||
.css("script[src]")
|
||||
.each do |node|
|
||||
src = node["src"]
|
||||
uri = URI(src)
|
||||
|
||||
next if GlobalSetting.cdn_url && src.starts_with?(GlobalSetting.cdn_url) # Ignore CDN urls (theme-javascripts)
|
||||
next if uri.host.nil? # Ignore same-domain scripts (theme-javascripts)
|
||||
next if uri.path.nil? # Ignore raw hosts
|
||||
next if GlobalSetting.cdn_url && src.starts_with?(GlobalSetting.cdn_url) # Ignore CDN urls (theme-javascripts)
|
||||
next if uri.host.nil? # Ignore same-domain scripts (theme-javascripts)
|
||||
next if uri.path.nil? # Ignore raw hosts
|
||||
|
||||
uri.query = nil # CSP should not include query part of url
|
||||
uri.query = nil # CSP should not include query part of url
|
||||
|
||||
uri_string = uri.to_s.sub(/^\/\//, '') # Protocol-less CSP should not have // at beginning of URL
|
||||
uri_string = uri.to_s.sub(%r{^//}, "") # Protocol-less CSP should not have // at beginning of URL
|
||||
|
||||
auto_script_src_extension[:script_src] << uri_string
|
||||
rescue URI::Error
|
||||
# Ignore invalid URI
|
||||
end
|
||||
auto_script_src_extension[:script_src] << uri_string
|
||||
rescue URI::Error
|
||||
# Ignore invalid URI
|
||||
end
|
||||
|
||||
extensions << auto_script_src_extension
|
||||
|
||||
@ -87,7 +95,7 @@ class ContentSecurityPolicy
|
||||
def build_theme_extension(entries)
|
||||
{}.tap do |extension|
|
||||
entries.each do |entry|
|
||||
directive, source = entry.split(':', 2).map(&:strip)
|
||||
directive, source = entry.split(":", 2).map(&:strip)
|
||||
|
||||
extension[directive] ||= []
|
||||
extension[directive] << source
|
||||
|
@ -1,5 +1,5 @@
|
||||
# frozen_string_literal: true
|
||||
require 'content_security_policy'
|
||||
require "content_security_policy"
|
||||
|
||||
class ContentSecurityPolicy
|
||||
class Middleware
|
||||
@ -19,8 +19,16 @@ class ContentSecurityPolicy
|
||||
|
||||
theme_id = env[:resolved_theme_id]
|
||||
|
||||
headers['Content-Security-Policy'] = policy(theme_id, base_url: base_url, path_info: env["PATH_INFO"]) if SiteSetting.content_security_policy
|
||||
headers['Content-Security-Policy-Report-Only'] = policy(theme_id, base_url: base_url, path_info: env["PATH_INFO"]) if SiteSetting.content_security_policy_report_only
|
||||
headers["Content-Security-Policy"] = policy(
|
||||
theme_id,
|
||||
base_url: base_url,
|
||||
path_info: env["PATH_INFO"],
|
||||
) if SiteSetting.content_security_policy
|
||||
headers["Content-Security-Policy-Report-Only"] = policy(
|
||||
theme_id,
|
||||
base_url: base_url,
|
||||
path_info: env["PATH_INFO"],
|
||||
) if SiteSetting.content_security_policy_report_only
|
||||
|
||||
response
|
||||
end
|
||||
@ -30,7 +38,7 @@ class ContentSecurityPolicy
|
||||
delegate :policy, to: :ContentSecurityPolicy
|
||||
|
||||
def html_response?(headers)
|
||||
headers['Content-Type'] && headers['Content-Type'] =~ /html/
|
||||
headers["Content-Type"] && headers["Content-Type"] =~ /html/
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -7,7 +7,7 @@ class CookedPostProcessor
|
||||
include CookedProcessorMixin
|
||||
|
||||
LIGHTBOX_WRAPPER_CSS_CLASS = "lightbox-wrapper"
|
||||
GIF_SOURCES_REGEXP = /(giphy|tenor)\.com\//
|
||||
GIF_SOURCES_REGEXP = %r{(giphy|tenor)\.com/}
|
||||
|
||||
attr_reader :cooking_options, :doc
|
||||
|
||||
@ -61,25 +61,27 @@ class CookedPostProcessor
|
||||
return if @post.user.blank? || !Guardian.new.can_see?(@post)
|
||||
|
||||
BadgeGranter.grant(Badge.find(Badge::FirstEmoji), @post.user, post_id: @post.id) if has_emoji?
|
||||
BadgeGranter.grant(Badge.find(Badge::FirstOnebox), @post.user, post_id: @post.id) if @has_oneboxes
|
||||
BadgeGranter.grant(Badge.find(Badge::FirstReplyByEmail), @post.user, post_id: @post.id) if @post.is_reply_by_email?
|
||||
if @has_oneboxes
|
||||
BadgeGranter.grant(Badge.find(Badge::FirstOnebox), @post.user, post_id: @post.id)
|
||||
end
|
||||
if @post.is_reply_by_email?
|
||||
BadgeGranter.grant(Badge.find(Badge::FirstReplyByEmail), @post.user, post_id: @post.id)
|
||||
end
|
||||
end
|
||||
|
||||
def post_process_quotes
|
||||
@doc.css("aside.quote").each do |q|
|
||||
post_number = q['data-post']
|
||||
topic_id = q['data-topic']
|
||||
if topic_id && post_number
|
||||
comparer = QuoteComparer.new(
|
||||
topic_id.to_i,
|
||||
post_number.to_i,
|
||||
q.css('blockquote').text
|
||||
)
|
||||
@doc
|
||||
.css("aside.quote")
|
||||
.each do |q|
|
||||
post_number = q["data-post"]
|
||||
topic_id = q["data-topic"]
|
||||
if topic_id && post_number
|
||||
comparer = QuoteComparer.new(topic_id.to_i, post_number.to_i, q.css("blockquote").text)
|
||||
|
||||
q['class'] = ((q['class'] || '') + " quote-post-not-found").strip if comparer.missing?
|
||||
q['class'] = ((q['class'] || '') + " quote-modified").strip if comparer.modified?
|
||||
q["class"] = ((q["class"] || "") + " quote-post-not-found").strip if comparer.missing?
|
||||
q["class"] = ((q["class"] || "") + " quote-modified").strip if comparer.modified?
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def remove_full_quote_on_direct_reply
|
||||
@ -87,66 +89,68 @@ class CookedPostProcessor
|
||||
return if @post.post_number == 1
|
||||
return if @doc.xpath("aside[contains(@class, 'quote')]").size != 1
|
||||
|
||||
previous = Post
|
||||
.where("post_number < ? AND topic_id = ? AND post_type = ? AND NOT hidden", @post.post_number, @post.topic_id, Post.types[:regular])
|
||||
.order("post_number DESC")
|
||||
.limit(1)
|
||||
.pluck(:cooked)
|
||||
.first
|
||||
previous =
|
||||
Post
|
||||
.where(
|
||||
"post_number < ? AND topic_id = ? AND post_type = ? AND NOT hidden",
|
||||
@post.post_number,
|
||||
@post.topic_id,
|
||||
Post.types[:regular],
|
||||
)
|
||||
.order("post_number DESC")
|
||||
.limit(1)
|
||||
.pluck(:cooked)
|
||||
.first
|
||||
|
||||
return if previous.blank?
|
||||
|
||||
previous_text = Nokogiri::HTML5::fragment(previous).text.strip
|
||||
previous_text = Nokogiri::HTML5.fragment(previous).text.strip
|
||||
quoted_text = @doc.css("aside.quote:first-child blockquote").first&.text&.strip || ""
|
||||
|
||||
return if previous_text.gsub(/(\s){2,}/, '\1') != quoted_text.gsub(/(\s){2,}/, '\1')
|
||||
|
||||
quote_regexp = /\A\s*\[quote.+\[\/quote\]/im
|
||||
quote_regexp = %r{\A\s*\[quote.+\[/quote\]}im
|
||||
quoteless_raw = @post.raw.sub(quote_regexp, "").strip
|
||||
|
||||
return if @post.raw.strip == quoteless_raw
|
||||
|
||||
PostRevisor.new(@post).revise!(
|
||||
Discourse.system_user,
|
||||
{
|
||||
raw: quoteless_raw,
|
||||
edit_reason: I18n.t(:removed_direct_reply_full_quotes)
|
||||
},
|
||||
{ raw: quoteless_raw, edit_reason: I18n.t(:removed_direct_reply_full_quotes) },
|
||||
skip_validations: true,
|
||||
bypass_bump: true
|
||||
bypass_bump: true,
|
||||
)
|
||||
end
|
||||
|
||||
def extract_images
|
||||
# all images with a src attribute
|
||||
@doc.css("img[src], img[#{PrettyText::BLOCKED_HOTLINKED_SRC_ATTR}]") -
|
||||
# minus data images
|
||||
@doc.css("img[src^='data']") -
|
||||
# minus emojis
|
||||
@doc.css("img.emoji")
|
||||
# minus data images
|
||||
@doc.css("img[src^='data']") -
|
||||
# minus emojis
|
||||
@doc.css("img.emoji")
|
||||
end
|
||||
|
||||
def extract_images_for_post
|
||||
# all images with a src attribute
|
||||
@doc.css("img[src]") -
|
||||
# minus emojis
|
||||
@doc.css("img.emoji") -
|
||||
# minus images inside quotes
|
||||
@doc.css(".quote img") -
|
||||
# minus onebox site icons
|
||||
@doc.css("img.site-icon") -
|
||||
# minus onebox avatars
|
||||
@doc.css("img.onebox-avatar") -
|
||||
@doc.css("img.onebox-avatar-inline") -
|
||||
# minus github onebox profile images
|
||||
@doc.css(".onebox.githubfolder img")
|
||||
# minus emojis
|
||||
@doc.css("img.emoji") -
|
||||
# minus images inside quotes
|
||||
@doc.css(".quote img") -
|
||||
# minus onebox site icons
|
||||
@doc.css("img.site-icon") -
|
||||
# minus onebox avatars
|
||||
@doc.css("img.onebox-avatar") - @doc.css("img.onebox-avatar-inline") -
|
||||
# minus github onebox profile images
|
||||
@doc.css(".onebox.githubfolder img")
|
||||
end
|
||||
|
||||
def convert_to_link!(img)
|
||||
w, h = img["width"].to_i, img["height"].to_i
|
||||
user_width, user_height = (w > 0 && h > 0 && [w, h]) ||
|
||||
get_size_from_attributes(img) ||
|
||||
get_size_from_image_sizes(img["src"], @opts[:image_sizes])
|
||||
user_width, user_height =
|
||||
(w > 0 && h > 0 && [w, h]) || get_size_from_attributes(img) ||
|
||||
get_size_from_image_sizes(img["src"], @opts[:image_sizes])
|
||||
|
||||
limit_size!(img)
|
||||
|
||||
@ -155,7 +159,7 @@ class CookedPostProcessor
|
||||
|
||||
upload = Upload.get_from_url(src)
|
||||
|
||||
original_width, original_height = nil
|
||||
original_width, original_height = nil
|
||||
|
||||
if (upload.present?)
|
||||
original_width = upload.width || 0
|
||||
@ -172,12 +176,17 @@ class CookedPostProcessor
|
||||
img.add_class("animated")
|
||||
end
|
||||
|
||||
return if original_width <= SiteSetting.max_image_width && original_height <= SiteSetting.max_image_height
|
||||
if original_width <= SiteSetting.max_image_width &&
|
||||
original_height <= SiteSetting.max_image_height
|
||||
return
|
||||
end
|
||||
|
||||
user_width, user_height = [original_width, original_height] if user_width.to_i <= 0 && user_height.to_i <= 0
|
||||
user_width, user_height = [original_width, original_height] if user_width.to_i <= 0 &&
|
||||
user_height.to_i <= 0
|
||||
width, height = user_width, user_height
|
||||
|
||||
crop = SiteSetting.min_ratio_to_crop > 0 && width.to_f / height.to_f < SiteSetting.min_ratio_to_crop
|
||||
crop =
|
||||
SiteSetting.min_ratio_to_crop > 0 && width.to_f / height.to_f < SiteSetting.min_ratio_to_crop
|
||||
|
||||
if crop
|
||||
width, height = ImageSizer.crop(width, height)
|
||||
@ -200,7 +209,7 @@ class CookedPostProcessor
|
||||
|
||||
return if upload.animated?
|
||||
|
||||
if img.ancestors('.onebox, .onebox-body, .quote').blank? && !img.classes.include?("onebox")
|
||||
if img.ancestors(".onebox, .onebox-body, .quote").blank? && !img.classes.include?("onebox")
|
||||
add_lightbox!(img, original_width, original_height, upload, cropped: crop)
|
||||
end
|
||||
|
||||
@ -211,7 +220,7 @@ class CookedPostProcessor
|
||||
def each_responsive_ratio
|
||||
SiteSetting
|
||||
.responsive_post_image_sizes
|
||||
.split('|')
|
||||
.split("|")
|
||||
.map(&:to_f)
|
||||
.sort
|
||||
.each { |r| yield r if r > 1 }
|
||||
@ -239,13 +248,16 @@ class CookedPostProcessor
|
||||
srcset << ", #{cooked_url} #{ratio.to_s.sub(/\.0$/, "")}x"
|
||||
end
|
||||
|
||||
img["srcset"] = "#{UrlHelper.cook_url(img["src"], secure: @post.with_secure_uploads?)}#{srcset}" if srcset.present?
|
||||
img[
|
||||
"srcset"
|
||||
] = "#{UrlHelper.cook_url(img["src"], secure: @post.with_secure_uploads?)}#{srcset}" if srcset.present?
|
||||
end
|
||||
else
|
||||
img["src"] = upload.url
|
||||
end
|
||||
|
||||
if !@disable_dominant_color && (color = upload.dominant_color(calculate_if_missing: true).presence)
|
||||
if !@disable_dominant_color &&
|
||||
(color = upload.dominant_color(calculate_if_missing: true).presence)
|
||||
img["data-dominant-color"] = color
|
||||
end
|
||||
end
|
||||
@ -261,9 +273,7 @@ class CookedPostProcessor
|
||||
a = create_link_node("lightbox", src)
|
||||
img.add_next_sibling(a)
|
||||
|
||||
if upload
|
||||
a["data-download-href"] = Discourse.store.download_url(upload)
|
||||
end
|
||||
a["data-download-href"] = Discourse.store.download_url(upload) if upload
|
||||
|
||||
a.add_child(img)
|
||||
|
||||
@ -309,48 +319,55 @@ class CookedPostProcessor
|
||||
@post.update_column(:image_upload_id, upload.id) # post
|
||||
if @post.is_first_post? # topic
|
||||
@post.topic.update_column(:image_upload_id, upload.id)
|
||||
extra_sizes = ThemeModifierHelper.new(theme_ids: Theme.user_selectable.pluck(:id)).topic_thumbnail_sizes
|
||||
extra_sizes =
|
||||
ThemeModifierHelper.new(theme_ids: Theme.user_selectable.pluck(:id)).topic_thumbnail_sizes
|
||||
@post.topic.generate_thumbnails!(extra_sizes: extra_sizes)
|
||||
end
|
||||
else
|
||||
@post.update_column(:image_upload_id, nil) if @post.image_upload_id
|
||||
@post.topic.update_column(:image_upload_id, nil) if @post.topic.image_upload_id && @post.is_first_post?
|
||||
if @post.topic.image_upload_id && @post.is_first_post?
|
||||
@post.topic.update_column(:image_upload_id, nil)
|
||||
end
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def optimize_urls
|
||||
%w{href data-download-href}.each do |selector|
|
||||
@doc.css("a[#{selector}]").each do |a|
|
||||
a[selector] = UrlHelper.cook_url(a[selector].to_s)
|
||||
end
|
||||
%w[href data-download-href].each do |selector|
|
||||
@doc.css("a[#{selector}]").each { |a| a[selector] = UrlHelper.cook_url(a[selector].to_s) }
|
||||
end
|
||||
|
||||
%w{src}.each do |selector|
|
||||
@doc.css("img[#{selector}]").each do |img|
|
||||
custom_emoji = img["class"]&.include?("emoji-custom") && Emoji.custom?(img["title"])
|
||||
img[selector] = UrlHelper.cook_url(
|
||||
img[selector].to_s, secure: @post.with_secure_uploads? && !custom_emoji
|
||||
)
|
||||
end
|
||||
%w[src].each do |selector|
|
||||
@doc
|
||||
.css("img[#{selector}]")
|
||||
.each do |img|
|
||||
custom_emoji = img["class"]&.include?("emoji-custom") && Emoji.custom?(img["title"])
|
||||
img[selector] = UrlHelper.cook_url(
|
||||
img[selector].to_s,
|
||||
secure: @post.with_secure_uploads? && !custom_emoji,
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def remove_user_ids
|
||||
@doc.css("a[href]").each do |a|
|
||||
uri = begin
|
||||
URI(a["href"])
|
||||
rescue URI::Error
|
||||
next
|
||||
@doc
|
||||
.css("a[href]")
|
||||
.each do |a|
|
||||
uri =
|
||||
begin
|
||||
URI(a["href"])
|
||||
rescue URI::Error
|
||||
next
|
||||
end
|
||||
next if uri.hostname != Discourse.current_hostname
|
||||
|
||||
query = Rack::Utils.parse_nested_query(uri.query)
|
||||
next if !query.delete("u")
|
||||
|
||||
uri.query = query.map { |k, v| "#{k}=#{v}" }.join("&").presence
|
||||
a["href"] = uri.to_s
|
||||
end
|
||||
next if uri.hostname != Discourse.current_hostname
|
||||
|
||||
query = Rack::Utils.parse_nested_query(uri.query)
|
||||
next if !query.delete("u")
|
||||
|
||||
uri.query = query.map { |k, v| "#{k}=#{v}" }.join("&").presence
|
||||
a["href"] = uri.to_s
|
||||
end
|
||||
end
|
||||
|
||||
def enforce_nofollow
|
||||
@ -369,13 +386,14 @@ class CookedPostProcessor
|
||||
|
||||
def process_hotlinked_image(img)
|
||||
@hotlinked_map ||= @post.post_hotlinked_media.preload(:upload).map { |r| [r.url, r] }.to_h
|
||||
normalized_src = PostHotlinkedMedia.normalize_src(img["src"] || img[PrettyText::BLOCKED_HOTLINKED_SRC_ATTR])
|
||||
normalized_src =
|
||||
PostHotlinkedMedia.normalize_src(img["src"] || img[PrettyText::BLOCKED_HOTLINKED_SRC_ATTR])
|
||||
info = @hotlinked_map[normalized_src]
|
||||
|
||||
still_an_image = true
|
||||
|
||||
if info&.too_large?
|
||||
if img.ancestors('.onebox, .onebox-body').blank?
|
||||
if img.ancestors(".onebox, .onebox-body").blank?
|
||||
add_large_image_placeholder!(img)
|
||||
else
|
||||
img.remove
|
||||
@ -383,7 +401,7 @@ class CookedPostProcessor
|
||||
|
||||
still_an_image = false
|
||||
elsif info&.download_failed?
|
||||
if img.ancestors('.onebox, .onebox-body').blank?
|
||||
if img.ancestors(".onebox, .onebox-body").blank?
|
||||
add_broken_image_placeholder!(img)
|
||||
else
|
||||
img.remove
|
||||
@ -399,28 +417,29 @@ class CookedPostProcessor
|
||||
end
|
||||
|
||||
def add_blocked_hotlinked_media_placeholders
|
||||
@doc.css([
|
||||
"[#{PrettyText::BLOCKED_HOTLINKED_SRC_ATTR}]",
|
||||
"[#{PrettyText::BLOCKED_HOTLINKED_SRCSET_ATTR}]",
|
||||
].join(',')).each do |el|
|
||||
src = el[PrettyText::BLOCKED_HOTLINKED_SRC_ATTR] ||
|
||||
el[PrettyText::BLOCKED_HOTLINKED_SRCSET_ATTR]&.split(',')&.first&.split(' ')&.first
|
||||
@doc
|
||||
.css(
|
||||
[
|
||||
"[#{PrettyText::BLOCKED_HOTLINKED_SRC_ATTR}]",
|
||||
"[#{PrettyText::BLOCKED_HOTLINKED_SRCSET_ATTR}]",
|
||||
].join(","),
|
||||
)
|
||||
.each do |el|
|
||||
src =
|
||||
el[PrettyText::BLOCKED_HOTLINKED_SRC_ATTR] ||
|
||||
el[PrettyText::BLOCKED_HOTLINKED_SRCSET_ATTR]&.split(",")&.first&.split(" ")&.first
|
||||
|
||||
if el.name == "img"
|
||||
add_blocked_hotlinked_image_placeholder!(el)
|
||||
next
|
||||
if el.name == "img"
|
||||
add_blocked_hotlinked_image_placeholder!(el)
|
||||
next
|
||||
end
|
||||
|
||||
el = el.parent if %w[video audio].include?(el.parent.name)
|
||||
|
||||
el = el.parent if el.parent.classes.include?("video-container")
|
||||
|
||||
add_blocked_hotlinked_media_placeholder!(el, src)
|
||||
end
|
||||
|
||||
if ["video", "audio"].include?(el.parent.name)
|
||||
el = el.parent
|
||||
end
|
||||
|
||||
if el.parent.classes.include?("video-container")
|
||||
el = el.parent
|
||||
end
|
||||
|
||||
add_blocked_hotlinked_media_placeholder!(el, src)
|
||||
end
|
||||
end
|
||||
|
||||
def is_svg?(img)
|
||||
@ -431,6 +450,6 @@ class CookedPostProcessor
|
||||
nil
|
||||
end
|
||||
|
||||
File.extname(path) == '.svg' if path
|
||||
File.extname(path) == ".svg" if path
|
||||
end
|
||||
end
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module CookedProcessorMixin
|
||||
|
||||
def post_process_oneboxes
|
||||
limit = SiteSetting.max_oneboxes_per_post - @doc.css("aside.onebox, a.inline-onebox").size
|
||||
oneboxes = {}
|
||||
@ -14,7 +13,7 @@ module CookedProcessorMixin
|
||||
|
||||
if skip_onebox
|
||||
if is_onebox
|
||||
element.remove_class('onebox')
|
||||
element.remove_class("onebox")
|
||||
else
|
||||
remove_inline_onebox_loading_class(element)
|
||||
end
|
||||
@ -26,11 +25,13 @@ module CookedProcessorMixin
|
||||
map[url] = true
|
||||
|
||||
if is_onebox
|
||||
onebox = Oneboxer.onebox(url,
|
||||
invalidate_oneboxes: !!@opts[:invalidate_oneboxes],
|
||||
user_id: @model&.user_id,
|
||||
category_id: @category_id
|
||||
)
|
||||
onebox =
|
||||
Oneboxer.onebox(
|
||||
url,
|
||||
invalidate_oneboxes: !!@opts[:invalidate_oneboxes],
|
||||
user_id: @model&.user_id,
|
||||
category_id: @category_id,
|
||||
)
|
||||
|
||||
@has_oneboxes = true if onebox.present?
|
||||
onebox
|
||||
@ -56,7 +57,7 @@ module CookedProcessorMixin
|
||||
# and wrap in a div
|
||||
limit_size!(img)
|
||||
|
||||
next if img["class"]&.include?('onebox-avatar')
|
||||
next if img["class"]&.include?("onebox-avatar")
|
||||
|
||||
parent = parent&.parent if parent&.name == "a"
|
||||
parent_class = parent && parent["class"]
|
||||
@ -84,12 +85,18 @@ module CookedProcessorMixin
|
||||
if width < 64 && height < 64
|
||||
img["class"] = img["class"].to_s + " onebox-full-image"
|
||||
else
|
||||
img.delete('width')
|
||||
img.delete('height')
|
||||
new_parent = img.add_next_sibling("<div class='aspect-image' style='--aspect-ratio:#{width}/#{height};'/>")
|
||||
img.delete("width")
|
||||
img.delete("height")
|
||||
new_parent =
|
||||
img.add_next_sibling(
|
||||
"<div class='aspect-image' style='--aspect-ratio:#{width}/#{height};'/>",
|
||||
)
|
||||
new_parent.first.add_child(img)
|
||||
end
|
||||
elsif (parent_class&.include?("instagram-images") || parent_class&.include?("tweet-images") || parent_class&.include?("scale-images")) && width > 0 && height > 0
|
||||
elsif (
|
||||
parent_class&.include?("instagram-images") || parent_class&.include?("tweet-images") ||
|
||||
parent_class&.include?("scale-images")
|
||||
) && width > 0 && height > 0
|
||||
img.remove_attribute("width")
|
||||
img.remove_attribute("height")
|
||||
parent["class"] = "aspect-image-full-size"
|
||||
@ -98,16 +105,18 @@ module CookedProcessorMixin
|
||||
end
|
||||
|
||||
if @omit_nofollow || !SiteSetting.add_rel_nofollow_to_user_content
|
||||
@doc.css(".onebox-body a[rel], .onebox a[rel]").each do |a|
|
||||
rel_values = a['rel'].split(' ').map(&:downcase)
|
||||
rel_values.delete('nofollow')
|
||||
rel_values.delete('ugc')
|
||||
if rel_values.blank?
|
||||
a.remove_attribute("rel")
|
||||
else
|
||||
a["rel"] = rel_values.join(' ')
|
||||
@doc
|
||||
.css(".onebox-body a[rel], .onebox a[rel]")
|
||||
.each do |a|
|
||||
rel_values = a["rel"].split(" ").map(&:downcase)
|
||||
rel_values.delete("nofollow")
|
||||
rel_values.delete("ugc")
|
||||
if rel_values.blank?
|
||||
a.remove_attribute("rel")
|
||||
else
|
||||
a["rel"] = rel_values.join(" ")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -116,9 +125,9 @@ module CookedProcessorMixin
|
||||
# 1) the width/height attributes
|
||||
# 2) the dimension from the preview (image_sizes)
|
||||
# 3) the dimension of the original image (HTTP request)
|
||||
w, h = get_size_from_attributes(img) ||
|
||||
get_size_from_image_sizes(img["src"], @opts[:image_sizes]) ||
|
||||
get_size(img["src"])
|
||||
w, h =
|
||||
get_size_from_attributes(img) || get_size_from_image_sizes(img["src"], @opts[:image_sizes]) ||
|
||||
get_size(img["src"])
|
||||
|
||||
# limit the size of the thumbnail
|
||||
img["width"], img["height"] = ImageSizer.resize(w, h)
|
||||
@ -126,7 +135,7 @@ module CookedProcessorMixin
|
||||
|
||||
def get_size_from_attributes(img)
|
||||
w, h = img["width"].to_i, img["height"].to_i
|
||||
return [w, h] unless w <= 0 || h <= 0
|
||||
return w, h unless w <= 0 || h <= 0
|
||||
# if only width or height are specified attempt to scale image
|
||||
if w > 0 || h > 0
|
||||
w = w.to_f
|
||||
@ -149,9 +158,9 @@ module CookedProcessorMixin
|
||||
return unless image_sizes.present?
|
||||
image_sizes.each do |image_size|
|
||||
url, size = image_size[0], image_size[1]
|
||||
if url && src && url.include?(src) &&
|
||||
size && size["width"].to_i > 0 && size["height"].to_i > 0
|
||||
return [size["width"], size["height"]]
|
||||
if url && src && url.include?(src) && size && size["width"].to_i > 0 &&
|
||||
size["height"].to_i > 0
|
||||
return size["width"], size["height"]
|
||||
end
|
||||
end
|
||||
nil
|
||||
@ -165,7 +174,7 @@ module CookedProcessorMixin
|
||||
return @size_cache[url] if @size_cache.has_key?(url)
|
||||
|
||||
absolute_url = url
|
||||
absolute_url = Discourse.base_url_no_prefix + absolute_url if absolute_url =~ /^\/[^\/]/
|
||||
absolute_url = Discourse.base_url_no_prefix + absolute_url if absolute_url =~ %r{^/[^/]}
|
||||
|
||||
return unless absolute_url
|
||||
|
||||
@ -186,14 +195,13 @@ module CookedProcessorMixin
|
||||
else
|
||||
@size_cache[url] = FastImage.size(absolute_url)
|
||||
end
|
||||
|
||||
rescue Zlib::BufError, URI::Error, OpenSSL::SSL::SSLError
|
||||
# FastImage.size raises BufError for some gifs, leave it.
|
||||
end
|
||||
|
||||
def is_valid_image_url?(url)
|
||||
uri = URI.parse(url)
|
||||
%w(http https).include? uri.scheme
|
||||
%w[http https].include? uri.scheme
|
||||
rescue URI::Error
|
||||
end
|
||||
|
||||
@ -217,9 +225,12 @@ module CookedProcessorMixin
|
||||
"help",
|
||||
I18n.t(
|
||||
"upload.placeholders.too_large_humanized",
|
||||
max_size: ActiveSupport::NumberHelper.number_to_human_size(SiteSetting.max_image_size_kb.kilobytes)
|
||||
)
|
||||
)
|
||||
max_size:
|
||||
ActiveSupport::NumberHelper.number_to_human_size(
|
||||
SiteSetting.max_image_size_kb.kilobytes,
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
# Only if the image is already linked
|
||||
@ -227,7 +238,7 @@ module CookedProcessorMixin
|
||||
parent = placeholder.parent
|
||||
parent.add_next_sibling(placeholder)
|
||||
|
||||
if parent.name == 'a' && parent["href"].present?
|
||||
if parent.name == "a" && parent["href"].present?
|
||||
if url == parent["href"]
|
||||
parent.remove
|
||||
else
|
||||
@ -295,12 +306,13 @@ module CookedProcessorMixin
|
||||
end
|
||||
|
||||
def process_inline_onebox(element)
|
||||
inline_onebox = InlineOneboxer.lookup(
|
||||
element.attributes["href"].value,
|
||||
invalidate: !!@opts[:invalidate_oneboxes],
|
||||
user_id: @model&.user_id,
|
||||
category_id: @category_id
|
||||
)
|
||||
inline_onebox =
|
||||
InlineOneboxer.lookup(
|
||||
element.attributes["href"].value,
|
||||
invalidate: !!@opts[:invalidate_oneboxes],
|
||||
user_id: @model&.user_id,
|
||||
category_id: @category_id,
|
||||
)
|
||||
|
||||
if title = inline_onebox&.dig(:title)
|
||||
element.children = CGI.escapeHTML(title)
|
||||
|
@ -4,7 +4,7 @@ module CrawlerDetection
|
||||
WAYBACK_MACHINE_URL = "archive.org"
|
||||
|
||||
def self.to_matcher(string, type: nil)
|
||||
escaped = string.split('|').map { |agent| Regexp.escape(agent) }.join('|')
|
||||
escaped = string.split("|").map { |agent| Regexp.escape(agent) }.join("|")
|
||||
|
||||
if type == :real && Rails.env == "test"
|
||||
# we need this bypass so we properly render views
|
||||
@ -15,18 +15,33 @@ module CrawlerDetection
|
||||
end
|
||||
|
||||
def self.crawler?(user_agent, via_header = nil)
|
||||
return true if user_agent.nil? || user_agent&.include?(WAYBACK_MACHINE_URL) || via_header&.include?(WAYBACK_MACHINE_URL)
|
||||
if user_agent.nil? || user_agent&.include?(WAYBACK_MACHINE_URL) ||
|
||||
via_header&.include?(WAYBACK_MACHINE_URL)
|
||||
return true
|
||||
end
|
||||
|
||||
# this is done to avoid regenerating regexes
|
||||
@non_crawler_matchers ||= {}
|
||||
@matchers ||= {}
|
||||
|
||||
possibly_real = (@non_crawler_matchers[SiteSetting.non_crawler_user_agents] ||= to_matcher(SiteSetting.non_crawler_user_agents, type: :real))
|
||||
possibly_real =
|
||||
(
|
||||
@non_crawler_matchers[SiteSetting.non_crawler_user_agents] ||= to_matcher(
|
||||
SiteSetting.non_crawler_user_agents,
|
||||
type: :real,
|
||||
)
|
||||
)
|
||||
|
||||
if user_agent.match?(possibly_real)
|
||||
known_bots = (@matchers[SiteSetting.crawler_user_agents] ||= to_matcher(SiteSetting.crawler_user_agents))
|
||||
known_bots =
|
||||
(@matchers[SiteSetting.crawler_user_agents] ||= to_matcher(SiteSetting.crawler_user_agents))
|
||||
if user_agent.match?(known_bots)
|
||||
bypass = (@matchers[SiteSetting.crawler_check_bypass_agents] ||= to_matcher(SiteSetting.crawler_check_bypass_agents))
|
||||
bypass =
|
||||
(
|
||||
@matchers[SiteSetting.crawler_check_bypass_agents] ||= to_matcher(
|
||||
SiteSetting.crawler_check_bypass_agents,
|
||||
)
|
||||
)
|
||||
!user_agent.match?(bypass)
|
||||
else
|
||||
false
|
||||
@ -34,30 +49,40 @@ module CrawlerDetection
|
||||
else
|
||||
true
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
def self.show_browser_update?(user_agent)
|
||||
return false if SiteSetting.browser_update_user_agents.blank?
|
||||
|
||||
@browser_update_matchers ||= {}
|
||||
matcher = @browser_update_matchers[SiteSetting.browser_update_user_agents] ||= to_matcher(SiteSetting.browser_update_user_agents)
|
||||
matcher =
|
||||
@browser_update_matchers[SiteSetting.browser_update_user_agents] ||= to_matcher(
|
||||
SiteSetting.browser_update_user_agents,
|
||||
)
|
||||
user_agent.match?(matcher)
|
||||
end
|
||||
|
||||
# Given a user_agent that returns true from crawler?, should its request be allowed?
|
||||
def self.allow_crawler?(user_agent)
|
||||
return true if SiteSetting.allowed_crawler_user_agents.blank? &&
|
||||
SiteSetting.blocked_crawler_user_agents.blank?
|
||||
if SiteSetting.allowed_crawler_user_agents.blank? &&
|
||||
SiteSetting.blocked_crawler_user_agents.blank?
|
||||
return true
|
||||
end
|
||||
|
||||
@allowlisted_matchers ||= {}
|
||||
@blocklisted_matchers ||= {}
|
||||
|
||||
if SiteSetting.allowed_crawler_user_agents.present?
|
||||
allowlisted = @allowlisted_matchers[SiteSetting.allowed_crawler_user_agents] ||= to_matcher(SiteSetting.allowed_crawler_user_agents)
|
||||
allowlisted =
|
||||
@allowlisted_matchers[SiteSetting.allowed_crawler_user_agents] ||= to_matcher(
|
||||
SiteSetting.allowed_crawler_user_agents,
|
||||
)
|
||||
!user_agent.nil? && user_agent.match?(allowlisted)
|
||||
else
|
||||
blocklisted = @blocklisted_matchers[SiteSetting.blocked_crawler_user_agents] ||= to_matcher(SiteSetting.blocked_crawler_user_agents)
|
||||
blocklisted =
|
||||
@blocklisted_matchers[SiteSetting.blocked_crawler_user_agents] ||= to_matcher(
|
||||
SiteSetting.blocked_crawler_user_agents,
|
||||
)
|
||||
user_agent.nil? || !user_agent.match?(blocklisted)
|
||||
end
|
||||
end
|
||||
|
@ -2,7 +2,8 @@
|
||||
|
||||
# Provides a way to check a CSRF token outside of a controller
|
||||
class CSRFTokenVerifier
|
||||
class InvalidCSRFToken < StandardError; end
|
||||
class InvalidCSRFToken < StandardError
|
||||
end
|
||||
|
||||
include ActiveSupport::Configurable
|
||||
include ActionController::RequestForgeryProtection
|
||||
@ -18,9 +19,7 @@ class CSRFTokenVerifier
|
||||
def call(env)
|
||||
@request = ActionDispatch::Request.new(env.dup)
|
||||
|
||||
unless verified_request?
|
||||
raise InvalidCSRFToken
|
||||
end
|
||||
raise InvalidCSRFToken unless verified_request?
|
||||
end
|
||||
|
||||
public :form_authenticity_token
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module CurrentUser
|
||||
|
||||
def self.has_auth_cookie?(env)
|
||||
Discourse.current_user_provider.new(env).has_auth_cookie?
|
||||
end
|
||||
@ -45,5 +44,4 @@ module CurrentUser
|
||||
def current_user_provider
|
||||
@current_user_provider ||= Discourse.current_user_provider.new(request.env)
|
||||
end
|
||||
|
||||
end
|
||||
|
108
lib/db_helper.rb
108
lib/db_helper.rb
@ -3,7 +3,6 @@
|
||||
require "migration/base_dropper"
|
||||
|
||||
class DbHelper
|
||||
|
||||
REMAP_SQL ||= <<~SQL
|
||||
SELECT table_name::text, column_name::text, character_maximum_length
|
||||
FROM information_schema.columns
|
||||
@ -19,24 +18,33 @@ class DbHelper
|
||||
WHERE trigger_name LIKE '%_readonly'
|
||||
SQL
|
||||
|
||||
TRUNCATABLE_COLUMNS ||= [
|
||||
'topic_links.url'
|
||||
]
|
||||
TRUNCATABLE_COLUMNS ||= ["topic_links.url"]
|
||||
|
||||
def self.remap(from, to, anchor_left: false, anchor_right: false, excluded_tables: [], verbose: false)
|
||||
like = "#{anchor_left ? '' : "%"}#{from}#{anchor_right ? '' : "%"}"
|
||||
def self.remap(
|
||||
from,
|
||||
to,
|
||||
anchor_left: false,
|
||||
anchor_right: false,
|
||||
excluded_tables: [],
|
||||
verbose: false
|
||||
)
|
||||
like = "#{anchor_left ? "" : "%"}#{from}#{anchor_right ? "" : "%"}"
|
||||
text_columns = find_text_columns(excluded_tables)
|
||||
|
||||
text_columns.each do |table, columns|
|
||||
set = columns.map do |column|
|
||||
replace = "REPLACE(\"#{column[:name]}\", :from, :to)"
|
||||
replace = truncate(replace, table, column)
|
||||
"\"#{column[:name]}\" = #{replace}"
|
||||
end.join(", ")
|
||||
set =
|
||||
columns
|
||||
.map do |column|
|
||||
replace = "REPLACE(\"#{column[:name]}\", :from, :to)"
|
||||
replace = truncate(replace, table, column)
|
||||
"\"#{column[:name]}\" = #{replace}"
|
||||
end
|
||||
.join(", ")
|
||||
|
||||
where = columns.map do |column|
|
||||
"\"#{column[:name]}\" IS NOT NULL AND \"#{column[:name]}\" LIKE :like"
|
||||
end.join(" OR ")
|
||||
where =
|
||||
columns
|
||||
.map { |column| "\"#{column[:name]}\" IS NOT NULL AND \"#{column[:name]}\" LIKE :like" }
|
||||
.join(" OR ")
|
||||
|
||||
rows = DB.exec(<<~SQL, from: from, to: to, like: like)
|
||||
UPDATE \"#{table}\"
|
||||
@ -50,19 +58,32 @@ class DbHelper
|
||||
finish!
|
||||
end
|
||||
|
||||
def self.regexp_replace(pattern, replacement, flags: "gi", match: "~*", excluded_tables: [], verbose: false)
|
||||
def self.regexp_replace(
|
||||
pattern,
|
||||
replacement,
|
||||
flags: "gi",
|
||||
match: "~*",
|
||||
excluded_tables: [],
|
||||
verbose: false
|
||||
)
|
||||
text_columns = find_text_columns(excluded_tables)
|
||||
|
||||
text_columns.each do |table, columns|
|
||||
set = columns.map do |column|
|
||||
replace = "REGEXP_REPLACE(\"#{column[:name]}\", :pattern, :replacement, :flags)"
|
||||
replace = truncate(replace, table, column)
|
||||
"\"#{column[:name]}\" = #{replace}"
|
||||
end.join(", ")
|
||||
set =
|
||||
columns
|
||||
.map do |column|
|
||||
replace = "REGEXP_REPLACE(\"#{column[:name]}\", :pattern, :replacement, :flags)"
|
||||
replace = truncate(replace, table, column)
|
||||
"\"#{column[:name]}\" = #{replace}"
|
||||
end
|
||||
.join(", ")
|
||||
|
||||
where = columns.map do |column|
|
||||
"\"#{column[:name]}\" IS NOT NULL AND \"#{column[:name]}\" #{match} :pattern"
|
||||
end.join(" OR ")
|
||||
where =
|
||||
columns
|
||||
.map do |column|
|
||||
"\"#{column[:name]}\" IS NOT NULL AND \"#{column[:name]}\" #{match} :pattern"
|
||||
end
|
||||
.join(" OR ")
|
||||
|
||||
rows = DB.exec(<<~SQL, pattern: pattern, replacement: replacement, flags: flags, match: match)
|
||||
UPDATE \"#{table}\"
|
||||
@ -78,23 +99,25 @@ class DbHelper
|
||||
|
||||
def self.find(needle, anchor_left: false, anchor_right: false, excluded_tables: [])
|
||||
found = {}
|
||||
like = "#{anchor_left ? '' : "%"}#{needle}#{anchor_right ? '' : "%"}"
|
||||
like = "#{anchor_left ? "" : "%"}#{needle}#{anchor_right ? "" : "%"}"
|
||||
|
||||
DB.query(REMAP_SQL).each do |r|
|
||||
next if excluded_tables.include?(r.table_name)
|
||||
DB
|
||||
.query(REMAP_SQL)
|
||||
.each do |r|
|
||||
next if excluded_tables.include?(r.table_name)
|
||||
|
||||
rows = DB.query(<<~SQL, like: like)
|
||||
rows = DB.query(<<~SQL, like: like)
|
||||
SELECT \"#{r.column_name}\"
|
||||
FROM \"#{r.table_name}\"
|
||||
WHERE \""#{r.column_name}"\" LIKE :like
|
||||
SQL
|
||||
|
||||
if rows.size > 0
|
||||
found["#{r.table_name}.#{r.column_name}"] = rows.map do |row|
|
||||
row.public_send(r.column_name)
|
||||
if rows.size > 0
|
||||
found["#{r.table_name}.#{r.column_name}"] = rows.map do |row|
|
||||
row.public_send(r.column_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
found
|
||||
end
|
||||
@ -112,16 +135,21 @@ class DbHelper
|
||||
triggers = DB.query(TRIGGERS_SQL).map(&:trigger_name).to_set
|
||||
text_columns = Hash.new { |h, k| h[k] = [] }
|
||||
|
||||
DB.query(REMAP_SQL).each do |r|
|
||||
next if excluded_tables.include?(r.table_name) ||
|
||||
triggers.include?(Migration::BaseDropper.readonly_trigger_name(r.table_name, r.column_name)) ||
|
||||
triggers.include?(Migration::BaseDropper.readonly_trigger_name(r.table_name))
|
||||
DB
|
||||
.query(REMAP_SQL)
|
||||
.each do |r|
|
||||
if excluded_tables.include?(r.table_name) ||
|
||||
triggers.include?(
|
||||
Migration::BaseDropper.readonly_trigger_name(r.table_name, r.column_name),
|
||||
) || triggers.include?(Migration::BaseDropper.readonly_trigger_name(r.table_name))
|
||||
next
|
||||
end
|
||||
|
||||
text_columns[r.table_name] << {
|
||||
name: r.column_name,
|
||||
max_length: r.character_maximum_length
|
||||
}
|
||||
end
|
||||
text_columns[r.table_name] << {
|
||||
name: r.column_name,
|
||||
max_length: r.character_maximum_length,
|
||||
}
|
||||
end
|
||||
|
||||
text_columns
|
||||
end
|
||||
|
@ -1,26 +1,22 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module Demon; end
|
||||
module Demon
|
||||
end
|
||||
|
||||
# intelligent fork based demonizer
|
||||
class Demon::Base
|
||||
|
||||
def self.demons
|
||||
@demons
|
||||
end
|
||||
|
||||
def self.start(count = 1, verbose: false)
|
||||
@demons ||= {}
|
||||
count.times do |i|
|
||||
(@demons["#{prefix}_#{i}"] ||= new(i, verbose: verbose)).start
|
||||
end
|
||||
count.times { |i| (@demons["#{prefix}_#{i}"] ||= new(i, verbose: verbose)).start }
|
||||
end
|
||||
|
||||
def self.stop
|
||||
return unless @demons
|
||||
@demons.values.each do |demon|
|
||||
demon.stop
|
||||
end
|
||||
@demons.values.each { |demon| demon.stop }
|
||||
end
|
||||
|
||||
def self.restart
|
||||
@ -32,16 +28,12 @@ class Demon::Base
|
||||
end
|
||||
|
||||
def self.ensure_running
|
||||
@demons.values.each do |demon|
|
||||
demon.ensure_running
|
||||
end
|
||||
@demons.values.each { |demon| demon.ensure_running }
|
||||
end
|
||||
|
||||
def self.kill(signal)
|
||||
return unless @demons
|
||||
@demons.values.each do |demon|
|
||||
demon.kill(signal)
|
||||
end
|
||||
@demons.values.each { |demon| demon.kill(signal) }
|
||||
end
|
||||
|
||||
attr_reader :pid, :parent_pid, :started, :index
|
||||
@ -83,18 +75,27 @@ class Demon::Base
|
||||
if @pid
|
||||
Process.kill(stop_signal, @pid)
|
||||
|
||||
wait_for_stop = lambda {
|
||||
timeout = @stop_timeout
|
||||
wait_for_stop =
|
||||
lambda do
|
||||
timeout = @stop_timeout
|
||||
|
||||
while alive? && timeout > 0
|
||||
timeout -= (@stop_timeout / 10.0)
|
||||
sleep(@stop_timeout / 10.0)
|
||||
Process.waitpid(@pid, Process::WNOHANG) rescue -1
|
||||
while alive? && timeout > 0
|
||||
timeout -= (@stop_timeout / 10.0)
|
||||
sleep(@stop_timeout / 10.0)
|
||||
begin
|
||||
Process.waitpid(@pid, Process::WNOHANG)
|
||||
rescue StandardError
|
||||
-1
|
||||
end
|
||||
end
|
||||
|
||||
begin
|
||||
Process.waitpid(@pid, Process::WNOHANG)
|
||||
rescue StandardError
|
||||
-1
|
||||
end
|
||||
end
|
||||
|
||||
Process.waitpid(@pid, Process::WNOHANG) rescue -1
|
||||
}
|
||||
|
||||
wait_for_stop.call
|
||||
|
||||
if alive?
|
||||
@ -118,7 +119,12 @@ class Demon::Base
|
||||
return
|
||||
end
|
||||
|
||||
dead = Process.waitpid(@pid, Process::WNOHANG) rescue -1
|
||||
dead =
|
||||
begin
|
||||
Process.waitpid(@pid, Process::WNOHANG)
|
||||
rescue StandardError
|
||||
-1
|
||||
end
|
||||
if dead
|
||||
STDERR.puts "Detected dead worker #{@pid}, restarting..."
|
||||
@pid = nil
|
||||
@ -141,21 +147,20 @@ class Demon::Base
|
||||
end
|
||||
|
||||
def run
|
||||
@pid = fork do
|
||||
Process.setproctitle("discourse #{self.class.prefix}")
|
||||
monitor_parent
|
||||
establish_app
|
||||
after_fork
|
||||
end
|
||||
@pid =
|
||||
fork do
|
||||
Process.setproctitle("discourse #{self.class.prefix}")
|
||||
monitor_parent
|
||||
establish_app
|
||||
after_fork
|
||||
end
|
||||
write_pid_file
|
||||
end
|
||||
|
||||
def already_running?
|
||||
if File.exist? pid_file
|
||||
pid = File.read(pid_file).to_i
|
||||
if Demon::Base.alive?(pid)
|
||||
return pid
|
||||
end
|
||||
return pid if Demon::Base.alive?(pid)
|
||||
end
|
||||
|
||||
nil
|
||||
@ -164,24 +169,20 @@ class Demon::Base
|
||||
def self.alive?(pid)
|
||||
Process.kill(0, pid)
|
||||
true
|
||||
rescue
|
||||
rescue StandardError
|
||||
false
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def verbose(msg)
|
||||
if @verbose
|
||||
puts msg
|
||||
end
|
||||
puts msg if @verbose
|
||||
end
|
||||
|
||||
def write_pid_file
|
||||
verbose("writing pid file #{pid_file} for #{@pid}")
|
||||
FileUtils.mkdir_p(@rails_root + "tmp/pids")
|
||||
File.open(pid_file, 'w') do |f|
|
||||
f.write(@pid)
|
||||
end
|
||||
File.open(pid_file, "w") { |f| f.write(@pid) }
|
||||
end
|
||||
|
||||
def delete_pid_file
|
||||
|
@ -36,15 +36,20 @@ class Demon::EmailSync < ::Demon::Base
|
||||
status = nil
|
||||
idle = false
|
||||
|
||||
while @running && group.reload.imap_mailbox_name.present? do
|
||||
while @running && group.reload.imap_mailbox_name.present?
|
||||
ImapSyncLog.debug("Processing mailbox for group #{group.name} in db #{db}", group)
|
||||
status = syncer.process(
|
||||
idle: syncer.can_idle? && status && status[:remaining] == 0,
|
||||
old_emails_limit: status && status[:remaining] > 0 ? 0 : nil,
|
||||
)
|
||||
status =
|
||||
syncer.process(
|
||||
idle: syncer.can_idle? && status && status[:remaining] == 0,
|
||||
old_emails_limit: status && status[:remaining] > 0 ? 0 : nil,
|
||||
)
|
||||
|
||||
if !syncer.can_idle? && status[:remaining] == 0
|
||||
ImapSyncLog.debug("Going to sleep for group #{group.name} in db #{db} to wait for new emails", group, db: false)
|
||||
ImapSyncLog.debug(
|
||||
"Going to sleep for group #{group.name} in db #{db} to wait for new emails",
|
||||
group,
|
||||
db: false,
|
||||
)
|
||||
|
||||
# Thread goes into sleep for a bit so it is better to return any
|
||||
# connection back to the pool.
|
||||
@ -66,11 +71,7 @@ class Demon::EmailSync < ::Demon::Base
|
||||
# synchronization primitives available anyway).
|
||||
@running = false
|
||||
|
||||
@sync_data.each do |db, sync_data|
|
||||
sync_data.each do |_, data|
|
||||
kill_and_disconnect!(data)
|
||||
end
|
||||
end
|
||||
@sync_data.each { |db, sync_data| sync_data.each { |_, data| kill_and_disconnect!(data) } }
|
||||
|
||||
exit 0
|
||||
end
|
||||
@ -89,9 +90,9 @@ class Demon::EmailSync < ::Demon::Base
|
||||
@sync_data = {}
|
||||
@sync_lock = Mutex.new
|
||||
|
||||
trap('INT') { kill_threads }
|
||||
trap('TERM') { kill_threads }
|
||||
trap('HUP') { kill_threads }
|
||||
trap("INT") { kill_threads }
|
||||
trap("TERM") { kill_threads }
|
||||
trap("HUP") { kill_threads }
|
||||
|
||||
while @running
|
||||
Discourse.redis.set(HEARTBEAT_KEY, Time.now.to_i, ex: HEARTBEAT_INTERVAL)
|
||||
@ -101,9 +102,7 @@ class Demon::EmailSync < ::Demon::Base
|
||||
@sync_data.filter! do |db, sync_data|
|
||||
next true if all_dbs.include?(db)
|
||||
|
||||
sync_data.each do |_, data|
|
||||
kill_and_disconnect!(data)
|
||||
end
|
||||
sync_data.each { |_, data| kill_and_disconnect!(data) }
|
||||
|
||||
false
|
||||
end
|
||||
@ -121,7 +120,10 @@ class Demon::EmailSync < ::Demon::Base
|
||||
next true if groups[group_id] && data[:thread]&.alive? && !data[:syncer]&.disconnected?
|
||||
|
||||
if !groups[group_id]
|
||||
ImapSyncLog.warn("Killing thread for group because mailbox is no longer synced", group_id)
|
||||
ImapSyncLog.warn(
|
||||
"Killing thread for group because mailbox is no longer synced",
|
||||
group_id,
|
||||
)
|
||||
else
|
||||
ImapSyncLog.warn("Thread for group is dead", group_id)
|
||||
end
|
||||
@ -133,12 +135,13 @@ class Demon::EmailSync < ::Demon::Base
|
||||
# Spawn new threads for groups that are now synchronized.
|
||||
groups.each do |group_id, group|
|
||||
if !@sync_data[db][group_id]
|
||||
ImapSyncLog.debug("Starting thread for group #{group.name} mailbox #{group.imap_mailbox_name}", group, db: false)
|
||||
ImapSyncLog.debug(
|
||||
"Starting thread for group #{group.name} mailbox #{group.imap_mailbox_name}",
|
||||
group,
|
||||
db: false,
|
||||
)
|
||||
|
||||
@sync_data[db][group_id] = {
|
||||
thread: start_thread(db, group),
|
||||
syncer: nil
|
||||
}
|
||||
@sync_data[db][group_id] = { thread: start_thread(db, group), syncer: nil }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -3,7 +3,6 @@
|
||||
require "demon/base"
|
||||
|
||||
class Demon::RailsAutospec < Demon::Base
|
||||
|
||||
def self.prefix
|
||||
"rails-autospec"
|
||||
end
|
||||
@ -17,15 +16,10 @@ class Demon::RailsAutospec < Demon::Base
|
||||
def after_fork
|
||||
require "rack"
|
||||
ENV["RAILS_ENV"] = "test"
|
||||
Rack::Server.start(
|
||||
config: "config.ru",
|
||||
AccessLog: [],
|
||||
Port: ENV["TEST_SERVER_PORT"] || 60099,
|
||||
)
|
||||
Rack::Server.start(config: "config.ru", AccessLog: [], Port: ENV["TEST_SERVER_PORT"] || 60_099)
|
||||
rescue => e
|
||||
STDERR.puts e.message
|
||||
STDERR.puts e.backtrace.join("\n")
|
||||
exit 1
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -3,7 +3,6 @@
|
||||
require "demon/base"
|
||||
|
||||
class Demon::Sidekiq < ::Demon::Base
|
||||
|
||||
def self.prefix
|
||||
"sidekiq"
|
||||
end
|
||||
@ -26,7 +25,7 @@ class Demon::Sidekiq < ::Demon::Base
|
||||
Demon::Sidekiq.after_fork&.call
|
||||
|
||||
puts "Loading Sidekiq in process id #{Process.pid}"
|
||||
require 'sidekiq/cli'
|
||||
require "sidekiq/cli"
|
||||
cli = Sidekiq::CLI.instance
|
||||
|
||||
# Unicorn uses USR1 to indicate that log files have been rotated
|
||||
@ -38,10 +37,10 @@ class Demon::Sidekiq < ::Demon::Base
|
||||
|
||||
options = ["-c", GlobalSetting.sidekiq_workers.to_s]
|
||||
|
||||
[['critical', 8], ['default', 4], ['low', 2], ['ultra_low', 1]].each do |queue_name, weight|
|
||||
[["critical", 8], ["default", 4], ["low", 2], ["ultra_low", 1]].each do |queue_name, weight|
|
||||
custom_queue_hostname = ENV["UNICORN_SIDEKIQ_#{queue_name.upcase}_QUEUE_HOSTNAME"]
|
||||
|
||||
if !custom_queue_hostname || custom_queue_hostname.split(',').include?(Discourse.os_hostname)
|
||||
if !custom_queue_hostname || custom_queue_hostname.split(",").include?(Discourse.os_hostname)
|
||||
options << "-q"
|
||||
options << "#{queue_name},#{weight}"
|
||||
end
|
||||
@ -49,7 +48,7 @@ class Demon::Sidekiq < ::Demon::Base
|
||||
|
||||
# Sidekiq not as high priority as web, in this environment it is forked so a web is very
|
||||
# likely running
|
||||
Discourse::Utils.execute_command('renice', '-n', '5', '-p', Process.pid.to_s)
|
||||
Discourse::Utils.execute_command("renice", "-n", "5", "-p", Process.pid.to_s)
|
||||
|
||||
cli.parse(options)
|
||||
load Rails.root + "config/initializers/100-sidekiq.rb"
|
||||
@ -59,5 +58,4 @@ class Demon::Sidekiq < ::Demon::Base
|
||||
STDERR.puts e.backtrace.join("\n")
|
||||
exit 1
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -1,24 +1,23 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module DirectoryHelper
|
||||
|
||||
def tmp_directory(prefix)
|
||||
directory_cache[prefix] ||= begin
|
||||
f = File.join(Rails.root, 'tmp', Time.now.strftime("#{prefix}%Y%m%d%H%M%S"))
|
||||
f = File.join(Rails.root, "tmp", Time.now.strftime("#{prefix}%Y%m%d%H%M%S"))
|
||||
FileUtils.mkdir_p(f) unless Dir[f].present?
|
||||
f
|
||||
end
|
||||
end
|
||||
|
||||
def remove_tmp_directory(prefix)
|
||||
tmp_directory_name = directory_cache[prefix] || ''
|
||||
tmp_directory_name = directory_cache[prefix] || ""
|
||||
directory_cache.delete(prefix)
|
||||
FileUtils.rm_rf(tmp_directory_name) if Dir[tmp_directory_name].present?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def directory_cache
|
||||
@directory_cache ||= {}
|
||||
end
|
||||
|
||||
end
|
||||
|
452
lib/discourse.rb
452
lib/discourse.rb
@ -1,16 +1,16 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'cache'
|
||||
require 'open3'
|
||||
require 'plugin/instance'
|
||||
require 'version'
|
||||
require "cache"
|
||||
require "open3"
|
||||
require "plugin/instance"
|
||||
require "version"
|
||||
|
||||
module Discourse
|
||||
DB_POST_MIGRATE_PATH ||= "db/post_migrate"
|
||||
REQUESTED_HOSTNAME ||= "REQUESTED_HOSTNAME"
|
||||
|
||||
class Utils
|
||||
URI_REGEXP ||= URI.regexp(%w{http https})
|
||||
URI_REGEXP ||= URI.regexp(%w[http https])
|
||||
|
||||
# Usage:
|
||||
# Discourse::Utils.execute_command("pwd", chdir: 'mydirectory')
|
||||
@ -22,7 +22,9 @@ module Discourse
|
||||
runner = CommandRunner.new(**args)
|
||||
|
||||
if block_given?
|
||||
raise RuntimeError.new("Cannot pass command and block to execute_command") if command.present?
|
||||
if command.present?
|
||||
raise RuntimeError.new("Cannot pass command and block to execute_command")
|
||||
end
|
||||
yield runner
|
||||
else
|
||||
runner.exec(*command)
|
||||
@ -33,33 +35,32 @@ module Discourse
|
||||
logs.join("\n")
|
||||
end
|
||||
|
||||
def self.logs_markdown(logs, user:, filename: 'log.txt')
|
||||
def self.logs_markdown(logs, user:, filename: "log.txt")
|
||||
# Reserve 250 characters for the rest of the text
|
||||
max_logs_length = SiteSetting.max_post_length - 250
|
||||
pretty_logs = Discourse::Utils.pretty_logs(logs)
|
||||
|
||||
# If logs are short, try to inline them
|
||||
if pretty_logs.size < max_logs_length
|
||||
return <<~TEXT
|
||||
return <<~TEXT if pretty_logs.size < max_logs_length
|
||||
```text
|
||||
#{pretty_logs}
|
||||
```
|
||||
TEXT
|
||||
end
|
||||
|
||||
# Try to create an upload for the logs
|
||||
upload = Dir.mktmpdir do |dir|
|
||||
File.write(File.join(dir, filename), pretty_logs)
|
||||
zipfile = Compression::Zip.new.compress(dir, filename)
|
||||
File.open(zipfile) do |file|
|
||||
UploadCreator.new(
|
||||
file,
|
||||
File.basename(zipfile),
|
||||
type: 'backup_logs',
|
||||
for_export: 'true'
|
||||
).create_for(user.id)
|
||||
upload =
|
||||
Dir.mktmpdir do |dir|
|
||||
File.write(File.join(dir, filename), pretty_logs)
|
||||
zipfile = Compression::Zip.new.compress(dir, filename)
|
||||
File.open(zipfile) do |file|
|
||||
UploadCreator.new(
|
||||
file,
|
||||
File.basename(zipfile),
|
||||
type: "backup_logs",
|
||||
for_export: "true",
|
||||
).create_for(user.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if upload.persisted?
|
||||
return UploadMarkdown.new(upload).attachment_markdown
|
||||
@ -82,8 +83,8 @@ module Discourse
|
||||
rescue Errno::ENOENT
|
||||
end
|
||||
|
||||
FileUtils.mkdir_p(File.join(Rails.root, 'tmp'))
|
||||
temp_destination = File.join(Rails.root, 'tmp', SecureRandom.hex)
|
||||
FileUtils.mkdir_p(File.join(Rails.root, "tmp"))
|
||||
temp_destination = File.join(Rails.root, "tmp", SecureRandom.hex)
|
||||
|
||||
File.open(temp_destination, "w") do |fd|
|
||||
fd.write(contents)
|
||||
@ -101,9 +102,9 @@ module Discourse
|
||||
rescue Errno::ENOENT, Errno::EINVAL
|
||||
end
|
||||
|
||||
FileUtils.mkdir_p(File.join(Rails.root, 'tmp'))
|
||||
temp_destination = File.join(Rails.root, 'tmp', SecureRandom.hex)
|
||||
execute_command('ln', '-s', source, temp_destination)
|
||||
FileUtils.mkdir_p(File.join(Rails.root, "tmp"))
|
||||
temp_destination = File.join(Rails.root, "tmp", SecureRandom.hex)
|
||||
execute_command("ln", "-s", source, temp_destination)
|
||||
FileUtils.mv(temp_destination, destination)
|
||||
|
||||
nil
|
||||
@ -127,13 +128,22 @@ module Discourse
|
||||
end
|
||||
|
||||
def exec(*command, **exec_params)
|
||||
raise RuntimeError.new("Cannot specify same parameters at block and command level") if (@init_params.keys & exec_params.keys).present?
|
||||
if (@init_params.keys & exec_params.keys).present?
|
||||
raise RuntimeError.new("Cannot specify same parameters at block and command level")
|
||||
end
|
||||
execute_command(*command, **@init_params.merge(exec_params))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def execute_command(*command, timeout: nil, failure_message: "", success_status_codes: [0], chdir: ".", unsafe_shell: false)
|
||||
def execute_command(
|
||||
*command,
|
||||
timeout: nil,
|
||||
failure_message: "",
|
||||
success_status_codes: [0],
|
||||
chdir: ".",
|
||||
unsafe_shell: false
|
||||
)
|
||||
env = nil
|
||||
env = command.shift if command[0].is_a?(Hash)
|
||||
|
||||
@ -156,11 +166,11 @@ module Discourse
|
||||
if !status.exited? || !success_status_codes.include?(status.exitstatus)
|
||||
failure_message = "#{failure_message}\n" if !failure_message.blank?
|
||||
raise CommandError.new(
|
||||
"#{caller[0]}: #{failure_message}#{stderr}",
|
||||
stdout: stdout,
|
||||
stderr: stderr,
|
||||
status: status
|
||||
)
|
||||
"#{caller[0]}: #{failure_message}#{stderr}",
|
||||
stdout: stdout,
|
||||
stderr: stderr,
|
||||
status: status,
|
||||
)
|
||||
end
|
||||
|
||||
stdout
|
||||
@ -195,33 +205,32 @@ module Discourse
|
||||
# mini_scheduler direct reporting
|
||||
if Hash === job
|
||||
job_class = job["class"]
|
||||
if job_class
|
||||
job_exception_stats[job_class] += 1
|
||||
end
|
||||
job_exception_stats[job_class] += 1 if job_class
|
||||
end
|
||||
|
||||
# internal reporting
|
||||
if job.class == Class && ::Jobs::Base > job
|
||||
job_exception_stats[job] += 1
|
||||
end
|
||||
job_exception_stats[job] += 1 if job.class == Class && ::Jobs::Base > job
|
||||
|
||||
cm = RailsMultisite::ConnectionManagement
|
||||
parent_logger.handle_exception(ex, {
|
||||
current_db: cm.current_db,
|
||||
current_hostname: cm.current_hostname
|
||||
}.merge(context))
|
||||
parent_logger.handle_exception(
|
||||
ex,
|
||||
{ current_db: cm.current_db, current_hostname: cm.current_hostname }.merge(context),
|
||||
)
|
||||
|
||||
raise ex if Rails.env.test?
|
||||
end
|
||||
|
||||
# Expected less matches than what we got in a find
|
||||
class TooManyMatches < StandardError; end
|
||||
class TooManyMatches < StandardError
|
||||
end
|
||||
|
||||
# When they try to do something they should be logged in for
|
||||
class NotLoggedIn < StandardError; end
|
||||
class NotLoggedIn < StandardError
|
||||
end
|
||||
|
||||
# When the input is somehow bad
|
||||
class InvalidParameters < StandardError; end
|
||||
class InvalidParameters < StandardError
|
||||
end
|
||||
|
||||
# When they don't have permission to do something
|
||||
class InvalidAccess < StandardError
|
||||
@ -249,7 +258,13 @@ module Discourse
|
||||
attr_reader :original_path
|
||||
attr_reader :custom_message
|
||||
|
||||
def initialize(msg = nil, status: 404, check_permalinks: false, original_path: nil, custom_message: nil)
|
||||
def initialize(
|
||||
msg = nil,
|
||||
status: 404,
|
||||
check_permalinks: false,
|
||||
original_path: nil,
|
||||
custom_message: nil
|
||||
)
|
||||
super(msg)
|
||||
|
||||
@status = status
|
||||
@ -260,27 +275,33 @@ module Discourse
|
||||
end
|
||||
|
||||
# When a setting is missing
|
||||
class SiteSettingMissing < StandardError; end
|
||||
class SiteSettingMissing < StandardError
|
||||
end
|
||||
|
||||
# When ImageMagick is missing
|
||||
class ImageMagickMissing < StandardError; end
|
||||
class ImageMagickMissing < StandardError
|
||||
end
|
||||
|
||||
# When read-only mode is enabled
|
||||
class ReadOnly < StandardError; end
|
||||
class ReadOnly < StandardError
|
||||
end
|
||||
|
||||
# Cross site request forgery
|
||||
class CSRF < StandardError; end
|
||||
class CSRF < StandardError
|
||||
end
|
||||
|
||||
class Deprecation < StandardError; end
|
||||
class Deprecation < StandardError
|
||||
end
|
||||
|
||||
class ScssError < StandardError; end
|
||||
class ScssError < StandardError
|
||||
end
|
||||
|
||||
def self.filters
|
||||
@filters ||= [:latest, :unread, :new, :unseen, :top, :read, :posted, :bookmarks]
|
||||
@filters ||= %i[latest unread new unseen top read posted bookmarks]
|
||||
end
|
||||
|
||||
def self.anonymous_filters
|
||||
@anonymous_filters ||= [:latest, :top, :categories]
|
||||
@anonymous_filters ||= %i[latest top categories]
|
||||
end
|
||||
|
||||
def self.top_menu_items
|
||||
@ -288,7 +309,7 @@ module Discourse
|
||||
end
|
||||
|
||||
def self.anonymous_top_menu_items
|
||||
@anonymous_top_menu_items ||= Discourse.anonymous_filters + [:categories, :top]
|
||||
@anonymous_top_menu_items ||= Discourse.anonymous_filters + %i[categories top]
|
||||
end
|
||||
|
||||
PIXEL_RATIOS ||= [1, 1.5, 2, 3]
|
||||
@ -297,26 +318,28 @@ module Discourse
|
||||
# TODO: should cache these when we get a notification system for site settings
|
||||
set = Set.new
|
||||
|
||||
SiteSetting.avatar_sizes.split("|").map(&:to_i).each do |size|
|
||||
PIXEL_RATIOS.each do |pixel_ratio|
|
||||
set << (size * pixel_ratio).to_i
|
||||
end
|
||||
end
|
||||
SiteSetting
|
||||
.avatar_sizes
|
||||
.split("|")
|
||||
.map(&:to_i)
|
||||
.each { |size| PIXEL_RATIOS.each { |pixel_ratio| set << (size * pixel_ratio).to_i } }
|
||||
|
||||
set
|
||||
end
|
||||
|
||||
def self.activate_plugins!
|
||||
@plugins = []
|
||||
Plugin::Instance.find_all("#{Rails.root}/plugins").each do |p|
|
||||
v = p.metadata.required_version || Discourse::VERSION::STRING
|
||||
if Discourse.has_needed_version?(Discourse::VERSION::STRING, v)
|
||||
p.activate!
|
||||
@plugins << p
|
||||
else
|
||||
STDERR.puts "Could not activate #{p.metadata.name}, discourse does not meet required version (#{v})"
|
||||
Plugin::Instance
|
||||
.find_all("#{Rails.root}/plugins")
|
||||
.each do |p|
|
||||
v = p.metadata.required_version || Discourse::VERSION::STRING
|
||||
if Discourse.has_needed_version?(Discourse::VERSION::STRING, v)
|
||||
p.activate!
|
||||
@plugins << p
|
||||
else
|
||||
STDERR.puts "Could not activate #{p.metadata.name}, discourse does not meet required version (#{v})"
|
||||
end
|
||||
end
|
||||
end
|
||||
DiscourseEvent.trigger(:after_plugin_activation)
|
||||
end
|
||||
|
||||
@ -360,9 +383,7 @@ module Discourse
|
||||
|
||||
def self.apply_asset_filters(plugins, type, request)
|
||||
filter_opts = asset_filter_options(type, request)
|
||||
plugins.select do |plugin|
|
||||
plugin.asset_filters.all? { |b| b.call(type, request, filter_opts) }
|
||||
end
|
||||
plugins.select { |plugin| plugin.asset_filters.all? { |b| b.call(type, request, filter_opts) } }
|
||||
end
|
||||
|
||||
def self.asset_filter_options(type, request)
|
||||
@ -385,20 +406,24 @@ module Discourse
|
||||
targets << :desktop if args[:desktop_view]
|
||||
|
||||
targets.each do |target|
|
||||
assets += plugins.find_all do |plugin|
|
||||
plugin.css_asset_exists?(target)
|
||||
end.map do |plugin|
|
||||
target.nil? ? plugin.directory_name : "#{plugin.directory_name}_#{target}"
|
||||
end
|
||||
assets +=
|
||||
plugins
|
||||
.find_all { |plugin| plugin.css_asset_exists?(target) }
|
||||
.map do |plugin|
|
||||
target.nil? ? plugin.directory_name : "#{plugin.directory_name}_#{target}"
|
||||
end
|
||||
end
|
||||
|
||||
assets
|
||||
end
|
||||
|
||||
def self.find_plugin_js_assets(args)
|
||||
plugins = self.find_plugins(args).select do |plugin|
|
||||
plugin.js_asset_exists? || plugin.extra_js_asset_exists? || plugin.admin_js_asset_exists?
|
||||
end
|
||||
plugins =
|
||||
self
|
||||
.find_plugins(args)
|
||||
.select do |plugin|
|
||||
plugin.js_asset_exists? || plugin.extra_js_asset_exists? || plugin.admin_js_asset_exists?
|
||||
end
|
||||
|
||||
plugins = apply_asset_filters(plugins, :js, args[:request])
|
||||
|
||||
@ -413,25 +438,33 @@ module Discourse
|
||||
end
|
||||
|
||||
def self.assets_digest
|
||||
@assets_digest ||= begin
|
||||
digest = Digest::MD5.hexdigest(ActionView::Base.assets_manifest.assets.values.sort.join)
|
||||
@assets_digest ||=
|
||||
begin
|
||||
digest = Digest::MD5.hexdigest(ActionView::Base.assets_manifest.assets.values.sort.join)
|
||||
|
||||
channel = "/global/asset-version"
|
||||
message = MessageBus.last_message(channel)
|
||||
channel = "/global/asset-version"
|
||||
message = MessageBus.last_message(channel)
|
||||
|
||||
unless message && message.data == digest
|
||||
MessageBus.publish channel, digest
|
||||
MessageBus.publish channel, digest unless message && message.data == digest
|
||||
digest
|
||||
end
|
||||
digest
|
||||
end
|
||||
end
|
||||
|
||||
BUILTIN_AUTH ||= [
|
||||
Auth::AuthProvider.new(authenticator: Auth::FacebookAuthenticator.new, frame_width: 580, frame_height: 400, icon: "fab-facebook"),
|
||||
Auth::AuthProvider.new(authenticator: Auth::GoogleOAuth2Authenticator.new, frame_width: 850, frame_height: 500), # Custom icon implemented in client
|
||||
Auth::AuthProvider.new(
|
||||
authenticator: Auth::FacebookAuthenticator.new,
|
||||
frame_width: 580,
|
||||
frame_height: 400,
|
||||
icon: "fab-facebook",
|
||||
),
|
||||
Auth::AuthProvider.new(
|
||||
authenticator: Auth::GoogleOAuth2Authenticator.new,
|
||||
frame_width: 850,
|
||||
frame_height: 500,
|
||||
), # Custom icon implemented in client
|
||||
Auth::AuthProvider.new(authenticator: Auth::GithubAuthenticator.new, icon: "fab-github"),
|
||||
Auth::AuthProvider.new(authenticator: Auth::TwitterAuthenticator.new, icon: "fab-twitter"),
|
||||
Auth::AuthProvider.new(authenticator: Auth::DiscordAuthenticator.new, icon: "fab-discord")
|
||||
Auth::AuthProvider.new(authenticator: Auth::DiscordAuthenticator.new, icon: "fab-discord"),
|
||||
]
|
||||
|
||||
def self.auth_providers
|
||||
@ -439,7 +472,7 @@ module Discourse
|
||||
end
|
||||
|
||||
def self.enabled_auth_providers
|
||||
auth_providers.select { |provider| provider.authenticator.enabled? }
|
||||
auth_providers.select { |provider| provider.authenticator.enabled? }
|
||||
end
|
||||
|
||||
def self.authenticators
|
||||
@ -449,17 +482,18 @@ module Discourse
|
||||
end
|
||||
|
||||
def self.enabled_authenticators
|
||||
authenticators.select { |authenticator| authenticator.enabled? }
|
||||
authenticators.select { |authenticator| authenticator.enabled? }
|
||||
end
|
||||
|
||||
def self.cache
|
||||
@cache ||= begin
|
||||
if GlobalSetting.skip_redis?
|
||||
ActiveSupport::Cache::MemoryStore.new
|
||||
else
|
||||
Cache.new
|
||||
@cache ||=
|
||||
begin
|
||||
if GlobalSetting.skip_redis?
|
||||
ActiveSupport::Cache::MemoryStore.new
|
||||
else
|
||||
Cache.new
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# hostname of the server, operating system level
|
||||
@ -467,15 +501,15 @@ module Discourse
|
||||
def self.os_hostname
|
||||
@os_hostname ||=
|
||||
begin
|
||||
require 'socket'
|
||||
require "socket"
|
||||
Socket.gethostname
|
||||
rescue => e
|
||||
warn_exception(e, message: 'Socket.gethostname is not working')
|
||||
warn_exception(e, message: "Socket.gethostname is not working")
|
||||
begin
|
||||
`hostname`.strip
|
||||
rescue => e
|
||||
warn_exception(e, message: 'hostname command is not working')
|
||||
'unknown_host'
|
||||
warn_exception(e, message: "hostname command is not working")
|
||||
"unknown_host"
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -501,12 +535,12 @@ module Discourse
|
||||
def self.current_hostname_with_port
|
||||
default_port = SiteSetting.force_https? ? 443 : 80
|
||||
result = +"#{current_hostname}"
|
||||
result << ":#{SiteSetting.port}" if SiteSetting.port.to_i > 0 && SiteSetting.port.to_i != default_port
|
||||
|
||||
if Rails.env.development? && SiteSetting.port.blank?
|
||||
result << ":#{ENV["UNICORN_PORT"] || 3000}"
|
||||
if SiteSetting.port.to_i > 0 && SiteSetting.port.to_i != default_port
|
||||
result << ":#{SiteSetting.port}"
|
||||
end
|
||||
|
||||
result << ":#{ENV["UNICORN_PORT"] || 3000}" if Rails.env.development? && SiteSetting.port.blank?
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
@ -520,16 +554,18 @@ module Discourse
|
||||
|
||||
def self.route_for(uri)
|
||||
unless uri.is_a?(URI)
|
||||
uri = begin
|
||||
URI(uri)
|
||||
rescue ArgumentError, URI::Error
|
||||
end
|
||||
uri =
|
||||
begin
|
||||
URI(uri)
|
||||
rescue ArgumentError, URI::Error
|
||||
end
|
||||
end
|
||||
|
||||
return unless uri
|
||||
|
||||
path = +(uri.path || "")
|
||||
if !uri.host || (uri.host == Discourse.current_hostname && path.start_with?(Discourse.base_path))
|
||||
if !uri.host ||
|
||||
(uri.host == Discourse.current_hostname && path.start_with?(Discourse.base_path))
|
||||
path.slice!(Discourse.base_path)
|
||||
return Rails.application.routes.recognize_path(path)
|
||||
end
|
||||
@ -543,21 +579,21 @@ module Discourse
|
||||
alias_method :base_url_no_path, :base_url_no_prefix
|
||||
end
|
||||
|
||||
READONLY_MODE_KEY_TTL ||= 60
|
||||
READONLY_MODE_KEY ||= 'readonly_mode'
|
||||
PG_READONLY_MODE_KEY ||= 'readonly_mode:postgres'
|
||||
PG_READONLY_MODE_KEY_TTL ||= 300
|
||||
USER_READONLY_MODE_KEY ||= 'readonly_mode:user'
|
||||
PG_FORCE_READONLY_MODE_KEY ||= 'readonly_mode:postgres_force'
|
||||
READONLY_MODE_KEY_TTL ||= 60
|
||||
READONLY_MODE_KEY ||= "readonly_mode"
|
||||
PG_READONLY_MODE_KEY ||= "readonly_mode:postgres"
|
||||
PG_READONLY_MODE_KEY_TTL ||= 300
|
||||
USER_READONLY_MODE_KEY ||= "readonly_mode:user"
|
||||
PG_FORCE_READONLY_MODE_KEY ||= "readonly_mode:postgres_force"
|
||||
|
||||
# Psuedo readonly mode, where staff can still write
|
||||
STAFF_WRITES_ONLY_MODE_KEY ||= 'readonly_mode:staff_writes_only'
|
||||
STAFF_WRITES_ONLY_MODE_KEY ||= "readonly_mode:staff_writes_only"
|
||||
|
||||
READONLY_KEYS ||= [
|
||||
READONLY_MODE_KEY,
|
||||
PG_READONLY_MODE_KEY,
|
||||
USER_READONLY_MODE_KEY,
|
||||
PG_FORCE_READONLY_MODE_KEY
|
||||
PG_FORCE_READONLY_MODE_KEY,
|
||||
]
|
||||
|
||||
def self.enable_readonly_mode(key = READONLY_MODE_KEY)
|
||||
@ -565,7 +601,9 @@ module Discourse
|
||||
Sidekiq.pause!("pg_failover") if !Sidekiq.paused?
|
||||
end
|
||||
|
||||
if [USER_READONLY_MODE_KEY, PG_FORCE_READONLY_MODE_KEY, STAFF_WRITES_ONLY_MODE_KEY].include?(key)
|
||||
if [USER_READONLY_MODE_KEY, PG_FORCE_READONLY_MODE_KEY, STAFF_WRITES_ONLY_MODE_KEY].include?(
|
||||
key,
|
||||
)
|
||||
Discourse.redis.set(key, 1)
|
||||
else
|
||||
ttl =
|
||||
@ -595,15 +633,13 @@ module Discourse
|
||||
|
||||
unless @threads[key]&.alive?
|
||||
@threads[key] = Thread.new do
|
||||
while @dbs.size > 0 do
|
||||
while @dbs.size > 0
|
||||
sleep ttl / 2
|
||||
|
||||
@mutex.synchronize do
|
||||
@dbs.each do |db|
|
||||
RailsMultisite::ConnectionManagement.with_connection(db) do
|
||||
if !Discourse.redis.expire(key, ttl)
|
||||
@dbs.delete(db)
|
||||
end
|
||||
@dbs.delete(db) if !Discourse.redis.expire(key, ttl)
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -653,7 +689,7 @@ module Discourse
|
||||
|
||||
# Shared between processes
|
||||
def self.postgres_last_read_only
|
||||
@postgres_last_read_only ||= DistributedCache.new('postgres_last_read_only', namespace: false)
|
||||
@postgres_last_read_only ||= DistributedCache.new("postgres_last_read_only", namespace: false)
|
||||
end
|
||||
|
||||
# Per-process
|
||||
@ -698,39 +734,43 @@ module Discourse
|
||||
# This is better than `MessageBus.publish "/file-change", ["refresh"]` because
|
||||
# it spreads the refreshes out over a time period
|
||||
if user_ids
|
||||
MessageBus.publish("/refresh_client", 'clobber', user_ids: user_ids)
|
||||
MessageBus.publish("/refresh_client", "clobber", user_ids: user_ids)
|
||||
else
|
||||
MessageBus.publish('/global/asset-version', 'clobber')
|
||||
MessageBus.publish("/global/asset-version", "clobber")
|
||||
end
|
||||
end
|
||||
|
||||
def self.git_version
|
||||
@git_version ||= begin
|
||||
git_cmd = 'git rev-parse HEAD'
|
||||
self.try_git(git_cmd, Discourse::VERSION::STRING)
|
||||
end
|
||||
@git_version ||=
|
||||
begin
|
||||
git_cmd = "git rev-parse HEAD"
|
||||
self.try_git(git_cmd, Discourse::VERSION::STRING)
|
||||
end
|
||||
end
|
||||
|
||||
def self.git_branch
|
||||
@git_branch ||= begin
|
||||
git_cmd = 'git rev-parse --abbrev-ref HEAD'
|
||||
self.try_git(git_cmd, 'unknown')
|
||||
end
|
||||
@git_branch ||=
|
||||
begin
|
||||
git_cmd = "git rev-parse --abbrev-ref HEAD"
|
||||
self.try_git(git_cmd, "unknown")
|
||||
end
|
||||
end
|
||||
|
||||
def self.full_version
|
||||
@full_version ||= begin
|
||||
git_cmd = 'git describe --dirty --match "v[0-9]*" 2> /dev/null'
|
||||
self.try_git(git_cmd, 'unknown')
|
||||
end
|
||||
@full_version ||=
|
||||
begin
|
||||
git_cmd = 'git describe --dirty --match "v[0-9]*" 2> /dev/null'
|
||||
self.try_git(git_cmd, "unknown")
|
||||
end
|
||||
end
|
||||
|
||||
def self.last_commit_date
|
||||
@last_commit_date ||= begin
|
||||
git_cmd = 'git log -1 --format="%ct"'
|
||||
seconds = self.try_git(git_cmd, nil)
|
||||
seconds.nil? ? nil : DateTime.strptime(seconds, '%s')
|
||||
end
|
||||
@last_commit_date ||=
|
||||
begin
|
||||
git_cmd = 'git log -1 --format="%ct"'
|
||||
seconds = self.try_git(git_cmd, nil)
|
||||
seconds.nil? ? nil : DateTime.strptime(seconds, "%s")
|
||||
end
|
||||
end
|
||||
|
||||
def self.try_git(git_cmd, default_value)
|
||||
@ -738,20 +778,21 @@ module Discourse
|
||||
|
||||
begin
|
||||
version_value = `#{git_cmd}`.strip
|
||||
rescue
|
||||
rescue StandardError
|
||||
version_value = default_value
|
||||
end
|
||||
|
||||
if version_value.empty?
|
||||
version_value = default_value
|
||||
end
|
||||
version_value = default_value if version_value.empty?
|
||||
|
||||
version_value
|
||||
end
|
||||
|
||||
# Either returns the site_contact_username user or the first admin.
|
||||
def self.site_contact_user
|
||||
user = User.find_by(username_lower: SiteSetting.site_contact_username.downcase) if SiteSetting.site_contact_username.present?
|
||||
user =
|
||||
User.find_by(
|
||||
username_lower: SiteSetting.site_contact_username.downcase,
|
||||
) if SiteSetting.site_contact_username.present?
|
||||
user ||= (system_user || User.admins.real.order(:id).first)
|
||||
end
|
||||
|
||||
@ -765,10 +806,10 @@ module Discourse
|
||||
|
||||
def self.store
|
||||
if SiteSetting.Upload.enable_s3_uploads
|
||||
@s3_store_loaded ||= require 'file_store/s3_store'
|
||||
@s3_store_loaded ||= require "file_store/s3_store"
|
||||
FileStore::S3Store.new
|
||||
else
|
||||
@local_store_loaded ||= require 'file_store/local_store'
|
||||
@local_store_loaded ||= require "file_store/local_store"
|
||||
FileStore::LocalStore.new
|
||||
end
|
||||
end
|
||||
@ -805,15 +846,15 @@ module Discourse
|
||||
Discourse.cache.reconnect
|
||||
Logster.store.redis.reconnect
|
||||
# shuts down all connections in the pool
|
||||
Sidekiq.redis_pool.shutdown { |conn| conn.disconnect! }
|
||||
Sidekiq.redis_pool.shutdown { |conn| conn.disconnect! }
|
||||
# re-establish
|
||||
Sidekiq.redis = sidekiq_redis_config
|
||||
|
||||
# in case v8 was initialized we want to make sure it is nil
|
||||
PrettyText.reset_context
|
||||
|
||||
DiscourseJsProcessor::Transpiler.reset_context if defined? DiscourseJsProcessor::Transpiler
|
||||
JsLocaleHelper.reset_context if defined? JsLocaleHelper
|
||||
DiscourseJsProcessor::Transpiler.reset_context if defined?(DiscourseJsProcessor::Transpiler)
|
||||
JsLocaleHelper.reset_context if defined?(JsLocaleHelper)
|
||||
|
||||
# warm up v8 after fork, that way we do not fork a v8 context
|
||||
# it may cause issues if bg threads in a v8 isolate randomly stop
|
||||
@ -831,7 +872,7 @@ module Discourse
|
||||
# you can use Discourse.warn when you want to report custom environment
|
||||
# with the error, this helps with grouping
|
||||
def self.warn(message, env = nil)
|
||||
append = env ? (+" ") << env.map { |k, v|"#{k}: #{v}" }.join(" ") : ""
|
||||
append = env ? (+" ") << env.map { |k, v| "#{k}: #{v}" }.join(" ") : ""
|
||||
|
||||
if !(Logster::Logger === Rails.logger)
|
||||
Rails.logger.warn("#{message}#{append}")
|
||||
@ -839,9 +880,7 @@ module Discourse
|
||||
end
|
||||
|
||||
loggers = [Rails.logger]
|
||||
if Rails.logger.chained
|
||||
loggers.concat(Rails.logger.chained)
|
||||
end
|
||||
loggers.concat(Rails.logger.chained) if Rails.logger.chained
|
||||
|
||||
logster_env = env
|
||||
|
||||
@ -849,9 +888,7 @@ module Discourse
|
||||
logster_env = Logster::Message.populate_from_env(old_env)
|
||||
|
||||
# a bit awkward by try to keep the new params
|
||||
env.each do |k, v|
|
||||
logster_env[k] = v
|
||||
end
|
||||
env.each { |k, v| logster_env[k] = v }
|
||||
end
|
||||
|
||||
loggers.each do |logger|
|
||||
@ -860,12 +897,7 @@ module Discourse
|
||||
next
|
||||
end
|
||||
|
||||
logger.store.report(
|
||||
::Logger::Severity::WARN,
|
||||
"discourse",
|
||||
message,
|
||||
env: logster_env
|
||||
)
|
||||
logger.store.report(::Logger::Severity::WARN, "discourse", message, env: logster_env)
|
||||
end
|
||||
|
||||
if old_env
|
||||
@ -881,7 +913,6 @@ module Discourse
|
||||
# report a warning maintaining backtrack for logster
|
||||
def self.warn_exception(e, message: "", env: nil)
|
||||
if Rails.logger.respond_to? :add_with_opts
|
||||
|
||||
env ||= {}
|
||||
env[:current_db] ||= RailsMultisite::ConnectionManagement.current_db
|
||||
|
||||
@ -891,13 +922,13 @@ module Discourse
|
||||
"#{message} : #{e.class.name} : #{e}",
|
||||
"discourse-exception",
|
||||
backtrace: e.backtrace.join("\n"),
|
||||
env: env
|
||||
env: env,
|
||||
)
|
||||
else
|
||||
# no logster ... fallback
|
||||
Rails.logger.warn("#{message} #{e}\n#{e.backtrace.join("\n")}")
|
||||
end
|
||||
rescue
|
||||
rescue StandardError
|
||||
STDERR.puts "Failed to report exception #{e} #{message}"
|
||||
end
|
||||
|
||||
@ -909,17 +940,11 @@ module Discourse
|
||||
warning << "\nAt #{location}"
|
||||
warning = warning.join(" ")
|
||||
|
||||
if raise_error
|
||||
raise Deprecation.new(warning)
|
||||
end
|
||||
raise Deprecation.new(warning) if raise_error
|
||||
|
||||
if Rails.env == "development"
|
||||
STDERR.puts(warning)
|
||||
end
|
||||
STDERR.puts(warning) if Rails.env == "development"
|
||||
|
||||
if output_in_test && Rails.env == "test"
|
||||
STDERR.puts(warning)
|
||||
end
|
||||
STDERR.puts(warning) if output_in_test && Rails.env == "test"
|
||||
|
||||
digest = Digest::MD5.hexdigest(warning)
|
||||
redis_key = "deprecate-notice-#{digest}"
|
||||
@ -935,7 +960,7 @@ module Discourse
|
||||
warning
|
||||
end
|
||||
|
||||
SIDEKIQ_NAMESPACE ||= 'sidekiq'
|
||||
SIDEKIQ_NAMESPACE ||= "sidekiq"
|
||||
|
||||
def self.sidekiq_redis_config
|
||||
conf = GlobalSetting.redis_config.dup
|
||||
@ -951,7 +976,8 @@ module Discourse
|
||||
|
||||
def self.reset_active_record_cache_if_needed(e)
|
||||
last_cache_reset = Discourse.last_ar_cache_reset
|
||||
if e && e.message =~ /UndefinedColumn/ && (last_cache_reset.nil? || last_cache_reset < 30.seconds.ago)
|
||||
if e && e.message =~ /UndefinedColumn/ &&
|
||||
(last_cache_reset.nil? || last_cache_reset < 30.seconds.ago)
|
||||
Rails.logger.warn "Clearing Active Record cache, this can happen if schema changed while site is running or in a multisite various databases are running different schemas. Consider running rake multisite:migrate."
|
||||
Discourse.last_ar_cache_reset = Time.zone.now
|
||||
Discourse.reset_active_record_cache
|
||||
@ -961,7 +987,11 @@ module Discourse
|
||||
def self.reset_active_record_cache
|
||||
ActiveRecord::Base.connection.query_cache.clear
|
||||
(ActiveRecord::Base.connection.tables - %w[schema_migrations versions]).each do |table|
|
||||
table.classify.constantize.reset_column_information rescue nil
|
||||
begin
|
||||
table.classify.constantize.reset_column_information
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
end
|
||||
nil
|
||||
end
|
||||
@ -971,7 +1001,7 @@ module Discourse
|
||||
end
|
||||
|
||||
def self.skip_post_deployment_migrations?
|
||||
['1', 'true'].include?(ENV["SKIP_POST_DEPLOYMENT_MIGRATIONS"]&.to_s)
|
||||
%w[1 true].include?(ENV["SKIP_POST_DEPLOYMENT_MIGRATIONS"]&.to_s)
|
||||
end
|
||||
|
||||
# this is used to preload as much stuff as possible prior to forking
|
||||
@ -985,7 +1015,11 @@ module Discourse
|
||||
|
||||
# load up all models and schema
|
||||
(ActiveRecord::Base.connection.tables - %w[schema_migrations versions]).each do |table|
|
||||
table.classify.constantize.first rescue nil
|
||||
begin
|
||||
table.classify.constantize.first
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
# ensure we have a full schema cache in case we missed something above
|
||||
@ -1024,29 +1058,27 @@ module Discourse
|
||||
end
|
||||
|
||||
[
|
||||
Thread.new {
|
||||
Thread.new do
|
||||
# router warm up
|
||||
Rails.application.routes.recognize_path('abc') rescue nil
|
||||
},
|
||||
Thread.new {
|
||||
begin
|
||||
Rails.application.routes.recognize_path("abc")
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
end,
|
||||
Thread.new do
|
||||
# preload discourse version
|
||||
Discourse.git_version
|
||||
Discourse.git_branch
|
||||
Discourse.full_version
|
||||
},
|
||||
Thread.new {
|
||||
require 'actionview_precompiler'
|
||||
end,
|
||||
Thread.new do
|
||||
require "actionview_precompiler"
|
||||
ActionviewPrecompiler.precompile
|
||||
},
|
||||
Thread.new {
|
||||
LetterAvatar.image_magick_version
|
||||
},
|
||||
Thread.new {
|
||||
SvgSprite.core_svgs
|
||||
},
|
||||
Thread.new {
|
||||
EmberCli.script_chunks
|
||||
}
|
||||
end,
|
||||
Thread.new { LetterAvatar.image_magick_version },
|
||||
Thread.new { SvgSprite.core_svgs },
|
||||
Thread.new { EmberCli.script_chunks },
|
||||
].each(&:join)
|
||||
ensure
|
||||
@preloaded_rails = true
|
||||
@ -1055,10 +1087,10 @@ module Discourse
|
||||
mattr_accessor :redis
|
||||
|
||||
def self.is_parallel_test?
|
||||
ENV['RAILS_ENV'] == "test" && ENV['TEST_ENV_NUMBER']
|
||||
ENV["RAILS_ENV"] == "test" && ENV["TEST_ENV_NUMBER"]
|
||||
end
|
||||
|
||||
CDN_REQUEST_METHODS ||= ["GET", "HEAD", "OPTIONS"]
|
||||
CDN_REQUEST_METHODS ||= %w[GET HEAD OPTIONS]
|
||||
|
||||
def self.is_cdn_request?(env, request_method)
|
||||
return unless CDN_REQUEST_METHODS.include?(request_method)
|
||||
@ -1071,8 +1103,8 @@ module Discourse
|
||||
end
|
||||
|
||||
def self.apply_cdn_headers(headers)
|
||||
headers['Access-Control-Allow-Origin'] = '*'
|
||||
headers['Access-Control-Allow-Methods'] = CDN_REQUEST_METHODS.join(", ")
|
||||
headers["Access-Control-Allow-Origin"] = "*"
|
||||
headers["Access-Control-Allow-Methods"] = CDN_REQUEST_METHODS.join(", ")
|
||||
headers
|
||||
end
|
||||
|
||||
@ -1091,8 +1123,12 @@ module Discourse
|
||||
end
|
||||
|
||||
def self.anonymous_locale(request)
|
||||
locale = HttpLanguageParser.parse(request.cookies["locale"]) if SiteSetting.set_locale_from_cookie
|
||||
locale ||= HttpLanguageParser.parse(request.env["HTTP_ACCEPT_LANGUAGE"]) if SiteSetting.set_locale_from_accept_language_header
|
||||
locale =
|
||||
HttpLanguageParser.parse(request.cookies["locale"]) if SiteSetting.set_locale_from_cookie
|
||||
locale ||=
|
||||
HttpLanguageParser.parse(
|
||||
request.env["HTTP_ACCEPT_LANGUAGE"],
|
||||
) if SiteSetting.set_locale_from_accept_language_header
|
||||
locale
|
||||
end
|
||||
end
|
||||
|
@ -1,12 +1,13 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class DiscourseConnectBase
|
||||
class ParseError < RuntimeError
|
||||
end
|
||||
|
||||
class ParseError < RuntimeError; end
|
||||
|
||||
ACCESSORS = %i{
|
||||
ACCESSORS = %i[
|
||||
add_groups
|
||||
admin moderator
|
||||
admin
|
||||
moderator
|
||||
avatar_force_update
|
||||
avatar_url
|
||||
bio
|
||||
@ -31,11 +32,11 @@ class DiscourseConnectBase
|
||||
title
|
||||
username
|
||||
website
|
||||
}
|
||||
]
|
||||
|
||||
FIXNUMS = []
|
||||
|
||||
BOOLS = %i{
|
||||
BOOLS = %i[
|
||||
admin
|
||||
avatar_force_update
|
||||
confirmed_2fa
|
||||
@ -46,7 +47,7 @@ class DiscourseConnectBase
|
||||
require_2fa
|
||||
require_activation
|
||||
suppress_welcome_message
|
||||
}
|
||||
]
|
||||
|
||||
def self.nonce_expiry_time
|
||||
@nonce_expiry_time ||= 10.minutes
|
||||
@ -80,9 +81,11 @@ class DiscourseConnectBase
|
||||
decoded_hash = Rack::Utils.parse_query(decoded)
|
||||
|
||||
if sso.sign(parsed["sso"]) != parsed["sig"]
|
||||
diags = "\n\nsso: #{parsed["sso"]}\n\nsig: #{parsed["sig"]}\n\nexpected sig: #{sso.sign(parsed["sso"])}"
|
||||
if parsed["sso"] =~ /[^a-zA-Z0-9=\r\n\/+]/m
|
||||
raise ParseError, "The SSO field should be Base64 encoded, using only A-Z, a-z, 0-9, +, /, and = characters. Your input contains characters we don't understand as Base64, see http://en.wikipedia.org/wiki/Base64 #{diags}"
|
||||
diags =
|
||||
"\n\nsso: #{parsed["sso"]}\n\nsig: #{parsed["sig"]}\n\nexpected sig: #{sso.sign(parsed["sso"])}"
|
||||
if parsed["sso"] =~ %r{[^a-zA-Z0-9=\r\n/+]}m
|
||||
raise ParseError,
|
||||
"The SSO field should be Base64 encoded, using only A-Z, a-z, 0-9, +, /, and = characters. Your input contains characters we don't understand as Base64, see http://en.wikipedia.org/wiki/Base64 #{diags}"
|
||||
else
|
||||
raise ParseError, "Bad signature for payload #{diags}"
|
||||
end
|
||||
@ -91,9 +94,7 @@ class DiscourseConnectBase
|
||||
ACCESSORS.each do |k|
|
||||
val = decoded_hash[k.to_s]
|
||||
val = val.to_i if FIXNUMS.include? k
|
||||
if BOOLS.include? k
|
||||
val = ["true", "false"].include?(val) ? val == "true" : nil
|
||||
end
|
||||
val = %w[true false].include?(val) ? val == "true" : nil if BOOLS.include? k
|
||||
sso.public_send("#{k}=", val)
|
||||
end
|
||||
|
||||
@ -137,12 +138,12 @@ class DiscourseConnectBase
|
||||
|
||||
def to_url(base_url = nil)
|
||||
base = "#{base_url || sso_url}"
|
||||
"#{base}#{base.include?('?') ? '&' : '?'}#{payload}"
|
||||
"#{base}#{base.include?("?") ? "&" : "?"}#{payload}"
|
||||
end
|
||||
|
||||
def payload(secret = nil)
|
||||
payload = Base64.strict_encode64(unsigned_payload)
|
||||
"sso=#{CGI::escape(payload)}&sig=#{sign(payload, secret)}"
|
||||
"sso=#{CGI.escape(payload)}&sig=#{sign(payload, secret)}"
|
||||
end
|
||||
|
||||
def unsigned_payload
|
||||
@ -157,9 +158,7 @@ class DiscourseConnectBase
|
||||
payload[k] = val
|
||||
end
|
||||
|
||||
@custom_fields&.each do |k, v|
|
||||
payload["custom.#{k}"] = v.to_s
|
||||
end
|
||||
@custom_fields&.each { |k, v| payload["custom.#{k}"] = v.to_s }
|
||||
|
||||
payload
|
||||
end
|
||||
|
@ -1,8 +1,10 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class DiscourseConnectProvider < DiscourseConnectBase
|
||||
class BlankSecret < RuntimeError; end
|
||||
class BlankReturnUrl < RuntimeError; end
|
||||
class BlankSecret < RuntimeError
|
||||
end
|
||||
class BlankReturnUrl < RuntimeError
|
||||
end
|
||||
|
||||
def self.parse(payload, sso_secret = nil, **init_kwargs)
|
||||
parsed_payload = Rack::Utils.parse_query(payload)
|
||||
@ -15,11 +17,16 @@ class DiscourseConnectProvider < DiscourseConnectBase
|
||||
if sso_secret.blank?
|
||||
begin
|
||||
host = URI.parse(return_sso_url).host
|
||||
Rails.logger.warn("SSO failed; website #{host} is not in the `discourse_connect_provider_secrets` site settings")
|
||||
Rails.logger.warn(
|
||||
"SSO failed; website #{host} is not in the `discourse_connect_provider_secrets` site settings",
|
||||
)
|
||||
rescue StandardError => e
|
||||
# going for StandardError cause URI::Error may not be enough, eg it parses to something not
|
||||
# responding to host
|
||||
Discourse.warn_exception(e, message: "SSO failed; invalid or missing return_sso_url in SSO payload")
|
||||
Discourse.warn_exception(
|
||||
e,
|
||||
message: "SSO failed; invalid or missing return_sso_url in SSO payload",
|
||||
)
|
||||
end
|
||||
|
||||
raise BlankSecret
|
||||
@ -31,7 +38,7 @@ class DiscourseConnectProvider < DiscourseConnectBase
|
||||
def self.lookup_return_sso_url(parsed_payload)
|
||||
decoded = Base64.decode64(parsed_payload["sso"])
|
||||
decoded_hash = Rack::Utils.parse_query(decoded)
|
||||
decoded_hash['return_sso_url']
|
||||
decoded_hash["return_sso_url"]
|
||||
end
|
||||
|
||||
def self.lookup_sso_secret(return_sso_url, parsed_payload)
|
||||
@ -39,21 +46,23 @@ class DiscourseConnectProvider < DiscourseConnectBase
|
||||
|
||||
return_url_host = URI.parse(return_sso_url).host
|
||||
|
||||
provider_secrets = SiteSetting
|
||||
.discourse_connect_provider_secrets
|
||||
.split("\n")
|
||||
.map { |row| row.split("|", 2) }
|
||||
.sort_by { |k, _| k }
|
||||
.reverse
|
||||
provider_secrets =
|
||||
SiteSetting
|
||||
.discourse_connect_provider_secrets
|
||||
.split("\n")
|
||||
.map { |row| row.split("|", 2) }
|
||||
.sort_by { |k, _| k }
|
||||
.reverse
|
||||
|
||||
first_domain_match = nil
|
||||
|
||||
pair = provider_secrets.find do |domain, configured_secret|
|
||||
if WildcardDomainChecker.check_domain(domain, return_url_host)
|
||||
first_domain_match ||= configured_secret
|
||||
sign(parsed_payload["sso"], configured_secret) == parsed_payload["sig"]
|
||||
pair =
|
||||
provider_secrets.find do |domain, configured_secret|
|
||||
if WildcardDomainChecker.check_domain(domain, return_url_host)
|
||||
first_domain_match ||= configured_secret
|
||||
sign(parsed_payload["sso"], configured_secret) == parsed_payload["sig"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# falls back to a secret which will fail to validate in DiscourseConnectBase
|
||||
# this ensures error flow is correct
|
||||
|
@ -1,12 +1,11 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'discourse_dev/record'
|
||||
require 'rails'
|
||||
require 'faker'
|
||||
require "discourse_dev/record"
|
||||
require "rails"
|
||||
require "faker"
|
||||
|
||||
module DiscourseDev
|
||||
class Category < Record
|
||||
|
||||
def initialize
|
||||
super(::Category, DiscourseDev.config.category[:count])
|
||||
@parent_category_ids = ::Category.where(parent_category_id: nil).pluck(:id)
|
||||
@ -29,7 +28,7 @@ module DiscourseDev
|
||||
description: Faker::Lorem.paragraph,
|
||||
user_id: ::Discourse::SYSTEM_USER_ID,
|
||||
color: Faker::Color.hex_color.last(6),
|
||||
parent_category_id: parent_category_id
|
||||
parent_category_id: parent_category_id,
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails'
|
||||
require 'highline/import'
|
||||
require "rails"
|
||||
require "highline/import"
|
||||
|
||||
module DiscourseDev
|
||||
class Config
|
||||
@ -63,10 +63,11 @@ module DiscourseDev
|
||||
if settings.present?
|
||||
email = settings[:email] || "new_user@example.com"
|
||||
|
||||
new_user = ::User.create!(
|
||||
email: email,
|
||||
username: settings[:username] || UserNameSuggester.suggest(email)
|
||||
)
|
||||
new_user =
|
||||
::User.create!(
|
||||
email: email,
|
||||
username: settings[:username] || UserNameSuggester.suggest(email),
|
||||
)
|
||||
new_user.email_tokens.update_all confirmed: true
|
||||
new_user.activate
|
||||
end
|
||||
@ -88,15 +89,14 @@ module DiscourseDev
|
||||
def create_admin_user_from_settings(settings)
|
||||
email = settings[:email]
|
||||
|
||||
admin = ::User.with_email(email).first_or_create!(
|
||||
email: email,
|
||||
username: settings[:username] || UserNameSuggester.suggest(email),
|
||||
password: settings[:password]
|
||||
)
|
||||
admin =
|
||||
::User.with_email(email).first_or_create!(
|
||||
email: email,
|
||||
username: settings[:username] || UserNameSuggester.suggest(email),
|
||||
password: settings[:password],
|
||||
)
|
||||
admin.grant_admin!
|
||||
if admin.trust_level < 1
|
||||
admin.change_trust_level!(1)
|
||||
end
|
||||
admin.change_trust_level!(1) if admin.trust_level < 1
|
||||
admin.email_tokens.update_all confirmed: true
|
||||
admin.activate
|
||||
end
|
||||
@ -107,10 +107,7 @@ module DiscourseDev
|
||||
password = ask("Password (optional, press ENTER to skip): ")
|
||||
username = UserNameSuggester.suggest(email)
|
||||
|
||||
admin = ::User.new(
|
||||
email: email,
|
||||
username: username
|
||||
)
|
||||
admin = ::User.new(email: email, username: username)
|
||||
|
||||
if password.present?
|
||||
admin.password = password
|
||||
@ -122,7 +119,7 @@ module DiscourseDev
|
||||
saved = admin.save
|
||||
|
||||
if saved
|
||||
File.open(file_path, 'a') do | file|
|
||||
File.open(file_path, "a") do |file|
|
||||
file.puts("admin:")
|
||||
file.puts(" username: #{admin.username}")
|
||||
file.puts(" email: #{admin.email}")
|
||||
@ -137,9 +134,7 @@ module DiscourseDev
|
||||
admin.save
|
||||
|
||||
admin.grant_admin!
|
||||
if admin.trust_level < 1
|
||||
admin.change_trust_level!(1)
|
||||
end
|
||||
admin.change_trust_level!(1) if admin.trust_level < 1
|
||||
admin.email_tokens.update_all confirmed: true
|
||||
admin.activate
|
||||
|
||||
|
@ -1,12 +1,11 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'discourse_dev/record'
|
||||
require 'rails'
|
||||
require 'faker'
|
||||
require "discourse_dev/record"
|
||||
require "rails"
|
||||
require "faker"
|
||||
|
||||
module DiscourseDev
|
||||
class Group < Record
|
||||
|
||||
def initialize
|
||||
super(::Group, DiscourseDev.config.group[:count])
|
||||
end
|
||||
|
@ -1,11 +1,10 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'discourse_dev/record'
|
||||
require 'faker'
|
||||
require "discourse_dev/record"
|
||||
require "faker"
|
||||
|
||||
module DiscourseDev
|
||||
class Post < Record
|
||||
|
||||
attr_reader :topic
|
||||
|
||||
def initialize(topic, count)
|
||||
@ -28,7 +27,7 @@ module DiscourseDev
|
||||
raw: Faker::DiscourseMarkdown.sandwich(sentences: 5),
|
||||
created_at: Faker::Time.between(from: topic.last_posted_at, to: DateTime.now),
|
||||
skip_validations: true,
|
||||
skip_guardian: true
|
||||
skip_guardian: true,
|
||||
}
|
||||
end
|
||||
|
||||
@ -44,13 +43,20 @@ module DiscourseDev
|
||||
def generate_likes(post)
|
||||
user_ids = [post.user_id]
|
||||
|
||||
Faker::Number.between(from: 0, to: @max_likes_count).times do
|
||||
user = self.user
|
||||
next if user_ids.include?(user.id)
|
||||
Faker::Number
|
||||
.between(from: 0, to: @max_likes_count)
|
||||
.times do
|
||||
user = self.user
|
||||
next if user_ids.include?(user.id)
|
||||
|
||||
PostActionCreator.new(user, post, PostActionType.types[:like], created_at: Faker::Time.between(from: post.created_at, to: DateTime.now)).perform
|
||||
user_ids << user.id
|
||||
end
|
||||
PostActionCreator.new(
|
||||
user,
|
||||
post,
|
||||
PostActionType.types[:like],
|
||||
created_at: Faker::Time.between(from: post.created_at, to: DateTime.now),
|
||||
).perform
|
||||
user_ids << user.id
|
||||
end
|
||||
end
|
||||
|
||||
def user
|
||||
@ -90,13 +96,14 @@ module DiscourseDev
|
||||
count.times do |i|
|
||||
begin
|
||||
user = User.random
|
||||
reply = Faker::DiscourseMarkdown.with_user(user.id) do
|
||||
{
|
||||
topic_id: topic.id,
|
||||
raw: Faker::DiscourseMarkdown.sandwich(sentences: 5),
|
||||
skip_validations: true
|
||||
}
|
||||
end
|
||||
reply =
|
||||
Faker::DiscourseMarkdown.with_user(user.id) do
|
||||
{
|
||||
topic_id: topic.id,
|
||||
raw: Faker::DiscourseMarkdown.sandwich(sentences: 5),
|
||||
skip_validations: true,
|
||||
}
|
||||
end
|
||||
PostCreator.new(user, reply).create!
|
||||
rescue ActiveRecord::RecordNotSaved => e
|
||||
puts e
|
||||
@ -109,6 +116,5 @@ module DiscourseDev
|
||||
def self.random
|
||||
super(::Post)
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
@ -1,11 +1,10 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'discourse_dev/record'
|
||||
require 'faker'
|
||||
require "discourse_dev/record"
|
||||
require "faker"
|
||||
|
||||
module DiscourseDev
|
||||
class PostRevision < Record
|
||||
|
||||
def initialize
|
||||
super(::PostRevision, DiscourseDev.config.post_revisions[:count])
|
||||
end
|
||||
|
@ -1,8 +1,8 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'discourse_dev'
|
||||
require 'rails'
|
||||
require 'faker'
|
||||
require "discourse_dev"
|
||||
require "rails"
|
||||
require "faker"
|
||||
|
||||
module DiscourseDev
|
||||
class Record
|
||||
@ -12,11 +12,12 @@ module DiscourseDev
|
||||
attr_reader :model, :type
|
||||
|
||||
def initialize(model, count = DEFAULT_COUNT)
|
||||
@@initialized ||= begin
|
||||
Faker::Discourse.unique.clear
|
||||
RateLimiter.disable
|
||||
true
|
||||
end
|
||||
@@initialized ||=
|
||||
begin
|
||||
Faker::Discourse.unique.clear
|
||||
RateLimiter.disable
|
||||
true
|
||||
end
|
||||
|
||||
@model = model
|
||||
@type = model.to_s.downcase.to_sym
|
||||
@ -40,11 +41,9 @@ module DiscourseDev
|
||||
if current_count >= @count
|
||||
puts "Already have #{current_count} #{type} records"
|
||||
|
||||
Rake.application.top_level_tasks.each do |task_name|
|
||||
Rake::Task[task_name].reenable
|
||||
end
|
||||
Rake.application.top_level_tasks.each { |task_name| Rake::Task[task_name].reenable }
|
||||
|
||||
Rake::Task['dev:repopulate'].invoke
|
||||
Rake::Task["dev:repopulate"].invoke
|
||||
return
|
||||
elsif current_count > 0
|
||||
@count -= current_count
|
||||
@ -74,7 +73,9 @@ module DiscourseDev
|
||||
end
|
||||
|
||||
def self.random(model, use_existing_records: true)
|
||||
model.joins(:_custom_fields).where("#{:type}_custom_fields.name = '#{AUTO_POPULATED}'") if !use_existing_records && model.new.respond_to?(:custom_fields)
|
||||
if !use_existing_records && model.new.respond_to?(:custom_fields)
|
||||
model.joins(:_custom_fields).where("#{:type}_custom_fields.name = '#{AUTO_POPULATED}'")
|
||||
end
|
||||
count = model.count
|
||||
raise "#{:type} records are not yet populated" if count == 0
|
||||
|
||||
|
@ -1,12 +1,11 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'discourse_dev/record'
|
||||
require 'rails'
|
||||
require 'faker'
|
||||
require "discourse_dev/record"
|
||||
require "rails"
|
||||
require "faker"
|
||||
|
||||
module DiscourseDev
|
||||
class Tag < Record
|
||||
|
||||
def initialize
|
||||
super(::Tag, DiscourseDev.config.tag[:count])
|
||||
end
|
||||
@ -24,9 +23,7 @@ module DiscourseDev
|
||||
end
|
||||
|
||||
def data
|
||||
{
|
||||
name: Faker::Discourse.unique.tag,
|
||||
}
|
||||
{ name: Faker::Discourse.unique.tag }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -1,11 +1,10 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'discourse_dev/record'
|
||||
require 'faker'
|
||||
require "discourse_dev/record"
|
||||
require "faker"
|
||||
|
||||
module DiscourseDev
|
||||
class Topic < Record
|
||||
|
||||
def initialize(private_messages: false, recipient: nil, ignore_current_count: false)
|
||||
@settings = DiscourseDev.config.topic
|
||||
@private_messages = private_messages
|
||||
@ -33,15 +32,9 @@ module DiscourseDev
|
||||
end
|
||||
|
||||
if @category
|
||||
merge_attributes = {
|
||||
category: @category.id,
|
||||
tags: tags
|
||||
}
|
||||
merge_attributes = { category: @category.id, tags: tags }
|
||||
else
|
||||
merge_attributes = {
|
||||
archetype: "private_message",
|
||||
target_usernames: [@recipient]
|
||||
}
|
||||
merge_attributes = { archetype: "private_message", target_usernames: [@recipient] }
|
||||
end
|
||||
|
||||
{
|
||||
@ -51,9 +44,11 @@ module DiscourseDev
|
||||
topic_opts: {
|
||||
import_mode: true,
|
||||
views: Faker::Number.between(from: 1, to: max_views),
|
||||
custom_fields: { dev_sample: true }
|
||||
custom_fields: {
|
||||
dev_sample: true,
|
||||
},
|
||||
},
|
||||
skip_validations: true
|
||||
skip_validations: true,
|
||||
}.merge(merge_attributes)
|
||||
end
|
||||
|
||||
@ -61,7 +56,10 @@ module DiscourseDev
|
||||
if current_count < I18n.t("faker.discourse.topics").count
|
||||
Faker::Discourse.unique.topic
|
||||
else
|
||||
Faker::Lorem.unique.sentence(word_count: 5, supplemental: true, random_words_to_add: 4).chomp(".")
|
||||
Faker::Lorem
|
||||
.unique
|
||||
.sentence(word_count: 5, supplemental: true, random_words_to_add: 4)
|
||||
.chomp(".")
|
||||
end
|
||||
end
|
||||
|
||||
@ -70,9 +68,9 @@ module DiscourseDev
|
||||
|
||||
@tags = []
|
||||
|
||||
Faker::Number.between(from: @settings.dig(:tags, :min), to: @settings.dig(:tags, :max)).times do
|
||||
@tags << Faker::Discourse.tag
|
||||
end
|
||||
Faker::Number
|
||||
.between(from: @settings.dig(:tags, :min), to: @settings.dig(:tags, :max))
|
||||
.times { @tags << Faker::Discourse.tag }
|
||||
|
||||
@tags.uniq
|
||||
end
|
||||
@ -92,7 +90,11 @@ module DiscourseDev
|
||||
if override = @settings.dig(:replies, :overrides).find { |o| o[:title] == topic_data[:title] }
|
||||
reply_count = override[:count]
|
||||
else
|
||||
reply_count = Faker::Number.between(from: @settings.dig(:replies, :min), to: @settings.dig(:replies, :max))
|
||||
reply_count =
|
||||
Faker::Number.between(
|
||||
from: @settings.dig(:replies, :min),
|
||||
to: @settings.dig(:replies, :max),
|
||||
)
|
||||
end
|
||||
|
||||
topic = post.topic
|
||||
@ -123,9 +125,7 @@ module DiscourseDev
|
||||
end
|
||||
|
||||
def delete_unwanted_sidekiq_jobs
|
||||
Sidekiq::ScheduledSet.new.each do |job|
|
||||
job.delete if job.item["class"] == "Jobs::UserEmail"
|
||||
end
|
||||
Sidekiq::ScheduledSet.new.each { |job| job.delete if job.item["class"] == "Jobs::UserEmail" }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
class DiscourseDiff
|
||||
|
||||
MAX_DIFFERENCE = 200
|
||||
|
||||
def initialize(before, after)
|
||||
@ -9,8 +8,8 @@ class DiscourseDiff
|
||||
@after = after
|
||||
before_html = tokenize_html_blocks(@before)
|
||||
after_html = tokenize_html_blocks(@after)
|
||||
before_markdown = tokenize_line(CGI::escapeHTML(@before))
|
||||
after_markdown = tokenize_line(CGI::escapeHTML(@after))
|
||||
before_markdown = tokenize_line(CGI.escapeHTML(@before))
|
||||
after_markdown = tokenize_line(CGI.escapeHTML(@after))
|
||||
|
||||
@block_by_block_diff = ONPDiff.new(before_html, after_html).paragraph_diff
|
||||
@line_by_line_diff = ONPDiff.new(before_markdown, after_markdown).short_diff
|
||||
@ -21,7 +20,8 @@ class DiscourseDiff
|
||||
inline = []
|
||||
while i < @block_by_block_diff.size
|
||||
op_code = @block_by_block_diff[i][1]
|
||||
if op_code == :common then inline << @block_by_block_diff[i][0]
|
||||
if op_code == :common
|
||||
inline << @block_by_block_diff[i][0]
|
||||
else
|
||||
if op_code == :delete
|
||||
opposite_op_code = :add
|
||||
@ -36,7 +36,11 @@ class DiscourseDiff
|
||||
end
|
||||
|
||||
if i + 1 < @block_by_block_diff.size && @block_by_block_diff[i + 1][1] == opposite_op_code
|
||||
diff = ONPDiff.new(tokenize_html(@block_by_block_diff[first][0]), tokenize_html(@block_by_block_diff[second][0])).diff
|
||||
diff =
|
||||
ONPDiff.new(
|
||||
tokenize_html(@block_by_block_diff[first][0]),
|
||||
tokenize_html(@block_by_block_diff[second][0]),
|
||||
).diff
|
||||
inline << generate_inline_html(diff)
|
||||
i += 1
|
||||
else
|
||||
@ -73,7 +77,11 @@ class DiscourseDiff
|
||||
end
|
||||
|
||||
if i + 1 < @block_by_block_diff.size && @block_by_block_diff[i + 1][1] == opposite_op_code
|
||||
diff = ONPDiff.new(tokenize_html(@block_by_block_diff[first][0]), tokenize_html(@block_by_block_diff[second][0])).diff
|
||||
diff =
|
||||
ONPDiff.new(
|
||||
tokenize_html(@block_by_block_diff[first][0]),
|
||||
tokenize_html(@block_by_block_diff[second][0]),
|
||||
).diff
|
||||
deleted, inserted = generate_side_by_side_html(diff)
|
||||
left << deleted
|
||||
right << inserted
|
||||
@ -109,9 +117,13 @@ class DiscourseDiff
|
||||
end
|
||||
|
||||
if i + 1 < @line_by_line_diff.size && @line_by_line_diff[i + 1][1] == opposite_op_code
|
||||
before_tokens, after_tokens = tokenize_markdown(@line_by_line_diff[first][0]), tokenize_markdown(@line_by_line_diff[second][0])
|
||||
before_tokens, after_tokens =
|
||||
tokenize_markdown(@line_by_line_diff[first][0]),
|
||||
tokenize_markdown(@line_by_line_diff[second][0])
|
||||
if (before_tokens.size - after_tokens.size).abs > MAX_DIFFERENCE
|
||||
before_tokens, after_tokens = tokenize_line(@line_by_line_diff[first][0]), tokenize_line(@line_by_line_diff[second][0])
|
||||
before_tokens, after_tokens =
|
||||
tokenize_line(@line_by_line_diff[first][0]),
|
||||
tokenize_line(@line_by_line_diff[second][0])
|
||||
end
|
||||
diff = ONPDiff.new(before_tokens, after_tokens).short_diff
|
||||
deleted, inserted = generate_side_by_side_markdown(diff)
|
||||
@ -178,7 +190,7 @@ class DiscourseDiff
|
||||
def add_class_or_wrap_in_tags(html_or_text, klass)
|
||||
result = html_or_text.dup
|
||||
index_of_next_chevron = result.index(">")
|
||||
if result.size > 0 && result[0] == '<' && index_of_next_chevron
|
||||
if result.size > 0 && result[0] == "<" && index_of_next_chevron
|
||||
index_of_class = result.index("class=")
|
||||
if index_of_class.nil? || index_of_class > index_of_next_chevron
|
||||
# we do not have a class for the current tag
|
||||
@ -202,9 +214,12 @@ class DiscourseDiff
|
||||
inline = []
|
||||
diff.each do |d|
|
||||
case d[1]
|
||||
when :common then inline << d[0]
|
||||
when :delete then inline << add_class_or_wrap_in_tags(d[0], "del")
|
||||
when :add then inline << add_class_or_wrap_in_tags(d[0], "ins")
|
||||
when :common
|
||||
inline << d[0]
|
||||
when :delete
|
||||
inline << add_class_or_wrap_in_tags(d[0], "del")
|
||||
when :add
|
||||
inline << add_class_or_wrap_in_tags(d[0], "ins")
|
||||
end
|
||||
end
|
||||
inline
|
||||
@ -217,8 +232,10 @@ class DiscourseDiff
|
||||
when :common
|
||||
deleted << d[0]
|
||||
inserted << d[0]
|
||||
when :delete then deleted << add_class_or_wrap_in_tags(d[0], "del")
|
||||
when :add then inserted << add_class_or_wrap_in_tags(d[0], "ins")
|
||||
when :delete
|
||||
deleted << add_class_or_wrap_in_tags(d[0], "del")
|
||||
when :add
|
||||
inserted << add_class_or_wrap_in_tags(d[0], "ins")
|
||||
end
|
||||
end
|
||||
[deleted, inserted]
|
||||
@ -231,15 +248,16 @@ class DiscourseDiff
|
||||
when :common
|
||||
deleted << d[0]
|
||||
inserted << d[0]
|
||||
when :delete then deleted << "<del>#{d[0]}</del>"
|
||||
when :add then inserted << "<ins>#{d[0]}</ins>"
|
||||
when :delete
|
||||
deleted << "<del>#{d[0]}</del>"
|
||||
when :add
|
||||
inserted << "<ins>#{d[0]}</ins>"
|
||||
end
|
||||
end
|
||||
[deleted, inserted]
|
||||
end
|
||||
|
||||
class HtmlTokenizer < Nokogiri::XML::SAX::Document
|
||||
|
||||
attr_accessor :tokens
|
||||
|
||||
def initialize
|
||||
@ -253,23 +271,21 @@ class DiscourseDiff
|
||||
me.tokens
|
||||
end
|
||||
|
||||
USELESS_TAGS = %w{html body}
|
||||
USELESS_TAGS = %w[html body]
|
||||
def start_element(name, attributes = [])
|
||||
return if USELESS_TAGS.include?(name)
|
||||
attrs = attributes.map { |a| " #{a[0]}=\"#{CGI::escapeHTML(a[1])}\"" }.join
|
||||
attrs = attributes.map { |a| " #{a[0]}=\"#{CGI.escapeHTML(a[1])}\"" }.join
|
||||
@tokens << "<#{name}#{attrs}>"
|
||||
end
|
||||
|
||||
AUTOCLOSING_TAGS = %w{area base br col embed hr img input meta}
|
||||
AUTOCLOSING_TAGS = %w[area base br col embed hr img input meta]
|
||||
def end_element(name)
|
||||
return if USELESS_TAGS.include?(name) || AUTOCLOSING_TAGS.include?(name)
|
||||
@tokens << "</#{name}>"
|
||||
end
|
||||
|
||||
def characters(string)
|
||||
@tokens.concat string.scan(/\W|\w+[ \t]*/).map { |x| CGI::escapeHTML(x) }
|
||||
@tokens.concat string.scan(/\W|\w+[ \t]*/).map { |x| CGI.escapeHTML(x) }
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -3,21 +3,23 @@
|
||||
# This is meant to be used by plugins to trigger and listen to events
|
||||
# So we can execute code when things happen.
|
||||
class DiscourseEvent
|
||||
|
||||
# Defaults to a hash where default values are empty sets.
|
||||
def self.events
|
||||
@events ||= Hash.new { |hash, key| hash[key] = Set.new }
|
||||
end
|
||||
|
||||
def self.trigger(event_name, *args, **kwargs)
|
||||
events[event_name].each do |event|
|
||||
event.call(*args, **kwargs)
|
||||
end
|
||||
events[event_name].each { |event| event.call(*args, **kwargs) }
|
||||
end
|
||||
|
||||
def self.on(event_name, &block)
|
||||
if event_name == :site_setting_saved
|
||||
Discourse.deprecate("The :site_setting_saved event is deprecated. Please use :site_setting_changed instead", since: "2.3.0beta8", drop_from: "2.4", raise_error: true)
|
||||
Discourse.deprecate(
|
||||
"The :site_setting_saved event is deprecated. Please use :site_setting_changed instead",
|
||||
since: "2.3.0beta8",
|
||||
drop_from: "2.4",
|
||||
raise_error: true,
|
||||
)
|
||||
end
|
||||
events[event_name] << block
|
||||
end
|
||||
|
@ -1,16 +1,17 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseHub
|
||||
|
||||
STATS_FETCHED_AT_KEY = "stats_fetched_at"
|
||||
|
||||
def self.version_check_payload
|
||||
default_payload = { installed_version: Discourse::VERSION::STRING }.merge!(Discourse.git_branch == "unknown" ? {} : { branch: Discourse.git_branch })
|
||||
default_payload = { installed_version: Discourse::VERSION::STRING }.merge!(
|
||||
Discourse.git_branch == "unknown" ? {} : { branch: Discourse.git_branch },
|
||||
)
|
||||
default_payload.merge!(get_payload)
|
||||
end
|
||||
|
||||
def self.discourse_version_check
|
||||
get('/version_check', version_check_payload)
|
||||
get("/version_check", version_check_payload)
|
||||
end
|
||||
|
||||
def self.stats_fetched_at=(time_with_zone)
|
||||
@ -18,7 +19,11 @@ module DiscourseHub
|
||||
end
|
||||
|
||||
def self.get_payload
|
||||
SiteSetting.share_anonymized_statistics && stats_fetched_at < 7.days.ago ? About.fetch_cached_stats.symbolize_keys : {}
|
||||
if SiteSetting.share_anonymized_statistics && stats_fetched_at < 7.days.ago
|
||||
About.fetch_cached_stats.symbolize_keys
|
||||
else
|
||||
{}
|
||||
end
|
||||
end
|
||||
|
||||
def self.get(rel_url, params = {})
|
||||
@ -40,27 +45,39 @@ module DiscourseHub
|
||||
def self.singular_action(action, rel_url, params = {})
|
||||
connect_opts = connect_opts(params)
|
||||
|
||||
JSON.parse(Excon.public_send(action,
|
||||
"#{hub_base_url}#{rel_url}",
|
||||
{
|
||||
headers: { 'Referer' => referer, 'Accept' => accepts.join(', ') },
|
||||
query: params,
|
||||
omit_default_port: true
|
||||
}.merge(connect_opts)
|
||||
).body)
|
||||
JSON.parse(
|
||||
Excon.public_send(
|
||||
action,
|
||||
"#{hub_base_url}#{rel_url}",
|
||||
{
|
||||
headers: {
|
||||
"Referer" => referer,
|
||||
"Accept" => accepts.join(", "),
|
||||
},
|
||||
query: params,
|
||||
omit_default_port: true,
|
||||
}.merge(connect_opts),
|
||||
).body,
|
||||
)
|
||||
end
|
||||
|
||||
def self.collection_action(action, rel_url, params = {})
|
||||
connect_opts = connect_opts(params)
|
||||
|
||||
response = Excon.public_send(action,
|
||||
"#{hub_base_url}#{rel_url}",
|
||||
{
|
||||
body: JSON[params],
|
||||
headers: { 'Referer' => referer, 'Accept' => accepts.join(', '), "Content-Type" => "application/json" },
|
||||
omit_default_port: true
|
||||
}.merge(connect_opts)
|
||||
)
|
||||
response =
|
||||
Excon.public_send(
|
||||
action,
|
||||
"#{hub_base_url}#{rel_url}",
|
||||
{
|
||||
body: JSON[params],
|
||||
headers: {
|
||||
"Referer" => referer,
|
||||
"Accept" => accepts.join(", "),
|
||||
"Content-Type" => "application/json",
|
||||
},
|
||||
omit_default_port: true,
|
||||
}.merge(connect_opts),
|
||||
)
|
||||
|
||||
if (status = response.status) != 200
|
||||
Rails.logger.warn(response_status_log_message(rel_url, status))
|
||||
@ -87,14 +104,14 @@ module DiscourseHub
|
||||
|
||||
def self.hub_base_url
|
||||
if Rails.env.production?
|
||||
ENV['HUB_BASE_URL'] || 'https://api.discourse.org/api'
|
||||
ENV["HUB_BASE_URL"] || "https://api.discourse.org/api"
|
||||
else
|
||||
ENV['HUB_BASE_URL'] || 'http://local.hub:3000/api'
|
||||
ENV["HUB_BASE_URL"] || "http://local.hub:3000/api"
|
||||
end
|
||||
end
|
||||
|
||||
def self.accepts
|
||||
['application/json', 'application/vnd.discoursehub.v1']
|
||||
%w[application/json application/vnd.discoursehub.v1]
|
||||
end
|
||||
|
||||
def self.referer
|
||||
@ -105,5 +122,4 @@ module DiscourseHub
|
||||
t = Discourse.redis.get(STATS_FETCHED_AT_KEY)
|
||||
t ? Time.zone.at(t.to_i) : 1.year.ago
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -1,7 +1,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'maxminddb'
|
||||
require 'resolv'
|
||||
require "maxminddb"
|
||||
require "resolv"
|
||||
|
||||
class DiscourseIpInfo
|
||||
include Singleton
|
||||
@ -11,13 +11,13 @@ class DiscourseIpInfo
|
||||
end
|
||||
|
||||
def open_db(path)
|
||||
@loc_mmdb = mmdb_load(File.join(path, 'GeoLite2-City.mmdb'))
|
||||
@asn_mmdb = mmdb_load(File.join(path, 'GeoLite2-ASN.mmdb'))
|
||||
@loc_mmdb = mmdb_load(File.join(path, "GeoLite2-City.mmdb"))
|
||||
@asn_mmdb = mmdb_load(File.join(path, "GeoLite2-ASN.mmdb"))
|
||||
@cache = LruRedux::ThreadSafeCache.new(2000)
|
||||
end
|
||||
|
||||
def self.path
|
||||
@path ||= File.join(Rails.root, 'vendor', 'data')
|
||||
@path ||= File.join(Rails.root, "vendor", "data")
|
||||
end
|
||||
|
||||
def self.mmdb_path(name)
|
||||
@ -25,7 +25,6 @@ class DiscourseIpInfo
|
||||
end
|
||||
|
||||
def self.mmdb_download(name)
|
||||
|
||||
if GlobalSetting.maxmind_license_key.blank?
|
||||
STDERR.puts "MaxMind IP database updates require a license"
|
||||
STDERR.puts "Please set DISCOURSE_MAXMIND_LICENSE_KEY to one you generated at https://www.maxmind.com"
|
||||
@ -34,41 +33,29 @@ class DiscourseIpInfo
|
||||
|
||||
FileUtils.mkdir_p(path)
|
||||
|
||||
url = "https://download.maxmind.com/app/geoip_download?license_key=#{GlobalSetting.maxmind_license_key}&edition_id=#{name}&suffix=tar.gz"
|
||||
url =
|
||||
"https://download.maxmind.com/app/geoip_download?license_key=#{GlobalSetting.maxmind_license_key}&edition_id=#{name}&suffix=tar.gz"
|
||||
|
||||
gz_file = FileHelper.download(
|
||||
url,
|
||||
max_file_size: 100.megabytes,
|
||||
tmp_file_name: "#{name}.gz",
|
||||
validate_uri: false,
|
||||
follow_redirect: false
|
||||
)
|
||||
gz_file =
|
||||
FileHelper.download(
|
||||
url,
|
||||
max_file_size: 100.megabytes,
|
||||
tmp_file_name: "#{name}.gz",
|
||||
validate_uri: false,
|
||||
follow_redirect: false,
|
||||
)
|
||||
|
||||
filename = File.basename(gz_file.path)
|
||||
|
||||
dir = "#{Dir.tmpdir}/#{SecureRandom.hex}"
|
||||
|
||||
Discourse::Utils.execute_command(
|
||||
"mkdir", "-p", dir
|
||||
)
|
||||
Discourse::Utils.execute_command("mkdir", "-p", dir)
|
||||
|
||||
Discourse::Utils.execute_command(
|
||||
"cp",
|
||||
gz_file.path,
|
||||
"#{dir}/#{filename}"
|
||||
)
|
||||
Discourse::Utils.execute_command("cp", gz_file.path, "#{dir}/#{filename}")
|
||||
|
||||
Discourse::Utils.execute_command(
|
||||
"tar",
|
||||
"-xzvf",
|
||||
"#{dir}/#{filename}",
|
||||
chdir: dir
|
||||
)
|
||||
|
||||
Dir["#{dir}/**/*.mmdb"].each do |f|
|
||||
FileUtils.mv(f, mmdb_path(name))
|
||||
end
|
||||
Discourse::Utils.execute_command("tar", "-xzvf", "#{dir}/#{filename}", chdir: dir)
|
||||
|
||||
Dir["#{dir}/**/*.mmdb"].each { |f| FileUtils.mv(f, mmdb_path(name)) }
|
||||
ensure
|
||||
FileUtils.rm_r(dir, force: true) if dir
|
||||
gz_file&.close!
|
||||
@ -96,7 +83,8 @@ class DiscourseIpInfo
|
||||
if result&.found?
|
||||
ret[:country] = result.country.name(locale) || result.country.name
|
||||
ret[:country_code] = result.country.iso_code
|
||||
ret[:region] = result.subdivisions.most_specific.name(locale) || result.subdivisions.most_specific.name
|
||||
ret[:region] = result.subdivisions.most_specific.name(locale) ||
|
||||
result.subdivisions.most_specific.name
|
||||
ret[:city] = result.city.name(locale) || result.city.name
|
||||
ret[:latitude] = result.location.latitude
|
||||
ret[:longitude] = result.location.longitude
|
||||
@ -104,13 +92,18 @@ class DiscourseIpInfo
|
||||
|
||||
# used by plugins or API to locate users more accurately
|
||||
ret[:geoname_ids] = [
|
||||
result.continent.geoname_id, result.country.geoname_id, result.city.geoname_id,
|
||||
*result.subdivisions.map(&:geoname_id)
|
||||
result.continent.geoname_id,
|
||||
result.country.geoname_id,
|
||||
result.city.geoname_id,
|
||||
*result.subdivisions.map(&:geoname_id),
|
||||
]
|
||||
ret[:geoname_ids].compact!
|
||||
end
|
||||
rescue => e
|
||||
Discourse.warn_exception(e, message: "IP #{ip} could not be looked up in MaxMind GeoLite2-City database.")
|
||||
Discourse.warn_exception(
|
||||
e,
|
||||
message: "IP #{ip} could not be looked up in MaxMind GeoLite2-City database.",
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
@ -123,7 +116,10 @@ class DiscourseIpInfo
|
||||
ret[:organization] = result["autonomous_system_organization"]
|
||||
end
|
||||
rescue => e
|
||||
Discourse.warn_exception(e, message: "IP #{ip} could not be looked up in MaxMind GeoLite2-ASN database.")
|
||||
Discourse.warn_exception(
|
||||
e,
|
||||
message: "IP #{ip} could not be looked up in MaxMind GeoLite2-ASN database.",
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
@ -142,10 +138,13 @@ class DiscourseIpInfo
|
||||
|
||||
def get(ip, locale: :en, resolve_hostname: false)
|
||||
ip = ip.to_s
|
||||
locale = locale.to_s.sub('_', '-')
|
||||
locale = locale.to_s.sub("_", "-")
|
||||
|
||||
@cache["#{ip}-#{locale}-#{resolve_hostname}"] ||=
|
||||
lookup(ip, locale: locale, resolve_hostname: resolve_hostname)
|
||||
@cache["#{ip}-#{locale}-#{resolve_hostname}"] ||= lookup(
|
||||
ip,
|
||||
locale: locale,
|
||||
resolve_hostname: resolve_hostname,
|
||||
)
|
||||
end
|
||||
|
||||
def self.open_db(path)
|
||||
|
@ -1,27 +1,28 @@
|
||||
# frozen_string_literal: true
|
||||
require 'execjs'
|
||||
require 'mini_racer'
|
||||
require "execjs"
|
||||
require "mini_racer"
|
||||
|
||||
class DiscourseJsProcessor
|
||||
class TranspileError < StandardError; end
|
||||
class TranspileError < StandardError
|
||||
end
|
||||
|
||||
DISCOURSE_COMMON_BABEL_PLUGINS = [
|
||||
'proposal-optional-chaining',
|
||||
['proposal-decorators', { legacy: true } ],
|
||||
'transform-template-literals',
|
||||
'proposal-class-properties',
|
||||
'proposal-class-static-block',
|
||||
'proposal-private-property-in-object',
|
||||
'proposal-private-methods',
|
||||
'proposal-numeric-separator',
|
||||
'proposal-logical-assignment-operators',
|
||||
'proposal-nullish-coalescing-operator',
|
||||
'proposal-json-strings',
|
||||
'proposal-optional-catch-binding',
|
||||
'transform-parameters',
|
||||
'proposal-async-generator-functions',
|
||||
'proposal-object-rest-spread',
|
||||
'proposal-export-namespace-from',
|
||||
"proposal-optional-chaining",
|
||||
["proposal-decorators", { legacy: true }],
|
||||
"transform-template-literals",
|
||||
"proposal-class-properties",
|
||||
"proposal-class-static-block",
|
||||
"proposal-private-property-in-object",
|
||||
"proposal-private-methods",
|
||||
"proposal-numeric-separator",
|
||||
"proposal-logical-assignment-operators",
|
||||
"proposal-nullish-coalescing-operator",
|
||||
"proposal-json-strings",
|
||||
"proposal-optional-catch-binding",
|
||||
"transform-parameters",
|
||||
"proposal-async-generator-functions",
|
||||
"proposal-object-rest-spread",
|
||||
"proposal-export-namespace-from",
|
||||
]
|
||||
|
||||
def self.plugin_transpile_paths
|
||||
@ -33,22 +34,22 @@ class DiscourseJsProcessor
|
||||
end
|
||||
|
||||
def self.call(input)
|
||||
root_path = input[:load_path] || ''
|
||||
logical_path = (input[:filename] || '').sub(root_path, '').gsub(/\.(js|es6).*$/, '').sub(/^\//, '')
|
||||
root_path = input[:load_path] || ""
|
||||
logical_path =
|
||||
(input[:filename] || "").sub(root_path, "").gsub(/\.(js|es6).*$/, "").sub(%r{^/}, "")
|
||||
data = input[:data]
|
||||
|
||||
if should_transpile?(input[:filename])
|
||||
data = transpile(data, root_path, logical_path)
|
||||
end
|
||||
data = transpile(data, root_path, logical_path) if should_transpile?(input[:filename])
|
||||
|
||||
# add sourceURL until we can do proper source maps
|
||||
if !Rails.env.production? && !ember_cli?(input[:filename])
|
||||
plugin_name = root_path[/\/plugins\/([\w-]+)\/assets/, 1]
|
||||
source_url = if plugin_name
|
||||
"plugins/#{plugin_name}/assets/javascripts/#{logical_path}"
|
||||
else
|
||||
logical_path
|
||||
end
|
||||
plugin_name = root_path[%r{/plugins/([\w-]+)/assets}, 1]
|
||||
source_url =
|
||||
if plugin_name
|
||||
"plugins/#{plugin_name}/assets/javascripts/#{logical_path}"
|
||||
else
|
||||
logical_path
|
||||
end
|
||||
|
||||
data = "eval(#{data.inspect} + \"\\n//# sourceURL=#{source_url}\");\n"
|
||||
end
|
||||
@ -62,7 +63,7 @@ class DiscourseJsProcessor
|
||||
end
|
||||
|
||||
def self.should_transpile?(filename)
|
||||
filename ||= ''
|
||||
filename ||= ""
|
||||
|
||||
# skip ember cli
|
||||
return false if ember_cli?(filename)
|
||||
@ -73,7 +74,7 @@ class DiscourseJsProcessor
|
||||
# For .js check the path...
|
||||
return false unless filename.end_with?(".js") || filename.end_with?(".js.erb")
|
||||
|
||||
relative_path = filename.sub(Rails.root.to_s, '').sub(/^\/*/, '')
|
||||
relative_path = filename.sub(Rails.root.to_s, "").sub(%r{^/*}, "")
|
||||
|
||||
js_root = "app/assets/javascripts"
|
||||
test_root = "test/javascripts"
|
||||
@ -81,26 +82,27 @@ class DiscourseJsProcessor
|
||||
return false if relative_path.start_with?("#{js_root}/locales/")
|
||||
return false if relative_path.start_with?("#{js_root}/plugins/")
|
||||
|
||||
return true if %w(
|
||||
start-discourse
|
||||
onpopstate-handler
|
||||
google-tag-manager
|
||||
google-universal-analytics-v3
|
||||
google-universal-analytics-v4
|
||||
activate-account
|
||||
auto-redirect
|
||||
embed-application
|
||||
app-boot
|
||||
).any? { |f| relative_path == "#{js_root}/#{f}.js" }
|
||||
if %w[
|
||||
start-discourse
|
||||
onpopstate-handler
|
||||
google-tag-manager
|
||||
google-universal-analytics-v3
|
||||
google-universal-analytics-v4
|
||||
activate-account
|
||||
auto-redirect
|
||||
embed-application
|
||||
app-boot
|
||||
].any? { |f| relative_path == "#{js_root}/#{f}.js" }
|
||||
return true
|
||||
end
|
||||
|
||||
return true if plugin_transpile_paths.any? { |prefix| relative_path.start_with?(prefix) }
|
||||
|
||||
!!(relative_path =~ /^#{js_root}\/[^\/]+\// ||
|
||||
relative_path =~ /^#{test_root}\/[^\/]+\//)
|
||||
!!(relative_path =~ %r{^#{js_root}/[^/]+/} || relative_path =~ %r{^#{test_root}/[^/]+/})
|
||||
end
|
||||
|
||||
def self.skip_module?(data)
|
||||
!!(data.present? && data =~ /^\/\/ discourse-skip-module$/)
|
||||
!!(data.present? && data =~ %r{^// discourse-skip-module$})
|
||||
end
|
||||
|
||||
class Transpiler
|
||||
@ -113,19 +115,17 @@ class DiscourseJsProcessor
|
||||
|
||||
def self.load_file_in_context(ctx, path, wrap_in_module: nil)
|
||||
contents = File.read("#{Rails.root}/app/assets/javascripts/#{path}")
|
||||
if wrap_in_module
|
||||
contents = <<~JS
|
||||
contents = <<~JS if wrap_in_module
|
||||
define(#{wrap_in_module.to_json}, ["exports", "require", "module"], function(exports, require, module){
|
||||
#{contents}
|
||||
});
|
||||
JS
|
||||
end
|
||||
ctx.eval(contents, filename: path)
|
||||
end
|
||||
|
||||
def self.create_new_context
|
||||
# timeout any eval that takes longer than 15 seconds
|
||||
ctx = MiniRacer::Context.new(timeout: 15000, ensure_gc_after_idle: 2000)
|
||||
ctx = MiniRacer::Context.new(timeout: 15_000, ensure_gc_after_idle: 2000)
|
||||
|
||||
# General shims
|
||||
ctx.attach("rails.logger.info", proc { |err| Rails.logger.info(err.to_s) })
|
||||
@ -158,10 +158,26 @@ class DiscourseJsProcessor
|
||||
|
||||
# Template Compiler
|
||||
load_file_in_context(ctx, "node_modules/ember-source/dist/ember-template-compiler.js")
|
||||
load_file_in_context(ctx, "node_modules/babel-plugin-ember-template-compilation/src/plugin.js", wrap_in_module: "babel-plugin-ember-template-compilation/index")
|
||||
load_file_in_context(ctx, "node_modules/babel-plugin-ember-template-compilation/src/expression-parser.js", wrap_in_module: "babel-plugin-ember-template-compilation/expression-parser")
|
||||
load_file_in_context(ctx, "node_modules/babel-import-util/src/index.js", wrap_in_module: "babel-import-util")
|
||||
load_file_in_context(ctx, "node_modules/ember-cli-htmlbars/lib/colocated-babel-plugin.js", wrap_in_module: "colocated-babel-plugin")
|
||||
load_file_in_context(
|
||||
ctx,
|
||||
"node_modules/babel-plugin-ember-template-compilation/src/plugin.js",
|
||||
wrap_in_module: "babel-plugin-ember-template-compilation/index",
|
||||
)
|
||||
load_file_in_context(
|
||||
ctx,
|
||||
"node_modules/babel-plugin-ember-template-compilation/src/expression-parser.js",
|
||||
wrap_in_module: "babel-plugin-ember-template-compilation/expression-parser",
|
||||
)
|
||||
load_file_in_context(
|
||||
ctx,
|
||||
"node_modules/babel-import-util/src/index.js",
|
||||
wrap_in_module: "babel-import-util",
|
||||
)
|
||||
load_file_in_context(
|
||||
ctx,
|
||||
"node_modules/ember-cli-htmlbars/lib/colocated-babel-plugin.js",
|
||||
wrap_in_module: "colocated-babel-plugin",
|
||||
)
|
||||
|
||||
# Widget HBS compiler
|
||||
widget_hbs_compiler_source = File.read("#{Rails.root}/lib/javascripts/widget-hbs-compiler.js")
|
||||
@ -170,32 +186,44 @@ class DiscourseJsProcessor
|
||||
#{widget_hbs_compiler_source}
|
||||
});
|
||||
JS
|
||||
widget_hbs_compiler_transpiled = ctx.call("rawBabelTransform", widget_hbs_compiler_source, {
|
||||
ast: false,
|
||||
moduleId: 'widget-hbs-compiler',
|
||||
plugins: DISCOURSE_COMMON_BABEL_PLUGINS
|
||||
})
|
||||
widget_hbs_compiler_transpiled =
|
||||
ctx.call(
|
||||
"rawBabelTransform",
|
||||
widget_hbs_compiler_source,
|
||||
{ ast: false, moduleId: "widget-hbs-compiler", plugins: DISCOURSE_COMMON_BABEL_PLUGINS },
|
||||
)
|
||||
ctx.eval(widget_hbs_compiler_transpiled, filename: "widget-hbs-compiler.js")
|
||||
|
||||
# Raw HBS compiler
|
||||
load_file_in_context(ctx, "node_modules/handlebars/dist/handlebars.js", wrap_in_module: "handlebars")
|
||||
|
||||
raw_hbs_transpiled = ctx.call(
|
||||
"rawBabelTransform",
|
||||
File.read("#{Rails.root}/app/assets/javascripts/discourse-common/addon/lib/raw-handlebars.js"),
|
||||
{
|
||||
ast: false,
|
||||
moduleId: "raw-handlebars",
|
||||
plugins: [
|
||||
['transform-modules-amd', { noInterop: true }],
|
||||
*DISCOURSE_COMMON_BABEL_PLUGINS
|
||||
]
|
||||
}
|
||||
load_file_in_context(
|
||||
ctx,
|
||||
"node_modules/handlebars/dist/handlebars.js",
|
||||
wrap_in_module: "handlebars",
|
||||
)
|
||||
|
||||
raw_hbs_transpiled =
|
||||
ctx.call(
|
||||
"rawBabelTransform",
|
||||
File.read(
|
||||
"#{Rails.root}/app/assets/javascripts/discourse-common/addon/lib/raw-handlebars.js",
|
||||
),
|
||||
{
|
||||
ast: false,
|
||||
moduleId: "raw-handlebars",
|
||||
plugins: [
|
||||
["transform-modules-amd", { noInterop: true }],
|
||||
*DISCOURSE_COMMON_BABEL_PLUGINS,
|
||||
],
|
||||
},
|
||||
)
|
||||
ctx.eval(raw_hbs_transpiled, filename: "raw-handlebars.js")
|
||||
|
||||
# Theme template AST transformation plugins
|
||||
load_file_in_context(ctx, "discourse-js-processor.js", wrap_in_module: "discourse-js-processor")
|
||||
load_file_in_context(
|
||||
ctx,
|
||||
"discourse-js-processor.js",
|
||||
wrap_in_module: "discourse-js-processor",
|
||||
)
|
||||
|
||||
# Make interfaces available via `v8.call`
|
||||
ctx.eval <<~JS
|
||||
@ -262,10 +290,10 @@ class DiscourseJsProcessor
|
||||
{
|
||||
skip_module: @skip_module,
|
||||
moduleId: module_name(root_path, logical_path),
|
||||
filename: logical_path || 'unknown',
|
||||
filename: logical_path || "unknown",
|
||||
themeId: theme_id,
|
||||
commonPlugins: DISCOURSE_COMMON_BABEL_PLUGINS
|
||||
}
|
||||
commonPlugins: DISCOURSE_COMMON_BABEL_PLUGINS,
|
||||
},
|
||||
)
|
||||
end
|
||||
|
||||
@ -274,15 +302,16 @@ class DiscourseJsProcessor
|
||||
|
||||
root_base = File.basename(Rails.root)
|
||||
# If the resource is a plugin, use the plugin name as a prefix
|
||||
if root_path =~ /(.*\/#{root_base}\/plugins\/[^\/]+)\//
|
||||
if root_path =~ %r{(.*/#{root_base}/plugins/[^/]+)/}
|
||||
plugin_path = "#{Regexp.last_match[1]}/plugin.rb"
|
||||
|
||||
plugin = Discourse.plugins.find { |p| p.path == plugin_path }
|
||||
path = "discourse/plugins/#{plugin.name}/#{logical_path.sub(/javascripts\//, '')}" if plugin
|
||||
path =
|
||||
"discourse/plugins/#{plugin.name}/#{logical_path.sub(%r{javascripts/}, "")}" if plugin
|
||||
end
|
||||
|
||||
# We need to strip the app subdirectory to replicate how ember-cli works.
|
||||
path || logical_path&.gsub('app/', '')&.gsub('addon/', '')&.gsub('admin/addon', 'admin')
|
||||
path || logical_path&.gsub("app/", "")&.gsub("addon/", "")&.gsub("admin/addon", "admin")
|
||||
end
|
||||
|
||||
def compile_raw_template(source, theme_id: nil)
|
||||
|
@ -1,27 +1,28 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'logstash-logger'
|
||||
require "logstash-logger"
|
||||
|
||||
class DiscourseLogstashLogger
|
||||
def self.logger(uri:, type:)
|
||||
# See Discourse.os_hostname
|
||||
hostname = begin
|
||||
require 'socket'
|
||||
Socket.gethostname
|
||||
rescue => e
|
||||
`hostname`.chomp
|
||||
end
|
||||
hostname =
|
||||
begin
|
||||
require "socket"
|
||||
Socket.gethostname
|
||||
rescue => e
|
||||
`hostname`.chomp
|
||||
end
|
||||
|
||||
LogStashLogger.new(
|
||||
uri: uri,
|
||||
sync: true,
|
||||
customize_event: ->(event) {
|
||||
event['hostname'] = hostname
|
||||
event['severity_name'] = event['severity']
|
||||
event['severity'] = Object.const_get("Logger::Severity::#{event['severity']}")
|
||||
event['type'] = type
|
||||
event['pid'] = Process.pid
|
||||
},
|
||||
customize_event: ->(event) do
|
||||
event["hostname"] = hostname
|
||||
event["severity_name"] = event["severity"]
|
||||
event["severity"] = Object.const_get("Logger::Severity::#{event["severity"]}")
|
||||
event["type"] = type
|
||||
event["pid"] = Process.pid
|
||||
end,
|
||||
)
|
||||
end
|
||||
end
|
||||
|
@ -4,7 +4,6 @@
|
||||
# A class that handles interaction between a plugin and the Discourse App.
|
||||
#
|
||||
class DiscoursePluginRegistry
|
||||
|
||||
# Plugins often need to be able to register additional handlers, data, or
|
||||
# classes that will be used by core classes. This should be used if you
|
||||
# need to control which type the registry is, and if it doesn't need to
|
||||
@ -24,9 +23,7 @@ class DiscoursePluginRegistry
|
||||
instance_variable_set(:"@#{register_name}", type.new)
|
||||
end
|
||||
|
||||
define_method(register_name) do
|
||||
self.class.public_send(register_name)
|
||||
end
|
||||
define_method(register_name) { self.class.public_send(register_name) }
|
||||
end
|
||||
|
||||
# Plugins often need to add values to a list, and we need to filter those
|
||||
@ -45,10 +42,7 @@ class DiscoursePluginRegistry
|
||||
|
||||
define_singleton_method(register_name) do
|
||||
unfiltered = public_send(:"_raw_#{register_name}")
|
||||
unfiltered
|
||||
.filter { |v| v[:plugin].enabled? }
|
||||
.map { |v| v[:value] }
|
||||
.uniq
|
||||
unfiltered.filter { |v| v[:plugin].enabled? }.map { |v| v[:value] }.uniq
|
||||
end
|
||||
|
||||
define_singleton_method("register_#{register_name.to_s.singularize}") do |value, plugin|
|
||||
@ -158,9 +152,7 @@ class DiscoursePluginRegistry
|
||||
next if each_options[:admin]
|
||||
end
|
||||
|
||||
Dir.glob("#{root}/**/*.#{ext}") do |f|
|
||||
yield f
|
||||
end
|
||||
Dir.glob("#{root}/**/*.#{ext}") { |f| yield f }
|
||||
end
|
||||
end
|
||||
|
||||
@ -227,7 +219,7 @@ class DiscoursePluginRegistry
|
||||
|
||||
def self.seed_paths
|
||||
result = SeedFu.fixture_paths.dup
|
||||
unless Rails.env.test? && ENV['LOAD_PLUGINS'] != "1"
|
||||
unless Rails.env.test? && ENV["LOAD_PLUGINS"] != "1"
|
||||
seed_path_builders.each { |b| result += b.call }
|
||||
end
|
||||
result.uniq
|
||||
@ -239,7 +231,7 @@ class DiscoursePluginRegistry
|
||||
|
||||
VENDORED_CORE_PRETTY_TEXT_MAP = {
|
||||
"moment.js" => "vendor/assets/javascripts/moment.js",
|
||||
"moment-timezone.js" => "vendor/assets/javascripts/moment-timezone-with-data.js"
|
||||
"moment-timezone.js" => "vendor/assets/javascripts/moment-timezone-with-data.js",
|
||||
}
|
||||
def self.core_asset_for_name(name)
|
||||
asset = VENDORED_CORE_PRETTY_TEXT_MAP[name]
|
||||
@ -248,16 +240,12 @@ class DiscoursePluginRegistry
|
||||
end
|
||||
|
||||
def self.reset!
|
||||
@@register_names.each do |name|
|
||||
instance_variable_set(:"@#{name}", nil)
|
||||
end
|
||||
@@register_names.each { |name| instance_variable_set(:"@#{name}", nil) }
|
||||
end
|
||||
|
||||
def self.reset_register!(register_name)
|
||||
found_register = @@register_names.detect { |name| name == register_name }
|
||||
|
||||
if found_register
|
||||
instance_variable_set(:"@#{found_register}", nil)
|
||||
end
|
||||
instance_variable_set(:"@#{found_register}", nil) if found_register
|
||||
end
|
||||
end
|
||||
|
@ -46,15 +46,103 @@ class DiscourseRedis
|
||||
end
|
||||
|
||||
# Proxy key methods through, but prefix the keys with the namespace
|
||||
[:append, :blpop, :brpop, :brpoplpush, :decr, :decrby, :expire, :expireat, :get, :getbit, :getrange, :getset,
|
||||
:hdel, :hexists, :hget, :hgetall, :hincrby, :hincrbyfloat, :hkeys, :hlen, :hmget, :hmset, :hset, :hsetnx, :hvals, :incr,
|
||||
:incrby, :incrbyfloat, :lindex, :linsert, :llen, :lpop, :lpush, :lpushx, :lrange, :lrem, :lset, :ltrim,
|
||||
:mapped_hmset, :mapped_hmget, :mapped_mget, :mapped_mset, :mapped_msetnx, :move, :mset,
|
||||
:msetnx, :persist, :pexpire, :pexpireat, :psetex, :pttl, :rename, :renamenx, :rpop, :rpoplpush, :rpush, :rpushx, :sadd, :sadd?, :scard,
|
||||
:sdiff, :set, :setbit, :setex, :setnx, :setrange, :sinter, :sismember, :smembers, :sort, :spop, :srandmember, :srem, :srem?, :strlen,
|
||||
:sunion, :ttl, :type, :watch, :zadd, :zcard, :zcount, :zincrby, :zrange, :zrangebyscore, :zrank, :zrem, :zremrangebyrank,
|
||||
:zremrangebyscore, :zrevrange, :zrevrangebyscore, :zrevrank, :zrangebyscore,
|
||||
:dump, :restore].each do |m|
|
||||
%i[
|
||||
append
|
||||
blpop
|
||||
brpop
|
||||
brpoplpush
|
||||
decr
|
||||
decrby
|
||||
expire
|
||||
expireat
|
||||
get
|
||||
getbit
|
||||
getrange
|
||||
getset
|
||||
hdel
|
||||
hexists
|
||||
hget
|
||||
hgetall
|
||||
hincrby
|
||||
hincrbyfloat
|
||||
hkeys
|
||||
hlen
|
||||
hmget
|
||||
hmset
|
||||
hset
|
||||
hsetnx
|
||||
hvals
|
||||
incr
|
||||
incrby
|
||||
incrbyfloat
|
||||
lindex
|
||||
linsert
|
||||
llen
|
||||
lpop
|
||||
lpush
|
||||
lpushx
|
||||
lrange
|
||||
lrem
|
||||
lset
|
||||
ltrim
|
||||
mapped_hmset
|
||||
mapped_hmget
|
||||
mapped_mget
|
||||
mapped_mset
|
||||
mapped_msetnx
|
||||
move
|
||||
mset
|
||||
msetnx
|
||||
persist
|
||||
pexpire
|
||||
pexpireat
|
||||
psetex
|
||||
pttl
|
||||
rename
|
||||
renamenx
|
||||
rpop
|
||||
rpoplpush
|
||||
rpush
|
||||
rpushx
|
||||
sadd
|
||||
sadd?
|
||||
scard
|
||||
sdiff
|
||||
set
|
||||
setbit
|
||||
setex
|
||||
setnx
|
||||
setrange
|
||||
sinter
|
||||
sismember
|
||||
smembers
|
||||
sort
|
||||
spop
|
||||
srandmember
|
||||
srem
|
||||
srem?
|
||||
strlen
|
||||
sunion
|
||||
ttl
|
||||
type
|
||||
watch
|
||||
zadd
|
||||
zcard
|
||||
zcount
|
||||
zincrby
|
||||
zrange
|
||||
zrangebyscore
|
||||
zrank
|
||||
zrem
|
||||
zremrangebyrank
|
||||
zremrangebyscore
|
||||
zrevrange
|
||||
zrevrangebyscore
|
||||
zrevrank
|
||||
zrangebyscore
|
||||
dump
|
||||
restore
|
||||
].each do |m|
|
||||
define_method m do |*args, **kwargs|
|
||||
args[0] = "#{namespace}:#{args[0]}" if @namespace
|
||||
DiscourseRedis.ignore_readonly { @redis.public_send(m, *args, **kwargs) }
|
||||
@ -72,7 +160,7 @@ class DiscourseRedis
|
||||
end
|
||||
|
||||
def mget(*args)
|
||||
args.map! { |a| "#{namespace}:#{a}" } if @namespace
|
||||
args.map! { |a| "#{namespace}:#{a}" } if @namespace
|
||||
DiscourseRedis.ignore_readonly { @redis.mget(*args) }
|
||||
end
|
||||
|
||||
@ -86,14 +174,13 @@ class DiscourseRedis
|
||||
|
||||
def scan_each(options = {}, &block)
|
||||
DiscourseRedis.ignore_readonly do
|
||||
match = options[:match].presence || '*'
|
||||
match = options[:match].presence || "*"
|
||||
|
||||
options[:match] =
|
||||
if @namespace
|
||||
"#{namespace}:#{match}"
|
||||
else
|
||||
match
|
||||
end
|
||||
options[:match] = if @namespace
|
||||
"#{namespace}:#{match}"
|
||||
else
|
||||
match
|
||||
end
|
||||
|
||||
if block
|
||||
@redis.scan_each(**options) do |key|
|
||||
@ -101,17 +188,19 @@ class DiscourseRedis
|
||||
block.call(key)
|
||||
end
|
||||
else
|
||||
@redis.scan_each(**options).map do |key|
|
||||
key = remove_namespace(key) if @namespace
|
||||
key
|
||||
end
|
||||
@redis
|
||||
.scan_each(**options)
|
||||
.map do |key|
|
||||
key = remove_namespace(key) if @namespace
|
||||
key
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def keys(pattern = nil)
|
||||
DiscourseRedis.ignore_readonly do
|
||||
pattern = pattern || '*'
|
||||
pattern = pattern || "*"
|
||||
pattern = "#{namespace}:#{pattern}" if @namespace
|
||||
keys = @redis.keys(pattern)
|
||||
|
||||
@ -125,9 +214,7 @@ class DiscourseRedis
|
||||
end
|
||||
|
||||
def delete_prefixed(prefix)
|
||||
DiscourseRedis.ignore_readonly do
|
||||
keys("#{prefix}*").each { |k| Discourse.redis.del(k) }
|
||||
end
|
||||
DiscourseRedis.ignore_readonly { keys("#{prefix}*").each { |k| Discourse.redis.del(k) } }
|
||||
end
|
||||
|
||||
def reconnect
|
||||
|
@ -6,7 +6,8 @@
|
||||
class DiscourseSourcemappingUrlProcessor < Sprockets::Rails::SourcemappingUrlProcessor
|
||||
def self.sourcemap_asset_path(sourcemap_logical_path, context:)
|
||||
result = super(sourcemap_logical_path, context: context)
|
||||
if (File.basename(sourcemap_logical_path) === sourcemap_logical_path) || sourcemap_logical_path.start_with?("plugins/")
|
||||
if (File.basename(sourcemap_logical_path) === sourcemap_logical_path) ||
|
||||
sourcemap_logical_path.start_with?("plugins/")
|
||||
# If the original sourcemap reference is relative, keep it relative
|
||||
result = File.basename(result)
|
||||
end
|
||||
|
@ -1,7 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseTagging
|
||||
|
||||
TAGS_FIELD_NAME ||= "tags"
|
||||
TAGS_FILTER_REGEXP ||= /[\/\?#\[\]@!\$&'\(\)\*\+,;=\.%\\`^\s|\{\}"<>]+/ # /?#[]@!$&'()*+,;=.%\`^|{}"<>
|
||||
TAGS_STAFF_CACHE_KEY ||= "staff_tag_names"
|
||||
@ -22,9 +21,11 @@ module DiscourseTagging
|
||||
tag_names = DiscourseTagging.tags_for_saving(tag_names_arg, guardian) || []
|
||||
|
||||
if !tag_names.empty?
|
||||
Tag.where_name(tag_names).joins(:target_tag).includes(:target_tag).each do |tag|
|
||||
tag_names[tag_names.index(tag.name)] = tag.target_tag.name
|
||||
end
|
||||
Tag
|
||||
.where_name(tag_names)
|
||||
.joins(:target_tag)
|
||||
.includes(:target_tag)
|
||||
.each { |tag| tag_names[tag_names.index(tag.name)] = tag.target_tag.name }
|
||||
end
|
||||
|
||||
# tags currently on the topic
|
||||
@ -45,9 +46,7 @@ module DiscourseTagging
|
||||
# If this user has explicit permission to use certain tags,
|
||||
# we need to ensure those tags are removed from the list of
|
||||
# restricted tags
|
||||
if permitted_tags.present?
|
||||
readonly_tags = readonly_tags - permitted_tags
|
||||
end
|
||||
readonly_tags = readonly_tags - permitted_tags if permitted_tags.present?
|
||||
|
||||
# visible, but not usable, tags this user is trying to use
|
||||
disallowed_tags = new_tag_names & readonly_tags
|
||||
@ -55,13 +54,19 @@ module DiscourseTagging
|
||||
disallowed_tags += new_tag_names & hidden_tags
|
||||
|
||||
if disallowed_tags.present?
|
||||
topic.errors.add(:base, I18n.t("tags.restricted_tag_disallowed", tag: disallowed_tags.join(" ")))
|
||||
topic.errors.add(
|
||||
:base,
|
||||
I18n.t("tags.restricted_tag_disallowed", tag: disallowed_tags.join(" ")),
|
||||
)
|
||||
return false
|
||||
end
|
||||
|
||||
removed_readonly_tags = removed_tag_names & readonly_tags
|
||||
if removed_readonly_tags.present?
|
||||
topic.errors.add(:base, I18n.t("tags.restricted_tag_remove_disallowed", tag: removed_readonly_tags.join(" ")))
|
||||
topic.errors.add(
|
||||
:base,
|
||||
I18n.t("tags.restricted_tag_remove_disallowed", tag: removed_readonly_tags.join(" ")),
|
||||
)
|
||||
return false
|
||||
end
|
||||
|
||||
@ -73,50 +78,61 @@ module DiscourseTagging
|
||||
if tag_names.present?
|
||||
# guardian is explicitly nil cause we don't want to strip all
|
||||
# staff tags that already passed validation
|
||||
tags = filter_allowed_tags(
|
||||
nil, # guardian
|
||||
for_topic: true,
|
||||
category: category,
|
||||
selected_tags: tag_names,
|
||||
only_tag_names: tag_names
|
||||
)
|
||||
tags =
|
||||
filter_allowed_tags(
|
||||
nil, # guardian
|
||||
for_topic: true,
|
||||
category: category,
|
||||
selected_tags: tag_names,
|
||||
only_tag_names: tag_names,
|
||||
)
|
||||
|
||||
# keep existent tags that current user cannot use
|
||||
tags += Tag.where(name: old_tag_names & tag_names)
|
||||
|
||||
tags = Tag.where(id: tags.map(&:id)).all.to_a if tags.size > 0
|
||||
|
||||
if tags.size < tag_names.size && (category.nil? || category.allow_global_tags || (category.tags.count == 0 && category.tag_groups.count == 0))
|
||||
if tags.size < tag_names.size &&
|
||||
(
|
||||
category.nil? || category.allow_global_tags ||
|
||||
(category.tags.count == 0 && category.tag_groups.count == 0)
|
||||
)
|
||||
tag_names.each do |name|
|
||||
unless Tag.where_name(name).exists?
|
||||
tags << Tag.create(name: name)
|
||||
end
|
||||
tags << Tag.create(name: name) unless Tag.where_name(name).exists?
|
||||
end
|
||||
end
|
||||
|
||||
# add missing mandatory parent tags
|
||||
tag_ids = tags.map(&:id)
|
||||
|
||||
parent_tags_map = DB.query("
|
||||
parent_tags_map =
|
||||
DB
|
||||
.query(
|
||||
"
|
||||
SELECT tgm.tag_id, tg.parent_tag_id
|
||||
FROM tag_groups tg
|
||||
INNER JOIN tag_group_memberships tgm
|
||||
ON tgm.tag_group_id = tg.id
|
||||
WHERE tg.parent_tag_id IS NOT NULL
|
||||
AND tgm.tag_id IN (?)
|
||||
", tag_ids).inject({}) do |h, v|
|
||||
h[v.tag_id] ||= []
|
||||
h[v.tag_id] << v.parent_tag_id
|
||||
h
|
||||
end
|
||||
",
|
||||
tag_ids,
|
||||
)
|
||||
.inject({}) do |h, v|
|
||||
h[v.tag_id] ||= []
|
||||
h[v.tag_id] << v.parent_tag_id
|
||||
h
|
||||
end
|
||||
|
||||
missing_parent_tag_ids = parent_tags_map.map do |_, parent_tag_ids|
|
||||
(tag_ids & parent_tag_ids).size == 0 ? parent_tag_ids.first : nil
|
||||
end.compact.uniq
|
||||
missing_parent_tag_ids =
|
||||
parent_tags_map
|
||||
.map do |_, parent_tag_ids|
|
||||
(tag_ids & parent_tag_ids).size == 0 ? parent_tag_ids.first : nil
|
||||
end
|
||||
.compact
|
||||
.uniq
|
||||
|
||||
unless missing_parent_tag_ids.empty?
|
||||
tags = tags + Tag.where(id: missing_parent_tag_ids).all
|
||||
end
|
||||
tags = tags + Tag.where(id: missing_parent_tag_ids).all unless missing_parent_tag_ids.empty?
|
||||
|
||||
return false unless validate_min_required_tags_for_category(guardian, topic, category, tags)
|
||||
return false unless validate_required_tags_from_group(guardian, topic, category, tags)
|
||||
@ -137,7 +153,9 @@ module DiscourseTagging
|
||||
|
||||
DiscourseEvent.trigger(
|
||||
:topic_tags_changed,
|
||||
topic, old_tag_names: old_tag_names, new_tag_names: topic.tags.map(&:name)
|
||||
topic,
|
||||
old_tag_names: old_tag_names,
|
||||
new_tag_names: topic.tags.map(&:name),
|
||||
)
|
||||
|
||||
return true
|
||||
@ -146,12 +164,12 @@ module DiscourseTagging
|
||||
end
|
||||
|
||||
def self.validate_min_required_tags_for_category(guardian, model, category, tags = [])
|
||||
if !guardian.is_staff? &&
|
||||
category &&
|
||||
category.minimum_required_tags > 0 &&
|
||||
tags.length < category.minimum_required_tags
|
||||
|
||||
model.errors.add(:base, I18n.t("tags.minimum_required_tags", count: category.minimum_required_tags))
|
||||
if !guardian.is_staff? && category && category.minimum_required_tags > 0 &&
|
||||
tags.length < category.minimum_required_tags
|
||||
model.errors.add(
|
||||
:base,
|
||||
I18n.t("tags.minimum_required_tags", count: category.minimum_required_tags),
|
||||
)
|
||||
false
|
||||
else
|
||||
true
|
||||
@ -164,17 +182,17 @@ module DiscourseTagging
|
||||
success = true
|
||||
category.category_required_tag_groups.each do |crtg|
|
||||
if tags.length < crtg.min_count ||
|
||||
crtg.tag_group.tags.where("tags.id in (?)", tags.map(&:id)).count < crtg.min_count
|
||||
|
||||
crtg.tag_group.tags.where("tags.id in (?)", tags.map(&:id)).count < crtg.min_count
|
||||
success = false
|
||||
|
||||
model.errors.add(:base,
|
||||
model.errors.add(
|
||||
:base,
|
||||
I18n.t(
|
||||
"tags.required_tags_from_group",
|
||||
count: crtg.min_count,
|
||||
tag_group_name: crtg.tag_group.name,
|
||||
tags: crtg.tag_group.tags.order(:id).pluck(:name).join(", ")
|
||||
)
|
||||
tags: crtg.tag_group.tags.order(:id).pluck(:name).join(", "),
|
||||
),
|
||||
)
|
||||
end
|
||||
end
|
||||
@ -189,24 +207,28 @@ module DiscourseTagging
|
||||
tags_restricted_to_categories = Hash.new { |h, k| h[k] = Set.new }
|
||||
|
||||
query = Tag.where(name: tags)
|
||||
query.joins(tag_groups: :categories).pluck(:name, 'categories.id').each do |(tag, cat_id)|
|
||||
tags_restricted_to_categories[tag] << cat_id
|
||||
end
|
||||
query.joins(:categories).pluck(:name, 'categories.id').each do |(tag, cat_id)|
|
||||
tags_restricted_to_categories[tag] << cat_id
|
||||
end
|
||||
query
|
||||
.joins(tag_groups: :categories)
|
||||
.pluck(:name, "categories.id")
|
||||
.each { |(tag, cat_id)| tags_restricted_to_categories[tag] << cat_id }
|
||||
query
|
||||
.joins(:categories)
|
||||
.pluck(:name, "categories.id")
|
||||
.each { |(tag, cat_id)| tags_restricted_to_categories[tag] << cat_id }
|
||||
|
||||
unallowed_tags = tags_restricted_to_categories.keys.select do |tag|
|
||||
!tags_restricted_to_categories[tag].include?(category.id)
|
||||
end
|
||||
unallowed_tags =
|
||||
tags_restricted_to_categories.keys.select do |tag|
|
||||
!tags_restricted_to_categories[tag].include?(category.id)
|
||||
end
|
||||
|
||||
if unallowed_tags.present?
|
||||
msg = I18n.t(
|
||||
"tags.forbidden.restricted_tags_cannot_be_used_in_category",
|
||||
count: unallowed_tags.size,
|
||||
tags: unallowed_tags.sort.join(", "),
|
||||
category: category.name
|
||||
)
|
||||
msg =
|
||||
I18n.t(
|
||||
"tags.forbidden.restricted_tags_cannot_be_used_in_category",
|
||||
count: unallowed_tags.size,
|
||||
tags: unallowed_tags.sort.join(", "),
|
||||
category: category.name,
|
||||
)
|
||||
model.errors.add(:base, msg)
|
||||
return false
|
||||
end
|
||||
@ -214,12 +236,13 @@ module DiscourseTagging
|
||||
if !category.allow_global_tags && category.has_restricted_tags?
|
||||
unrestricted_tags = tags - tags_restricted_to_categories.keys
|
||||
if unrestricted_tags.present?
|
||||
msg = I18n.t(
|
||||
"tags.forbidden.category_does_not_allow_tags",
|
||||
count: unrestricted_tags.size,
|
||||
tags: unrestricted_tags.sort.join(", "),
|
||||
category: category.name
|
||||
)
|
||||
msg =
|
||||
I18n.t(
|
||||
"tags.forbidden.category_does_not_allow_tags",
|
||||
count: unrestricted_tags.size,
|
||||
tags: unrestricted_tags.sort.join(", "),
|
||||
category: category.name,
|
||||
)
|
||||
model.errors.add(:base, msg)
|
||||
return false
|
||||
end
|
||||
@ -280,7 +303,8 @@ module DiscourseTagging
|
||||
def self.filter_allowed_tags(guardian, opts = {})
|
||||
selected_tag_ids = opts[:selected_tags] ? Tag.where_name(opts[:selected_tags]).pluck(:id) : []
|
||||
category = opts[:category]
|
||||
category_has_restricted_tags = category ? (category.tags.count > 0 || category.tag_groups.count > 0) : false
|
||||
category_has_restricted_tags =
|
||||
category ? (category.tags.count > 0 || category.tag_groups.count > 0) : false
|
||||
|
||||
# If guardian is nil, it means the caller doesn't want tags to be filtered
|
||||
# based on guardian rules. Use the same rules as for staff users.
|
||||
@ -288,9 +312,7 @@ module DiscourseTagging
|
||||
|
||||
builder_params = {}
|
||||
|
||||
unless selected_tag_ids.empty?
|
||||
builder_params[:selected_tag_ids] = selected_tag_ids
|
||||
end
|
||||
builder_params[:selected_tag_ids] = selected_tag_ids unless selected_tag_ids.empty?
|
||||
|
||||
sql = +"WITH #{TAG_GROUP_RESTRICTIONS_SQL}, #{CATEGORY_RESTRICTIONS_SQL}"
|
||||
if (opts[:for_input] || opts[:for_topic]) && filter_for_non_staff
|
||||
@ -301,13 +323,14 @@ module DiscourseTagging
|
||||
|
||||
outer_join = category.nil? || category.allow_global_tags || !category_has_restricted_tags
|
||||
|
||||
distinct_clause = if opts[:order_popularity]
|
||||
"DISTINCT ON (topic_count, name)"
|
||||
elsif opts[:order_search_results] && opts[:term].present?
|
||||
"DISTINCT ON (lower(name) = lower(:cleaned_term), topic_count, name)"
|
||||
else
|
||||
""
|
||||
end
|
||||
distinct_clause =
|
||||
if opts[:order_popularity]
|
||||
"DISTINCT ON (topic_count, name)"
|
||||
elsif opts[:order_search_results] && opts[:term].present?
|
||||
"DISTINCT ON (lower(name) = lower(:cleaned_term), topic_count, name)"
|
||||
else
|
||||
""
|
||||
end
|
||||
|
||||
sql << <<~SQL
|
||||
SELECT #{distinct_clause} t.id, t.name, t.topic_count, t.pm_topic_count, t.description,
|
||||
@ -336,16 +359,20 @@ module DiscourseTagging
|
||||
# parent tag requirements
|
||||
if opts[:for_input]
|
||||
builder.where(
|
||||
builder_params[:selected_tag_ids] ?
|
||||
"tgm_id IS NULL OR parent_tag_id IS NULL OR parent_tag_id IN (:selected_tag_ids)" :
|
||||
"tgm_id IS NULL OR parent_tag_id IS NULL"
|
||||
(
|
||||
if builder_params[:selected_tag_ids]
|
||||
"tgm_id IS NULL OR parent_tag_id IS NULL OR parent_tag_id IN (:selected_tag_ids)"
|
||||
else
|
||||
"tgm_id IS NULL OR parent_tag_id IS NULL"
|
||||
end
|
||||
),
|
||||
)
|
||||
end
|
||||
|
||||
if category && category_has_restricted_tags
|
||||
builder.where(
|
||||
category.allow_global_tags ? "category_id = ? OR category_id IS NULL" : "category_id = ?",
|
||||
category.id
|
||||
category.id,
|
||||
)
|
||||
elsif category || opts[:for_input] || opts[:for_topic]
|
||||
# tags not restricted to any categories
|
||||
@ -354,7 +381,9 @@ module DiscourseTagging
|
||||
|
||||
if filter_for_non_staff && (opts[:for_input] || opts[:for_topic])
|
||||
# exclude staff-only tag groups
|
||||
builder.where("tag_group_id IS NULL OR tag_group_id IN (SELECT tag_group_id FROM permitted_tag_groups)")
|
||||
builder.where(
|
||||
"tag_group_id IS NULL OR tag_group_id IN (SELECT tag_group_id FROM permitted_tag_groups)",
|
||||
)
|
||||
end
|
||||
|
||||
term = opts[:term]
|
||||
@ -380,7 +409,8 @@ module DiscourseTagging
|
||||
# - and no search term has been included
|
||||
required_tag_ids = nil
|
||||
required_category_tag_group = nil
|
||||
if opts[:for_input] && category&.category_required_tag_groups.present? && (filter_for_non_staff || term.blank?)
|
||||
if opts[:for_input] && category&.category_required_tag_groups.present? &&
|
||||
(filter_for_non_staff || term.blank?)
|
||||
category.category_required_tag_groups.each do |crtg|
|
||||
group_tags = crtg.tag_group.tags.pluck(:id)
|
||||
next if (group_tags & selected_tag_ids).size >= crtg.min_count
|
||||
@ -426,22 +456,18 @@ module DiscourseTagging
|
||||
if !one_tag_per_group_ids.empty?
|
||||
builder.where(
|
||||
"tag_group_id IS NULL OR tag_group_id NOT IN (?) OR id IN (:selected_tag_ids)",
|
||||
one_tag_per_group_ids
|
||||
one_tag_per_group_ids,
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
if opts[:exclude_synonyms]
|
||||
builder.where("target_tag_id IS NULL")
|
||||
end
|
||||
builder.where("target_tag_id IS NULL") if opts[:exclude_synonyms]
|
||||
|
||||
if opts[:exclude_has_synonyms]
|
||||
builder.where("id NOT IN (SELECT target_tag_id FROM tags WHERE target_tag_id IS NOT NULL)")
|
||||
end
|
||||
|
||||
if opts[:excluded_tag_names]&.any?
|
||||
builder.where("name NOT IN (?)", opts[:excluded_tag_names])
|
||||
end
|
||||
builder.where("name NOT IN (?)", opts[:excluded_tag_names]) if opts[:excluded_tag_names]&.any?
|
||||
|
||||
if opts[:limit]
|
||||
if required_tag_ids && term.blank?
|
||||
@ -465,7 +491,7 @@ module DiscourseTagging
|
||||
if required_category_tag_group
|
||||
context[:required_tag_group] = {
|
||||
name: required_category_tag_group.tag_group.name,
|
||||
min_count: required_category_tag_group.min_count
|
||||
min_count: required_category_tag_group.min_count,
|
||||
}
|
||||
end
|
||||
[result, context]
|
||||
@ -480,21 +506,15 @@ module DiscourseTagging
|
||||
else
|
||||
# Visible tags either have no permissions or have allowable permissions
|
||||
Tag
|
||||
.where.not(
|
||||
id:
|
||||
TagGroupMembership
|
||||
.joins(tag_group: :tag_group_permissions)
|
||||
.select(:tag_id)
|
||||
)
|
||||
.where.not(id: TagGroupMembership.joins(tag_group: :tag_group_permissions).select(:tag_id))
|
||||
.or(
|
||||
Tag
|
||||
.where(
|
||||
id:
|
||||
TagGroupPermission
|
||||
.joins(tag_group: :tag_group_memberships)
|
||||
.where(group_id: permitted_group_ids_query(guardian))
|
||||
.select('tag_group_memberships.tag_id'),
|
||||
)
|
||||
Tag.where(
|
||||
id:
|
||||
TagGroupPermission
|
||||
.joins(tag_group: :tag_group_memberships)
|
||||
.where(group_id: permitted_group_ids_query(guardian))
|
||||
.select("tag_group_memberships.tag_id"),
|
||||
),
|
||||
)
|
||||
end
|
||||
end
|
||||
@ -509,21 +529,18 @@ module DiscourseTagging
|
||||
|
||||
def self.permitted_group_ids_query(guardian = nil)
|
||||
if guardian&.authenticated?
|
||||
Group
|
||||
.from(
|
||||
Group.sanitize_sql(
|
||||
["(SELECT ? AS id UNION #{guardian.user.groups.select(:id).to_sql}) as groups", Group::AUTO_GROUPS[:everyone]]
|
||||
)
|
||||
)
|
||||
.select(:id)
|
||||
Group.from(
|
||||
Group.sanitize_sql(
|
||||
[
|
||||
"(SELECT ? AS id UNION #{guardian.user.groups.select(:id).to_sql}) as groups",
|
||||
Group::AUTO_GROUPS[:everyone],
|
||||
],
|
||||
),
|
||||
).select(:id)
|
||||
else
|
||||
Group
|
||||
.from(
|
||||
Group.sanitize_sql(
|
||||
["(SELECT ? AS id) AS groups", Group::AUTO_GROUPS[:everyone]]
|
||||
)
|
||||
)
|
||||
.select(:id)
|
||||
Group.from(
|
||||
Group.sanitize_sql(["(SELECT ? AS id) AS groups", Group::AUTO_GROUPS[:everyone]]),
|
||||
).select(:id)
|
||||
end
|
||||
end
|
||||
|
||||
@ -535,9 +552,11 @@ module DiscourseTagging
|
||||
def self.readonly_tag_names(guardian = nil)
|
||||
return [] if guardian&.is_staff?
|
||||
|
||||
query = Tag.joins(tag_groups: :tag_group_permissions)
|
||||
.where('tag_group_permissions.permission_type = ?',
|
||||
TagGroupPermission.permission_types[:readonly])
|
||||
query =
|
||||
Tag.joins(tag_groups: :tag_group_permissions).where(
|
||||
"tag_group_permissions.permission_type = ?",
|
||||
TagGroupPermission.permission_types[:readonly],
|
||||
)
|
||||
|
||||
query.pluck(:name)
|
||||
end
|
||||
@ -545,14 +564,12 @@ module DiscourseTagging
|
||||
# explicit permissions to use these tags
|
||||
def self.permitted_tag_names(guardian = nil)
|
||||
query =
|
||||
Tag
|
||||
.joins(tag_groups: :tag_group_permissions)
|
||||
.where(
|
||||
tag_group_permissions: {
|
||||
group_id: permitted_group_ids(guardian),
|
||||
permission_type: TagGroupPermission.permission_types[:full],
|
||||
},
|
||||
)
|
||||
Tag.joins(tag_groups: :tag_group_permissions).where(
|
||||
tag_group_permissions: {
|
||||
group_id: permitted_group_ids(guardian),
|
||||
permission_type: TagGroupPermission.permission_types[:full],
|
||||
},
|
||||
)
|
||||
|
||||
query.pluck(:name).uniq
|
||||
end
|
||||
@ -586,15 +603,14 @@ module DiscourseTagging
|
||||
tag = tag.dup
|
||||
tag.downcase! if SiteSetting.force_lowercase_tags
|
||||
tag.strip!
|
||||
tag.gsub!(/[[:space:]]+/, '-')
|
||||
tag.gsub!(/[^[:word:][:punct:]]+/, '')
|
||||
tag.squeeze!('-')
|
||||
tag.gsub!(TAGS_FILTER_REGEXP, '')
|
||||
tag.gsub!(/[[:space:]]+/, "-")
|
||||
tag.gsub!(/[^[:word:][:punct:]]+/, "")
|
||||
tag.squeeze!("-")
|
||||
tag.gsub!(TAGS_FILTER_REGEXP, "")
|
||||
tag[0...SiteSetting.max_tag_length]
|
||||
end
|
||||
|
||||
def self.tags_for_saving(tags_arg, guardian, opts = {})
|
||||
|
||||
return [] unless guardian.can_tag_topics? && tags_arg.present?
|
||||
|
||||
tag_names = Tag.where_name(tags_arg).pluck(:name)
|
||||
@ -609,21 +625,23 @@ module DiscourseTagging
|
||||
end
|
||||
|
||||
def self.add_or_create_tags_by_name(taggable, tag_names_arg, opts = {})
|
||||
tag_names = DiscourseTagging.tags_for_saving(tag_names_arg, Guardian.new(Discourse.system_user), opts) || []
|
||||
tag_names =
|
||||
DiscourseTagging.tags_for_saving(tag_names_arg, Guardian.new(Discourse.system_user), opts) ||
|
||||
[]
|
||||
if taggable.tags.pluck(:name).sort != tag_names.sort
|
||||
taggable.tags = Tag.where_name(tag_names).all
|
||||
new_tag_names = taggable.tags.size < tag_names.size ? tag_names - taggable.tags.map(&:name) : []
|
||||
new_tag_names =
|
||||
taggable.tags.size < tag_names.size ? tag_names - taggable.tags.map(&:name) : []
|
||||
taggable.tags << Tag.where(target_tag_id: taggable.tags.map(&:id)).all
|
||||
new_tag_names.each do |name|
|
||||
taggable.tags << Tag.create(name: name)
|
||||
end
|
||||
new_tag_names.each { |name| taggable.tags << Tag.create(name: name) }
|
||||
end
|
||||
end
|
||||
|
||||
# Returns true if all were added successfully, or an Array of the
|
||||
# tags that failed to be added, with errors on each Tag.
|
||||
def self.add_or_create_synonyms_by_name(target_tag, synonym_names)
|
||||
tag_names = DiscourseTagging.tags_for_saving(synonym_names, Guardian.new(Discourse.system_user)) || []
|
||||
tag_names =
|
||||
DiscourseTagging.tags_for_saving(synonym_names, Guardian.new(Discourse.system_user)) || []
|
||||
tag_names -= [target_tag.name]
|
||||
existing = Tag.where_name(tag_names).all
|
||||
target_tag.synonyms << existing
|
||||
@ -642,6 +660,6 @@ module DiscourseTagging
|
||||
|
||||
def self.muted_tags(user)
|
||||
return [] unless user
|
||||
TagUser.lookup(user, :muted).joins(:tag).pluck('tags.name')
|
||||
TagUser.lookup(user, :muted).joins(:tag).pluck("tags.name")
|
||||
end
|
||||
end
|
||||
|
@ -1,13 +1,11 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseUpdates
|
||||
|
||||
class << self
|
||||
|
||||
def check_version
|
||||
attrs = {
|
||||
installed_version: Discourse::VERSION::STRING,
|
||||
installed_sha: (Discourse.git_version == 'unknown' ? nil : Discourse.git_version),
|
||||
installed_sha: (Discourse.git_version == "unknown" ? nil : Discourse.git_version),
|
||||
installed_describe: Discourse.full_version,
|
||||
git_branch: Discourse.git_branch,
|
||||
updated_at: updated_at,
|
||||
@ -17,7 +15,7 @@ module DiscourseUpdates
|
||||
attrs.merge!(
|
||||
latest_version: latest_version,
|
||||
critical_updates: critical_updates_available?,
|
||||
missing_versions_count: missing_versions_count
|
||||
missing_versions_count: missing_versions_count,
|
||||
)
|
||||
end
|
||||
|
||||
@ -25,19 +23,24 @@ module DiscourseUpdates
|
||||
|
||||
# replace -commit_count with +commit_count
|
||||
if version_info.installed_describe =~ /-(\d+)-/
|
||||
version_info.installed_describe = version_info.installed_describe.gsub(/-(\d+)-.*/, " +#{$1}")
|
||||
version_info.installed_describe =
|
||||
version_info.installed_describe.gsub(/-(\d+)-.*/, " +#{$1}")
|
||||
end
|
||||
|
||||
if SiteSetting.version_checks?
|
||||
is_stale_data =
|
||||
(version_info.missing_versions_count == 0 && version_info.latest_version != version_info.installed_version) ||
|
||||
(version_info.missing_versions_count != 0 && version_info.latest_version == version_info.installed_version)
|
||||
(
|
||||
version_info.missing_versions_count == 0 &&
|
||||
version_info.latest_version != version_info.installed_version
|
||||
) ||
|
||||
(
|
||||
version_info.missing_versions_count != 0 &&
|
||||
version_info.latest_version == version_info.installed_version
|
||||
)
|
||||
|
||||
# Handle cases when version check data is old so we report something that makes sense
|
||||
if version_info.updated_at.nil? || # never performed a version check
|
||||
last_installed_version != Discourse::VERSION::STRING || # upgraded since the last version check
|
||||
is_stale_data
|
||||
|
||||
if version_info.updated_at.nil? || last_installed_version != Discourse::VERSION::STRING || # never performed a version check # upgraded since the last version check
|
||||
is_stale_data
|
||||
Jobs.enqueue(:version_check, all_sites: true)
|
||||
version_info.version_check_pending = true
|
||||
|
||||
@ -48,9 +51,8 @@ module DiscourseUpdates
|
||||
end
|
||||
|
||||
version_info.stale_data =
|
||||
version_info.version_check_pending ||
|
||||
(updated_at && updated_at < 48.hours.ago) ||
|
||||
is_stale_data
|
||||
version_info.version_check_pending || (updated_at && updated_at < 48.hours.ago) ||
|
||||
is_stale_data
|
||||
end
|
||||
|
||||
version_info
|
||||
@ -82,7 +84,7 @@ module DiscourseUpdates
|
||||
end
|
||||
|
||||
def critical_updates_available?
|
||||
(Discourse.redis.get(critical_updates_available_key) || false) == 'true'
|
||||
(Discourse.redis.get(critical_updates_available_key) || false) == "true"
|
||||
end
|
||||
|
||||
def critical_updates_available=(arg)
|
||||
@ -110,7 +112,7 @@ module DiscourseUpdates
|
||||
# store the list in redis
|
||||
version_keys = []
|
||||
versions[0, 5].each do |v|
|
||||
key = "#{missing_versions_key_prefix}:#{v['version']}"
|
||||
key = "#{missing_versions_key_prefix}:#{v["version"]}"
|
||||
Discourse.redis.mapped_hmset key, v
|
||||
version_keys << key
|
||||
end
|
||||
@ -140,11 +142,21 @@ module DiscourseUpdates
|
||||
end
|
||||
|
||||
def new_features
|
||||
entries = JSON.parse(Discourse.redis.get(new_features_key)) rescue nil
|
||||
entries =
|
||||
begin
|
||||
JSON.parse(Discourse.redis.get(new_features_key))
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
return nil if entries.nil?
|
||||
|
||||
entries.select! do |item|
|
||||
item["discourse_version"].nil? || Discourse.has_needed_version?(current_version, item["discourse_version"]) rescue nil
|
||||
begin
|
||||
item["discourse_version"].nil? ||
|
||||
Discourse.has_needed_version?(current_version, item["discourse_version"])
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
entries.sort_by { |item| Time.zone.parse(item["created_at"]).to_i }.reverse
|
||||
@ -170,7 +182,12 @@ module DiscourseUpdates
|
||||
end
|
||||
|
||||
def mark_new_features_as_seen(user_id)
|
||||
entries = JSON.parse(Discourse.redis.get(new_features_key)) rescue nil
|
||||
entries =
|
||||
begin
|
||||
JSON.parse(Discourse.redis.get(new_features_key))
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
return nil if entries.nil?
|
||||
last_seen = entries.max_by { |x| x["created_at"] }
|
||||
Discourse.redis.set(new_features_last_seen_key(user_id), last_seen["created_at"])
|
||||
@ -204,39 +221,39 @@ module DiscourseUpdates
|
||||
private
|
||||
|
||||
def last_installed_version_key
|
||||
'last_installed_version'
|
||||
"last_installed_version"
|
||||
end
|
||||
|
||||
def latest_version_key
|
||||
'discourse_latest_version'
|
||||
"discourse_latest_version"
|
||||
end
|
||||
|
||||
def critical_updates_available_key
|
||||
'critical_updates_available'
|
||||
"critical_updates_available"
|
||||
end
|
||||
|
||||
def missing_versions_count_key
|
||||
'missing_versions_count'
|
||||
"missing_versions_count"
|
||||
end
|
||||
|
||||
def updated_at_key
|
||||
'last_version_check_at'
|
||||
"last_version_check_at"
|
||||
end
|
||||
|
||||
def missing_versions_list_key
|
||||
'missing_versions'
|
||||
"missing_versions"
|
||||
end
|
||||
|
||||
def missing_versions_key_prefix
|
||||
'missing_version'
|
||||
"missing_version"
|
||||
end
|
||||
|
||||
def new_features_endpoint
|
||||
'https://meta.discourse.org/new-features.json'
|
||||
"https://meta.discourse.org/new-features.json"
|
||||
end
|
||||
|
||||
def new_features_key
|
||||
'new_features'
|
||||
"new_features"
|
||||
end
|
||||
|
||||
def new_features_last_seen_key(user_id)
|
||||
|
@ -18,13 +18,13 @@ class DiskSpace
|
||||
end
|
||||
|
||||
def self.free(path)
|
||||
output = Discourse::Utils.execute_command('df', '-Pk', path)
|
||||
output = Discourse::Utils.execute_command("df", "-Pk", path)
|
||||
size_line = output.split("\n")[1]
|
||||
size_line.split(/\s+/)[3].to_i * 1024
|
||||
end
|
||||
|
||||
def self.percent_free(path)
|
||||
output = Discourse::Utils.execute_command('df', '-P', path)
|
||||
output = Discourse::Utils.execute_command("df", "-P", path)
|
||||
size_line = output.split("\n")[1]
|
||||
size_line.split(/\s+/)[4].to_i
|
||||
end
|
||||
|
@ -1,23 +1,16 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'message_bus/distributed_cache'
|
||||
require "message_bus/distributed_cache"
|
||||
|
||||
class DistributedCache < MessageBus::DistributedCache
|
||||
def initialize(key, manager: nil, namespace: true)
|
||||
super(
|
||||
key,
|
||||
manager: manager,
|
||||
namespace: namespace,
|
||||
app_version: Discourse.git_version
|
||||
)
|
||||
super(key, manager: manager, namespace: namespace, app_version: Discourse.git_version)
|
||||
end
|
||||
|
||||
# Defer setting of the key in the cache for performance critical path to avoid
|
||||
# waiting on MessageBus to publish the message which involves writing to Redis.
|
||||
def defer_set(k, v)
|
||||
Scheduler::Defer.later("#{@key}_set") do
|
||||
self[k] = v
|
||||
end
|
||||
Scheduler::Defer.later("#{@key}_set") { self[k] = v }
|
||||
end
|
||||
|
||||
def defer_get_set(k, &block)
|
||||
|
@ -31,11 +31,7 @@ class DistributedMutex
|
||||
LUA
|
||||
|
||||
def self.synchronize(key, redis: nil, validity: DEFAULT_VALIDITY, &blk)
|
||||
self.new(
|
||||
key,
|
||||
redis: redis,
|
||||
validity: validity
|
||||
).synchronize(&blk)
|
||||
self.new(key, redis: redis, validity: validity).synchronize(&blk)
|
||||
end
|
||||
|
||||
def initialize(key, redis: nil, validity: DEFAULT_VALIDITY)
|
||||
@ -58,7 +54,9 @@ class DistributedMutex
|
||||
ensure
|
||||
current_time = redis.time[0]
|
||||
if current_time > expire_time
|
||||
warn("held for too long, expected max: #{@validity} secs, took an extra #{current_time - expire_time} secs")
|
||||
warn(
|
||||
"held for too long, expected max: #{@validity} secs, took an extra #{current_time - expire_time} secs",
|
||||
)
|
||||
end
|
||||
|
||||
unlocked = UNLOCK_SCRIPT.eval(redis, [prefixed_key], [expire_time.to_s])
|
||||
|
@ -1,6 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rate_limiter'
|
||||
require "rate_limiter"
|
||||
class EditRateLimiter < RateLimiter
|
||||
def initialize(user)
|
||||
limit = SiteSetting.max_edits_per_day
|
||||
|
10
lib/email.rb
10
lib/email.rb
@ -1,6 +1,6 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'mail'
|
||||
require "mail"
|
||||
|
||||
module Email
|
||||
# See https://www.iana.org/assignments/smtp-enhanced-status-codes/smtp-enhanced-status-codes.xhtml#smtp-enhanced-status-codes-1
|
||||
@ -21,19 +21,19 @@ module Email
|
||||
def self.obfuscate(email)
|
||||
return email if !Email.is_valid?(email)
|
||||
|
||||
first, _, last = email.rpartition('@')
|
||||
first, _, last = email.rpartition("@")
|
||||
|
||||
# Obfuscate each last part, except tld
|
||||
last = last.split('.')
|
||||
last = last.split(".")
|
||||
tld = last.pop
|
||||
last.map! { |part| obfuscate_part(part) }
|
||||
last << tld
|
||||
|
||||
"#{obfuscate_part(first)}@#{last.join('.')}"
|
||||
"#{obfuscate_part(first)}@#{last.join(".")}"
|
||||
end
|
||||
|
||||
def self.cleanup_alias(name)
|
||||
name ? name.gsub(/[:<>,"]/, '') : name
|
||||
name ? name.gsub(/[:<>,"]/, "") : name
|
||||
end
|
||||
|
||||
def self.extract_parts(raw)
|
||||
|
@ -2,12 +2,7 @@
|
||||
|
||||
module Email
|
||||
class AuthenticationResults
|
||||
VERDICT = Enum.new(
|
||||
:gray,
|
||||
:pass,
|
||||
:fail,
|
||||
start: 0,
|
||||
)
|
||||
VERDICT = Enum.new(:gray, :pass, :fail, start: 0)
|
||||
|
||||
def initialize(headers)
|
||||
@authserv_id = SiteSetting.email_in_authserv_id
|
||||
@ -16,11 +11,10 @@ module Email
|
||||
end
|
||||
|
||||
def results
|
||||
@results ||= Array(@headers).map do |header|
|
||||
parse_header(header.to_s)
|
||||
end.filter do |result|
|
||||
@authserv_id.blank? || @authserv_id == result[:authserv_id]
|
||||
end
|
||||
@results ||=
|
||||
Array(@headers)
|
||||
.map { |header| parse_header(header.to_s) }
|
||||
.filter { |result| @authserv_id.blank? || @authserv_id == result[:authserv_id] }
|
||||
end
|
||||
|
||||
def action
|
||||
@ -55,7 +49,8 @@ module Email
|
||||
end
|
||||
end
|
||||
end
|
||||
verdict = VERDICT[:gray] if SiteSetting.email_in_authserv_id.blank? && verdict == VERDICT[:pass]
|
||||
verdict = VERDICT[:gray] if SiteSetting.email_in_authserv_id.blank? &&
|
||||
verdict == VERDICT[:pass]
|
||||
verdict
|
||||
end
|
||||
|
||||
@ -67,10 +62,11 @@ module Email
|
||||
authres_version = /\d+#{cfws}?/
|
||||
no_result = /#{cfws}?;#{cfws}?none/
|
||||
keyword = /([a-zA-Z0-9-]*[a-zA-Z0-9])/
|
||||
authres_payload = /\A#{cfws}?#{authserv_id}(?:#{cfws}#{authres_version})?(?:#{no_result}|([\S\s]*))/
|
||||
authres_payload =
|
||||
/\A#{cfws}?#{authserv_id}(?:#{cfws}#{authres_version})?(?:#{no_result}|([\S\s]*))/
|
||||
|
||||
method_version = authres_version
|
||||
method = /#{keyword}\s*(?:#{cfws}?\/#{cfws}?#{method_version})?/
|
||||
method = %r{#{keyword}\s*(?:#{cfws}?/#{cfws}?#{method_version})?}
|
||||
result = keyword
|
||||
methodspec = /#{cfws}?#{method}#{cfws}?=#{cfws}?#{result}/
|
||||
reasonspec = /reason#{cfws}?=#{cfws}?#{value}/
|
||||
@ -87,27 +83,21 @@ module Email
|
||||
|
||||
if resinfo_val
|
||||
resinfo_scan = resinfo_val.scan(resinfo)
|
||||
parsed_resinfo = resinfo_scan.map do |x|
|
||||
{
|
||||
method: x[2],
|
||||
result: x[8],
|
||||
reason: x[12] || x[13],
|
||||
props: x[-1].scan(propspec).map do |y|
|
||||
{
|
||||
ptype: y[0],
|
||||
property: y[4],
|
||||
pvalue: y[8] || y[9]
|
||||
}
|
||||
end
|
||||
}
|
||||
end
|
||||
parsed_resinfo =
|
||||
resinfo_scan.map do |x|
|
||||
{
|
||||
method: x[2],
|
||||
result: x[8],
|
||||
reason: x[12] || x[13],
|
||||
props:
|
||||
x[-1]
|
||||
.scan(propspec)
|
||||
.map { |y| { ptype: y[0], property: y[4], pvalue: y[8] || y[9] } },
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
{
|
||||
authserv_id: parsed_authserv_id,
|
||||
resinfo: parsed_resinfo
|
||||
}
|
||||
{ authserv_id: parsed_authserv_id, resinfo: parsed_resinfo }
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
@ -5,11 +5,11 @@ module Email
|
||||
def build_email(*builder_args)
|
||||
builder = Email::MessageBuilder.new(*builder_args)
|
||||
headers(builder.header_args) if builder.header_args.present?
|
||||
mail(builder.build_args).tap { |message|
|
||||
mail(builder.build_args).tap do |message|
|
||||
if message && h = builder.html_part
|
||||
message.html_part = h
|
||||
end
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user