2019-05-02 17:17:27 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2014-02-20 15:07:02 -06:00
|
|
|
module CrawlerDetection
|
2020-05-14 06:10:07 -05:00
|
|
|
WAYBACK_MACHINE_URL = "archive.org"
|
2017-09-28 21:31:50 -05:00
|
|
|
|
2018-01-15 23:28:11 -06:00
|
|
|
def self.to_matcher(string, type: nil)
|
2017-09-28 21:31:50 -05:00
|
|
|
escaped = string.split("|").map { |agent| Regexp.escape(agent) }.join("|")
|
2018-01-15 23:28:11 -06:00
|
|
|
|
|
|
|
if type == :real && Rails.env == "test"
|
|
|
|
# we need this bypass so we properly render views
|
|
|
|
escaped << "|Rails Testing"
|
|
|
|
end
|
|
|
|
|
2018-01-15 22:41:13 -06:00
|
|
|
Regexp.new(escaped, Regexp::IGNORECASE)
|
2017-09-28 21:31:50 -05:00
|
|
|
end
|
2015-02-14 08:24:51 -06:00
|
|
|
|
2019-06-02 21:13:32 -05:00
|
|
|
def self.crawler?(user_agent, via_header = nil)
|
2020-05-14 06:10:07 -05:00
|
|
|
if user_agent.nil? || user_agent&.include?(WAYBACK_MACHINE_URL) ||
|
|
|
|
via_header&.include?(WAYBACK_MACHINE_URL)
|
|
|
|
return true
|
2023-01-09 06:10:19 -06:00
|
|
|
end
|
2018-01-15 23:28:11 -06:00
|
|
|
|
2017-09-28 21:31:50 -05:00
|
|
|
# this is done to avoid regenerating regexes
|
2018-01-15 22:41:13 -06:00
|
|
|
@non_crawler_matchers ||= {}
|
2017-09-28 21:31:50 -05:00
|
|
|
@matchers ||= {}
|
2018-01-15 22:41:13 -06:00
|
|
|
|
2018-01-15 23:28:11 -06:00
|
|
|
possibly_real =
|
|
|
|
(
|
|
|
|
@non_crawler_matchers[SiteSetting.non_crawler_user_agents] ||= to_matcher(
|
|
|
|
SiteSetting.non_crawler_user_agents,
|
|
|
|
type: :real,
|
|
|
|
)
|
2023-01-09 06:10:19 -06:00
|
|
|
)
|
2018-01-15 22:41:13 -06:00
|
|
|
|
|
|
|
if user_agent.match?(possibly_real)
|
|
|
|
known_bots =
|
|
|
|
(@matchers[SiteSetting.crawler_user_agents] ||= to_matcher(SiteSetting.crawler_user_agents))
|
2018-06-20 19:56:46 -05:00
|
|
|
if user_agent.match?(known_bots)
|
|
|
|
bypass =
|
|
|
|
(
|
|
|
|
@matchers[SiteSetting.crawler_check_bypass_agents] ||= to_matcher(
|
|
|
|
SiteSetting.crawler_check_bypass_agents,
|
|
|
|
)
|
2023-01-09 06:10:19 -06:00
|
|
|
)
|
2018-06-20 19:56:46 -05:00
|
|
|
!user_agent.match?(bypass)
|
|
|
|
else
|
|
|
|
false
|
|
|
|
end
|
2018-01-15 22:41:13 -06:00
|
|
|
else
|
|
|
|
true
|
|
|
|
end
|
2014-02-14 16:10:08 -06:00
|
|
|
end
|
2018-03-15 16:10:45 -05:00
|
|
|
|
2021-03-22 12:41:42 -05:00
|
|
|
def self.show_browser_update?(user_agent)
|
|
|
|
return false if SiteSetting.browser_update_user_agents.blank?
|
|
|
|
|
|
|
|
@browser_update_matchers ||= {}
|
|
|
|
matcher =
|
|
|
|
@browser_update_matchers[SiteSetting.browser_update_user_agents] ||= to_matcher(
|
|
|
|
SiteSetting.browser_update_user_agents,
|
|
|
|
)
|
|
|
|
user_agent.match?(matcher)
|
|
|
|
end
|
|
|
|
|
2018-03-15 16:10:45 -05:00
|
|
|
# Given a user_agent that returns true from crawler?, should its request be allowed?
|
|
|
|
def self.allow_crawler?(user_agent)
|
2020-07-26 19:23:54 -05:00
|
|
|
if SiteSetting.allowed_crawler_user_agents.blank? &&
|
|
|
|
SiteSetting.blocked_crawler_user_agents.blank?
|
|
|
|
return true
|
2023-01-09 06:10:19 -06:00
|
|
|
end
|
2018-03-15 16:10:45 -05:00
|
|
|
|
2020-07-26 19:23:54 -05:00
|
|
|
@allowlisted_matchers ||= {}
|
|
|
|
@blocklisted_matchers ||= {}
|
2018-03-15 16:10:45 -05:00
|
|
|
|
2020-07-26 19:23:54 -05:00
|
|
|
if SiteSetting.allowed_crawler_user_agents.present?
|
|
|
|
allowlisted =
|
|
|
|
@allowlisted_matchers[SiteSetting.allowed_crawler_user_agents] ||= to_matcher(
|
|
|
|
SiteSetting.allowed_crawler_user_agents,
|
|
|
|
)
|
|
|
|
!user_agent.nil? && user_agent.match?(allowlisted)
|
2018-03-15 16:10:45 -05:00
|
|
|
else
|
2020-07-26 19:23:54 -05:00
|
|
|
blocklisted =
|
|
|
|
@blocklisted_matchers[SiteSetting.blocked_crawler_user_agents] ||= to_matcher(
|
|
|
|
SiteSetting.blocked_crawler_user_agents,
|
|
|
|
)
|
|
|
|
user_agent.nil? || !user_agent.match?(blocklisted)
|
2018-03-15 16:10:45 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.is_blocked_crawler?(user_agent)
|
|
|
|
crawler?(user_agent) && !allow_crawler?(user_agent)
|
|
|
|
end
|
2014-02-14 16:10:08 -06:00
|
|
|
end
|