discourse/spec/jobs/export_csv_file_spec.rb

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

317 lines
9.4 KiB
Ruby
Raw Normal View History

# frozen_string_literal: true
RSpec.describe Jobs::ExportCsvFile do
describe "#execute" do
let(:other_user) { Fabricate(:user) }
let(:admin) { Fabricate(:admin) }
let(:action_log) { StaffActionLogger.new(admin).log_revoke_moderation(other_user) }
2018-04-24 23:12:42 -05:00
2014-08-09 05:28:57 -05:00
it "raises an error when the entity is missing" do
expect { Jobs::ExportCsvFile.new.execute(user_id: admin.id) }.to raise_error(
Discourse::InvalidParameters,
)
2018-04-24 23:12:42 -05:00
end
it "works" do
action_log
2018-04-24 23:12:42 -05:00
begin
expect do
Jobs::ExportCsvFile.new.execute(user_id: admin.id, entity: "staff_action")
end.to change { Upload.count }.by(1)
system_message = admin.topics_allowed.last
expect(system_message.title).to eq(
I18n.t(
2018-04-24 23:57:56 -05:00
"system_messages.csv_export_succeeded.subject_template",
export_title: "Staff Action",
),
2018-04-24 23:57:56 -05:00
)
upload = system_message.first_post.uploads.first
expect(system_message.first_post.raw).to eq(
I18n.t(
"system_messages.csv_export_succeeded.text_body_template",
download_link:
"[#{upload.original_filename}|attachment](#{upload.short_url}) (#{upload.filesize} Bytes)",
).chomp,
)
expect(system_message.id).to eq(UserExport.last.topic_id)
expect(system_message.closed).to eq(true)
files = []
Zip::File.open(Discourse.store.path_for(upload)) do |zip_file|
zip_file.each { |entry| files << entry.name }
end
expect(files.size).to eq(1)
2018-04-24 23:12:42 -05:00
ensure
admin.uploads.each(&:destroy!)
2018-04-24 23:12:42 -05:00
end
2014-08-09 05:28:57 -05:00
end
it "generates csv export failed message if upload is too large" do
action_log
SiteSetting.max_export_file_size_kb = 0
begin
Jobs::ExportCsvFile.new.execute(user_id: admin.id, entity: "staff_action")
system_message = admin.topics_allowed.last
expect(system_message.title).to eq(
I18n.t(
"system_messages.csv_export_failed.subject_template",
export_title: "Data export failed",
),
)
ensure
admin.uploads.each(&:destroy!)
end
end
end
describe ".report_export" do
let(:user) { Fabricate(:admin) }
let(:exporter) do
exporter = Jobs::ExportCsvFile.new
exporter.entity = "report"
exporter.extra =
HashWithIndifferentAccess.new(start_date: "2010-01-01", end_date: "2011-01-01")
exporter.current_user = User.find_by(id: user.id)
exporter
end
it "does not throw an error when the dates are invalid" do
Jobs::ExportCsvFile.new.execute(
entity: "report",
user_id: user.id,
args: {
start_date: "asdfasdf",
end_date: "not-a-date",
name: "dau_by_mau",
},
)
end
it "works with single-column reports" do
user.user_visits.create!(visited_at: "2010-01-01", posts_read: 42)
Fabricate(:user).user_visits.create!(visited_at: "2010-01-03", posts_read: 420)
exporter.extra["name"] = "dau_by_mau"
report = export_report
expect(report.first).to contain_exactly("Day", "Percent")
expect(report.second).to contain_exactly("2010-01-01", "100.0")
expect(report.third).to contain_exactly("2010-01-03", "50.0")
end
it "works with filters" do
user.user_visits.create!(visited_at: "2010-01-01", posts_read: 42)
group = Fabricate(:group)
user1 = Fabricate(:user)
Fabricate(:group_user, group: group, user: user1)
user1.user_visits.create!(visited_at: "2010-01-03", posts_read: 420)
exporter.extra["name"] = "visits"
exporter.extra["group"] = group.id
report = export_report
expect(report.length).to eq(2)
expect(report.first).to contain_exactly("Day", "Count")
expect(report.second).to contain_exactly("2010-01-03", "1")
end
it "works with single-column reports with default label" do
user.user_visits.create!(visited_at: "2010-01-01")
Fabricate(:user).user_visits.create!(visited_at: "2010-01-03")
exporter.extra["name"] = "visits"
report = export_report
expect(report.first).to contain_exactly("Day", "Count")
expect(report.second).to contain_exactly("2010-01-01", "1")
expect(report.third).to contain_exactly("2010-01-03", "1")
end
it "works with multi-columns reports" do
DiscourseIpInfo.stubs(:get).with("1.1.1.1").returns(location: "Earth")
user.user_auth_token_logs.create!(
action: "login",
client_ip: "1.1.1.1",
created_at: "2010-01-01",
)
exporter.extra["name"] = "staff_logins"
report = export_report
expect(report.first).to contain_exactly("User", "Location", "Login at")
expect(report.second).to contain_exactly(user.username, "Earth", "2010-01-01 00:00:00 UTC")
end
it "works with topic reports" do
freeze_time DateTime.parse("2010-01-01 6:00")
exporter.extra["name"] = "top_referred_topics"
post1 = Fabricate(:post)
Fabricate(:post)
IncomingLink.add(
host: "a.com",
referer: "http://twitter.com",
post_id: post1.id,
ip_address: "1.1.1.1",
)
report = export_report
expect(report.first).to contain_exactly("Topic", "Clicks")
expect(report.second).to contain_exactly(post1.topic.id.to_s, "1")
end
it "works with stacked_chart reports" do
ApplicationRequest.create!(date: "2010-01-01", req_type: "page_view_logged_in", count: 1)
ApplicationRequest.create!(date: "2010-01-02", req_type: "page_view_logged_in", count: 2)
ApplicationRequest.create!(date: "2010-01-03", req_type: "page_view_logged_in", count: 3)
ApplicationRequest.create!(date: "2010-01-01", req_type: "page_view_anon", count: 4)
ApplicationRequest.create!(date: "2010-01-02", req_type: "page_view_anon", count: 5)
ApplicationRequest.create!(date: "2010-01-03", req_type: "page_view_anon", count: 6)
ApplicationRequest.create!(date: "2010-01-01", req_type: "page_view_crawler", count: 7)
ApplicationRequest.create!(date: "2010-01-02", req_type: "page_view_crawler", count: 8)
ApplicationRequest.create!(date: "2010-01-03", req_type: "page_view_crawler", count: 9)
exporter.extra["name"] = "consolidated_page_views"
report = export_report
expect(report[0]).to contain_exactly("Day", "Logged in users", "Anonymous users", "Crawlers")
expect(report[1]).to contain_exactly("2010-01-01", "1", "4", "7")
expect(report[2]).to contain_exactly("2010-01-02", "2", "5", "8")
expect(report[3]).to contain_exactly("2010-01-03", "3", "6", "9")
end
it "works with posts reports and filters" do
category = Fabricate(:category)
subcategory = Fabricate(:category, parent_category: category)
Fabricate(
:post,
topic: Fabricate(:topic, category: category),
created_at: "2010-01-01 12:00:00 UTC",
)
Fabricate(
:post,
topic: Fabricate(:topic, category: subcategory),
created_at: "2010-01-01 12:00:00 UTC",
)
exporter.extra["name"] = "posts"
exporter.extra["category"] = category.id
report = export_report
expect(report[0]).to contain_exactly("Count", "Day")
expect(report[1]).to contain_exactly("1", "2010-01-01")
exporter.extra["include_subcategories"] = true
report = export_report
expect(report[0]).to contain_exactly("Count", "Day")
expect(report[1]).to contain_exactly("2", "2010-01-01")
end
def export_report
report = []
exporter.report_export { |entry| report << entry }
report
end
end
2017-09-13 11:09:11 -05:00
let(:user_list_header) do
%w[
id
name
username
email
title
created_at
last_seen_at
last_posted_at
last_emailed_at
trust_level
approved
suspended_at
suspended_till
blocked
active
admin
moderator
ip_address
staged
secondary_emails
topics_entered
posts_read_count
time_read
topic_count
post_count
likes_given
likes_received
location
website
views
external_id
external_email
external_username
external_name
external_avatar_url
]
end
2017-09-13 11:09:11 -05:00
let(:user_list_export) do
exported_data = []
Jobs::ExportCsvFile.new.user_list_export { |entry| exported_data << entry }
exported_data
end
def to_hash(row)
2015-01-02 00:59:05 -06:00
Hash[*user_list_header.zip(row).flatten]
end
2019-04-11 01:55:02 -05:00
it "exports secondary emails" do
user = Fabricate(:user)
Fabricate(:secondary_email, user: user, primary: false)
2019-04-11 01:55:02 -05:00
secondary_emails = user.secondary_emails
user = to_hash(user_list_export.find { |u| u[0].to_i == user.id })
2019-04-11 01:55:02 -05:00
expect(user["secondary_emails"].split(";")).to match_array(secondary_emails)
end
it "exports sso data" do
FEATURE: Rename 'Discourse SSO' to DiscourseConnect (#11978) The 'Discourse SSO' protocol is being rebranded to DiscourseConnect. This should help to reduce confusion when 'SSO' is used in the generic sense. This commit aims to: - Rename `sso_` site settings. DiscourseConnect specific ones are prefixed `discourse_connect_`. Generic settings are prefixed `auth_` - Add (server-side-only) backwards compatibility for the old setting names, with deprecation notices - Copy `site_settings` database records to the new names - Rename relevant translation keys - Update relevant translations This commit does **not** aim to: - Rename any Ruby classes or methods. This might be done in a future commit - Change any URLs. This would break existing integrations - Make any changes to the protocol. This would break existing integrations - Change any functionality. Further normalization across DiscourseConnect and other auth methods will be done separately The risks are: - There is no backwards compatibility for site settings on the client-side. Accessing auth-related site settings in Javascript is fairly rare, and an error on the client side would not be security-critical. - If a plugin is monkey-patching parts of the auth process, changes to locale keys could cause broken error messages. This should also be unlikely. The old site setting names remain functional, so security-related overrides will remain working. A follow-up commit will be made with a post-deploy migration to delete the old `site_settings` rows.
2021-02-08 04:04:33 -06:00
SiteSetting.discourse_connect_url = "https://www.example.com/sso"
SiteSetting.enable_discourse_connect = true
user = Fabricate(:user)
user.user_profile.update_column(:location, "La,La Land")
user.create_single_sign_on_record(
external_id: "123",
last_payload: "xxx",
external_email: "test@test.com",
)
user = to_hash(user_list_export.find { |u| u[0].to_i == user.id })
2014-08-09 05:28:57 -05:00
expect(user["location"]).to eq('"La,La Land"')
expect(user["external_id"]).to eq("123")
expect(user["external_email"]).to eq("test@test.com")
2014-08-09 05:28:57 -05:00
end
end