mirror of
https://github.com/discourse/discourse.git
synced 2025-02-25 18:55:32 -06:00
Add rubocop to our build. (#5004)
This commit is contained in:
@@ -24,11 +24,11 @@ class ImportScripts::MyAskBot < ImportScripts::Base
|
||||
@tagmap = []
|
||||
@td = PG::TextDecoder::TimestampWithTimeZone.new
|
||||
@client = PG.connect(
|
||||
:dbname => DB_NAME,
|
||||
:host => DB_HOST,
|
||||
:port => DB_PORT,
|
||||
:user => DB_USER,
|
||||
:password => DB_PASS
|
||||
dbname: DB_NAME,
|
||||
host: DB_HOST,
|
||||
port: DB_PORT,
|
||||
user: DB_USER,
|
||||
password: DB_PASS
|
||||
)
|
||||
end
|
||||
|
||||
@@ -79,7 +79,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
|
||||
tid = tag["thread_id"].to_i
|
||||
tnm = tag["name"].downcase
|
||||
if @tagmap[tid]
|
||||
@tagmap[tid].push( tnm )
|
||||
@tagmap[tid].push(tnm)
|
||||
else
|
||||
@tagmap[tid] = [ tnm ]
|
||||
end
|
||||
@@ -110,7 +110,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
|
||||
|
||||
break if users.ntuples() < 1
|
||||
|
||||
next if all_records_exist? :users, users.map {|u| u["id"].to_i}
|
||||
next if all_records_exist? :users, users.map { |u| u["id"].to_i }
|
||||
|
||||
create_users(users, total: total_count, offset: offset) do |user|
|
||||
{
|
||||
@@ -155,7 +155,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
|
||||
|
||||
break if posts.ntuples() < 1
|
||||
|
||||
next if all_records_exist? :posts, posts.map {|p| p["id"].to_i}
|
||||
next if all_records_exist? :posts, posts.map { |p| p["id"].to_i }
|
||||
|
||||
create_posts(posts, total: post_count, offset: offset) do |post|
|
||||
pid = post["id"]
|
||||
@@ -174,7 +174,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
|
||||
id: pid,
|
||||
title: post["title"],
|
||||
category: cat,
|
||||
custom_fields: {import_id: pid, import_thread_id: tid, import_tags: tags},
|
||||
custom_fields: { import_id: pid, import_thread_id: tid, import_tags: tags },
|
||||
user_id: user_id_from_imported_user_id(post["author_id"]) || Discourse::SYSTEM_USER_ID,
|
||||
created_at: Time.zone.at(@td.decode(post["added_at"])),
|
||||
raw: post["text"],
|
||||
@@ -210,7 +210,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
|
||||
|
||||
break if posts.ntuples() < 1
|
||||
|
||||
next if all_records_exist? :posts, posts.map {|p| p["id"].to_i}
|
||||
next if all_records_exist? :posts, posts.map { |p| p["id"].to_i }
|
||||
|
||||
create_posts(posts, total: post_count, offset: offset) do |post|
|
||||
tid = post["thread_id"].to_i
|
||||
@@ -220,7 +220,7 @@ class ImportScripts::MyAskBot < ImportScripts::Base
|
||||
{
|
||||
id: pid,
|
||||
topic_id: parent[:topic_id],
|
||||
custom_fields: {import_id: pid},
|
||||
custom_fields: { import_id: pid },
|
||||
user_id: user_id_from_imported_user_id(post["author_id"]) || Discourse::SYSTEM_USER_ID,
|
||||
created_at: Time.zone.at(@td.decode(post["added_at"])),
|
||||
raw: post["text"]
|
||||
@@ -230,47 +230,48 @@ class ImportScripts::MyAskBot < ImportScripts::Base
|
||||
end
|
||||
|
||||
def post_process_posts
|
||||
puts "", "Postprocessing posts..."
|
||||
current = 0
|
||||
max = Post.count
|
||||
# Rewrite internal links; e.g.
|
||||
# ask.cvxr.com/question/(\d+)/[^'"}]*
|
||||
# I am sure this is incomplete, but we didn't make heavy use of internal
|
||||
# links on our site.
|
||||
tmp = Regexp.quote("http://" << OLD_SITE)
|
||||
r1 = /"(#{tmp})?\/question\/(\d+)\/[a-zA-Z-]*\/?"/
|
||||
r2 = /\((#{tmp})?\/question\/(\d+)\/[a-zA-Z-]*\/?\)/
|
||||
r3 = /<?#tmp\/question\/(\d+)\/[a-zA-Z-]*\/?>?/
|
||||
Post.find_each do |post|
|
||||
raw = post.raw.gsub(r1) do
|
||||
if topic = topic_lookup_from_imported_post_id($2)
|
||||
"\"#{topic[:url]}\""
|
||||
else
|
||||
$&
|
||||
end
|
||||
puts "", "Postprocessing posts..."
|
||||
current = 0
|
||||
max = Post.count
|
||||
# Rewrite internal links; e.g.
|
||||
# ask.cvxr.com/question/(\d+)/[^'"}]*
|
||||
# I am sure this is incomplete, but we didn't make heavy use of internal
|
||||
# links on our site.
|
||||
tmp = Regexp.quote("http://" << OLD_SITE)
|
||||
r1 = /"(#{tmp})?\/question\/(\d+)\/[a-zA-Z-]*\/?"/
|
||||
r2 = /\((#{tmp})?\/question\/(\d+)\/[a-zA-Z-]*\/?\)/
|
||||
r3 = /<?#tmp\/question\/(\d+)\/[a-zA-Z-]*\/?>?/
|
||||
Post.find_each do |post|
|
||||
raw = post.raw.gsub(r1) do
|
||||
if topic = topic_lookup_from_imported_post_id($2)
|
||||
"\"#{topic[:url]}\""
|
||||
else
|
||||
$&
|
||||
end
|
||||
raw = raw.gsub(r2) do
|
||||
if topic = topic_lookup_from_imported_post_id($2)
|
||||
"(#{topic[:url]})"
|
||||
else
|
||||
$&
|
||||
end
|
||||
end
|
||||
raw = raw.gsub(r3) do
|
||||
if topic = topic_lookup_from_imported_post_id($1)
|
||||
trec = Topic.find_by(id: topic[:topic_id])
|
||||
"[#{trec.title}](#{topic[:url]})"
|
||||
else
|
||||
$&
|
||||
end
|
||||
end
|
||||
if raw != post.raw
|
||||
post.raw = raw
|
||||
post.save
|
||||
end
|
||||
print_status(current += 1, max)
|
||||
end
|
||||
raw = raw.gsub(r2) do
|
||||
if topic = topic_lookup_from_imported_post_id($2)
|
||||
"(#{topic[:url]})"
|
||||
else
|
||||
$&
|
||||
end
|
||||
end
|
||||
raw = raw.gsub(r3) do
|
||||
if topic = topic_lookup_from_imported_post_id($1)
|
||||
trec = Topic.find_by(id: topic[:topic_id])
|
||||
"[#{trec.title}](#{topic[:url]})"
|
||||
else
|
||||
$&
|
||||
end
|
||||
end
|
||||
|
||||
if raw != post.raw
|
||||
post.raw = raw
|
||||
post.save
|
||||
end
|
||||
print_status(current += 1, max)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
ImportScripts::MyAskBot.new.perform
|
||||
|
||||
@@ -30,7 +30,7 @@ class ImportScripts::Base
|
||||
@bbcode_to_md = true if use_bbcode_to_md?
|
||||
@site_settings_during_import = {}
|
||||
@old_site_settings = {}
|
||||
@start_times = {import: Time.now}
|
||||
@start_times = { import: Time.now }
|
||||
end
|
||||
|
||||
def preload_i18n
|
||||
@@ -56,7 +56,7 @@ class ImportScripts::Base
|
||||
reset_topic_counters
|
||||
|
||||
elapsed = Time.now - @start_times[:import]
|
||||
puts '', '', 'Done (%02dh %02dmin %02dsec)' % [elapsed/3600, elapsed/60%60, elapsed%60]
|
||||
puts '', '', 'Done (%02dh %02dmin %02dsec)' % [elapsed / 3600, elapsed / 60 % 60, elapsed % 60]
|
||||
|
||||
ensure
|
||||
reset_site_settings
|
||||
@@ -118,7 +118,7 @@ class ImportScripts::Base
|
||||
delegate method_name, to: :@lookup
|
||||
end
|
||||
|
||||
def create_admin(opts={})
|
||||
def create_admin(opts = {})
|
||||
admin = User.new
|
||||
admin.email = opts[:email] || "sam.saffron@gmail.com"
|
||||
admin.username = opts[:username] || "sam"
|
||||
@@ -140,7 +140,7 @@ class ImportScripts::Base
|
||||
# Required fields are :id and :name, where :id is the id of the
|
||||
# group in the original datasource. The given id will not be used
|
||||
# to create the Discourse group record.
|
||||
def create_groups(results, opts={})
|
||||
def create_groups(results, opts = {})
|
||||
created = 0
|
||||
skipped = 0
|
||||
failed = 0
|
||||
@@ -171,12 +171,12 @@ class ImportScripts::Base
|
||||
end
|
||||
|
||||
def create_group(opts, import_id)
|
||||
opts = opts.dup.tap {|o| o.delete(:id) }
|
||||
opts = opts.dup.tap { |o| o.delete(:id) }
|
||||
import_name = opts[:name]
|
||||
opts[:name] = UserNameSuggester.suggest(import_name)
|
||||
|
||||
existing = Group.where(name: opts[:name]).first
|
||||
return existing if existing and existing.custom_fields["import_id"].to_i == import_id.to_i
|
||||
return existing if existing && existing.custom_fields["import_id"].to_i == (import_id.to_i)
|
||||
g = existing || Group.new(opts)
|
||||
g.custom_fields["import_id"] = import_id
|
||||
g.custom_fields["import_name"] = import_name
|
||||
@@ -196,8 +196,8 @@ class ImportScripts::Base
|
||||
|
||||
existing = "#{type.to_s.classify}CustomField".constantize
|
||||
existing = existing.where(name: 'import_id')
|
||||
.joins('JOIN import_ids ON val = value')
|
||||
.count
|
||||
.joins('JOIN import_ids ON val = value')
|
||||
.count
|
||||
if existing == import_ids.length
|
||||
puts "Skipping #{import_ids.length} already imported #{type}"
|
||||
return true
|
||||
@@ -216,7 +216,7 @@ class ImportScripts::Base
|
||||
# Required fields are :id and :email, where :id is the id of the
|
||||
# user in the original datasource. The given id will not be used to
|
||||
# create the Discourse user record.
|
||||
def create_users(results, opts={})
|
||||
def create_users(results, opts = {})
|
||||
created = 0
|
||||
skipped = 0
|
||||
failed = 0
|
||||
@@ -422,7 +422,7 @@ class ImportScripts::Base
|
||||
# Attributes will be passed to the PostCreator.
|
||||
# Topics should give attributes title and category.
|
||||
# Replies should provide topic_id. Use topic_lookup_from_imported_post_id to find the topic.
|
||||
def create_posts(results, opts={})
|
||||
def create_posts(results, opts = {})
|
||||
skipped = 0
|
||||
created = 0
|
||||
total = opts[:total] || results.size
|
||||
@@ -502,7 +502,7 @@ class ImportScripts::Base
|
||||
# Block should return a hash with the attributes for the bookmark.
|
||||
# Required fields are :user_id and :post_id, where both ids are
|
||||
# the values in the original datasource.
|
||||
def create_bookmarks(results, opts={})
|
||||
def create_bookmarks(results, opts = {})
|
||||
created = 0
|
||||
skipped = 0
|
||||
total = opts[:total] || results.size
|
||||
@@ -539,7 +539,7 @@ class ImportScripts::Base
|
||||
[created, skipped]
|
||||
end
|
||||
|
||||
def close_inactive_topics(opts={})
|
||||
def close_inactive_topics(opts = {})
|
||||
num_days = opts[:days] || 30
|
||||
puts '', "Closing topics that have been inactive for more than #{num_days} days."
|
||||
|
||||
@@ -775,7 +775,7 @@ class ImportScripts::Base
|
||||
end
|
||||
|
||||
def get_start_time(key)
|
||||
@start_times.fetch(key) {|k| @start_times[k] = Time.now}
|
||||
@start_times.fetch(key) { |k| @start_times[k] = Time.now }
|
||||
end
|
||||
|
||||
def batches(batch_size)
|
||||
|
||||
@@ -10,7 +10,7 @@ module ImportScripts
|
||||
end
|
||||
|
||||
def initialize(cols)
|
||||
cols.each_with_index do |col,idx|
|
||||
cols.each_with_index do |col, idx|
|
||||
self.class.send(:define_method, col.downcase.gsub(/[\W]/, '_').squeeze('_')) do
|
||||
@row[idx]
|
||||
end
|
||||
@@ -72,4 +72,4 @@ module ImportScripts
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
require 'mysql2'
|
||||
require File.expand_path(File.dirname(__FILE__) + "/base.rb")
|
||||
|
||||
|
||||
# Before running this script, paste these lines into your shell,
|
||||
# then use arrow keys to edit the values
|
||||
=begin
|
||||
@@ -125,7 +124,7 @@ class ImportScripts::Bbpress < ImportScripts::Base
|
||||
end
|
||||
|
||||
# gather every existent username
|
||||
anon_posts.each do |id,post|
|
||||
anon_posts.each do |id, post|
|
||||
anon_names[post['name']] = Hash.new if not anon_names[post['name']]
|
||||
# overwriting email address, one user can only use one email address
|
||||
anon_names[post['name']]['email'] = post['email']
|
||||
@@ -133,7 +132,7 @@ class ImportScripts::Bbpress < ImportScripts::Base
|
||||
end
|
||||
|
||||
# make sure every user name has a unique email address
|
||||
anon_names.each do |k,name|
|
||||
anon_names.each do |k, name|
|
||||
if not emails.include? name['email']
|
||||
emails.push ( name['email'])
|
||||
else
|
||||
@@ -141,7 +140,6 @@ class ImportScripts::Bbpress < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
create_users(anon_names) do |k, n|
|
||||
{
|
||||
id: k,
|
||||
|
||||
@@ -43,7 +43,7 @@ class ImportScripts::Bespoke < ImportScripts::Base
|
||||
end
|
||||
|
||||
def initialize(cols)
|
||||
cols.each_with_index do |col,idx|
|
||||
cols.each_with_index do |col, idx|
|
||||
self.class.send(:define_method, col) do
|
||||
@row[idx]
|
||||
end
|
||||
@@ -71,7 +71,7 @@ class ImportScripts::Bespoke < ImportScripts::Base
|
||||
File.open(filename).each_line do |line|
|
||||
|
||||
# escaping is mental here
|
||||
line.gsub!(/\\(.{1})/){|m| m[-1] == '"'? '""': m[-1]}
|
||||
line.gsub!(/\\(.{1})/) { |m| m[-1] == '"' ? '""' : m[-1] }
|
||||
line.strip!
|
||||
|
||||
current_row << "\n" unless current_row.empty?
|
||||
@@ -119,7 +119,7 @@ class ImportScripts::Bespoke < ImportScripts::Base
|
||||
end
|
||||
|
||||
def total_rows(table)
|
||||
File.foreach("#{@path}/#{table}.csv").inject(0) {|c, line| c+1} - 1
|
||||
File.foreach("#{@path}/#{table}.csv").inject(0) { |c, line| c + 1 } - 1
|
||||
end
|
||||
|
||||
def import_users
|
||||
@@ -169,7 +169,7 @@ class ImportScripts::Bespoke < ImportScripts::Base
|
||||
def import_categories
|
||||
rows = []
|
||||
csv_parse("categories") do |row|
|
||||
rows << {id: row.id, name: row.name, description: row.description}
|
||||
rows << { id: row.id, name: row.name, description: row.description }
|
||||
end
|
||||
|
||||
create_categories(rows) do |row|
|
||||
@@ -181,46 +181,46 @@ class ImportScripts::Bespoke < ImportScripts::Base
|
||||
# purple and #1223f3
|
||||
raw.gsub!(/\[color=[#a-z0-9]+\]/i, "")
|
||||
raw.gsub!(/\[\/color\]/i, "")
|
||||
raw.gsub!(/\[signature\].+\[\/signature\]/im,"")
|
||||
raw.gsub!(/\[signature\].+\[\/signature\]/im, "")
|
||||
raw
|
||||
end
|
||||
|
||||
def import_post_batch!(posts, topics, offset, total)
|
||||
create_posts(posts, total: total, offset: offset) do |post|
|
||||
create_posts(posts, total: total, offset: offset) do |post|
|
||||
|
||||
mapped = {}
|
||||
mapped = {}
|
||||
|
||||
mapped[:id] = post[:id]
|
||||
mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
|
||||
mapped[:raw] = post[:body]
|
||||
mapped[:created_at] = post[:created_at]
|
||||
mapped[:id] = post[:id]
|
||||
mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
|
||||
mapped[:raw] = post[:body]
|
||||
mapped[:created_at] = post[:created_at]
|
||||
|
||||
topic = topics[post[:topic_id]]
|
||||
topic = topics[post[:topic_id]]
|
||||
|
||||
unless topic[:post_id]
|
||||
mapped[:category] = category_id_from_imported_category_id(topic[:category_id])
|
||||
mapped[:title] = post[:title]
|
||||
topic[:post_id] = post[:id]
|
||||
else
|
||||
parent = topic_lookup_from_imported_post_id(topic[:post_id])
|
||||
next unless parent
|
||||
unless topic[:post_id]
|
||||
mapped[:category] = category_id_from_imported_category_id(topic[:category_id])
|
||||
mapped[:title] = post[:title]
|
||||
topic[:post_id] = post[:id]
|
||||
else
|
||||
parent = topic_lookup_from_imported_post_id(topic[:post_id])
|
||||
next unless parent
|
||||
|
||||
mapped[:topic_id] = parent[:topic_id]
|
||||
mapped[:topic_id] = parent[:topic_id]
|
||||
|
||||
reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
|
||||
if reply_to_post_id
|
||||
reply_to_post_number = @post_number_map[reply_to_post_id]
|
||||
if reply_to_post_number && reply_to_post_number > 1
|
||||
mapped[:reply_to_post_number] = reply_to_post_number
|
||||
end
|
||||
reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
|
||||
if reply_to_post_id
|
||||
reply_to_post_number = @post_number_map[reply_to_post_id]
|
||||
if reply_to_post_number && reply_to_post_number > 1
|
||||
mapped[:reply_to_post_number] = reply_to_post_number
|
||||
end
|
||||
end
|
||||
|
||||
next if topic[:deleted] or post[:deleted]
|
||||
|
||||
mapped
|
||||
end
|
||||
|
||||
next if topic[:deleted] || post[:deleted]
|
||||
|
||||
mapped
|
||||
end
|
||||
|
||||
posts.clear
|
||||
end
|
||||
|
||||
@@ -262,7 +262,7 @@ class ImportScripts::Bespoke < ImportScripts::Base
|
||||
created_at: DateTime.parse(row.dcreate)
|
||||
}
|
||||
posts << row
|
||||
count+=1
|
||||
count += 1
|
||||
|
||||
if posts.length > 0 && posts.length % BATCH_SIZE == 0
|
||||
import_post_batch!(posts, topic_map, count - posts.length, total)
|
||||
@@ -274,7 +274,6 @@ class ImportScripts::Bespoke < ImportScripts::Base
|
||||
exit
|
||||
end
|
||||
|
||||
|
||||
end
|
||||
|
||||
unless ARGV[0] && Dir.exist?(ARGV[0])
|
||||
|
||||
@@ -176,11 +176,11 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
last_posted_at: user['last_posted_at'],
|
||||
moderator: @moderator_group_id.include?(user['group_id']),
|
||||
admin: @admin_group_id.include?(user['group_id']),
|
||||
website: (user['website'] and user['website'].include?('.')) ? user['website'].strip : ( user['qq'] and user['qq'].strip == user['qq'].strip.to_i and user['qq'].strip.to_i > 10000 ) ? 'http://user.qzone.qq.com/' + user['qq'].strip : nil,
|
||||
bio_raw: first_exists((user['bio'] and CGI.unescapeHTML(user['bio'])), user['sightml'], user['spacenote']).strip[0,3000],
|
||||
location: first_exists(user['address'], (!user['resideprovince'].blank? ? [user['resideprovince'], user['residecity'], user['residedist'], user['residecommunity']] : [user['birthprovince'], user['birthcity'], user['birthdist'], user['birthcommunity']]).reject{|location|location.blank?}.join(' ')),
|
||||
website: (user['website'] && user['website'].include?('.')) ? user['website'].strip : (user['qq'] && user['qq'].strip == (user['qq'].strip.to_i) && user['qq'].strip.to_i > (10000)) ? 'http://user.qzone.qq.com/' + user['qq'].strip : nil,
|
||||
bio_raw: first_exists((user['bio'] && CGI.unescapeHTML(user['bio'])), user['sightml'], user['spacenote']).strip[0, 3000],
|
||||
location: first_exists(user['address'], (!user['resideprovince'].blank? ? [user['resideprovince'], user['residecity'], user['residedist'], user['residecommunity']] : [user['birthprovince'], user['birthcity'], user['birthdist'], user['birthcommunity']]).reject { |location|location.blank? }.join(' ')),
|
||||
post_create_action: lambda do |newmember|
|
||||
if user['avatar_exists'] == 1 and newmember.uploaded_avatar_id.blank?
|
||||
if user['avatar_exists'] == (1) && newmember.uploaded_avatar_id.blank?
|
||||
path, filename = discuzx_avatar_fullpath(user['id'])
|
||||
if path
|
||||
begin
|
||||
@@ -199,7 +199,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
end
|
||||
if !user['spacecss'].blank? and newmember.user_profile.profile_background.blank?
|
||||
if !user['spacecss'].blank? && newmember.user_profile.profile_background.blank?
|
||||
# profile background
|
||||
if matched = user['spacecss'].match(/body\s*{[^}]*url\('?(.+?)'?\)/i)
|
||||
body_background = matched[1].split(ORIGINAL_SITE_PREFIX, 2).last
|
||||
@@ -234,7 +234,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
|
||||
# we don't send email to the unconfirmed user
|
||||
newmember.update(email_digests: user['email_confirmed'] == 1) if newmember.email_digests
|
||||
newmember.update(name: '') if !newmember.name.blank? and newmember.name == newmember.username
|
||||
newmember.update(name: '') if !newmember.name.blank? && newmember.name == (newmember.username)
|
||||
end
|
||||
}
|
||||
end
|
||||
@@ -259,10 +259,10 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
|
||||
max_position = Category.all.max_by(&:position).position
|
||||
create_categories(results) do |row|
|
||||
next if row['type'] == 'group' or row['status'] == 2 # or row['status'].to_i == 3 # 如果不想导入群组,取消注释
|
||||
next if row['type'] == ('group') || row['status'] == (2) # or row['status'].to_i == 3 # 如果不想导入群组,取消注释
|
||||
extra = PHP.unserialize(row['extra']) if !row['extra'].blank?
|
||||
if extra and !extra["namecolor"].blank?
|
||||
color = extra["namecolor"][1,6]
|
||||
if extra && !extra["namecolor"].blank?
|
||||
color = extra["namecolor"][1, 6]
|
||||
end
|
||||
|
||||
Category.all.max_by(&:position).position
|
||||
@@ -273,7 +273,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
description: row['description'],
|
||||
position: row['position'].to_i + max_position,
|
||||
color: color,
|
||||
suppress_from_homepage: (row['status'] == 0 or row['status'] == 3),
|
||||
suppress_from_homepage: (row['status'] == (0) || row['status'] == (3)),
|
||||
post_create_action: lambda do |category|
|
||||
if slug = @category_slug[row['id']]
|
||||
category.update(slug: slug)
|
||||
@@ -289,7 +289,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
if upload
|
||||
category.logo_url = upload.url
|
||||
# FIXME: I don't know how to get '/shared' by script. May change to Rails.root
|
||||
category.color = Miro::DominantColors.new(File.join('/shared', category.logo_url)).to_hex.first[1,6] if !color
|
||||
category.color = Miro::DominantColors.new(File.join('/shared', category.logo_url)).to_hex.first[1, 6] if !color
|
||||
category.save!
|
||||
end
|
||||
end
|
||||
@@ -332,10 +332,10 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
LIMIT #{BATCH_SIZE}
|
||||
OFFSET #{offset};
|
||||
")
|
||||
# u.status != -1 AND u.groupid != 4 AND u.groupid != 5 用户未被锁定、禁访或禁言。在现实中的 Discuz 论坛,禁止的用户通常是广告机或驱逐的用户,这些不需要导入。
|
||||
# u.status != -1 AND u.groupid != 4 AND u.groupid != 5 用户未被锁定、禁访或禁言。在现实中的 Discuz 论坛,禁止的用户通常是广告机或驱逐的用户,这些不需要导入。
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| p["id"].to_i}
|
||||
next if all_records_exist? :posts, results.map { |p| p["id"].to_i }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |m|
|
||||
skip = false
|
||||
@@ -364,7 +364,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
if results.empty?
|
||||
puts "WARNING: can't find poll options for topic #{m['topic_id']}, skip poll"
|
||||
else
|
||||
mapped[:raw].prepend "[poll#{poll['multiple'] ? ' type=multiple' : ''}#{poll['maxchoices'] > 0 ? " max=#{poll['maxchoices']}" : ''}]\n#{results.map{|option|'- ' + option['polloption']}.join("\n")}\n[/poll]\n"
|
||||
mapped[:raw].prepend "[poll#{poll['multiple'] ? ' type=multiple' : ''}#{poll['maxchoices'] > 0 ? " max=#{poll['maxchoices']}" : ''}]\n#{results.map { |option|'- ' + option['polloption'] }.join("\n")}\n[/poll]\n"
|
||||
end
|
||||
end
|
||||
else
|
||||
@@ -398,7 +398,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
end
|
||||
elsif (m['status'] & 2) >> 1 == 1 # waiting for approve
|
||||
mapped[:post_create_action] = lambda do |post|
|
||||
PostAction.act(Discourse.system_user, post, 6, {take_action: false})
|
||||
PostAction.act(Discourse.system_user, post, 6, take_action: false)
|
||||
end
|
||||
end
|
||||
skip ? nil : mapped
|
||||
@@ -423,7 +423,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
# next if all_records_exist?
|
||||
# next if all_records_exist?
|
||||
|
||||
create_bookmarks(results, total: total_count, offset: offset) do |row|
|
||||
{
|
||||
@@ -434,7 +434,6 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def import_private_messages
|
||||
puts '', 'creating private messages'
|
||||
|
||||
@@ -494,7 +493,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
SELECT plid thread_id, uid user_id
|
||||
FROM #{table_name 'ucenter_pm_members'}
|
||||
WHERE plid = #{m['thread_id']};
|
||||
").map {|r| r['user_id']}.uniq
|
||||
").map { |r| r['user_id'] }.uniq
|
||||
|
||||
mapped[:target_usernames] = import_user_ids.map! do |import_user_id|
|
||||
import_user_id.to_s == m['user_id'].to_s ? nil : User.find_by(id: user_id_from_imported_user_id(import_user_id)).try(:username)
|
||||
@@ -587,7 +586,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
s.gsub!(/\[img[^\]]*\]https?:\/\/#{ORIGINAL_SITE_PREFIX}\/(.*)\[\/img\]/i, '[x-attach]\1[/x-attach]') # dont convert attachment
|
||||
s.gsub!(/<img[^>]*src="https?:\/\/#{ORIGINAL_SITE_PREFIX}\/(.*)".*?>/i, '[x-attach]\1[/x-attach]') # dont convert attachment
|
||||
s.gsub!(/\[img[^\]]*\]https?:\/\/www\.touhou\.cc\/blog\/(.*)\[\/img\]/i, '[x-attach]../blog/\1[/x-attach]') # 私货
|
||||
s.gsub!(/\[img[^\]]*\]https?:\/\/www\.touhou\.cc\/ucenter\/avatar.php\?uid=(\d+)[^\]]*\[\/img\]/i) { "[x-attach]#{discuzx_avatar_fullpath($1,false)[0]}[/x-attach]" } # 私货
|
||||
s.gsub!(/\[img[^\]]*\]https?:\/\/www\.touhou\.cc\/ucenter\/avatar.php\?uid=(\d+)[^\]]*\[\/img\]/i) { "[x-attach]#{discuzx_avatar_fullpath($1, false)[0]}[/x-attach]" } # 私货
|
||||
s.gsub!(/\[img=(\d+),(\d+)\]([^\]]*)\[\/img\]/i, '<img width="\1" height="\2" src="\3">')
|
||||
s.gsub!(/\[img\]([^\]]*)\[\/img\]/i, '<img src="\1">')
|
||||
|
||||
@@ -671,7 +670,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
# @someone without the url
|
||||
s.gsub!(/@\[url=[^\[\]]*?\](\S*)\[\/url\]/i, '@\1')
|
||||
|
||||
s.scan(/http(?:s)?:\/\/#{ORIGINAL_SITE_PREFIX.gsub('.', '\.')}\/[^\[\]\s]*/) {|link|puts "WARNING: post #{import_id} can't replace internal url #{link}"}
|
||||
s.scan(/http(?:s)?:\/\/#{ORIGINAL_SITE_PREFIX.gsub('.', '\.')}\/[^\[\]\s]*/) { |link|puts "WARNING: post #{import_id} can't replace internal url #{link}" }
|
||||
|
||||
s.strip
|
||||
end
|
||||
@@ -785,7 +784,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
FROM #{table_name 'forum_attachment'}
|
||||
WHERE pid = #{post.custom_fields['import_id']}"
|
||||
if !inline_attachments.empty?
|
||||
sql << " AND aid NOT IN (#{inline_attachments.join(',')})"
|
||||
sql << " AND aid NOT IN (#{inline_attachments.join(',')})"
|
||||
end
|
||||
|
||||
results = mysql_query(sql)
|
||||
@@ -805,7 +804,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
end
|
||||
|
||||
if new_raw != post.raw
|
||||
PostRevisor.new(post).revise!(post.user, { raw: new_raw }, { bypass_bump: true, edit_reason: '从 Discuz 中导入附件' })
|
||||
PostRevisor.new(post).revise!(post.user, { raw: new_raw }, bypass_bump: true, edit_reason: '从 Discuz 中导入附件')
|
||||
end
|
||||
|
||||
success_count += 1
|
||||
@@ -818,7 +817,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
end
|
||||
|
||||
# Create the full path to the discuz avatar specified from user id
|
||||
def discuzx_avatar_fullpath(user_id, absolute=true)
|
||||
def discuzx_avatar_fullpath(user_id, absolute = true)
|
||||
padded_id = user_id.to_s.rjust(9, '0')
|
||||
|
||||
part_1 = padded_id[0..2]
|
||||
@@ -945,7 +944,7 @@ class ImportScripts::DiscuzX < ImportScripts::Base
|
||||
end
|
||||
|
||||
def first_exists(*items)
|
||||
items.find{|item|!item.blank?} || ''
|
||||
items.find { |item|!item.blank? } || ''
|
||||
end
|
||||
|
||||
def mysql_query(sql)
|
||||
|
||||
@@ -11,7 +11,7 @@ class ImportScripts::Disqus < ImportScripts::Base
|
||||
def initialize
|
||||
abort("File '#{IMPORT_FILE}' not found") if !File.exist?(IMPORT_FILE)
|
||||
|
||||
@category = Category.where(name: IMPORT_CATEGORY).first
|
||||
@category = Category.where(name: IMPORT_CATEGORY).first
|
||||
abort("Category #{IMPORT_CATEGORY} not found") if @category.blank?
|
||||
|
||||
@parser = DisqusSAX.new
|
||||
@@ -135,7 +135,7 @@ class DisqusSAX < Nokogiri::XML::SAX::Document
|
||||
thread = @threads[id]
|
||||
thread[:posts] << @post
|
||||
else
|
||||
@thread = {id: id, posts: []}
|
||||
@thread = { id: id, posts: [] }
|
||||
end
|
||||
when 'parent'
|
||||
if @post
|
||||
@@ -194,7 +194,7 @@ class DisqusSAX < Nokogiri::XML::SAX::Document
|
||||
end
|
||||
|
||||
def inside?(*params)
|
||||
return !params.find{|p| !@inside[p]}
|
||||
return !params.find { |p| !@inside[p] }
|
||||
end
|
||||
|
||||
def normalize
|
||||
@@ -203,7 +203,7 @@ class DisqusSAX < Nokogiri::XML::SAX::Document
|
||||
# Remove any threads that have no posts
|
||||
@threads.delete(id)
|
||||
else
|
||||
t[:posts].delete_if {|p| p[:is_spam] == 'true' || p[:is_deleted] == 'true'}
|
||||
t[:posts].delete_if { |p| p[:is_spam] == 'true' || p[:is_deleted] == 'true' }
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
|
||||
DRUPAL_DB = ENV['DRUPAL_DB'] || "newsite3"
|
||||
VID = ENV['DRUPAL_VID'] || 1
|
||||
|
||||
|
||||
def initialize
|
||||
super
|
||||
|
||||
@@ -23,7 +23,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
|
||||
def execute
|
||||
create_users(@client.query("SELECT uid id, name, mail email, created FROM users;")) do |row|
|
||||
{id: row['id'], username: row['name'], email: row['email'], created_at: Time.zone.at(row['created'])}
|
||||
{ id: row['id'], username: row['name'], email: row['email'], created_at: Time.zone.at(row['created']) }
|
||||
end
|
||||
|
||||
# You'll need to edit the following query for your Drupal install:
|
||||
@@ -32,7 +32,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
# * Table name may be term_data.
|
||||
# * May need to select a vid other than 1.
|
||||
create_categories(categories_query) do |c|
|
||||
{id: c['tid'], name: c['name'], description: c['description']}
|
||||
{ id: c['tid'], name: c['name'], description: c['description'] }
|
||||
end
|
||||
|
||||
# "Nodes" in Drupal are divided into types. Here we import two types,
|
||||
@@ -65,8 +65,8 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
|
||||
results = @client.query("
|
||||
SELECT n.nid nid,
|
||||
n.title title,
|
||||
n.uid uid,
|
||||
n.title title,
|
||||
n.uid uid,
|
||||
n.created created,
|
||||
n.sticky sticky,
|
||||
nr.body body
|
||||
@@ -85,7 +85,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
created_at: Time.zone.at(row['created']),
|
||||
pinned_at: row['sticky'].to_i == 1 ? Time.zone.at(row['created']) : nil,
|
||||
title: row['title'].try(:strip),
|
||||
custom_fields: {import_id: "nid:#{row['nid']}"}
|
||||
custom_fields: { import_id: "nid:#{row['nid']}" }
|
||||
}
|
||||
end
|
||||
end
|
||||
@@ -123,7 +123,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| "nid:#{p['nid']}"}
|
||||
next if all_records_exist? :posts, results.map { |p| "nid:#{p['nid']}" }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |row|
|
||||
{
|
||||
@@ -141,7 +141,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
|
||||
def create_replies
|
||||
puts '', "creating replies in topics"
|
||||
|
||||
|
||||
if ENV['DRUPAL_IMPORT_BLOG']
|
||||
node_types = "('forum','blog')"
|
||||
else
|
||||
@@ -149,7 +149,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
end
|
||||
|
||||
total_count = @client.query("
|
||||
SELECT COUNT(*) count
|
||||
SELECT COUNT(*) count
|
||||
FROM comments c
|
||||
LEFT JOIN node n ON n.nid=c.nid
|
||||
WHERE node.type IN #{node_types}
|
||||
@@ -167,18 +167,18 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
c.uid,
|
||||
c.timestamp,
|
||||
c.comment body
|
||||
FROM comments c
|
||||
FROM comments c
|
||||
LEFT JOIN node n ON n.nid=c.nid
|
||||
WHERE n.type IN #{node_types}
|
||||
AND n.status = 1
|
||||
AND c.status=0
|
||||
AND c.status=0
|
||||
LIMIT #{batch_size}
|
||||
OFFSET #{offset};
|
||||
", cache_rows: false)
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| "cid:#{p['cid']}"}
|
||||
next if all_records_exist? :posts, results.map { |p| "cid:#{p['cid']}" }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |row|
|
||||
topic_mapping = topic_lookup_from_imported_post_id("nid:#{row['nid']}")
|
||||
@@ -192,7 +192,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
}
|
||||
if row['pid']
|
||||
parent = topic_lookup_from_imported_post_id("cid:#{row['pid']}")
|
||||
h[:reply_to_post_number] = parent[:post_number] if parent and parent[:post_number] > 1
|
||||
h[:reply_to_post_number] = parent[:post_number] if parent && parent[:post_number] > (1)
|
||||
end
|
||||
h
|
||||
else
|
||||
@@ -205,6 +205,6 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
|
||||
end
|
||||
|
||||
if __FILE__==$0
|
||||
if __FILE__ == $0
|
||||
ImportScripts::Drupal.new.perform
|
||||
end
|
||||
|
||||
@@ -23,7 +23,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
|
||||
def execute
|
||||
create_users(@client.query("SELECT uid id, name, mail email, created FROM users;")) do |row|
|
||||
{id: row['id'], username: row['name'], email: row['email'], created_at: Time.zone.at(row['created'])}
|
||||
{ id: row['id'], username: row['name'], email: row['email'], created_at: Time.zone.at(row['created']) }
|
||||
end
|
||||
|
||||
# You'll need to edit the following query for your Drupal install:
|
||||
@@ -32,7 +32,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
# * Table name may be term_data.
|
||||
# * May need to select a vid other than 1.
|
||||
create_categories(categories_query) do |c|
|
||||
{id: c['tid'], name: c['name'], description: c['description']}
|
||||
{ id: c['tid'], name: c['name'], description: c['description'] }
|
||||
end
|
||||
|
||||
# "Nodes" in Drupal are divided into types. Here we import two types,
|
||||
@@ -82,7 +82,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
created_at: Time.zone.at(row['created']),
|
||||
pinned_at: row['sticky'].to_i == 1 ? Time.zone.at(row['created']) : nil,
|
||||
title: row['title'].try(:strip),
|
||||
custom_fields: {import_id: "nid:#{row['nid']}"}
|
||||
custom_fields: { import_id: "nid:#{row['nid']}" }
|
||||
}
|
||||
end
|
||||
end
|
||||
@@ -121,7 +121,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| "nid:#{p['nid']}"}
|
||||
next if all_records_exist? :posts, results.map { |p| "nid:#{p['nid']}" }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |row|
|
||||
{
|
||||
@@ -169,7 +169,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| "cid:#{p['cid']}"}
|
||||
next if all_records_exist? :posts, results.map { |p| "cid:#{p['cid']}" }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |row|
|
||||
topic_mapping = topic_lookup_from_imported_post_id("nid:#{row['nid']}")
|
||||
@@ -183,7 +183,7 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
}
|
||||
if row['pid']
|
||||
parent = topic_lookup_from_imported_post_id("cid:#{row['pid']}")
|
||||
h[:reply_to_post_number] = parent[:post_number] if parent and parent[:post_number] > 1
|
||||
h[:reply_to_post_number] = parent[:post_number] if parent && parent[:post_number] > (1)
|
||||
end
|
||||
h
|
||||
else
|
||||
@@ -196,6 +196,6 @@ class ImportScripts::Drupal < ImportScripts::Base
|
||||
|
||||
end
|
||||
|
||||
if __FILE__==$0
|
||||
if __FILE__ == $0
|
||||
ImportScripts::Drupal.new.perform
|
||||
end
|
||||
|
||||
@@ -8,7 +8,7 @@ class ImportScripts::DrupalJson < ImportScripts::Base
|
||||
|
||||
def initialize
|
||||
super
|
||||
@users_json = load_json("formatted_users.json")
|
||||
@users_json = load_json("formatted_users.json")
|
||||
end
|
||||
|
||||
def execute
|
||||
@@ -40,6 +40,6 @@ class ImportScripts::DrupalJson < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
if __FILE__==$0
|
||||
if __FILE__ == $0
|
||||
ImportScripts::DrupalJson.new.perform
|
||||
end
|
||||
|
||||
@@ -56,7 +56,7 @@ class ImportScripts::DrupalQA < ImportScripts::Drupal
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| "nid:#{p['nid']}"}
|
||||
next if all_records_exist? :posts, results.map { |p| "nid:#{p['nid']}" }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |row|
|
||||
{
|
||||
@@ -102,7 +102,7 @@ class ImportScripts::DrupalQA < ImportScripts::Drupal
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| "cid:#{p['cid']}"}
|
||||
next if all_records_exist? :posts, results.map { |p| "cid:#{p['cid']}" }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |row|
|
||||
topic_mapping = topic_lookup_from_imported_post_id("nid:#{row['nid']}")
|
||||
@@ -155,7 +155,7 @@ class ImportScripts::DrupalQA < ImportScripts::Drupal
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| "cid:#{p['cid']}"}
|
||||
next if all_records_exist? :posts, results.map { |p| "cid:#{p['cid']}" }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |row|
|
||||
topic_mapping = topic_lookup_from_imported_post_id("nid:#{row['nid']}")
|
||||
@@ -207,7 +207,7 @@ class ImportScripts::DrupalQA < ImportScripts::Drupal
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| "cid:#{p['cid']}"}
|
||||
next if all_records_exist? :posts, results.map { |p| "cid:#{p['cid']}" }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |row|
|
||||
topic_mapping = topic_lookup_from_imported_post_id("nid:#{row['nid']}")
|
||||
@@ -235,6 +235,6 @@ class ImportScripts::DrupalQA < ImportScripts::Drupal
|
||||
|
||||
end
|
||||
|
||||
if __FILE__==$0
|
||||
if __FILE__ == $0
|
||||
ImportScripts::DrupalQA.new.perform
|
||||
end
|
||||
|
||||
@@ -74,7 +74,7 @@ class ImportScripts::FluxBB < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :users, results.map {|u| u["id"].to_i}
|
||||
next if all_records_exist? :users, results.map { |u| u["id"].to_i }
|
||||
|
||||
create_users(results, total: total_count, offset: offset) do |user|
|
||||
{ id: user['id'],
|
||||
@@ -91,7 +91,7 @@ class ImportScripts::FluxBB < ImportScripts::Base
|
||||
admin: user['group_id'] == 1 }
|
||||
end
|
||||
|
||||
groupusers = results.select{ |user| user['group_id'] > 2 }
|
||||
groupusers = results.select { |user| user['group_id'] > 2 }
|
||||
|
||||
groupusers.each do |user|
|
||||
if user['group_id']
|
||||
@@ -164,7 +164,7 @@ class ImportScripts::FluxBB < ImportScripts::Base
|
||||
").to_a
|
||||
|
||||
break if results.size < 1
|
||||
next if all_records_exist? :posts, results.map {|m| m['id'].to_i}
|
||||
next if all_records_exist? :posts, results.map { |m| m['id'].to_i }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |m|
|
||||
skip = false
|
||||
|
||||
@@ -37,7 +37,6 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
|
||||
super
|
||||
end
|
||||
|
||||
|
||||
def execute
|
||||
c = Category.find_by(name: 'Old Forum') ||
|
||||
Category.create!(name: 'Old Forum', user: Discourse.system_user)
|
||||
@@ -61,7 +60,7 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
|
||||
end
|
||||
|
||||
def initialize(cols)
|
||||
cols.each_with_index do |col,idx|
|
||||
cols.each_with_index do |col, idx|
|
||||
self.class.send(:define_method, col) do
|
||||
@row[idx]
|
||||
end
|
||||
@@ -134,7 +133,7 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
|
||||
def total_rows(table)
|
||||
# In case of Excel export file, I converted it to CSV and used:
|
||||
# CSV.foreach("#{@path}/#{table}.csv", encoding:'iso-8859-1:utf-8').inject(0) {|c, line| c+1} - 1
|
||||
File.foreach("#{@path}/#{table}.csv").inject(0) {|c, line| c+1} - 1
|
||||
File.foreach("#{@path}/#{table}.csv").inject(0) { |c, line| c + 1 } - 1
|
||||
end
|
||||
|
||||
def import_users
|
||||
@@ -191,7 +190,7 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
|
||||
def import_categories
|
||||
rows = []
|
||||
csv_parse("categories") do |row|
|
||||
rows << {id: row.id, name: row.name, description: row.description}
|
||||
rows << { id: row.id, name: row.name, description: row.description }
|
||||
end
|
||||
|
||||
create_categories(rows) do |row|
|
||||
@@ -209,7 +208,7 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
|
||||
code = $2
|
||||
hoist = SecureRandom.hex
|
||||
# tidy code, wow, this is impressively crazy
|
||||
code.gsub!(/ (\s*)/,"\n\\1")
|
||||
code.gsub!(/ (\s*)/, "\n\\1")
|
||||
code.gsub!(/^\s*\n$/, "\n")
|
||||
code.gsub!(/\n+/m, "\n")
|
||||
code.strip!
|
||||
@@ -231,48 +230,47 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
|
||||
end
|
||||
|
||||
def import_post_batch!(posts, topics, offset, total)
|
||||
create_posts(posts, total: total, offset: offset) do |post|
|
||||
create_posts(posts, total: total, offset: offset) do |post|
|
||||
|
||||
mapped = {}
|
||||
mapped = {}
|
||||
|
||||
mapped[:id] = post[:id]
|
||||
mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
|
||||
mapped[:raw] = post[:body]
|
||||
mapped[:created_at] = post[:created_at]
|
||||
mapped[:id] = post[:id]
|
||||
mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
|
||||
mapped[:raw] = post[:body]
|
||||
mapped[:created_at] = post[:created_at]
|
||||
|
||||
topic = topics[post[:topic_id]]
|
||||
topic = topics[post[:topic_id]]
|
||||
|
||||
unless topic
|
||||
p "MISSING TOPIC #{post[:topic_id]}"
|
||||
p post
|
||||
next
|
||||
end
|
||||
unless topic
|
||||
p "MISSING TOPIC #{post[:topic_id]}"
|
||||
p post
|
||||
next
|
||||
end
|
||||
|
||||
unless topic[:post_id]
|
||||
mapped[:title] = post[:title] || "Topic title missing"
|
||||
topic[:post_id] = post[:id]
|
||||
mapped[:category] = post[:category]
|
||||
else
|
||||
parent = topic_lookup_from_imported_post_id(topic[:post_id])
|
||||
next unless parent
|
||||
|
||||
unless topic[:post_id]
|
||||
mapped[:title] = post[:title] || "Topic title missing"
|
||||
topic[:post_id] = post[:id]
|
||||
mapped[:category] = post[:category]
|
||||
else
|
||||
parent = topic_lookup_from_imported_post_id(topic[:post_id])
|
||||
next unless parent
|
||||
mapped[:topic_id] = parent[:topic_id]
|
||||
|
||||
mapped[:topic_id] = parent[:topic_id]
|
||||
|
||||
reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
|
||||
if reply_to_post_id
|
||||
reply_to_post_number = @post_number_map[reply_to_post_id]
|
||||
if reply_to_post_number && reply_to_post_number > 1
|
||||
mapped[:reply_to_post_number] = reply_to_post_number
|
||||
end
|
||||
reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
|
||||
if reply_to_post_id
|
||||
reply_to_post_number = @post_number_map[reply_to_post_id]
|
||||
if reply_to_post_number && reply_to_post_number > 1
|
||||
mapped[:reply_to_post_number] = reply_to_post_number
|
||||
end
|
||||
end
|
||||
|
||||
next if topic[:deleted] or post[:deleted]
|
||||
|
||||
mapped
|
||||
end
|
||||
|
||||
next if topic[:deleted] || post[:deleted]
|
||||
|
||||
mapped
|
||||
end
|
||||
|
||||
posts.clear
|
||||
end
|
||||
|
||||
@@ -324,7 +322,7 @@ class ImportScripts::GetSatisfaction < ImportScripts::Base
|
||||
created_at: DateTime.parse(row.created_at)
|
||||
}
|
||||
posts << row
|
||||
count+=1
|
||||
count += 1
|
||||
|
||||
if posts.length > 0 && posts.length % BATCH_SIZE == 0
|
||||
import_post_batch!(posts, topic_map, count - posts.length, total)
|
||||
|
||||
@@ -6,7 +6,7 @@ require File.expand_path(File.dirname(__FILE__) + "/base.rb")
|
||||
class ImportScripts::Jive < ImportScripts::Base
|
||||
|
||||
BATCH_SIZE = 1000
|
||||
CATEGORY_IDS = [2023,2003,2004,2042,2036,2029] # categories that should be imported
|
||||
CATEGORY_IDS = [2023, 2003, 2004, 2042, 2036, 2029] # categories that should be imported
|
||||
|
||||
def initialize(path)
|
||||
@path = path
|
||||
@@ -45,7 +45,7 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
end
|
||||
|
||||
def initialize(cols)
|
||||
cols.each_with_index do |col,idx|
|
||||
cols.each_with_index do |col, idx|
|
||||
self.class.send(:define_method, col) do
|
||||
@row[idx]
|
||||
end
|
||||
@@ -72,7 +72,7 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
|
||||
File.open(filename).each_line do |line|
|
||||
|
||||
line.gsub!(/\\(.{1})/){|m| m[-1] == '"'? '""': m[-1]}
|
||||
line.gsub!(/\\(.{1})/) { |m| m[-1] == '"' ? '""' : m[-1] }
|
||||
line.strip!
|
||||
|
||||
current_row << "\n" unless current_row.empty?
|
||||
@@ -120,7 +120,7 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
end
|
||||
|
||||
def total_rows(table)
|
||||
File.foreach("#{@path}/#{table}.csv").inject(0) {|c, line| c+1} - 1
|
||||
File.foreach("#{@path}/#{table}.csv").inject(0) { |c, line| c + 1 } - 1
|
||||
end
|
||||
|
||||
def import_groups
|
||||
@@ -128,7 +128,7 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
|
||||
rows = []
|
||||
csv_parse("groups") do |row|
|
||||
rows << {id: row.groupid, name: row.name}
|
||||
rows << { id: row.groupid, name: row.name }
|
||||
end
|
||||
|
||||
create_groups(rows) do |row|
|
||||
@@ -204,7 +204,7 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
|
||||
csv_parse("communities") do |row|
|
||||
next unless CATEGORY_IDS.include?(row.communityid.to_i)
|
||||
rows << {id: row.communityid, name: "#{row.name} (#{row.communityid})"}
|
||||
rows << { id: row.communityid, name: "#{row.name} (#{row.communityid})" }
|
||||
end
|
||||
|
||||
create_categories(rows) do |row|
|
||||
@@ -228,47 +228,47 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
end
|
||||
|
||||
def import_post_batch!(posts, topics, offset, total)
|
||||
create_posts(posts, total: total, offset: offset) do |post|
|
||||
create_posts(posts, total: total, offset: offset) do |post|
|
||||
|
||||
mapped = {}
|
||||
mapped = {}
|
||||
|
||||
mapped[:id] = post[:id]
|
||||
mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
|
||||
mapped[:raw] = post[:body]
|
||||
mapped[:created_at] = post[:created_at]
|
||||
mapped[:id] = post[:id]
|
||||
mapped[:user_id] = user_id_from_imported_user_id(post[:user_id]) || -1
|
||||
mapped[:raw] = post[:body]
|
||||
mapped[:created_at] = post[:created_at]
|
||||
|
||||
topic = topics[post[:topic_id]]
|
||||
topic = topics[post[:topic_id]]
|
||||
|
||||
unless topic
|
||||
p "MISSING TOPIC #{post[:topic_id]}"
|
||||
p post
|
||||
next
|
||||
end
|
||||
unless topic
|
||||
p "MISSING TOPIC #{post[:topic_id]}"
|
||||
p post
|
||||
next
|
||||
end
|
||||
|
||||
unless topic[:post_id]
|
||||
mapped[:category] = category_id_from_imported_category_id(topic[:category_id])
|
||||
mapped[:title] = post[:title]
|
||||
topic[:post_id] = post[:id]
|
||||
else
|
||||
parent = topic_lookup_from_imported_post_id(topic[:post_id])
|
||||
next unless parent
|
||||
unless topic[:post_id]
|
||||
mapped[:category] = category_id_from_imported_category_id(topic[:category_id])
|
||||
mapped[:title] = post[:title]
|
||||
topic[:post_id] = post[:id]
|
||||
else
|
||||
parent = topic_lookup_from_imported_post_id(topic[:post_id])
|
||||
next unless parent
|
||||
|
||||
mapped[:topic_id] = parent[:topic_id]
|
||||
mapped[:topic_id] = parent[:topic_id]
|
||||
|
||||
reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
|
||||
if reply_to_post_id
|
||||
reply_to_post_number = @post_number_map[reply_to_post_id]
|
||||
if reply_to_post_number && reply_to_post_number > 1
|
||||
mapped[:reply_to_post_number] = reply_to_post_number
|
||||
end
|
||||
reply_to_post_id = post_id_from_imported_post_id(post[:reply_id])
|
||||
if reply_to_post_id
|
||||
reply_to_post_number = @post_number_map[reply_to_post_id]
|
||||
if reply_to_post_number && reply_to_post_number > 1
|
||||
mapped[:reply_to_post_number] = reply_to_post_number
|
||||
end
|
||||
end
|
||||
|
||||
next if topic[:deleted] or post[:deleted]
|
||||
|
||||
mapped
|
||||
end
|
||||
|
||||
next if topic[:deleted] || post[:deleted]
|
||||
|
||||
mapped
|
||||
end
|
||||
|
||||
posts.clear
|
||||
end
|
||||
|
||||
@@ -290,14 +290,14 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
#IMAGE UPLOADER
|
||||
if thread.imagecount
|
||||
Dir.foreach("/var/www/discourse/script/import_scripts/jive/img/#{thread.messageid}") do |item|
|
||||
next if item == '.' or item == '..' or item == '.DS_Store'
|
||||
next if item == ('.') || item == ('..') || item == ('.DS_Store')
|
||||
photo_path = "/var/www/discourse/script/import_scripts/jive/img/#{thread.messageid}/#{item}"
|
||||
upload = create_upload(thread.userid, photo_path, File.basename(photo_path))
|
||||
if upload.persisted?
|
||||
puts "Image upload is successful for #{photo_path}, new path is #{upload.url}!"
|
||||
thread.body.gsub!(item,upload.url)
|
||||
puts "Image upload is successful for #{photo_path}, new path is #{upload.url}!"
|
||||
thread.body.gsub!(item, upload.url)
|
||||
else
|
||||
puts "Error: Image upload is not successful for #{photo_path}!"
|
||||
puts "Error: Image upload is not successful for #{photo_path}!"
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -305,15 +305,15 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
#ATTACHMENT UPLOADER
|
||||
if thread.attachmentcount
|
||||
Dir.foreach("/var/www/discourse/script/import_scripts/jive/attach/#{thread.messageid}") do |item|
|
||||
next if item == '.' or item == '..' or item == '.DS_Store'
|
||||
next if item == ('.') || item == ('..') || item == ('.DS_Store')
|
||||
attach_path = "/var/www/discourse/script/import_scripts/jive/attach/#{thread.messageid}/#{item}"
|
||||
upload = create_upload(thread.userid, attach_path, File.basename(attach_path))
|
||||
if upload.persisted?
|
||||
puts "Attachment upload is successful for #{attach_path}, new path is #{upload.url}!"
|
||||
thread.body.gsub!(item,upload.url)
|
||||
thread.body << "<br/><br/> #{attachment_html(upload,item)}"
|
||||
puts "Attachment upload is successful for #{attach_path}, new path is #{upload.url}!"
|
||||
thread.body.gsub!(item, upload.url)
|
||||
thread.body << "<br/><br/> #{attachment_html(upload, item)}"
|
||||
else
|
||||
puts "Error: Attachment upload is not successful for #{attach_path}!"
|
||||
puts "Error: Attachment upload is not successful for #{attach_path}!"
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -337,7 +337,7 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
|
||||
topic_map.each do |_, topic|
|
||||
posts << topic if topic[:body]
|
||||
count+=1
|
||||
count += 1
|
||||
end
|
||||
|
||||
csv_parse("messages") do |thread|
|
||||
@@ -350,14 +350,14 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
#IMAGE UPLOADER
|
||||
if thread.imagecount
|
||||
Dir.foreach("/var/www/discourse/script/import_scripts/jive/img/#{thread.messageid}") do |item|
|
||||
next if item == '.' or item == '..' or item == '.DS_Store'
|
||||
next if item == ('.') || item == ('..') || item == ('.DS_Store')
|
||||
photo_path = "/var/www/discourse/script/import_scripts/jive/img/#{thread.messageid}/#{item}"
|
||||
upload = create_upload(thread.userid, photo_path, File.basename(photo_path))
|
||||
if upload.persisted?
|
||||
puts "Image upload is successful for #{photo_path}, new path is #{upload.url}!"
|
||||
thread.body.gsub!(item,upload.url)
|
||||
puts "Image upload is successful for #{photo_path}, new path is #{upload.url}!"
|
||||
thread.body.gsub!(item, upload.url)
|
||||
else
|
||||
puts "Error: Image upload is not successful for #{photo_path}!"
|
||||
puts "Error: Image upload is not successful for #{photo_path}!"
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -365,15 +365,15 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
#ATTACHMENT UPLOADER
|
||||
if thread.attachmentcount
|
||||
Dir.foreach("/var/www/discourse/script/import_scripts/jive/attach/#{thread.messageid}") do |item|
|
||||
next if item == '.' or item == '..' or item == '.DS_Store'
|
||||
next if item == ('.') || item == ('..') || item == ('.DS_Store')
|
||||
attach_path = "/var/www/discourse/script/import_scripts/jive/attach/#{thread.messageid}/#{item}"
|
||||
upload = create_upload(thread.userid, attach_path, File.basename(attach_path))
|
||||
if upload.persisted?
|
||||
puts "Attachment upload is successful for #{attach_path}, new path is #{upload.url}!"
|
||||
thread.body.gsub!(item,upload.url)
|
||||
thread.body << "<br/><br/> #{attachment_html(upload,item)}"
|
||||
puts "Attachment upload is successful for #{attach_path}, new path is #{upload.url}!"
|
||||
thread.body.gsub!(item, upload.url)
|
||||
thread.body << "<br/><br/> #{attachment_html(upload, item)}"
|
||||
else
|
||||
puts "Error: Attachment upload is not successful for #{attach_path}!"
|
||||
puts "Error: Attachment upload is not successful for #{attach_path}!"
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -387,7 +387,7 @@ class ImportScripts::Jive < ImportScripts::Base
|
||||
created_at: DateTime.parse(thread.creationdate)
|
||||
}
|
||||
posts << row
|
||||
count+=1
|
||||
count += 1
|
||||
|
||||
if posts.length > 0 && posts.length % BATCH_SIZE == 0
|
||||
import_post_batch!(posts, topic_map, count - posts.length, total)
|
||||
|
||||
@@ -334,7 +334,7 @@ class ImportScripts::JiveApi < ImportScripts::Base
|
||||
SQL
|
||||
end
|
||||
|
||||
def get(url_or_path, authenticated=false)
|
||||
def get(url_or_path, authenticated = false)
|
||||
tries ||= 3
|
||||
|
||||
command = ["curl", "--silent"]
|
||||
|
||||
@@ -6,7 +6,7 @@ require File.expand_path(File.dirname(__FILE__) + "/base.rb")
|
||||
class ImportScripts::JsonGeneric < ImportScripts::Base
|
||||
|
||||
JSON_FILE_PATH = ENV['JSON_FILE']
|
||||
BATCH_SIZE ||= 1000
|
||||
BATCH_SIZE ||= 1000
|
||||
|
||||
def initialize
|
||||
super
|
||||
@@ -59,7 +59,6 @@ class ImportScripts::JsonGeneric < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def import_discussions
|
||||
puts "", "Importing discussions"
|
||||
|
||||
@@ -103,6 +102,6 @@ class ImportScripts::JsonGeneric < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
if __FILE__==$0
|
||||
if __FILE__ == $0
|
||||
ImportScripts::JsonGeneric.new.perform
|
||||
end
|
||||
|
||||
@@ -3,7 +3,7 @@ require File.expand_path(File.dirname(__FILE__) + "/base.rb")
|
||||
|
||||
class ImportScripts::Kunena < ImportScripts::Base
|
||||
|
||||
KUNENA_DB = "kunena"
|
||||
KUNENA_DB = "kunena"
|
||||
|
||||
def initialize
|
||||
super
|
||||
@@ -38,7 +38,7 @@ class ImportScripts::Kunena < ImportScripts::Base
|
||||
@users = nil
|
||||
|
||||
create_categories(@client.query("SELECT id, parent, name, description, ordering FROM jos_kunena_categories ORDER BY parent, id;")) do |c|
|
||||
h = {id: c['id'], name: c['name'], description: c['description'], position: c['ordering'].to_i}
|
||||
h = { id: c['id'], name: c['name'], description: c['description'], position: c['ordering'].to_i }
|
||||
if c['parent'].to_i > 0
|
||||
h[:parent_category_id] = category_id_from_imported_category_id(c['parent'])
|
||||
end
|
||||
@@ -61,12 +61,12 @@ class ImportScripts::Kunena < ImportScripts::Base
|
||||
puts "fetching Joomla users data from mysql"
|
||||
results = @client.query("SELECT id, username, email, registerDate FROM jos_users;", cache_rows: false)
|
||||
results.each do |u|
|
||||
next unless u['id'].to_i > 0 and u['username'].present? and u['email'].present?
|
||||
username = u['username'].gsub(' ', '_').gsub(/[^A-Za-z0-9_]/, '')[0,User.username_length.end]
|
||||
next unless u['id'].to_i > (0) && u['username'].present? && u['email'].present?
|
||||
username = u['username'].gsub(' ', '_').gsub(/[^A-Za-z0-9_]/, '')[0, User.username_length.end]
|
||||
if username.length < User.username_length.first
|
||||
username = username * User.username_length.first
|
||||
end
|
||||
@users[u['id'].to_i] = {id: u['id'].to_i, username: username, email: u['email'], created_at: u['registerDate']}
|
||||
@users[u['id'].to_i] = { id: u['id'].to_i, username: username, email: u['email'], created_at: u['registerDate'] }
|
||||
end
|
||||
|
||||
puts "fetching Kunena user data from mysql"
|
||||
@@ -109,7 +109,7 @@ class ImportScripts::Kunena < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| p['id'].to_i}
|
||||
next if all_records_exist? :posts, results.map { |p| p['id'].to_i }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |m|
|
||||
skip = false
|
||||
|
||||
@@ -60,7 +60,7 @@ class ImportScripts::Kunena < ImportScripts::Base
|
||||
@users = nil
|
||||
|
||||
create_categories(@client.query("SELECT id, #{PARENT_FIELD} as parent_id, name, description, ordering FROM #{KUNENA_PREFIX}kunena_categories ORDER BY #{PARENT_FIELD}, id;")) do |c|
|
||||
h = {id: c['id'], name: c['name'], description: c['description'], position: c['ordering'].to_i}
|
||||
h = { id: c['id'], name: c['name'], description: c['description'], position: c['ordering'].to_i }
|
||||
if c['parent_id'].to_i > 0
|
||||
h[:parent_category_id] = category_id_from_imported_category_id(c['parent_id'])
|
||||
end
|
||||
@@ -83,12 +83,12 @@ class ImportScripts::Kunena < ImportScripts::Base
|
||||
puts "fetching Joomla users data from mysql"
|
||||
results = @client.query("SELECT id, username, email, registerDate FROM #{KUNENA_PREFIX}users;", cache_rows: false)
|
||||
results.each do |u|
|
||||
next unless u['id'].to_i > 0 and u['username'].present? and u['email'].present?
|
||||
username = u['username'].gsub(' ', '_').gsub(/[^A-Za-z0-9_]/, '')[0,User.username_length.end]
|
||||
next unless u['id'].to_i > (0) && u['username'].present? && u['email'].present?
|
||||
username = u['username'].gsub(' ', '_').gsub(/[^A-Za-z0-9_]/, '')[0, User.username_length.end]
|
||||
if username.length < User.username_length.first
|
||||
username = username * User.username_length.first
|
||||
end
|
||||
@users[u['id'].to_i] = {id: u['id'].to_i, username: username, email: u['email'], created_at: u['registerDate']}
|
||||
@users[u['id'].to_i] = { id: u['id'].to_i, username: username, email: u['email'], created_at: u['registerDate'] }
|
||||
end
|
||||
|
||||
puts "fetching Kunena user data from mysql"
|
||||
@@ -131,7 +131,7 @@ class ImportScripts::Kunena < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|p| p['id'].to_i}
|
||||
next if all_records_exist? :posts, results.map { |p| p['id'].to_i }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |m|
|
||||
skip = false
|
||||
|
||||
@@ -10,8 +10,6 @@
|
||||
# that was done using import_scripts/support/convert_mysql_xml_to_mysql.rb
|
||||
#
|
||||
|
||||
|
||||
|
||||
require 'mysql2'
|
||||
require 'csv'
|
||||
require 'reverse_markdown'
|
||||
@@ -19,7 +17,7 @@ require File.expand_path(File.dirname(__FILE__) + "/base.rb")
|
||||
require 'htmlentities'
|
||||
|
||||
# remove table conversion
|
||||
[:table,:td,:tr,:th,:thead,:tbody].each do |tag|
|
||||
[:table, :td, :tr, :th, :thead, :tbody].each do |tag|
|
||||
ReverseMarkdown::Converters.unregister(tag)
|
||||
end
|
||||
|
||||
@@ -101,7 +99,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
|
||||
break if users.size < 1
|
||||
|
||||
next if all_records_exist? :users, users.map {|u| u["id"].to_i}
|
||||
next if all_records_exist? :users, users.map { |u| u["id"].to_i }
|
||||
|
||||
create_users(users, total: user_count, offset: offset) do |user|
|
||||
|
||||
@@ -123,7 +121,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
end
|
||||
|
||||
def unix_time(t)
|
||||
Time.at(t/1000.0)
|
||||
Time.at(t / 1000.0)
|
||||
end
|
||||
|
||||
def import_profile_picture(old_user, imported_user)
|
||||
@@ -191,7 +189,6 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
top_level_ids = Set.new
|
||||
child_ids = Set.new
|
||||
|
||||
|
||||
parent = nil
|
||||
CSV.foreach(CATEGORY_CSV) do |row|
|
||||
display_id = row[2].strip
|
||||
@@ -216,7 +213,6 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
|
||||
top_level_categories = categories.select { |c| top_level_ids.include? c["display_id"] }
|
||||
|
||||
|
||||
create_categories(top_level_categories) do |category|
|
||||
info = category_info[category["display_id"]]
|
||||
info[:id] = category["node_id"]
|
||||
@@ -228,7 +224,6 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
}
|
||||
end
|
||||
|
||||
|
||||
puts "", "importing children categories..."
|
||||
|
||||
children_categories = categories.select { |c| child_ids.include? c["display_id"] }
|
||||
@@ -246,7 +241,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
end
|
||||
|
||||
puts "", "securing categories"
|
||||
category_info.each do |_,info|
|
||||
category_info.each do |_, info|
|
||||
if info[:secure]
|
||||
id = category_id_from_imported_category_id(info[:id])
|
||||
if id
|
||||
@@ -278,7 +273,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
|
||||
break if topics.size < 1
|
||||
|
||||
next if all_records_exist? :posts, topics.map {|topic| "#{topic["node_id"]} #{topic["id"]}"}
|
||||
next if all_records_exist? :posts, topics.map { |topic| "#{topic["node_id"]} #{topic["id"]}" }
|
||||
|
||||
create_posts(topics, total: topic_count, offset: offset) do |topic|
|
||||
|
||||
@@ -295,7 +290,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
raw: raw,
|
||||
created_at: unix_time(topic["post_date"]),
|
||||
views: topic["views"],
|
||||
custom_fields: {import_unique_id: topic["unique_id"]},
|
||||
custom_fields: { import_unique_id: topic["unique_id"] },
|
||||
import_mode: true
|
||||
}
|
||||
else
|
||||
@@ -326,7 +321,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
|
||||
break if posts.size < 1
|
||||
|
||||
next if all_records_exist? :posts, posts.map {|post| "#{post["node_id"]} #{post["root_id"]} #{post["id"]}"}
|
||||
next if all_records_exist? :posts, posts.map { |post| "#{post["node_id"]} #{post["root_id"]} #{post["id"]}" }
|
||||
|
||||
create_posts(posts, total: post_count, offset: offset) do |post|
|
||||
raw = post["raw"]
|
||||
@@ -340,7 +335,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
topic_id: topic[:topic_id],
|
||||
raw: raw,
|
||||
created_at: unix_time(post["post_date"]),
|
||||
custom_fields: {import_unique_id: post["unique_id"]},
|
||||
custom_fields: { import_unique_id: post["unique_id"] },
|
||||
import_mode: true
|
||||
}
|
||||
|
||||
@@ -365,7 +360,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
"smileysurprised" => "dizzy_face",
|
||||
"smileytongue" => "stuck_out_tongue",
|
||||
"smileyvery-happy" => "grin",
|
||||
"smileywink" => "wink",
|
||||
"smileywink" => "wink",
|
||||
"smileyfrustrated" => "confounded",
|
||||
"smileyembarrassed" => "flushed",
|
||||
"smileylol" => "laughing",
|
||||
@@ -382,7 +377,6 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
"catlol" => "joy_cat"
|
||||
}
|
||||
|
||||
|
||||
def import_likes
|
||||
puts "\nimporting likes..."
|
||||
|
||||
@@ -395,8 +389,6 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
existing_map[import_id] = post_id
|
||||
end
|
||||
|
||||
|
||||
|
||||
puts "loading data into temp table"
|
||||
PostAction.exec_sql("create temp table like_data(user_id int, post_id int, created_at timestamp without time zone)")
|
||||
PostAction.transaction do
|
||||
@@ -436,7 +428,6 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
WHERE ua.id IS NULL AND pa.post_action_type_id = 2
|
||||
SQL
|
||||
|
||||
|
||||
# reverse action
|
||||
UserAction.exec_sql <<-SQL
|
||||
INSERT INTO user_actions (user_id, action_type, target_topic_id, target_post_id, acting_user_id, created_at, updated_at)
|
||||
@@ -490,7 +481,6 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
existing_map[import_id] = post_id
|
||||
end
|
||||
|
||||
|
||||
puts "loading data into temp table"
|
||||
PostAction.exec_sql("create temp table accepted_data(post_id int primary key)")
|
||||
PostAction.transaction do
|
||||
@@ -507,7 +497,6 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
puts "deleting dupe answers"
|
||||
PostAction.exec_sql <<-SQL
|
||||
DELETE FROM accepted_data WHERE post_id NOT IN (
|
||||
@@ -553,7 +542,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
|
||||
users = {}
|
||||
|
||||
[inbox,outbox].each do |r|
|
||||
[inbox, outbox].each do |r|
|
||||
r.each do |row|
|
||||
ary = (users[row["note_id"]] ||= Set.new)
|
||||
user_id = user_id_from_imported_user_id(row["user_id"])
|
||||
@@ -567,7 +556,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
subject_to_first_note = {}
|
||||
|
||||
mysql_query("SELECT note_id, subject, sender_user_id FROM tblia_notes_content order by note_id").each do |row|
|
||||
user_id = user_id_from_imported_user_id(row["sender_user_id"])
|
||||
user_id = user_id_from_imported_user_id(row["sender_user_id"])
|
||||
ary = (users[row["note_id"]] ||= Set.new)
|
||||
if user_id
|
||||
ary << user_id
|
||||
@@ -581,7 +570,7 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
|
||||
puts "Loading user_id to username map"
|
||||
user_map = {}
|
||||
User.pluck(:id, :username).each do |id,username|
|
||||
User.pluck(:id, :username).each do |id, username|
|
||||
user_map[id] = username
|
||||
end
|
||||
|
||||
@@ -596,17 +585,16 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
OFFSET #{offset}
|
||||
SQL
|
||||
|
||||
|
||||
break if topics.size < 1
|
||||
|
||||
next if all_records_exist? :posts, topics.map {|topic| "pm_#{topic["note_id"]}"}
|
||||
next if all_records_exist? :posts, topics.map { |topic| "pm_#{topic["note_id"]}" }
|
||||
|
||||
create_posts(topics, total: topic_count, offset: offset) do |topic|
|
||||
|
||||
user_id = user_id_from_imported_user_id(topic["sender_user_id"]) || Discourse::SYSTEM_USER_ID
|
||||
participants = users[topic["note_id"]]
|
||||
|
||||
usernames = (participants - [user_id]).map{|id| user_map[id]}
|
||||
usernames = (participants - [user_id]).map { |id| user_map[id] }
|
||||
|
||||
subject = topic["subject"]
|
||||
topic_id = nil
|
||||
@@ -646,7 +634,6 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
|
||||
def close_topics
|
||||
|
||||
|
||||
puts "\nclosing closed topics..."
|
||||
|
||||
sql = "select unique_id post_id from message2 where root_id = id AND (attributes & 0x0002 ) != 0;"
|
||||
@@ -658,8 +645,8 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
existing_map[import_id.to_i] = post_id.to_i
|
||||
end
|
||||
|
||||
results.map{|r| r["post_id"]}.each_slice(500) do |ids|
|
||||
mapped = ids.map{|id| existing_map[id]}.compact
|
||||
results.map { |r| r["post_id"] }.each_slice(500) do |ids|
|
||||
mapped = ids.map { |id| existing_map[id] }.compact
|
||||
Topic.exec_sql("
|
||||
UPDATE topics SET closed = true
|
||||
WHERE id IN (SELECT topic_id FROM posts where id in (:ids))
|
||||
@@ -668,7 +655,6 @@ class ImportScripts::Lithium < ImportScripts::Base
|
||||
|
||||
end
|
||||
|
||||
|
||||
def create_permalinks
|
||||
puts "Creating permalinks"
|
||||
|
||||
@@ -739,7 +725,6 @@ SQL
|
||||
def post_process_posts
|
||||
puts "", "Postprocessing posts..."
|
||||
|
||||
|
||||
current = 0
|
||||
max = Post.count
|
||||
|
||||
@@ -766,7 +751,6 @@ SQL
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def postprocess_post_raw(raw, user_id)
|
||||
|
||||
doc = Nokogiri::HTML.fragment(raw)
|
||||
@@ -827,7 +811,7 @@ SQL
|
||||
":#{SMILEY_SUBS[$1] || $1}:"
|
||||
end
|
||||
# nbsp central
|
||||
raw.gsub!(/([a-zA-Z0-9]) ([a-zA-Z0-9])/,"\\1 \\2")
|
||||
raw.gsub!(/([a-zA-Z0-9]) ([a-zA-Z0-9])/, "\\1 \\2")
|
||||
raw
|
||||
end
|
||||
|
||||
|
||||
@@ -14,4 +14,3 @@ module ImportScripts
|
||||
Importer.new(@settings).perform
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ class ImportScripts::Mbox < ImportScripts::Base
|
||||
exit
|
||||
end
|
||||
|
||||
validates_format_of :email, :with => /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, :on => :create
|
||||
validates_format_of :email, with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, on: :create
|
||||
|
||||
def execute
|
||||
import_categories
|
||||
@@ -63,7 +63,7 @@ class ImportScripts::Mbox < ImportScripts::Base
|
||||
mappings = CATEGORY_MAPPINGS.values - ['uncategorized']
|
||||
|
||||
create_categories(mappings) do |c|
|
||||
{id: c, name: c}
|
||||
{ id: c, name: c }
|
||||
end
|
||||
end
|
||||
|
||||
@@ -109,7 +109,7 @@ class ImportScripts::Mbox < ImportScripts::Base
|
||||
each_line(f) do |line|
|
||||
line = line.scrub
|
||||
if line =~ SPLIT_AT
|
||||
p message_count += 1
|
||||
p message_count += 1
|
||||
if !msg.empty?
|
||||
mail = Mail.read_from_string(msg)
|
||||
yield mail, f
|
||||
@@ -163,7 +163,7 @@ p message_count += 1
|
||||
|
||||
puts "#{not_found.size} records couldn't be associated with parents"
|
||||
if not_found.present?
|
||||
db.execute "UPDATE emails SET reply_to = NULL WHERE msg_id IN (#{not_found.map {|nf| "'#{nf}'"}.join(',')})"
|
||||
db.execute "UPDATE emails SET reply_to = NULL WHERE msg_id IN (#{not_found.map { |nf| "'#{nf}'" }.join(',')})"
|
||||
end
|
||||
|
||||
dupe_titles = db.execute "SELECT title, COUNT(*) FROM emails GROUP BY title HAVING count(*) > 1"
|
||||
@@ -197,7 +197,7 @@ p message_count += 1
|
||||
from_email.gsub!(/ /, '')
|
||||
end
|
||||
end
|
||||
p end
|
||||
end
|
||||
|
||||
display_names = from.try(:display_names)
|
||||
if display_names.present?
|
||||
@@ -308,7 +308,7 @@ p end
|
||||
title.strip
|
||||
|
||||
#In case of mixed localized prefixes there could be many of them if the mail client didn't strip the localized ones
|
||||
if original_length > title.length
|
||||
if original_length > title.length
|
||||
clean_title(title)
|
||||
else
|
||||
title
|
||||
@@ -331,9 +331,9 @@ p end
|
||||
total_count = all_users.size
|
||||
|
||||
batches(BATCH_SIZE) do |offset|
|
||||
users = all_users[offset..offset+BATCH_SIZE-1]
|
||||
users = all_users[offset..offset + BATCH_SIZE - 1]
|
||||
break if users.nil?
|
||||
next if all_records_exist? :users, users.map {|u| u[1]}
|
||||
next if all_records_exist? :users, users.map { |u| u[1] }
|
||||
|
||||
create_users(users, total: total_count, offset: offset) do |u|
|
||||
{
|
||||
@@ -374,7 +374,7 @@ p end
|
||||
new_raw = p.raw.dup
|
||||
new_raw = new_raw.gsub!(/#{Regexp.escape(find)}/i, replace) || new_raw
|
||||
if new_raw != p.raw
|
||||
p.revise(Discourse.system_user, { raw: new_raw }, { bypass_bump: true })
|
||||
p.revise(Discourse.system_user, { raw: new_raw }, bypass_bump: true)
|
||||
print_warning "\nReplaced #{find} with #{replace} in topic #{p.topic_id}"
|
||||
end
|
||||
end
|
||||
@@ -411,10 +411,10 @@ p end
|
||||
topic_count = all_topics.size
|
||||
|
||||
batches(BATCH_SIZE) do |offset|
|
||||
topics = all_topics[offset..offset+BATCH_SIZE-1]
|
||||
topics = all_topics[offset..offset + BATCH_SIZE - 1]
|
||||
break if topics.nil?
|
||||
|
||||
next if all_records_exist? :posts, topics.map {|t| t[0]}
|
||||
next if all_records_exist? :posts, topics.map { |t| t[0] }
|
||||
|
||||
create_posts(topics, total: topic_count, offset: offset) do |t|
|
||||
raw_email = t[5]
|
||||
@@ -454,7 +454,7 @@ p end
|
||||
raw = clean_raw(raw)
|
||||
raw = raw.dup.to_s
|
||||
raw.gsub!(/#{from_email}/, "@#{username}")
|
||||
cleaned_email = from_email.dup.sub(/@/,' at ')
|
||||
cleaned_email = from_email.dup.sub(/@/, ' at ')
|
||||
raw.gsub!(/#{cleaned_email}/, "@#{username}")
|
||||
{ id: t[0],
|
||||
title: clean_title(title),
|
||||
@@ -490,11 +490,11 @@ p end
|
||||
puts "Replies: #{post_count}"
|
||||
|
||||
batches(BATCH_SIZE) do |offset|
|
||||
posts = replies[offset..offset+BATCH_SIZE-1]
|
||||
posts = replies[offset..offset + BATCH_SIZE - 1]
|
||||
break if posts.nil?
|
||||
break if posts.count < 1
|
||||
|
||||
next if all_records_exist? :posts, posts.map {|p| p[0]}
|
||||
next if all_records_exist? :posts, posts.map { |p| p[0] }
|
||||
|
||||
create_posts(posts, total: post_count, offset: offset) do |p|
|
||||
parent_id = p[6]
|
||||
@@ -521,7 +521,7 @@ p end
|
||||
user_id = user_id_from_imported_user_id(from_email) || Discourse::SYSTEM_USER_ID
|
||||
raw = clean_raw(raw).to_s
|
||||
raw.gsub!(/#{from_email}/, "@#{username}")
|
||||
cleaned_email = from_email.dup.sub(/@/,' at ')
|
||||
cleaned_email = from_email.dup.sub(/@/, ' at ')
|
||||
raw.gsub!(/#{cleaned_email}/, "@#{username}")
|
||||
# import the attachments
|
||||
mail.attachments.each do |attachment|
|
||||
|
||||
@@ -59,7 +59,6 @@ class ImportScripts::Muut < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def import_categories
|
||||
puts "", "Importing categories"
|
||||
|
||||
@@ -72,7 +71,6 @@ class ImportScripts::Muut < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def import_discussions
|
||||
puts "", "Importing discussions"
|
||||
|
||||
@@ -81,7 +79,6 @@ class ImportScripts::Muut < ImportScripts::Base
|
||||
|
||||
@imported_json['categories'].each do |category|
|
||||
|
||||
|
||||
@imported_json['threads'][category['path']].each do |thread|
|
||||
|
||||
next if thread["seed"]["key"] == "skip-this-topic"
|
||||
@@ -96,7 +93,7 @@ class ImportScripts::Muut < ImportScripts::Base
|
||||
end
|
||||
|
||||
# update user display name
|
||||
if thread["seed"]["author"] && thread["seed"]["author"]["displayname"] != "" && mapped[:user_id] != -1
|
||||
if thread["seed"]["author"] && thread["seed"]["author"]["displayname"] != "" && mapped[:user_id] != -1
|
||||
user = User.find_by(id: mapped[:user_id])
|
||||
if user
|
||||
user.name = thread["seed"]["author"]["displayname"]
|
||||
@@ -181,6 +178,6 @@ class ImportScripts::Muut < ImportScripts::Base
|
||||
|
||||
end
|
||||
|
||||
if __FILE__==$0
|
||||
if __FILE__ == $0
|
||||
ImportScripts::Muut.new.perform
|
||||
end
|
||||
|
||||
@@ -37,7 +37,7 @@ class ImportScripts::MyBB < ImportScripts::Base
|
||||
end
|
||||
|
||||
def execute
|
||||
SiteSetting.disable_emails=true
|
||||
SiteSetting.disable_emails = true
|
||||
import_users
|
||||
import_categories
|
||||
import_posts
|
||||
@@ -66,7 +66,7 @@ class ImportScripts::MyBB < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :users, results.map {|u| u["id"].to_i}
|
||||
next if all_records_exist? :users, results.map { |u| u["id"].to_i }
|
||||
|
||||
create_users(results, total: total_count, offset: offset) do |user|
|
||||
{ id: user['id'],
|
||||
@@ -87,7 +87,7 @@ class ImportScripts::MyBB < ImportScripts::Base
|
||||
")
|
||||
|
||||
create_categories(results) do |row|
|
||||
h = {id: row['id'], name: CGI.unescapeHTML(row['name']), description: CGI.unescapeHTML(row['description'])}
|
||||
h = { id: row['id'], name: CGI.unescapeHTML(row['name']), description: CGI.unescapeHTML(row['description']) }
|
||||
if row['parent_id'].to_i > 0
|
||||
h[:parent_category_id] = category_id_from_imported_category_id(row['parent_id'])
|
||||
end
|
||||
@@ -120,7 +120,7 @@ class ImportScripts::MyBB < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|m| m['id'].to_i}
|
||||
next if all_records_exist? :posts, results.map { |m| m['id'].to_i }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |m|
|
||||
skip = false
|
||||
@@ -235,7 +235,7 @@ class ImportScripts::MyBB < ImportScripts::Base
|
||||
def create_permalinks
|
||||
puts '', 'Creating redirects...', ''
|
||||
|
||||
SiteSetting.permalink_normalizations= '/(\\w+)-(\\d+)[-.].*/\\1-\\2.html'
|
||||
SiteSetting.permalink_normalizations = '/(\\w+)-(\\d+)[-.].*/\\1-\\2.html'
|
||||
puts '', 'Users...', ''
|
||||
total_users = User.count
|
||||
start_time = Time.now
|
||||
@@ -244,7 +244,7 @@ class ImportScripts::MyBB < ImportScripts::Base
|
||||
ucf = u.custom_fields
|
||||
count += 1
|
||||
if ucf && ucf["import_id"] && ucf["import_username"]
|
||||
Permalink.create(url: "#{BASE}/user-#{ucf['import_id']}.html", external_url: "/u/#{u.username}" ) rescue nil
|
||||
Permalink.create(url: "#{BASE}/user-#{ucf['import_id']}.html", external_url: "/u/#{u.username}") rescue nil
|
||||
end
|
||||
print_status(count, total_users, start_time)
|
||||
end
|
||||
@@ -260,7 +260,7 @@ class ImportScripts::MyBB < ImportScripts::Base
|
||||
unless QUIET
|
||||
puts ("forum-#{id}.html --> /c/#{cat.id}")
|
||||
end
|
||||
Permalink.create( url: "#{BASE}/forum-#{id}.html", category_id: cat.id ) rescue nil
|
||||
Permalink.create(url: "#{BASE}/forum-#{id}.html", category_id: cat.id) rescue nil
|
||||
print_status(count, total_categories, start_time)
|
||||
end
|
||||
|
||||
@@ -286,7 +286,7 @@ class ImportScripts::MyBB < ImportScripts::Base
|
||||
count += 1
|
||||
if topic = topic_lookup_from_imported_post_id(post['id'])
|
||||
id = post['topic_id']
|
||||
Permalink.create( url: "#{BASE}/thread-#{id}.html", topic_id: topic[:topic_id] ) rescue nil
|
||||
Permalink.create(url: "#{BASE}/thread-#{id}.html", topic_id: topic[:topic_id]) rescue nil
|
||||
unless QUIET
|
||||
puts ("#{BASE}/thread-#{id}.html --> http://localhost:3000/t/#{topic[:topic_id]}")
|
||||
end
|
||||
@@ -296,8 +296,6 @@ class ImportScripts::MyBB < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
def mysql_query(sql)
|
||||
@client.query(sql, cache_rows: false)
|
||||
end
|
||||
|
||||
@@ -2,7 +2,6 @@ require "mysql2"
|
||||
require File.expand_path(File.dirname(__FILE__) + "/base.rb")
|
||||
require 'htmlentities'
|
||||
|
||||
|
||||
# Before running this script, paste these lines into your shell,
|
||||
# then use arrow keys to edit the values
|
||||
=begin
|
||||
@@ -16,7 +15,6 @@ export IMAGE_BASE="http://www.example.com/forum"
|
||||
export BASE="forum"
|
||||
=end
|
||||
|
||||
|
||||
class ImportScripts::MylittleforumSQL < ImportScripts::Base
|
||||
|
||||
DB_HOST ||= ENV['DB_HOST'] || "localhost"
|
||||
@@ -37,10 +35,9 @@ class ImportScripts::MylittleforumSQL < ImportScripts::Base
|
||||
# Site settings
|
||||
SiteSetting.disable_emails = true
|
||||
if FORCE_HOSTNAME
|
||||
SiteSetting.force_hostname=FORCE_HOSTNAME
|
||||
SiteSetting.force_hostname = FORCE_HOSTNAME
|
||||
end
|
||||
|
||||
|
||||
def initialize
|
||||
|
||||
if IMPORT_AFTER > "1970-01-01"
|
||||
@@ -57,7 +54,7 @@ class ImportScripts::MylittleforumSQL < ImportScripts::Base
|
||||
database: DB_NAME
|
||||
)
|
||||
rescue Exception => e
|
||||
puts '='*50
|
||||
puts '=' * 50
|
||||
puts e.message
|
||||
puts <<EOM
|
||||
Cannot log in to database.
|
||||
@@ -123,7 +120,7 @@ EOM
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :users, results.map {|u| u['UserID'].to_i}
|
||||
next if all_records_exist? :users, results.map { |u| u['UserID'].to_i }
|
||||
|
||||
create_users(results, total: total_count, offset: offset) do |user|
|
||||
next if user['Email'].blank?
|
||||
@@ -151,15 +148,15 @@ EOM
|
||||
|
||||
def fix_username(username)
|
||||
olduser = username.dup
|
||||
username.gsub!(/Dr\. /,"Dr") # no &
|
||||
username.gsub!(/[ +!\/,*()?]/,"_") # can't have these
|
||||
username.gsub!(/&/,"_and_") # no &
|
||||
username.gsub!(/@/,"_at_") # no @
|
||||
username.gsub!(/#/,"_hash_") # no &
|
||||
username.gsub!(/\'/,"") # seriously?
|
||||
username.gsub!(/[._]+/,"_") # can't have 2 special in a row
|
||||
username.gsub!(/_+/,"_") # could result in dupes, but wtf?
|
||||
username.gsub!(/_$/,"") # could result in dupes, but wtf?
|
||||
username.gsub!(/Dr\. /, "Dr") # no &
|
||||
username.gsub!(/[ +!\/,*()?]/, "_") # can't have these
|
||||
username.gsub!(/&/, "_and_") # no &
|
||||
username.gsub!(/@/, "_at_") # no @
|
||||
username.gsub!(/#/, "_hash_") # no &
|
||||
username.gsub!(/\'/, "") # seriously?
|
||||
username.gsub!(/[._]+/, "_") # can't have 2 special in a row
|
||||
username.gsub!(/_+/, "_") # could result in dupes, but wtf?
|
||||
username.gsub!(/_$/, "") # could result in dupes, but wtf?
|
||||
if olduser != username
|
||||
print_warning ("#{olduser} --> #{username}")
|
||||
end
|
||||
@@ -210,7 +207,7 @@ EOM
|
||||
OFFSET #{offset};")
|
||||
|
||||
break if discussions.size < 1
|
||||
next if all_records_exist? :posts, discussions.map {|t| "discussion#" + t['DiscussionID'].to_s}
|
||||
next if all_records_exist? :posts, discussions.map { |t| "discussion#" + t['DiscussionID'].to_s }
|
||||
|
||||
create_posts(discussions, total: total_count, offset: offset) do |discussion|
|
||||
|
||||
@@ -226,7 +223,7 @@ EOM
|
||||
{
|
||||
id: "discussion#" + discussion['DiscussionID'].to_s,
|
||||
user_id: user_id_from_imported_user_id(discussion['InsertUserID']) || Discourse::SYSTEM_USER_ID,
|
||||
title: discussion['Name'].gsub('\\"','"'),
|
||||
title: discussion['Name'].gsub('\\"', '"'),
|
||||
category: category_id_from_imported_category_id(discussion['CategoryID']),
|
||||
raw: raw,
|
||||
created_at: Time.zone.at(discussion['DateInserted']),
|
||||
@@ -260,7 +257,7 @@ EOM
|
||||
OFFSET #{offset};")
|
||||
|
||||
break if comments.size < 1
|
||||
next if all_records_exist? :posts, comments.map {|comment| "comment#" + comment['CommentID'].to_s}
|
||||
next if all_records_exist? :posts, comments.map { |comment| "comment#" + comment['CommentID'].to_s }
|
||||
|
||||
create_posts(comments, total: total_count, offset: offset) do |comment|
|
||||
next unless t = topic_lookup_from_imported_post_id("discussion#" + comment['DiscussionID'].to_s)
|
||||
@@ -286,9 +283,9 @@ EOM
|
||||
youtube_cooked = clean_up(youtube_raw.dup.to_s)
|
||||
# get just src from <iframe> and put on a line by itself
|
||||
re = /<iframe.+?src="(\S+?)".+?<\/iframe>/mix
|
||||
youtube_cooked.gsub!(re) {"\n#{$1}\n"}
|
||||
youtube_cooked.gsub!(re) { "\n#{$1}\n" }
|
||||
re = /<object.+?src="(\S+?)".+?<\/object>/mix
|
||||
youtube_cooked.gsub!(re) {"\n#{$1}\n"}
|
||||
youtube_cooked.gsub!(re) { "\n#{$1}\n" }
|
||||
youtube_cooked.gsub!(/^\/\//, "https://") # make sure it has a protocol
|
||||
unless /http/.match(youtube_cooked) # handle case of only youtube object number
|
||||
if youtube_cooked.length < 8 || /[<>=]/.match(youtube_cooked)
|
||||
@@ -298,7 +295,7 @@ EOM
|
||||
youtube_cooked = 'https://www.youtube.com/watch?v=' + youtube_cooked
|
||||
end
|
||||
end
|
||||
print_warning("#{'-'*40}\nBefore: #{youtube_raw}\nAfter: #{youtube_cooked}") unless QUIET
|
||||
print_warning("#{'-' * 40}\nBefore: #{youtube_raw}\nAfter: #{youtube_cooked}") unless QUIET
|
||||
|
||||
youtube_cooked
|
||||
end
|
||||
@@ -310,8 +307,8 @@ EOM
|
||||
raw = @htmlentities.decode(raw)
|
||||
|
||||
# don't \ quotes
|
||||
raw = raw.gsub('\\"','"')
|
||||
raw = raw.gsub("\\'","'")
|
||||
raw = raw.gsub('\\"', '"')
|
||||
raw = raw.gsub("\\'", "'")
|
||||
|
||||
raw = raw.gsub(/\[b\]/i, "<strong>")
|
||||
raw = raw.gsub(/\[\/b\]/i, "</strong>")
|
||||
@@ -322,12 +319,12 @@ EOM
|
||||
raw = raw.gsub(/\[u\]/i, "<em>")
|
||||
raw = raw.gsub(/\[\/u\]/i, "</em>")
|
||||
|
||||
raw = raw.gsub(/\[url\](\S+)\[\/url\]/im) { "#{$1}"}
|
||||
raw = raw.gsub(/\[link\](\S+)\[\/link\]/im) { "#{$1}"}
|
||||
raw = raw.gsub(/\[url\](\S+)\[\/url\]/im) { "#{$1}" }
|
||||
raw = raw.gsub(/\[link\](\S+)\[\/link\]/im) { "#{$1}" }
|
||||
|
||||
# URL & LINK with text
|
||||
raw = raw.gsub(/\[url=(\S+?)\](.*?)\[\/url\]/im) { "<a href=\"#{$1}\">#{$2}</a>"}
|
||||
raw = raw.gsub(/\[link=(\S+?)\](.*?)\[\/link\]/im) { "<a href=\"#{$1}\">#{$2}</a>"}
|
||||
raw = raw.gsub(/\[url=(\S+?)\](.*?)\[\/url\]/im) { "<a href=\"#{$1}\">#{$2}</a>" }
|
||||
raw = raw.gsub(/\[link=(\S+?)\](.*?)\[\/link\]/im) { "<a href=\"#{$1}\">#{$2}</a>" }
|
||||
|
||||
# remote images
|
||||
raw = raw.gsub(/\[img\](https?:.+?)\[\/img\]/im) { "<img src=\"#{$1}\">" }
|
||||
@@ -347,22 +344,22 @@ EOM
|
||||
raw.gsub(/\[postedby\](.+?)\[b\](.+?)\[\/b\]\[\/postedby\]/i) { "#{$1}@#{$2}" }
|
||||
|
||||
# CODE (not tested)
|
||||
raw = raw.gsub(/\[code\](\S+)\[\/code\]/im) { "```\n#{$1}\n```"}
|
||||
raw = raw.gsub(/\[pre\](\S+)\[\/pre\]/im) { "```\n#{$1}\n```"}
|
||||
raw = raw.gsub(/\[code\](\S+)\[\/code\]/im) { "```\n#{$1}\n```" }
|
||||
raw = raw.gsub(/\[pre\](\S+)\[\/pre\]/im) { "```\n#{$1}\n```" }
|
||||
|
||||
raw = raw.gsub(/(https:\/\/youtu\S+)/i) { "\n#{$1}\n" } #youtube links on line by themselves
|
||||
|
||||
# no center
|
||||
raw = raw.gsub(/\[\/?center\]/i,"")
|
||||
raw = raw.gsub(/\[\/?center\]/i, "")
|
||||
|
||||
# no size
|
||||
raw = raw.gsub(/\[\/?size.*?\]/i,"")
|
||||
raw = raw.gsub(/\[\/?size.*?\]/i, "")
|
||||
|
||||
### FROM VANILLA:
|
||||
|
||||
# fix whitespaces
|
||||
raw = raw.gsub(/(\\r)?\\n/, "\n")
|
||||
.gsub("\\t", "\t")
|
||||
.gsub("\\t", "\t")
|
||||
|
||||
unless CONVERT_HTML
|
||||
# replace all chevrons with HTML entities
|
||||
@@ -370,12 +367,12 @@ EOM
|
||||
# - AFTER all the "code" processing
|
||||
# - BEFORE the "quote" processing
|
||||
raw = raw.gsub(/`([^`]+)`/im) { "`" + $1.gsub("<", "\u2603") + "`" }
|
||||
.gsub("<", "<")
|
||||
.gsub("\u2603", "<")
|
||||
.gsub("<", "<")
|
||||
.gsub("\u2603", "<")
|
||||
|
||||
raw = raw.gsub(/`([^`]+)`/im) { "`" + $1.gsub(">", "\u2603") + "`" }
|
||||
.gsub(">", ">")
|
||||
.gsub("\u2603", ">")
|
||||
.gsub(">", ">")
|
||||
.gsub("\u2603", ">")
|
||||
end
|
||||
|
||||
# Remove the color tag
|
||||
@@ -402,7 +399,7 @@ EOM
|
||||
User.find_each do |u|
|
||||
ucf = u.custom_fields
|
||||
if ucf && ucf["import_id"] && ucf["import_username"]
|
||||
Permalink.create( url: "#{BASE}/user-id-#{ucf['import_id']}.html", external_url: "/u/#{u.username}" ) rescue nil
|
||||
Permalink.create(url: "#{BASE}/user-id-#{ucf['import_id']}.html", external_url: "/u/#{u.username}") rescue nil
|
||||
print '.'
|
||||
end
|
||||
end
|
||||
@@ -414,12 +411,12 @@ EOM
|
||||
topic = post.topic
|
||||
id = pcf["import_id"].split('#').last
|
||||
if post.post_number == 1
|
||||
Permalink.create( url: "#{BASE}/forum_entry-id-#{id}.html", topic_id: topic.id ) rescue nil
|
||||
Permalink.create(url: "#{BASE}/forum_entry-id-#{id}.html", topic_id: topic.id) rescue nil
|
||||
unless QUIET
|
||||
print_warning("forum_entry-id-#{id}.html --> http://localhost:3000/t/#{topic.id}")
|
||||
end
|
||||
else
|
||||
Permalink.create( url: "#{BASE}/forum_entry-id-#{id}.html", post_id: post.id ) rescue nil
|
||||
Permalink.create(url: "#{BASE}/forum_entry-id-#{id}.html", post_id: post.id) rescue nil
|
||||
unless QUIET
|
||||
print_warning("forum_entry-id-#{id}.html --> http://localhost:3000/t/#{topic.id}/#{post.id}")
|
||||
end
|
||||
@@ -435,7 +432,7 @@ EOM
|
||||
unless QUIET
|
||||
print_warning("forum-category-#{id}.html --> /t/#{cat.id}")
|
||||
end
|
||||
Permalink.create( url: "#{BASE}/forum-category-#{id}.html", category_id: cat.id ) rescue nil
|
||||
Permalink.create(url: "#{BASE}/forum-category-#{id}.html", category_id: cat.id) rescue nil
|
||||
print '.'
|
||||
end
|
||||
end
|
||||
@@ -444,8 +441,6 @@ EOM
|
||||
$stderr.puts "#{message}"
|
||||
end
|
||||
|
||||
|
||||
|
||||
end
|
||||
|
||||
ImportScripts::MylittleforumSQL.new.perform
|
||||
|
||||
@@ -3,12 +3,12 @@ require 'pg'
|
||||
require_relative 'base/uploader'
|
||||
|
||||
=begin
|
||||
if you want to create mock users for posts made by anonymous participants,
|
||||
if you want to create mock users for posts made by anonymous participants,
|
||||
run the following SQL prior to importing.
|
||||
|
||||
-- first attribute any anonymous posts to existing users (if any)
|
||||
|
||||
UPDATE node
|
||||
UPDATE node
|
||||
SET owner_id = p.user_id, anonymous_name = NULL
|
||||
FROM ( SELECT lower(name) AS name, user_id FROM user_ ) p
|
||||
WHERE p.name = lower(node.anonymous_name)
|
||||
@@ -25,7 +25,7 @@ INSERT INTO user_ (email, name, joined, registered)
|
||||
-- then move these posts to the new users
|
||||
-- (yes, this is the same query as the first one indeed)
|
||||
|
||||
UPDATE node
|
||||
UPDATE node
|
||||
SET owner_id = p.user_id, anonymous_name = NULL
|
||||
FROM ( SELECT lower(name) AS name, user_id FROM user_ ) p
|
||||
WHERE p.name = lower(node.anonymous_name)
|
||||
@@ -33,7 +33,6 @@ WHERE p.name = lower(node.anonymous_name)
|
||||
|
||||
=end
|
||||
|
||||
|
||||
class ImportScripts::Nabble < ImportScripts::Base
|
||||
# CHANGE THESE BEFORE RUNNING THE IMPORTER
|
||||
|
||||
@@ -74,7 +73,7 @@ class ImportScripts::Nabble < ImportScripts::Base
|
||||
|
||||
break if users.ntuples() < 1
|
||||
|
||||
next if all_records_exist? :users, users.map {|u| u["user_id"].to_i}
|
||||
next if all_records_exist? :users, users.map { |u| u["user_id"].to_i }
|
||||
|
||||
create_users(users, total: total_count, offset: offset) do |row|
|
||||
{
|
||||
@@ -144,7 +143,7 @@ class ImportScripts::Nabble < ImportScripts::Base
|
||||
|
||||
break if topics.ntuples() < 1
|
||||
|
||||
next if all_records_exist? :posts, topics.map {|t| t['node_id'].to_i}
|
||||
next if all_records_exist? :posts, topics.map { |t| t['node_id'].to_i }
|
||||
|
||||
create_posts(topics, total: topic_count, offset: offset) do |t|
|
||||
raw = body_from(t)
|
||||
@@ -173,7 +172,7 @@ class ImportScripts::Nabble < ImportScripts::Base
|
||||
txt.gsub! /\<quote author="(.*?)"\>/, '[quote="\1"]'
|
||||
txt.gsub! /\<\/quote\>/, '[/quote]'
|
||||
txt.gsub!(/\<raw\>(.*?)\<\/raw\>/m) do |match|
|
||||
c = Regexp.last_match[1].indent(4);
|
||||
c = Regexp.last_match[1].indent(4);
|
||||
"\n#{c}\n"
|
||||
end
|
||||
|
||||
@@ -246,7 +245,7 @@ class ImportScripts::Nabble < ImportScripts::Base
|
||||
|
||||
break if posts.ntuples() < 1
|
||||
|
||||
next if all_records_exist? :posts, posts.map {|p| p['node_id'].to_i}
|
||||
next if all_records_exist? :posts, posts.map { |p| p['node_id'].to_i }
|
||||
|
||||
create_posts(posts, total: post_count, offset: offset) do |p|
|
||||
parent_id = p['parent_id']
|
||||
@@ -289,5 +288,4 @@ class String
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
ImportScripts::Nabble.new.perform
|
||||
|
||||
@@ -164,7 +164,6 @@ class ImportScripts::Ning < ImportScripts::Base
|
||||
puts "", "Marked #{suspended} users as suspended."
|
||||
end
|
||||
|
||||
|
||||
def import_categories
|
||||
puts "", "Importing categories"
|
||||
create_categories((["Blog", "Pages", "Photos"] + @discussions_json.map { |d| d["category"] }).uniq.compact) do |name|
|
||||
@@ -179,7 +178,6 @@ class ImportScripts::Ning < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def import_discussions
|
||||
puts "", "Importing discussions"
|
||||
import_topics(@discussions_json)
|
||||
@@ -200,7 +198,7 @@ class ImportScripts::Ning < ImportScripts::Base
|
||||
import_topics(@pages_json, "Pages")
|
||||
end
|
||||
|
||||
def import_topics(topics_json, default_category=nil)
|
||||
def import_topics(topics_json, default_category = nil)
|
||||
topics = 0
|
||||
posts = 0
|
||||
total = topics_json.size # number of topics. posts are embedded in the topic json, so we can't get total post count quickly.
|
||||
@@ -398,6 +396,6 @@ class ImportScripts::Ning < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
if __FILE__==$0
|
||||
if __FILE__ == $0
|
||||
ImportScripts::Ning.new.perform
|
||||
end
|
||||
|
||||
@@ -47,15 +47,15 @@ module ImportScripts::PhpBB3
|
||||
|
||||
def get_avatar_path(avatar_type, filename)
|
||||
case avatar_type
|
||||
when Constants::AVATAR_TYPE_UPLOADED then
|
||||
filename.gsub!(/_[0-9]+\./, '.') # we need 1337.jpg, not 1337_2983745.jpg
|
||||
when Constants::AVATAR_TYPE_UPLOADED then
|
||||
filename.gsub!(/_[0-9]+\./, '.') # we need 1337.jpg, not 1337_2983745.jpg
|
||||
get_uploaded_path(filename)
|
||||
when Constants::AVATAR_TYPE_GALLERY then
|
||||
get_gallery_path(filename)
|
||||
when Constants::AVATAR_TYPE_REMOTE then
|
||||
download_avatar(filename)
|
||||
when Constants::AVATAR_TYPE_GALLERY then
|
||||
get_gallery_path(filename)
|
||||
when Constants::AVATAR_TYPE_REMOTE then
|
||||
download_avatar(filename)
|
||||
else
|
||||
Rails.logger.error("Invalid avatar type #{avatar_type}. Skipping...")
|
||||
Rails.logger.error("Invalid avatar type #{avatar_type}. Skipping...")
|
||||
nil
|
||||
end
|
||||
end
|
||||
@@ -97,14 +97,14 @@ module ImportScripts::PhpBB3
|
||||
|
||||
def is_allowed_avatar_type?(avatar_type)
|
||||
case avatar_type
|
||||
when Constants::AVATAR_TYPE_UPLOADED then
|
||||
@settings.import_uploaded_avatars
|
||||
when Constants::AVATAR_TYPE_REMOTE then
|
||||
@settings.import_remote_avatars
|
||||
when Constants::AVATAR_TYPE_GALLERY then
|
||||
@settings.import_gallery_avatars
|
||||
when Constants::AVATAR_TYPE_UPLOADED then
|
||||
@settings.import_uploaded_avatars
|
||||
when Constants::AVATAR_TYPE_REMOTE then
|
||||
@settings.import_remote_avatars
|
||||
when Constants::AVATAR_TYPE_GALLERY then
|
||||
@settings.import_gallery_avatars
|
||||
else
|
||||
false
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -41,8 +41,8 @@ module ImportScripts::PhpBB3
|
||||
end
|
||||
|
||||
if row[:forum_desc].present?
|
||||
changes = {raw: @text_processor.process_raw_text(row[:forum_desc])}
|
||||
opts = {revised_at: post.created_at, bypass_bump: true}
|
||||
changes = { raw: @text_processor.process_raw_text(row[:forum_desc]) }
|
||||
opts = { revised_at: post.created_at, bypass_bump: true }
|
||||
post.revise(Discourse.system_user, changes, opts)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -17,7 +17,6 @@ module ImportScripts::PhpBB3
|
||||
rows.map { |row| get_import_id(row[:msg_id]) }
|
||||
end
|
||||
|
||||
|
||||
def map_message(row)
|
||||
user_id = @lookup.user_id_from_imported_user_id(row[:author_id]) || Discourse.system_user.id
|
||||
attachments = import_attachments(row, user_id)
|
||||
@@ -54,7 +53,7 @@ module ImportScripts::PhpBB3
|
||||
mapped[:title] = get_topic_title(row)
|
||||
mapped[:archetype] = Archetype.private_message
|
||||
mapped[:target_usernames] = get_recipient_usernames(row)
|
||||
mapped[:custom_fields] = {import_user_ids: current_user_ids.join(',')}
|
||||
mapped[:custom_fields] = { import_user_ids: current_user_ids.join(',') }
|
||||
|
||||
if mapped[:target_usernames].empty?
|
||||
puts "Private message without recipients. Skipping #{row[:msg_id]}: #{row[:message_subject][0..40]}"
|
||||
@@ -127,7 +126,7 @@ module ImportScripts::PhpBB3
|
||||
.joins(:topic)
|
||||
.joins(:_custom_fields)
|
||||
.where(["LOWER(topics.title) IN (:titles) AND post_custom_fields.name = 'import_user_ids' AND post_custom_fields.value = :user_ids",
|
||||
{titles: topic_titles, user_ids: current_user_ids.join(',')}])
|
||||
{ titles: topic_titles, user_ids: current_user_ids.join(',') }])
|
||||
.order('topics.created_at DESC')
|
||||
.first.try(:topic_id)
|
||||
end
|
||||
|
||||
@@ -35,7 +35,7 @@ module ImportScripts::PhpBB3
|
||||
|
||||
def get_poll_options(topic_id)
|
||||
rows = @database.fetch_poll_options(topic_id)
|
||||
options_by_text = Hash.new { |h, k| h[k] = {ids: [], total_votes: 0, anonymous_votes: 0} }
|
||||
options_by_text = Hash.new { |h, k| h[k] = { ids: [], total_votes: 0, anonymous_votes: 0 } }
|
||||
|
||||
rows.each do |row|
|
||||
option_text = @text_processor.process_raw_text(row[:poll_option_text]).delete("\n")
|
||||
@@ -100,7 +100,7 @@ module ImportScripts::PhpBB3
|
||||
|
||||
# @param custom_fields [Hash]
|
||||
def add_poll_to_custom_fields(custom_fields, extracted_poll)
|
||||
custom_fields[DiscoursePoll::POLLS_CUSTOM_FIELD] = {DiscoursePoll::DEFAULT_POLL_NAME => extracted_poll}
|
||||
custom_fields[DiscoursePoll::POLLS_CUSTOM_FIELD] = { DiscoursePoll::DEFAULT_POLL_NAME => extracted_poll }
|
||||
end
|
||||
|
||||
# @param custom_fields [Hash]
|
||||
|
||||
@@ -44,7 +44,7 @@ module ImportScripts::PhpBB3
|
||||
@import_remote_avatars = avatar_settings['remote']
|
||||
@import_gallery_avatars = avatar_settings['gallery']
|
||||
|
||||
@use_bbcode_to_md =import_settings['use_bbcode_to_md']
|
||||
@use_bbcode_to_md = import_settings['use_bbcode_to_md']
|
||||
|
||||
@original_site_prefix = import_settings['site_prefix']['original']
|
||||
@new_site_prefix = import_settings['site_prefix']['new']
|
||||
|
||||
@@ -43,7 +43,7 @@ class ImportScripts::PunBB < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :users, results.map {|u| u["id"].to_i}
|
||||
next if all_records_exist? :users, results.map { |u| u["id"].to_i }
|
||||
|
||||
create_users(results, total: total_count, offset: offset) do |user|
|
||||
{ id: user['id'],
|
||||
@@ -120,7 +120,7 @@ class ImportScripts::PunBB < ImportScripts::Base
|
||||
").to_a
|
||||
|
||||
break if results.size < 1
|
||||
next if all_records_exist? :posts, results.map {|m| m['id'].to_i}
|
||||
next if all_records_exist? :posts, results.map { |m| m['id'].to_i }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |m|
|
||||
skip = false
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
require 'yaml'
|
||||
require_relative 'quandora_api'
|
||||
|
||||
def load_config file
|
||||
config = YAML::load_file(File.join(__dir__, file))
|
||||
def load_config(file)
|
||||
config = YAML::load_file(File.join(__dir__, file))
|
||||
@domain = config['domain']
|
||||
@username = config['username']
|
||||
@password = config['password']
|
||||
@@ -27,4 +27,3 @@ end
|
||||
|
||||
load_config ARGV.shift
|
||||
export
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ class ImportScripts::Quandora < ImportScripts::Base
|
||||
@system_user = Discourse.system_user
|
||||
@questions = []
|
||||
Dir.foreach(JSON_FILES_DIR) do |filename|
|
||||
next if filename == '.' or filename == '..'
|
||||
next if filename == ('.') || filename == ('..')
|
||||
question = File.read JSON_FILES_DIR + '/' + filename
|
||||
@questions << question
|
||||
end
|
||||
@@ -23,7 +23,7 @@ class ImportScripts::Quandora < ImportScripts::Base
|
||||
puts "", "Done"
|
||||
end
|
||||
|
||||
def import_questions questions
|
||||
def import_questions(questions)
|
||||
topics = 0
|
||||
total = questions.size
|
||||
|
||||
@@ -40,13 +40,13 @@ class ImportScripts::Quandora < ImportScripts::Base
|
||||
puts "", "Imported #{topics} topics."
|
||||
end
|
||||
|
||||
def import_users users
|
||||
def import_users(users)
|
||||
users.each do |user|
|
||||
create_user user, user[:id]
|
||||
end
|
||||
end
|
||||
|
||||
def import_topic topic
|
||||
def import_topic(topic)
|
||||
post = nil
|
||||
if post_id = post_id_from_imported_post_id(topic[:id])
|
||||
post = Post.find(post_id) # already imported this topic
|
||||
@@ -65,13 +65,13 @@ class ImportScripts::Quandora < ImportScripts::Base
|
||||
post
|
||||
end
|
||||
|
||||
def import_posts posts, topic_id
|
||||
def import_posts(posts, topic_id)
|
||||
posts.each do |post|
|
||||
import_post post, topic_id
|
||||
end
|
||||
end
|
||||
|
||||
def import_post post, topic_id
|
||||
def import_post(post, topic_id)
|
||||
if post_id_from_imported_post_id(post[:id])
|
||||
return # already imported
|
||||
end
|
||||
@@ -89,6 +89,6 @@ class ImportScripts::Quandora < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
if __FILE__==$0
|
||||
if __FILE__ == $0
|
||||
ImportScripts::Quandora.new.perform
|
||||
end
|
||||
|
||||
@@ -5,26 +5,26 @@ class QuandoraApi
|
||||
|
||||
attr_accessor :domain, :username, :password
|
||||
|
||||
def initialize domain, username, password
|
||||
def initialize(domain, username, password)
|
||||
@domain = domain
|
||||
@username = username
|
||||
@password = password
|
||||
end
|
||||
|
||||
def base_url domain
|
||||
def base_url(domain)
|
||||
"https://#{domain}.quandora.com/m/json"
|
||||
end
|
||||
|
||||
def auth_header username, password
|
||||
def auth_header(username, password)
|
||||
encoded = Base64.encode64 "#{username}:#{password}"
|
||||
{:Authorization => "Basic #{encoded.strip!}"}
|
||||
{ Authorization: "Basic #{encoded.strip!}" }
|
||||
end
|
||||
|
||||
def list_bases_url
|
||||
"#{base_url @domain}/kb"
|
||||
end
|
||||
|
||||
def list_questions_url kb_id, limit
|
||||
def list_questions_url(kb_id, limit)
|
||||
url = "#{base_url @domain}/kb/#{kb_id}/list"
|
||||
url = "#{url}?l=#{limit}" if limit
|
||||
url
|
||||
@@ -39,13 +39,13 @@ class QuandoraApi
|
||||
response['data']
|
||||
end
|
||||
|
||||
def list_questions kb_id, limit = nil
|
||||
def list_questions(kb_id, limit = nil)
|
||||
url = list_questions_url(kb_id, limit)
|
||||
response = request url
|
||||
response['data']['result']
|
||||
end
|
||||
|
||||
def get_question question_id
|
||||
def get_question(question_id)
|
||||
url = "#{base_url @domain}/q/#{question_id}"
|
||||
response = request url
|
||||
response['data']
|
||||
|
||||
@@ -4,9 +4,9 @@ require 'time'
|
||||
|
||||
class QuandoraQuestion
|
||||
|
||||
def initialize question_json
|
||||
@question = JSON.parse question_json
|
||||
end
|
||||
def initialize(question_json)
|
||||
@question = JSON.parse question_json
|
||||
end
|
||||
|
||||
def topic
|
||||
topic = {}
|
||||
@@ -24,19 +24,19 @@ class QuandoraQuestion
|
||||
users[user[:id]] = user
|
||||
replies.each do |reply|
|
||||
user = user_from_author reply[:author]
|
||||
users[user[:id]] = user
|
||||
users[user[:id]] = user
|
||||
end
|
||||
users.values.to_a
|
||||
end
|
||||
|
||||
def user_from_author author
|
||||
def user_from_author(author)
|
||||
email = author['email']
|
||||
email = "#{author['uid']}@noemail.com" unless email
|
||||
|
||||
user = {}
|
||||
user[:id] = author['uid']
|
||||
user[:name] = "#{author['firstName']} #{author['lastName']}"
|
||||
user[:email] = email
|
||||
user[:email] = email
|
||||
user[:staged] = true
|
||||
user
|
||||
end
|
||||
@@ -55,10 +55,10 @@ class QuandoraQuestion
|
||||
posts << post_from_comment(comment, i, answer)
|
||||
end
|
||||
end
|
||||
order_replies posts
|
||||
order_replies posts
|
||||
end
|
||||
|
||||
def order_replies posts
|
||||
def order_replies(posts)
|
||||
posts = posts.sort_by { |p| p[:created_at] }
|
||||
posts.each_with_index do |p, i|
|
||||
p[:post_number] = i + 2
|
||||
@@ -70,7 +70,7 @@ class QuandoraQuestion
|
||||
posts
|
||||
end
|
||||
|
||||
def post_from_answer answer
|
||||
def post_from_answer(answer)
|
||||
post = {}
|
||||
post[:id] = answer['uid']
|
||||
post[:parent_id] = @question['uid']
|
||||
@@ -81,17 +81,17 @@ class QuandoraQuestion
|
||||
post
|
||||
end
|
||||
|
||||
def post_from_comment comment, index, parent
|
||||
def post_from_comment(comment, index, parent)
|
||||
if comment['created']
|
||||
created_at = Time.parse comment['created']
|
||||
else
|
||||
created_at = Time.parse parent['created']
|
||||
end
|
||||
parent_id = parent['uid']
|
||||
parent_id = "#{parent['uid']}-#{index-1}" if index > 0
|
||||
parent_id = "#{parent['uid']}-#{index - 1}" if index > 0
|
||||
post = {}
|
||||
id = "#{parent['uid']}-#{index}"
|
||||
post[:id] = id
|
||||
post[:id] = id
|
||||
post[:parent_id] = parent_id
|
||||
post[:author] = comment['author']
|
||||
post[:author_id] = comment['author']['uid']
|
||||
@@ -102,7 +102,7 @@ class QuandoraQuestion
|
||||
|
||||
private
|
||||
|
||||
def unescape html
|
||||
def unescape(html)
|
||||
return nil unless html
|
||||
CGI.unescapeHTML html
|
||||
end
|
||||
|
||||
@@ -7,7 +7,7 @@ class TestQuandoraApi < Minitest::Test
|
||||
|
||||
DEBUG = false
|
||||
|
||||
def initialize args
|
||||
def initialize(args)
|
||||
config = YAML::load_file(File.join(__dir__, 'config.yml'))
|
||||
@domain = config['domain']
|
||||
@username = config['username']
|
||||
@@ -52,7 +52,7 @@ class TestQuandoraApi < Minitest::Test
|
||||
end
|
||||
|
||||
def test_get_question_has_expected_structure
|
||||
question = @quandora.get_question @question_id
|
||||
question = @quandora.get_question @question_id
|
||||
expected = JSON.parse(QUESTION)['data']
|
||||
check_keys expected, question
|
||||
|
||||
@@ -71,14 +71,14 @@ class TestQuandoraApi < Minitest::Test
|
||||
|
||||
private
|
||||
|
||||
def check_keys expected, actual
|
||||
def check_keys(expected, actual)
|
||||
msg = "### caller[0]:\nKey not found in actual keys: #{actual.keys}\n"
|
||||
expected.keys.each do |k|
|
||||
assert (actual.keys.include? k), "#{k}"
|
||||
end
|
||||
end
|
||||
|
||||
def debug message, show=false
|
||||
def debug(message, show = false)
|
||||
if show || DEBUG
|
||||
puts '### ' + caller[0]
|
||||
puts ''
|
||||
|
||||
@@ -67,9 +67,9 @@ class TestQuandoraQuestion < Minitest::Test
|
||||
answer['uid'] = 'uid'
|
||||
answer['content'] = 'content'
|
||||
answer['created'] = '2013-01-06T18:24:54.62Z'
|
||||
answer['author'] = {'uid' => 'auid'}
|
||||
answer['author'] = { 'uid' => 'auid' }
|
||||
|
||||
post = @question.post_from_answer answer
|
||||
post = @question.post_from_answer answer
|
||||
|
||||
assert_equal 'uid', post[:id]
|
||||
assert_equal @question.topic[:id], post[:parent_id]
|
||||
@@ -83,8 +83,8 @@ class TestQuandoraQuestion < Minitest::Test
|
||||
comment = {}
|
||||
comment['text'] = 'text'
|
||||
comment['created'] = '2013-01-06T18:24:54.62Z'
|
||||
comment['author'] = {'uid' => 'auid'}
|
||||
parent = {'uid' => 'parent-uid'}
|
||||
comment['author'] = { 'uid' => 'auid' }
|
||||
parent = { 'uid' => 'parent-uid' }
|
||||
|
||||
post = @question.post_from_comment comment, 0, parent
|
||||
|
||||
@@ -98,8 +98,8 @@ class TestQuandoraQuestion < Minitest::Test
|
||||
|
||||
def test_post_from_comment_uses_parent_created_if_necessary
|
||||
comment = {}
|
||||
comment['author'] = {'uid' => 'auid'}
|
||||
parent = {'created' => '2013-01-06T18:24:54.62Z'}
|
||||
comment['author'] = { 'uid' => 'auid' }
|
||||
parent = { 'created' => '2013-01-06T18:24:54.62Z' }
|
||||
|
||||
post = @question.post_from_comment comment, 0, parent
|
||||
|
||||
@@ -108,8 +108,8 @@ class TestQuandoraQuestion < Minitest::Test
|
||||
|
||||
def test_post_from_comment_uses_previous_comment_as_parent
|
||||
comment = {}
|
||||
comment['author'] = {'uid' => 'auid'}
|
||||
parent = {'uid' => 'parent-uid', 'created' => '2013-01-06T18:24:54.62Z'}
|
||||
comment['author'] = { 'uid' => 'auid' }
|
||||
parent = { 'uid' => 'parent-uid', 'created' => '2013-01-06T18:24:54.62Z' }
|
||||
|
||||
post = @question.post_from_comment comment, 1, parent
|
||||
|
||||
@@ -117,7 +117,7 @@ class TestQuandoraQuestion < Minitest::Test
|
||||
assert_equal 'parent-uid-0', post[:parent_id]
|
||||
assert_equal Time.parse('2013-01-06T18:24:54.62Z'), post[:created_at]
|
||||
end
|
||||
|
||||
|
||||
def test_users
|
||||
users = @question.users
|
||||
assert_equal 5, users.size
|
||||
@@ -127,10 +127,10 @@ class TestQuandoraQuestion < Minitest::Test
|
||||
assert_equal 'Greta Greatful', users[3][:name]
|
||||
assert_equal 'Eddy Excited', users[4][:name]
|
||||
end
|
||||
|
||||
|
||||
private
|
||||
|
||||
def unescape html
|
||||
def unescape(html)
|
||||
CGI.unescapeHTML html
|
||||
end
|
||||
end
|
||||
|
||||
@@ -77,7 +77,7 @@ class ImportScripts::Sfn < ImportScripts::Base
|
||||
SQL
|
||||
|
||||
break if users.size < 1
|
||||
next if all_records_exist? :users, users.map {|u| u["id"].to_i}
|
||||
next if all_records_exist? :users, users.map { |u| u["id"].to_i }
|
||||
|
||||
create_users(users, total: user_count, offset: offset) do |user|
|
||||
external_user = @external_users[user["id"]]
|
||||
@@ -232,7 +232,7 @@ class ImportScripts::Sfn < ImportScripts::Base
|
||||
SQL
|
||||
|
||||
break if topics.size < 1
|
||||
next if all_records_exist? :posts, topics.map {|t| t['id'].to_i}
|
||||
next if all_records_exist? :posts, topics.map { |t| t['id'].to_i }
|
||||
|
||||
create_posts(topics, total: topic_count, offset: offset) do |topic|
|
||||
next unless category_id = CATEGORY_MAPPING[topic["category_id"]]
|
||||
@@ -284,7 +284,7 @@ class ImportScripts::Sfn < ImportScripts::Base
|
||||
|
||||
break if posts.size < 1
|
||||
|
||||
next if all_records_exist? :posts, posts.map {|p| p['id'].to_i}
|
||||
next if all_records_exist? :posts, posts.map { |p| p['id'].to_i }
|
||||
|
||||
create_posts(posts, total: posts_count, offset: offset) do |post|
|
||||
next unless parent = topic_lookup_from_imported_post_id(post["topic_id"])
|
||||
|
||||
@@ -5,7 +5,7 @@ class ImportScripts::SimplePress < ImportScripts::Base
|
||||
|
||||
SIMPLE_PRESS_DB ||= ENV['SIMPLEPRESS_DB'] || "simplepress"
|
||||
TABLE_PREFIX = "wp_sf"
|
||||
BATCH_SIZE ||= 1000
|
||||
BATCH_SIZE ||= 1000
|
||||
|
||||
def initialize
|
||||
super
|
||||
@@ -120,7 +120,7 @@ class ImportScripts::SimplePress < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :posts, results.map {|m| m['id'].to_i}
|
||||
next if all_records_exist? :posts, results.map { |m| m['id'].to_i }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |m|
|
||||
skip = false
|
||||
|
||||
@@ -53,7 +53,7 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
if options.password == :ask
|
||||
require 'highline'
|
||||
$stderr.print "Enter password for MySQL database `#{options.database}`: "
|
||||
options.password = HighLine.new.ask('') {|q| q.echo = false }
|
||||
options.password = HighLine.new.ask('') { |q| q.echo = false }
|
||||
end
|
||||
|
||||
@default_db_connection = create_db_connection
|
||||
@@ -76,7 +76,7 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
WHERE min_posts = -1 AND group_type IN (1, 2)
|
||||
SQL
|
||||
|
||||
create_groups(query(<<-SQL), total: total) {|group| group }
|
||||
create_groups(query(<<-SQL), total: total) { |group| group }
|
||||
SELECT id_group AS id, group_name AS name
|
||||
FROM {prefix}membergroups
|
||||
WHERE min_posts = -1 AND group_type IN (1, 2)
|
||||
@@ -124,12 +124,12 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
user.save
|
||||
GroupUser.transaction do
|
||||
group_ids.each do |gid|
|
||||
group_id = group_id_from_imported_group_id(gid) and
|
||||
(group_id = group_id_from_imported_group_id(gid)) &&
|
||||
GroupUser.find_or_create_by(user: user, group_id: group_id)
|
||||
end
|
||||
end
|
||||
if options.smfroot and member[:id_attach].present? and user.uploaded_avatar_id.blank?
|
||||
path = find_smf_attachment_path(member[:id_attach], member[:file_hash], member[:filename]) and begin
|
||||
if options.smfroot && member[:id_attach].present? && user.uploaded_avatar_id.blank?
|
||||
(path = find_smf_attachment_path(member[:id_attach], member[:file_hash], member[:filename])) && begin
|
||||
upload = create_upload(user.id, path, member[:filename])
|
||||
if upload.persisted?
|
||||
user.update(uploaded_avatar_id: upload.id)
|
||||
@@ -160,7 +160,7 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
post_create_action: restricted && proc do |category|
|
||||
category.update(read_restricted: true)
|
||||
groups.each do |imported_group_id|
|
||||
group_id = group_id_from_imported_group_id(imported_group_id) and
|
||||
(group_id = group_id_from_imported_group_id(imported_group_id)) &&
|
||||
CategoryGroup.find_or_create_by(category: category, group_id: group_id) do |cg|
|
||||
cg.permission_type = CategoryGroup.permission_types[:full]
|
||||
end
|
||||
@@ -184,11 +184,10 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
db2 = create_db_connection
|
||||
|
||||
create_posts(query(<<-SQL), total: total) do |message|
|
||||
SELECT m.id_msg, m.id_topic, m.id_member, m.poster_time, m.body,
|
||||
SELECT m.id_msg, m.id_topic, m.id_member, m.poster_time, m.body,
|
||||
m.subject, t.id_board, t.id_first_msg, COUNT(a.id_attach) AS attachment_count
|
||||
FROM {prefix}messages AS m
|
||||
LEFT JOIN {prefix}topics AS t ON t.id_topic = m.id_topic
|
||||
@@ -226,7 +225,7 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
WHERE attachment_type = 0 AND id_msg = #{message[:id_msg]}
|
||||
ORDER BY id_attach ASC
|
||||
SQL
|
||||
attachments.map! {|a| import_attachment(post, a) rescue (puts $! ; nil) }
|
||||
attachments.map! { |a| import_attachment(post, a) rescue (puts $! ; nil) }
|
||||
post[:raw] = convert_message_body(message[:body], attachments, ignore_quotes: ignore_quotes)
|
||||
next post
|
||||
end
|
||||
@@ -265,7 +264,7 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
|
||||
def create_db_connection
|
||||
Mysql2::Client.new(host: options.host, username: options.username,
|
||||
password: options.password, database: options.database)
|
||||
password: options.password, database: options.database)
|
||||
end
|
||||
|
||||
def query(sql, **opts, &block)
|
||||
@@ -278,7 +277,7 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
|
||||
def __query(db, sql, **opts)
|
||||
db.query(sql.gsub('{prefix}', options.prefix),
|
||||
{symbolize_keys: true, cache_rows: false}.merge(opts))
|
||||
{ symbolize_keys: true, cache_rows: false }.merge(opts))
|
||||
end
|
||||
|
||||
TRTR_TABLE = begin
|
||||
@@ -289,14 +288,14 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
|
||||
def find_smf_attachment_path(attachment_id, file_hash, filename)
|
||||
cleaned_name = filename.dup
|
||||
TRTR_TABLE.each {|from,to| cleaned_name.gsub!(from, to) }
|
||||
TRTR_TABLE.each { |from, to| cleaned_name.gsub!(from, to) }
|
||||
cleaned_name.gsub!(/\s/, '_')
|
||||
cleaned_name.gsub!(/[^\w_\.\-]/, '')
|
||||
legacy_name = "#{attachment_id}_#{cleaned_name.gsub('.', '_')}#{Digest::MD5.hexdigest(cleaned_name)}"
|
||||
|
||||
[ filename, "#{attachment_id}_#{file_hash}", legacy_name ]
|
||||
.map {|name| File.join(options.smfroot, 'attachments', name) }
|
||||
.detect {|file| File.exists?(file) }
|
||||
.map { |name| File.join(options.smfroot, 'attachments', name) }
|
||||
.detect { |file| File.exists?(file) }
|
||||
end
|
||||
|
||||
def decode_entities(*args)
|
||||
@@ -313,7 +312,7 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
end
|
||||
body.gsub!(XListPattern) do |s|
|
||||
r = "\n[ul]"
|
||||
s.lines.each {|l| r << '[li]' << l.strip.sub(/^\[x\]\s*/, '') << '[/li]' }
|
||||
s.lines.each { |l| r << '[li]' << l.strip.sub(/^\[x\]\s*/, '') << '[/li]' }
|
||||
r << "[/ul]\n"
|
||||
end
|
||||
|
||||
@@ -331,8 +330,8 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
if use_count.keys.length < attachments.select(&:present?).length
|
||||
body << "\n\n---"
|
||||
attachments.each_with_index do |upload, num|
|
||||
if upload.present? and use_count[num] == 0
|
||||
body << ( "\n\n" + get_upload_markdown(upload) )
|
||||
if upload.present? && use_count[num] == (0)
|
||||
body << ("\n\n" + get_upload_markdown(upload))
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -378,7 +377,7 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
# => {'param1' => 'value1=still1 value1', 'param2' => 'value2 ...'}
|
||||
def parse_tag_params(params)
|
||||
params.to_s.strip.scan(/(?<param>\w+)=(?<value>(?:(?>\S+)|\s+(?!\w+=))*)/).
|
||||
inject({}) {|h,e| h[e[0]] = e[1]; h }
|
||||
inject({}) { |h, e| h[e[0]] = e[1]; h }
|
||||
end
|
||||
|
||||
class << self
|
||||
@@ -391,7 +390,7 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
# => match[:params] == 'param=value param2=value2'
|
||||
# match[:inner] == "\n text\n [tag nested=true]text[/tag]\n"
|
||||
def build_nested_tag_regex(ltag, rtag = nil)
|
||||
rtag ||= '/'+ltag
|
||||
rtag ||= '/' + ltag
|
||||
%r{
|
||||
\[#{ltag}(?-x:[ =](?<params>[^\]]*))?\] # consume open tag, followed by...
|
||||
(?<inner>(?:
|
||||
@@ -422,11 +421,10 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
ColorPattern = build_nested_tag_regex('color')
|
||||
ListPattern = build_nested_tag_regex('list')
|
||||
AttachmentPatterns = [
|
||||
[/^\[attach(?:|img|url|mini)=(?<num>\d+)\]$/, ->(u) { "\n"+get_upload_markdown(u)+"\n" }],
|
||||
[/^\[attach(?:|img|url|mini)=(?<num>\d+)\]$/, ->(u) { "\n" + get_upload_markdown(u) + "\n" }],
|
||||
[/\[attach(?:|img|url|mini)=(?<num>\d+)\]/, ->(u) { get_upload_markdown(u) }]
|
||||
]
|
||||
|
||||
|
||||
# Provides command line options and parses the SMF settings file.
|
||||
class Options
|
||||
|
||||
@@ -494,18 +492,17 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
@parser ||= OptionParser.new(nil, 12) do |o|
|
||||
o.banner = "Usage:\t#{File.basename($0)} <SMFROOT> [options]\n"
|
||||
o.banner << "\t#{File.basename($0)} -d <DATABASE> [options]"
|
||||
o.on('-h HOST', :REQUIRED, "MySQL server hostname [\"#{self.host}\"]") {|s| self.host = s }
|
||||
o.on('-u USER', :REQUIRED, "MySQL username [\"#{self.username}\"]") {|s| self.username = s }
|
||||
o.on('-p [PASS]', :OPTIONAL, 'MySQL password. Without argument, reads password from STDIN.') {|s| self.password = s || :ask }
|
||||
o.on('-d DBNAME', :REQUIRED, 'Name of SMF database') {|s| self.database = s }
|
||||
o.on('-f PREFIX', :REQUIRED, "Table names prefix [\"#{self.prefix}\"]") {|s| self.prefix = s }
|
||||
o.on('-t TIMEZONE', :REQUIRED, 'Timezone used by SMF2 [auto-detected from PHP]') {|s| self.timezone = s }
|
||||
o.on('-h HOST', :REQUIRED, "MySQL server hostname [\"#{self.host}\"]") { |s| self.host = s }
|
||||
o.on('-u USER', :REQUIRED, "MySQL username [\"#{self.username}\"]") { |s| self.username = s }
|
||||
o.on('-p [PASS]', :OPTIONAL, 'MySQL password. Without argument, reads password from STDIN.') { |s| self.password = s || :ask }
|
||||
o.on('-d DBNAME', :REQUIRED, 'Name of SMF database') { |s| self.database = s }
|
||||
o.on('-f PREFIX', :REQUIRED, "Table names prefix [\"#{self.prefix}\"]") { |s| self.prefix = s }
|
||||
o.on('-t TIMEZONE', :REQUIRED, 'Timezone used by SMF2 [auto-detected from PHP]') { |s| self.timezone = s }
|
||||
end
|
||||
end
|
||||
|
||||
end #Options
|
||||
|
||||
|
||||
# Framework around TSort, used to build a dependency graph over messages
|
||||
# to find and solve cyclic quotations.
|
||||
class MessageDependencyGraph
|
||||
@@ -532,10 +529,9 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
end
|
||||
|
||||
def cycles
|
||||
strongly_connected_components.select {|c| c.length > 1 }.to_a
|
||||
strongly_connected_components.select { |c| c.length > 1 }.to_a
|
||||
end
|
||||
|
||||
|
||||
class Node
|
||||
attr_reader :id
|
||||
|
||||
@@ -551,7 +547,7 @@ class ImportScripts::Smf2 < ImportScripts::Base
|
||||
end
|
||||
|
||||
def quoted
|
||||
@quoted.map {|id| @graph[id] }.reject(&:nil?)
|
||||
@quoted.map { |id| @graph[id] }.reject(&:nil?)
|
||||
end
|
||||
|
||||
def ignore_quotes?
|
||||
|
||||
@@ -34,7 +34,7 @@ class CreateTitle
|
||||
end
|
||||
|
||||
def self.first_long_line(text)
|
||||
lines = text.split("\n").select {|t| t.strip.size >= 20}
|
||||
lines = text.split("\n").select { |t| t.strip.size >= 20 }
|
||||
return if lines.empty?
|
||||
lines[0].strip
|
||||
end
|
||||
|
||||
@@ -2,8 +2,8 @@ require 'yaml'
|
||||
require 'fileutils'
|
||||
require_relative 'socialcast_api'
|
||||
|
||||
def load_config file
|
||||
config = YAML::load_file(File.join(__dir__, file))
|
||||
def load_config(file)
|
||||
config = YAML::load_file(File.join(__dir__, file))
|
||||
@domain = config['domain']
|
||||
@username = config['username']
|
||||
@password = config['password']
|
||||
@@ -17,8 +17,8 @@ def export
|
||||
export_messages
|
||||
end
|
||||
|
||||
def export_users(page=1)
|
||||
users = @api.list_users({page: page})
|
||||
def export_users(page = 1)
|
||||
users = @api.list_users(page: page)
|
||||
return if users.empty?
|
||||
users.each do |user|
|
||||
File.open("output/users/#{user['id']}.json", 'w') do |f|
|
||||
@@ -30,8 +30,8 @@ def export_users(page=1)
|
||||
export_users page + 1
|
||||
end
|
||||
|
||||
def export_messages(page=1)
|
||||
messages = @api.list_messages({page: page})
|
||||
def export_messages(page = 1)
|
||||
messages = @api.list_messages(page: page)
|
||||
return if messages.empty?
|
||||
messages.each do |message|
|
||||
File.open("output/messages/#{message['id']}.json", 'w') do |f|
|
||||
|
||||
@@ -27,7 +27,7 @@ class ImportScripts::Socialcast < ImportScripts::Base
|
||||
imported = 0
|
||||
total = count_files(MESSAGES_DIR)
|
||||
Dir.foreach(MESSAGES_DIR) do |filename|
|
||||
next if filename == '.' or filename == '..'
|
||||
next if filename == ('.') || filename == ('..')
|
||||
topics += 1
|
||||
message_json = File.read MESSAGES_DIR + '/' + filename
|
||||
message = SocialcastMessage.new(message_json)
|
||||
@@ -46,7 +46,7 @@ class ImportScripts::Socialcast < ImportScripts::Base
|
||||
users = 0
|
||||
total = count_files(USERS_DIR)
|
||||
Dir.foreach(USERS_DIR) do |filename|
|
||||
next if filename == '.' or filename == '..'
|
||||
next if filename == ('.') || filename == ('..')
|
||||
user_json = File.read USERS_DIR + '/' + filename
|
||||
user = SocialcastUser.new(user_json).user
|
||||
create_user user, user[:id]
|
||||
@@ -56,10 +56,10 @@ class ImportScripts::Socialcast < ImportScripts::Base
|
||||
end
|
||||
|
||||
def count_files(path)
|
||||
Dir.foreach(path).select {|f| f != '.' && f != '..'}.count
|
||||
Dir.foreach(path).select { |f| f != '.' && f != '..' }.count
|
||||
end
|
||||
|
||||
def import_topic topic
|
||||
def import_topic(topic)
|
||||
post = nil
|
||||
if post_id = post_id_from_imported_post_id(topic[:id])
|
||||
post = Post.find(post_id) # already imported this topic
|
||||
@@ -77,13 +77,13 @@ class ImportScripts::Socialcast < ImportScripts::Base
|
||||
post
|
||||
end
|
||||
|
||||
def import_posts posts, topic_id
|
||||
def import_posts(posts, topic_id)
|
||||
posts.each do |post|
|
||||
import_post post, topic_id
|
||||
end
|
||||
end
|
||||
|
||||
def import_post post, topic_id
|
||||
def import_post(post, topic_id)
|
||||
return if post_id_from_imported_post_id(post[:id]) # already imported
|
||||
post[:topic_id] = topic_id
|
||||
post[:user_id] = user_id_from_imported_user_id(post[:author_id]) || -1
|
||||
@@ -96,6 +96,6 @@ class ImportScripts::Socialcast < ImportScripts::Base
|
||||
|
||||
end
|
||||
|
||||
if __FILE__==$0
|
||||
if __FILE__ == $0
|
||||
ImportScripts::Socialcast.new.perform
|
||||
end
|
||||
|
||||
@@ -5,7 +5,7 @@ class SocialcastApi
|
||||
|
||||
attr_accessor :domain, :username, :password
|
||||
|
||||
def initialize domain, username, password
|
||||
def initialize(domain, username, password)
|
||||
@domain = domain
|
||||
@username = username
|
||||
@password = password
|
||||
@@ -17,22 +17,22 @@ class SocialcastApi
|
||||
|
||||
def headers
|
||||
encoded = Base64.encode64 "#{@username}:#{@password}"
|
||||
{:Authorization => "Basic #{encoded.strip!}", :Accept => "application/json"}
|
||||
{ Authorization: "Basic #{encoded.strip!}", Accept: "application/json" }
|
||||
end
|
||||
|
||||
def request url
|
||||
JSON.parse(Excon.get(url, headers: headers)
|
||||
def request(url)
|
||||
JSON.parse(Excon.get(url, headers: headers))
|
||||
end
|
||||
|
||||
def list_users(opts={})
|
||||
def list_users(opts = {})
|
||||
page = opts[:page] ? opts[:page] : 1
|
||||
response = request "#{base_url}/users?page=#{page}"
|
||||
response['users'].sort {|u| u['id']}
|
||||
response['users'].sort { |u| u['id'] }
|
||||
end
|
||||
|
||||
def list_messages(opts={})
|
||||
def list_messages(opts = {})
|
||||
page = opts[:page] ? opts[:page] : 1
|
||||
response = request "#{base_url}/messages?page=#{page}"
|
||||
response['messages'].sort {|m| m['id']}
|
||||
response['messages'].sort { |m| m['id'] }
|
||||
end
|
||||
end
|
||||
|
||||
@@ -18,7 +18,7 @@ class SocialcastMessage
|
||||
}
|
||||
}
|
||||
|
||||
def initialize message_json
|
||||
def initialize(message_json)
|
||||
@parsed_json = JSON.parse message_json
|
||||
end
|
||||
|
||||
@@ -51,7 +51,6 @@ class SocialcastMessage
|
||||
tags
|
||||
end
|
||||
|
||||
|
||||
def category
|
||||
category = DEFAULT_CATEGORY
|
||||
if group && TAGS_AND_CATEGORIES[group]
|
||||
@@ -92,7 +91,7 @@ class SocialcastMessage
|
||||
|
||||
private
|
||||
|
||||
def unescape html
|
||||
def unescape(html)
|
||||
return nil unless html
|
||||
CGI.unescapeHTML html
|
||||
end
|
||||
|
||||
@@ -4,9 +4,9 @@ require 'time'
|
||||
|
||||
class SocialcastUser
|
||||
|
||||
def initialize user_json
|
||||
@parsed_json = JSON.parse user_json
|
||||
end
|
||||
def initialize(user_json)
|
||||
@parsed_json = JSON.parse user_json
|
||||
end
|
||||
|
||||
def user
|
||||
email = @parsed_json['contact_info']['email']
|
||||
|
||||
@@ -7,7 +7,7 @@ class TestSocialcastApi < Minitest::Test
|
||||
|
||||
DEBUG = false
|
||||
|
||||
def initialize args
|
||||
def initialize(args)
|
||||
config = YAML::load_file(File.join(__dir__, 'config.ex.yml'))
|
||||
@domain = config['domain']
|
||||
@username = config['username']
|
||||
@@ -39,40 +39,40 @@ class TestSocialcastApi < Minitest::Test
|
||||
|
||||
def test_list_users
|
||||
users = @socialcast.list_users
|
||||
expected = JSON.parse(USERS)['users'].sort {|u| u['id']}
|
||||
expected = JSON.parse(USERS)['users'].sort { |u| u['id'] }
|
||||
assert_equal 15, users.size
|
||||
assert_equal expected[0], users[0]
|
||||
end
|
||||
|
||||
def test_list_users_next_page
|
||||
users = @socialcast.list_users({page: 2})
|
||||
users = @socialcast.list_users(page: 2)
|
||||
assert_equal 0, users.size
|
||||
end
|
||||
|
||||
def test_list_messages
|
||||
messages = @socialcast.list_messages
|
||||
expected = JSON.parse(MESSAGES)['messages'].sort {|m| m['id']}
|
||||
expected = JSON.parse(MESSAGES)['messages'].sort { |m| m['id'] }
|
||||
assert_equal 20, messages.size
|
||||
check_keys expected[0], messages[0]
|
||||
end
|
||||
|
||||
def test_messages_next_page
|
||||
messages = @socialcast.list_messages({page: 2})
|
||||
expected = JSON.parse(MESSAGES_PG_2)['messages'].sort {|m| m['id']}
|
||||
messages = @socialcast.list_messages(page: 2)
|
||||
expected = JSON.parse(MESSAGES_PG_2)['messages'].sort { |m| m['id'] }
|
||||
assert_equal 20, messages.size
|
||||
check_keys expected[0], messages[0]
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def check_keys expected, actual
|
||||
def check_keys(expected, actual)
|
||||
msg = "### caller[0]:\nKey not found in actual keys: #{actual.keys}\n"
|
||||
expected.keys.each do |k|
|
||||
assert (actual.keys.include? k), "#{k}"
|
||||
end
|
||||
end
|
||||
|
||||
def debug message, show=false
|
||||
def debug(message, show = false)
|
||||
if show || DEBUG
|
||||
puts '### ' + caller[0]
|
||||
puts ''
|
||||
|
||||
@@ -9,7 +9,7 @@ def titles
|
||||
topics = 0
|
||||
total = count_files(MESSAGES_DIR)
|
||||
Dir.foreach(MESSAGES_DIR) do |filename|
|
||||
next if filename == '.' or filename == '..'
|
||||
next if filename == ('.') || filename == ('..')
|
||||
message_json = File.read MESSAGES_DIR + '/' + filename
|
||||
message = SocialcastMessage.new(message_json)
|
||||
next unless message.title
|
||||
@@ -21,7 +21,7 @@ def titles
|
||||
end
|
||||
|
||||
def count_files(path)
|
||||
Dir.foreach(path).select {|f| f != '.' && f != '..'}.count
|
||||
Dir.foreach(path).select { |f| f != '.' && f != '..' }.count
|
||||
end
|
||||
|
||||
titles
|
||||
|
||||
@@ -34,7 +34,7 @@ class ImportScripts::Sourceforge < ImportScripts::Base
|
||||
end
|
||||
|
||||
def load_json
|
||||
@json = MultiJson.load(File.read(JSON_FILE), :symbolize_keys => true)
|
||||
@json = MultiJson.load(File.read(JSON_FILE), symbolize_keys: true)
|
||||
end
|
||||
|
||||
def import_categories
|
||||
@@ -45,8 +45,8 @@ class ImportScripts::Sourceforge < ImportScripts::Base
|
||||
id: forum[:shortname],
|
||||
name: forum[:name],
|
||||
post_create_action: proc do |category|
|
||||
changes = {raw: forum[:description]}
|
||||
opts = {revised_at: Time.now, bypass_bump: true}
|
||||
changes = { raw: forum[:description] }
|
||||
opts = { revised_at: Time.now, bypass_bump: true }
|
||||
|
||||
post = category.topic.first_post
|
||||
post.revise(@system_user, changes, opts)
|
||||
|
||||
@@ -11,7 +11,7 @@ class Saxy < Ox::Sax
|
||||
end
|
||||
|
||||
def start_element(name)
|
||||
@stack << {elem: name}
|
||||
@stack << { elem: name }
|
||||
end
|
||||
|
||||
def end_element(name)
|
||||
@@ -40,7 +40,6 @@ class Convert < Saxy
|
||||
super()
|
||||
end
|
||||
|
||||
|
||||
def end_element(name)
|
||||
old = @stack.pop
|
||||
cur = @stack[-1]
|
||||
@@ -71,10 +70,9 @@ class Convert < Saxy
|
||||
|
||||
row = data[:row_data]
|
||||
col_names = row.keys.join(",")
|
||||
vals = row.values.map{|v| "'#{v.gsub("'", "''").gsub('\\','\\\\\\')}'"}.join(",")
|
||||
vals = row.values.map { |v| "'#{v.gsub("'", "''").gsub('\\', '\\\\\\')}'" }.join(",")
|
||||
puts "INSERT INTO #{name} (#{col_names}) VALUES (#{vals});"
|
||||
end
|
||||
end
|
||||
|
||||
Ox.sax_parse(Convert.new(skip_data: ['metrics2', 'user_log']), File.open(ARGV[0]))
|
||||
|
||||
|
||||
@@ -69,10 +69,10 @@ class ImportScripts::Vanilla < ImportScripts::Base
|
||||
def read_file
|
||||
puts "reading file..."
|
||||
string = File.read(@vanilla_file).gsub("\\N", "")
|
||||
.gsub(/\\$\n/m, "\\n")
|
||||
.gsub("\\,", ",")
|
||||
.gsub(/(?<!\\)\\"/, '""')
|
||||
.gsub(/\\\\\\"/, '\\""')
|
||||
.gsub(/\\$\n/m, "\\n")
|
||||
.gsub("\\,", ",")
|
||||
.gsub(/(?<!\\)\\"/, '""')
|
||||
.gsub(/\\\\\\"/, '\\""')
|
||||
StringIO.new(string)
|
||||
end
|
||||
|
||||
@@ -192,10 +192,10 @@ class ImportScripts::Vanilla < ImportScripts::Base
|
||||
|
||||
# list all other user ids in the conversation
|
||||
user_ids_in_conversation = @user_conversations.select { |uc| uc[:conversation_id] == conversation[:conversation_id] && uc[:user_id] != conversation[:insert_user_id] }
|
||||
.map { |uc| uc[:user_id] }
|
||||
.map { |uc| uc[:user_id] }
|
||||
# retrieve their emails
|
||||
user_emails_in_conversation = @users.select { |u| user_ids_in_conversation.include?(u[:user_id]) }
|
||||
.map { |u| u[:email] }
|
||||
.map { |u| u[:email] }
|
||||
# retrieve their usernames from the database
|
||||
target_usernames = User.where("email IN (?)", user_emails_in_conversation).pluck(:username).to_a
|
||||
|
||||
@@ -243,10 +243,10 @@ class ImportScripts::Vanilla < ImportScripts::Base
|
||||
def clean_up(raw)
|
||||
return "" if raw.blank?
|
||||
raw.gsub("\\n", "\n")
|
||||
.gsub(/<\/?pre\s*>/i, "\n```\n")
|
||||
.gsub(/<\/?code\s*>/i, "`")
|
||||
.gsub("<", "<")
|
||||
.gsub(">", ">")
|
||||
.gsub(/<\/?pre\s*>/i, "\n```\n")
|
||||
.gsub(/<\/?code\s*>/i, "`")
|
||||
.gsub("<", "<")
|
||||
.gsub(">", ">")
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
@@ -54,7 +54,7 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
username = nil
|
||||
@last_user_id = -1
|
||||
total_count = mysql_query("SELECT count(*) count FROM #{TABLE_PREFIX}User;").first['count']
|
||||
|
||||
|
||||
batches(BATCH_SIZE) do |offset|
|
||||
results = mysql_query(
|
||||
"SELECT UserID, Name, Title, Location, About, Email,
|
||||
@@ -66,7 +66,7 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
@last_user_id = results.to_a.last['UserID']
|
||||
next if all_records_exist? :users, results.map {|u| u['UserID'].to_i}
|
||||
next if all_records_exist? :users, results.map { |u| u['UserID'].to_i }
|
||||
|
||||
create_users(results, total: total_count, offset: offset) do |user|
|
||||
next if user['Email'].blank?
|
||||
@@ -197,9 +197,9 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
tag_names_sql = "select t.name as tag_name from GDN_Tag t, GDN_TagDiscussion td where t.tagid = td.tagid and td.discussionid = {discussionid} and t.name != '';"
|
||||
|
||||
total_count = mysql_query("SELECT count(*) count FROM #{TABLE_PREFIX}Discussion;").first['count']
|
||||
|
||||
|
||||
@last_topic_id = -1
|
||||
|
||||
|
||||
batches(BATCH_SIZE) do |offset|
|
||||
discussions = mysql_query(
|
||||
"SELECT DiscussionID, CategoryID, Name, Body,
|
||||
@@ -211,7 +211,7 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
|
||||
break if discussions.size < 1
|
||||
@last_topic_id = discussions.to_a.last['DiscussionID']
|
||||
next if all_records_exist? :posts, discussions.map {|t| "discussion#" + t['DiscussionID'].to_s}
|
||||
next if all_records_exist? :posts, discussions.map { |t| "discussion#" + t['DiscussionID'].to_s }
|
||||
|
||||
create_posts(discussions, total: total_count, offset: offset) do |discussion|
|
||||
{
|
||||
@@ -223,7 +223,7 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
created_at: Time.zone.at(discussion['DateInserted']),
|
||||
post_create_action: proc do |post|
|
||||
if @import_tags
|
||||
tag_names = @client.query(tag_names_sql.gsub('{discussionid}', discussion['DiscussionID'].to_s)).map {|row| row['tag_name']}
|
||||
tag_names = @client.query(tag_names_sql.gsub('{discussionid}', discussion['DiscussionID'].to_s)).map { |row| row['tag_name'] }
|
||||
DiscourseTagging.tag_topic_by_names(post.topic, staff_guardian, tag_names)
|
||||
end
|
||||
end
|
||||
@@ -248,7 +248,7 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
|
||||
break if comments.size < 1
|
||||
@last_post_id = comments.to_a.last['CommentID']
|
||||
next if all_records_exist? :posts, comments.map {|comment| "comment#" + comment['CommentID'].to_s}
|
||||
next if all_records_exist? :posts, comments.map { |comment| "comment#" + comment['CommentID'].to_s }
|
||||
|
||||
create_posts(comments, total: total_count, offset: offset) do |comment|
|
||||
next unless t = topic_lookup_from_imported_post_id("discussion#" + comment['DiscussionID'].to_s)
|
||||
@@ -272,15 +272,15 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
|
||||
# fix whitespaces
|
||||
raw = raw.gsub(/(\\r)?\\n/, "\n")
|
||||
.gsub("\\t", "\t")
|
||||
.gsub("\\t", "\t")
|
||||
|
||||
# [HTML]...[/HTML]
|
||||
raw = raw.gsub(/\[html\]/i, "\n```html\n")
|
||||
.gsub(/\[\/html\]/i, "\n```\n")
|
||||
.gsub(/\[\/html\]/i, "\n```\n")
|
||||
|
||||
# [PHP]...[/PHP]
|
||||
raw = raw.gsub(/\[php\]/i, "\n```php\n")
|
||||
.gsub(/\[\/php\]/i, "\n```\n")
|
||||
.gsub(/\[\/php\]/i, "\n```\n")
|
||||
|
||||
# [HIGHLIGHT="..."]
|
||||
raw = raw.gsub(/\[highlight="?(\w+)"?\]/i) { "\n```#{$1.downcase}\n" }
|
||||
@@ -288,7 +288,7 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
# [CODE]...[/CODE]
|
||||
# [HIGHLIGHT]...[/HIGHLIGHT]
|
||||
raw = raw.gsub(/\[\/?code\]/i, "\n```\n")
|
||||
.gsub(/\[\/?highlight\]/i, "\n```\n")
|
||||
.gsub(/\[\/?highlight\]/i, "\n```\n")
|
||||
|
||||
# [SAMP]...[/SAMP]
|
||||
raw.gsub!(/\[\/?samp\]/i, "`")
|
||||
@@ -299,12 +299,12 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
# - AFTER all the "code" processing
|
||||
# - BEFORE the "quote" processing
|
||||
raw = raw.gsub(/`([^`]+)`/im) { "`" + $1.gsub("<", "\u2603") + "`" }
|
||||
.gsub("<", "<")
|
||||
.gsub("\u2603", "<")
|
||||
.gsub("<", "<")
|
||||
.gsub("\u2603", "<")
|
||||
|
||||
raw = raw.gsub(/`([^`]+)`/im) { "`" + $1.gsub(">", "\u2603") + "`" }
|
||||
.gsub(">", ">")
|
||||
.gsub("\u2603", ">")
|
||||
.gsub(">", ">")
|
||||
.gsub("\u2603", ">")
|
||||
end
|
||||
|
||||
# [URL=...]...[/URL]
|
||||
@@ -316,7 +316,7 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
# [URL]...[/URL]
|
||||
# [MP3]...[/MP3]
|
||||
raw = raw.gsub(/\[\/?url\]/i, "")
|
||||
.gsub(/\[\/?mp3\]/i, "")
|
||||
.gsub(/\[\/?mp3\]/i, "")
|
||||
|
||||
# [QUOTE]...[/QUOTE]
|
||||
raw.gsub!(/\[quote\](.+?)\[\/quote\]/im) { "\n> #{$1}\n" }
|
||||
@@ -366,7 +366,7 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
User.find_each do |u|
|
||||
ucf = u.custom_fields
|
||||
if ucf && ucf["import_id"] && ucf["import_username"]
|
||||
Permalink.create( url: "profile/#{ucf['import_id']}/#{ucf['import_username']}", external_url: "/users/#{u.username}" ) rescue nil
|
||||
Permalink.create(url: "profile/#{ucf['import_id']}/#{ucf['import_username']}", external_url: "/users/#{u.username}") rescue nil
|
||||
print '.'
|
||||
end
|
||||
end
|
||||
@@ -378,9 +378,9 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
id = pcf["import_id"].split('#').last
|
||||
if post.post_number == 1
|
||||
slug = Slug.for(topic.title) # probably matches what vanilla would do...
|
||||
Permalink.create( url: "discussion/#{id}/#{slug}", topic_id: topic.id ) rescue nil
|
||||
Permalink.create(url: "discussion/#{id}/#{slug}", topic_id: topic.id) rescue nil
|
||||
else
|
||||
Permalink.create( url: "discussion/comment/#{id}", post_id: post.id ) rescue nil
|
||||
Permalink.create(url: "discussion/comment/#{id}", post_id: post.id) rescue nil
|
||||
end
|
||||
print '.'
|
||||
end
|
||||
@@ -389,5 +389,4 @@ class ImportScripts::VanillaSQL < ImportScripts::Base
|
||||
|
||||
end
|
||||
|
||||
|
||||
ImportScripts::VanillaSQL.new.perform
|
||||
|
||||
@@ -47,7 +47,7 @@ class ImportScripts::VBulletin < ImportScripts::Base
|
||||
database: DB_NAME
|
||||
)
|
||||
rescue Exception => e
|
||||
puts '='*50
|
||||
puts '=' * 50
|
||||
puts e.message
|
||||
puts <<EOM
|
||||
Cannot connect in to database.
|
||||
@@ -71,7 +71,6 @@ EOM
|
||||
exit
|
||||
end
|
||||
|
||||
|
||||
def execute
|
||||
mysql_query("CREATE INDEX firstpostid_index ON #{TABLE_PREFIX}thread (firstpostid)") rescue nil
|
||||
|
||||
@@ -422,7 +421,6 @@ EOM
|
||||
puts sql
|
||||
end
|
||||
|
||||
|
||||
def import_private_messages
|
||||
puts "", "importing private messages..."
|
||||
|
||||
@@ -536,7 +534,6 @@ EOM
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def import_attachments
|
||||
puts '', 'importing attachments...'
|
||||
|
||||
@@ -574,7 +571,7 @@ EOM
|
||||
end
|
||||
|
||||
if new_raw != post.raw
|
||||
PostRevisor.new(post).revise!(post.user, { raw: new_raw }, { bypass_bump: true, edit_reason: 'Import attachments from vBulletin' })
|
||||
PostRevisor.new(post).revise!(post.user, { raw: new_raw }, bypass_bump: true, edit_reason: 'Import attachments from vBulletin')
|
||||
end
|
||||
|
||||
success_count += 1
|
||||
@@ -828,7 +825,6 @@ EOM
|
||||
raw
|
||||
end
|
||||
|
||||
|
||||
def create_permalink_file
|
||||
puts '', 'Creating Permalink File...', ''
|
||||
|
||||
@@ -858,7 +854,6 @@ EOM
|
||||
|
||||
end
|
||||
|
||||
|
||||
def suspend_users
|
||||
puts '', "updating banned users"
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ require 'htmlentities'
|
||||
class ImportScripts::VBulletin < ImportScripts::Base
|
||||
BATCH_SIZE = 1000
|
||||
DBPREFIX = "vb_"
|
||||
ROOT_NODE=2
|
||||
ROOT_NODE = 2
|
||||
|
||||
# CHANGE THESE BEFORE RUNNING THE IMPORTER
|
||||
DATABASE = "yourforum"
|
||||
@@ -394,7 +394,7 @@ class ImportScripts::VBulletin < ImportScripts::Base
|
||||
end
|
||||
|
||||
if new_raw != post.raw
|
||||
PostRevisor.new(post).revise!(post.user, { raw: new_raw }, { bypass_bump: true, edit_reason: 'Import attachments from vBulletin' })
|
||||
PostRevisor.new(post).revise!(post.user, { raw: new_raw }, bypass_bump: true, edit_reason: 'Import attachments from vBulletin')
|
||||
end
|
||||
|
||||
success_count += 1
|
||||
@@ -450,15 +450,15 @@ class ImportScripts::VBulletin < ImportScripts::Base
|
||||
|
||||
# fix whitespaces
|
||||
raw = raw.gsub(/(\\r)?\\n/, "\n")
|
||||
.gsub("\\t", "\t")
|
||||
.gsub("\\t", "\t")
|
||||
|
||||
# [HTML]...[/HTML]
|
||||
raw = raw.gsub(/\[html\]/i, "\n```html\n")
|
||||
.gsub(/\[\/html\]/i, "\n```\n")
|
||||
.gsub(/\[\/html\]/i, "\n```\n")
|
||||
|
||||
# [PHP]...[/PHP]
|
||||
raw = raw.gsub(/\[php\]/i, "\n```php\n")
|
||||
.gsub(/\[\/php\]/i, "\n```\n")
|
||||
.gsub(/\[\/php\]/i, "\n```\n")
|
||||
|
||||
# [HIGHLIGHT="..."]
|
||||
raw = raw.gsub(/\[highlight="?(\w+)"?\]/i) { "\n```#{$1.downcase}\n" }
|
||||
@@ -466,7 +466,7 @@ class ImportScripts::VBulletin < ImportScripts::Base
|
||||
# [CODE]...[/CODE]
|
||||
# [HIGHLIGHT]...[/HIGHLIGHT]
|
||||
raw = raw.gsub(/\[\/?code\]/i, "\n```\n")
|
||||
.gsub(/\[\/?highlight\]/i, "\n```\n")
|
||||
.gsub(/\[\/?highlight\]/i, "\n```\n")
|
||||
|
||||
# [SAMP]...[/SAMP]
|
||||
raw = raw.gsub(/\[\/?samp\]/i, "`")
|
||||
@@ -476,12 +476,12 @@ class ImportScripts::VBulletin < ImportScripts::Base
|
||||
# - AFTER all the "code" processing
|
||||
# - BEFORE the "quote" processing
|
||||
raw = raw.gsub(/`([^`]+)`/im) { "`" + $1.gsub("<", "\u2603") + "`" }
|
||||
.gsub("<", "<")
|
||||
.gsub("\u2603", "<")
|
||||
.gsub("<", "<")
|
||||
.gsub("\u2603", "<")
|
||||
|
||||
raw = raw.gsub(/`([^`]+)`/im) { "`" + $1.gsub(">", "\u2603") + "`" }
|
||||
.gsub(">", ">")
|
||||
.gsub("\u2603", ">")
|
||||
.gsub(">", ">")
|
||||
.gsub("\u2603", ">")
|
||||
|
||||
# [URL=...]...[/URL]
|
||||
raw.gsub!(/\[url="?(.+?)"?\](.+?)\[\/url\]/i) { "<a href=\"#{$1}\">#{$2}</a>" }
|
||||
@@ -489,7 +489,7 @@ class ImportScripts::VBulletin < ImportScripts::Base
|
||||
# [URL]...[/URL]
|
||||
# [MP3]...[/MP3]
|
||||
raw = raw.gsub(/\[\/?url\]/i, "")
|
||||
.gsub(/\[\/?mp3\]/i, "")
|
||||
.gsub(/\[\/?mp3\]/i, "")
|
||||
|
||||
# [MENTION]<username>[/MENTION]
|
||||
raw = raw.gsub(/\[mention\](.+?)\[\/mention\]/i) do
|
||||
|
||||
@@ -42,7 +42,7 @@ class ImportScripts::XenForo < ImportScripts::Base
|
||||
|
||||
break if results.size < 1
|
||||
|
||||
next if all_records_exist? :users, results.map {|u| u["id"].to_i}
|
||||
next if all_records_exist? :users, results.map { |u| u["id"].to_i }
|
||||
|
||||
create_users(results, total: total_count, offset: offset) do |user|
|
||||
next if user['username'].blank?
|
||||
@@ -100,7 +100,7 @@ class ImportScripts::XenForo < ImportScripts::Base
|
||||
").to_a
|
||||
|
||||
break if results.size < 1
|
||||
next if all_records_exist? :posts, results.map {|p| p['id'] }
|
||||
next if all_records_exist? :posts, results.map { |p| p['id'] }
|
||||
|
||||
create_posts(results, total: total_count, offset: offset) do |m|
|
||||
skip = false
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
|
||||
|
||||
|
||||
###
|
||||
###
|
||||
### The output of this importer is bad.
|
||||
@@ -12,14 +10,6 @@
|
||||
###
|
||||
###
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Import from Zoho.
|
||||
# Be sure to get the posts CSV file, AND the user list csv file with people's email addresses.
|
||||
# You may need to contact Zoho support for the user list.
|
||||
@@ -64,7 +54,7 @@ class ImportScripts::Zoho < ImportScripts::Base
|
||||
|
||||
def import_users
|
||||
puts "", "Importing users"
|
||||
create_users( CSV.parse(File.read(File.join(@path, 'users.csv'))) ) do |u|
|
||||
create_users(CSV.parse(File.read(File.join(@path, 'users.csv')))) do |u|
|
||||
username = cleanup_zoho_username(u[0])
|
||||
{
|
||||
id: username,
|
||||
@@ -105,10 +95,10 @@ class ImportScripts::Zoho < ImportScripts::Base
|
||||
|
||||
# Create categories
|
||||
@categories.each do |parent, subcats|
|
||||
c = create_category({name: parent}, parent)
|
||||
c = create_category({ name: parent }, parent)
|
||||
subcats.each do |subcat|
|
||||
next if subcat == "Uncategorized" || subcat == "Uncategorised"
|
||||
create_category({name: subcat, parent_category_id: c.id}, "#{parent}:#{subcat}")
|
||||
create_category({ name: subcat, parent_category_id: c.id }, "#{parent}:#{subcat}")
|
||||
end
|
||||
end
|
||||
|
||||
@@ -217,7 +207,6 @@ class ImportScripts::Zoho < ImportScripts::Base
|
||||
CGI.unescapeHTML(x)
|
||||
end
|
||||
|
||||
|
||||
def import_post_id(row)
|
||||
# Try to make up a unique id based on the data Zoho gives us.
|
||||
# The posted_time seems to be the same for all posts in a topic, so we can't use that.
|
||||
|
||||
Reference in New Issue
Block a user