universal_table/lib/tasks/universal_table_tasks.rake

207 lines
7.1 KiB
Ruby
Executable File

# desc "Explaining what the task does"
# task :universal_table do
# # Task goes here
# end
require 'rubyXL'
namespace :universal_table_tasks do
task :prepare_download,[:utable_id, :url] => :environment do |task,args|
id = args.utable_id
I18n.locale = :zh_tw
table = UTable.find(id)
ac = ActionController::Base.new()
host_url = Site.first.root_url
if host_url == "http://"
host_url = "http://#{args.url}"
end
xlsx = ac.render_to_string handlers: [:axlsx], formats: [:xlsx], template: "utable_export/export", locals: {table: table, site_in_use_locales: Site.first.in_use_locales, url: host_url}
dirname = "public/uploads/utable_export/#{id}"
FileUtils.mkdir_p(dirname) unless File.exist?(dirname)
f = "#{dirname}/#{table.title.gsub(/[ "'*@#$%^&()+=;:.,?>|\\\/<~_!:,、。!?;「」〈〉【】/]/,'')}.xlsx"
if File.exist?(f)
File.delete(f)
end
file = File.open(f, "w")
xlsx.force_encoding("utf-8")
file.write(xlsx)
end
desc "Import entries from XLSX file"
task :import, [:file_path, :table_id, :site_locales] => :environment do |t, args|
def create_get_table_tags(entry,new_tags=nil,module_app)
updated_tags = []
new_tags.each do |tag_str|
tag_str = tag_str.gsub(/^#+/, '').downcase.strip
hashtag = module_app.hashtags.where(name: tag_str).first rescue nil
if hashtag.nil?
hashtag = Hashtag.new
hashtag.name = tag_str
hashtag.module_app_ids << module_app.id.to_s
hashtag.save
updated_tags << hashtag.id
else
entry.hashtags.include?(hashtag) || updated_tags << hashtag.id
end
end
entry.hashtags= updated_tags
end
file_path = args[:file_path]
table_id = args[:table_id]
site_locales = args[:site_locales].to_s.split(":").map(&:to_sym)
if !file_path || !File.exist?(file_path)
puts "Invalid or missing file: #{file_path}"
exit(1)
end
workbook = RubyXL::Parser.parse(file_path)
sheet = workbook[0]
if sheet.count > 503
puts "More than 500 entries. Please split the entries in different files."
exit(1)
end
table = UTable.find_by(id: table_id)
unless table
puts "Table not found with ID #{table_id}"
exit(1)
end
column_titles = sheet[0].cells.map { |c| c.value.to_s.strip }
column_keys = sheet[1].cells.map { |c| c.value.to_s.strip }
column_types = sheet[2].cells.map { |c| c.value.to_s.strip }
columns = column_keys.uniq.map.with_index do |key, i|
tc = table.table_columns.where(key: key).first
[i, tc]
end.to_h
sheet.each_with_index do |row, i|
if i >= 3 && row.cells.all? { |c| c.nil? || c.value.to_s.strip.blank? }
table.inc(current_xlsx_value: 1)
next
end
next if i < 3
uid_val = row[0].value.to_s.strip rescue nil
te = uid_val.present? ? TableEntry.where(uid: uid_val, u_table_id: table.id).first_or_initialize : TableEntry.new
te.u_table = table
skip = 0
tc_idx = 0
row.cells.each_with_index do |cell, col_idx|
next if skip > 0 && (skip -= 1) >= 0
val = cell.value
tc = columns[tc_idx]
tc_idx += 1
next if tc.nil?
ce = te.column_entries.where(table_column_id: tc.id).first || ColumnEntry.new(table_column_id: tc.id)
case tc.type
when "text", "editor"
v = {}
site_locales.each_with_index do |locale, offset|
v[locale.to_s] = row[col_idx + offset].value.to_s rescue ""
end
skip = site_locales.size - 1
tc.type == "text" ? ce.text_translations = v : ce.content_translations = v
when "integer"
ce.number = val.present? ? val.to_i : nil
when "image"
ce.remote_image_url = val if val.present?
when "file"
file_urls = val.to_s.split(";").map(&:strip)
file_titles = {}
skip = site_locales.size
site_locales.each_with_index do |locale, locale_idx|
file_titles[locale.to_s] = row[col_idx + locale_idx + 1].value.to_s.split(";").map(&:strip)
end
# ce.column_entry_files.destroy_all
# ce.column_entry_files = []
cef_ids = ce.column_entry_files.pluck(:id)
file_urls.each_with_index do |remote_url, file_idx|
if remote_url.blank?
skip = site_locales.size
end
next if remote_url.blank?
file = ColumnEntryFile.new
file.remote_file_url = remote_url
titles = {}
site_locales.each_with_index do |locale, locale_idx|
titles[locale.to_s] = file_titles[locale.to_s][file_idx] rescue file.file.file.filename
end
file.file_title_translations = titles
file.save!
ce.column_entry_files << file
end
ColumnEntryFile.where(:id.in => cef_ids).destroy_all
when "date"
ce.date = val
when "period"
skip = 1
ce.period_from = val
ce.period_to = row[col_idx + 1].value rescue nil
end
ce.save!
te.column_entries << ce
end
tags_text = row.cells[-2].value.to_s rescue ""
module_app = ModuleApp.find_by_key("universal_table")
create_get_table_tags(te, tags_text.split(";"), module_app) if row.cells.count >= 2
related_uids = row.cells[-1].value.to_s.split(";").map(&:strip)
related_ids = TableEntry.where(:uid.in => related_uids).pluck(:id)
te.related_entries = related_ids.join(",") if row.cells.count >= 1
te.save!
te.fix_have_data
te.uid = uid_val if uid_val.present?
te.save!
table.inc(current_xlsx_value: 1)
end
# Cleanup
File.delete(file_path) if File.exist?(file_path)
puts "Import finished. Total entries in table: #{table.table_entries.count}"
end
desc "Migrate old tags to new tags"
task :migrate_hashtags => :environment do
module_app = ModuleApp.find_by_key("universal_table")
dataset = {}
TableTag.all.each do |tt|
next if tt.title.blank? # Skip if tag name is blank
next if tt.table_entries.empty? # Skip if no entries are associated with the tag
puts "Processing tag: #{tt.title}"
module_app.reload
tag_str = tt.title.gsub(/^#+/, '').downcase.strip
hashtag = module_app.hashtags.where(name: tag_str).first_or_initialize
hashtag.module_app_ids << module_app.id.to_s unless hashtag.module_app_ids.include?(module_app.id.to_s)
puts "New record #{hashtag.new_record?}"
hashtag.save if hashtag.new_record?
puts "Hashtag ID: #{hashtag.id} - #{hashtag.name}"
tt.table_entries.each do |te|
unless te.hashtags.include?(hashtag)
dataset[te] ||= []
dataset[te] << hashtag.id
end
end
end
dataset.each do |te, tag_ids|
puts "Updating TableEntry #{te.id} with tags: #{tag_ids.join(', ')}"
te.hashtags= tag_ids
te.save
end
end
end