archive/app/models/archive_file.rb

565 lines
18 KiB
Ruby
Raw Normal View History

2014-05-14 11:52:06 +00:00
# encoding: utf-8
2014-05-08 06:03:33 +00:00
class ArchiveFile
include Mongoid::Document
2014-05-14 11:52:06 +00:00
include Mongoid::Timestamps
include OrbitCategory::Categorizable
include OrbitModel::Status
include OrbitTag::Taggable
include Slug
require 'archive_model/cache'
unless defined?(ArchiveModel)
Object.send(:remove_const, 'ArchiveModel') rescue nil
$LOADED_FEATURES.select!{|p| !p.include? 'archive_model'}
require 'archive_model/cache'
end
include ::ArchiveModel::Cache
SubPart.class_eval { include ::ArchiveModel::Cache }
Page.class_eval { include ::ArchiveModel::Cache }
index({tmp_sort_number: 1})
scope :sorted, ->{order(tmp_sort_number: :asc)}
attr_accessor :org_tag_ids,:org_category_id
def tags=(ids)
self.org_tag_ids = self.tag_ids
super(ids)
end
def category=(cat)
self.org_category_id = self.category_id
super(cat)
end
def tag_ids=(ids)
self.org_tag_ids = self.tag_ids
super(ids)
end
def category_id=(cat_id)
self.org_category_id = self.category_id
super(cat_id)
end
def []=(index,value)
if index.to_s=='tags' || index.to_s=='tag_ids'
self.org_tag_ids = self.tag_ids
elsif index.to_s=='category' || index.to_s=='category_id'
self.org_category_id = self.category_id
end
super(index,value)
end
2014-05-14 11:52:06 +00:00
# include Tire::Model::Search
# include Tire::Model::Callbacks
# BelongsToCategory = :archive_file_category
# PAYMENT_TYPES = @site_valid_locales
2021-09-17 05:43:32 +00:00
before_save do
unless @skip_callback
cat = self.category rescue nil
if cat && ArchiveCategory.where(:category_id => cat.id).count==0
ArchiveCategory.create(category_id: cat.id.to_s,sort_number: cat.sort_number)
end
end
end
after_save do
unless @skip_callback
if self.sort_number_changed?
self.class.recalc_sort_number
elsif self.tmp_sort_number.nil?
if self.is_top
self.tmp_sort_number = (ArchiveSortOrder.first.max_sort_number.to_i + 1 rescue 1)
@skip_callback = true
self.save
@skip_callback = false
else
self.class.recalc_sort_number
end
end
2021-09-17 05:43:32 +00:00
end
end
2014-05-14 11:52:06 +00:00
field :title, as: :slug_title, localize: true
field :description, localize: true
2021-09-29 10:43:34 +00:00
field :urls, localize: true, type: Array
field :url, localize: true #Old field
field :url_texts, localize: true, type: Array, default: []
2014-05-14 11:52:06 +00:00
field :create_user_id
field :update_user_id
field :postdate , :type => DateTime, :default => Time.now
field :deadline , :type => DateTime
field :uid, type: String
field :sort_number, type: Integer
field :tmp_sort_number, type: Integer
2014-06-06 04:06:34 +00:00
field :rss2_sn
field :feed_uid
field :site_feed_id
field :feed_data, :type => Hash, :default => {}
2014-05-14 11:52:06 +00:00
# scope :can_display,where(is_hidden: false)
scope :local_data, ->{where(:site_feed_id=>nil)}
scope :can_display, ->{where(is_hidden: false,:title.nin=>["",nil])}
scope :can_display_and_sorted, ->{can_display.sorted}
2014-05-14 11:52:06 +00:00
# belongs_to :archive_file_category
has_many :archive_file_multiples, :autosave => true, :dependent => :destroy
accepts_nested_attributes_for :archive_file_multiples, :allow_destroy => true
# validates :title, :at_least_one => true
after_save :save_archive_file_multiples, :update_tmp_sort_number
2014-05-14 11:52:06 +00:00
2018-01-25 09:24:39 +00:00
before_save :add_http
2021-09-29 10:43:34 +00:00
after_initialize do
unless self.new_record?
if self.urls.nil? && self.url.present?
self.urls_translations = self.url_translations.map{|k,v| [k,[v]]}.to_h
self.save
end
end
end
def self.check_sort_number
archive_sort_order = ArchiveSortOrder.first
if archive_sort_order && archive_sort_order.need_update_sort
self.recalc_sort_number(archive_sort_order.sort_order)
end
end
def self.recalc_sort_number(sort_order=nil)
if sort_order.nil?
sort_order = (ArchiveSortOrder.first['sort_order'] rescue false) ? 1 : -1
end
sorted_archives = self.all.to_a.sort_by{|k| [(k["is_top"] ? 0 : 1) ,(k["sort_number"].nil? ? Float::INFINITY : sort_order * k["sort_number"].to_i),-k["created_at"].to_i]}
sorted_archives.each_with_index do |a, i|
a.instance_variable_set(:@skip_callback, true)
self.where(:id=>a.id).update_all(:tmp_sort_number => i)
end
2022-05-22 16:19:51 +00:00
min_sort_number, max_sort_number = (sorted_archives.count != 0) ? (sorted_archives.map{|a| a.sort_number.to_i}.minmax) : [0, 0]
ArchiveSortOrder.update_all(:min_sort_number=>min_sort_number,:max_sort_number=>max_sort_number,:need_update_sort=>false)
self.create_indexes
return max_sort_number
end
def update_tmp_sort_number
return if @skip_callback
if self.new_record? || self.sort_number_changed?
self.class.recalc_sort_number
end
end
2021-09-29 10:43:34 +00:00
def get_url_text(idx=0,org=false)
url_text = self.url_texts[idx] rescue nil
if org
url_text
else
2022-12-24 13:11:22 +00:00
url_text.present? ? url_text : self.title
2021-09-29 10:43:34 +00:00
end
end
def get_data(more_url=nil, base_url=nil, cat_ids=nil, tag_ids=nil, locales=nil)
if locales.nil?
locales = Site.first.in_use_locales rescue I18n.available_locales
end
locale = I18n.locale if locale.nil?
base_url = Site.first.root_url if base_url.nil?
user = User.find(self.create_user_id) rescue nil
a = {}
if !user.nil?
author_translations = user.member_name_translations
locales.each do |locale|
if author_translations[locale].blank?
author_translations[locale] = user.user_name
end
end
else
author_translations = {}
end
a["author_translations"] = author_translations
if more_url.nil?
if cat_ids.nil?
cat_ids = [self.category_id]
end
if tag_ids.nil?
tag_ids = self.tag_ids
end
basic_query = {:module => 'archive',:enabled_for=>locale}
if !cat_ids.blank?
query = basic_query.merge({:categories.all => cat_ids})
else
query = basic_query.clone
end
if !tag_ids.blank?
query = query.merge({:tags.all => tag_ids})
end
page = Page.where(query).first || Page.where(basic_query).first
more_url = page ? page.get_url : nil
end
if more_url
a['show_url'] = "#{more_url}/#{self.to_param}"
end
a["id"] = self.uid
a["title_translations"] = self.title_translations
a["description_translations"] = {}
description_translations = self.description_translations
locales.each do |locale|
locale = locale.to_s
a["description_translations"][locale] = self.class.smart_convertor(description_translations[locale],base_url) if !description_translations[locale].blank?
end
a["created_at"] = self.created_at
a["url_translations"] = self.url_translations
a["tags"] = []
a["category"] = {}
a["files"] = []
a["params"] = self.to_param
self.tags.each do |tag|
if !tag_ids.include?(tag.id.to_s)
tag_ids << tag.id.to_s
end
a["tags"] << {"name_translations" => tag.name_translations}
end
cat = self.category
a["sort_number"] = nil
a["category"] = {"title_translations" => (cat.title_translations rescue {})}
self.archive_file_multiples.order_by(:sort_number=>'desc').each do |file|
if file.choose_lang.include?(I18n.locale.to_s)
title_translations = {}
locales.each do |locale|
title_translations[locale] = (file.file_title_translations[locale].blank? ? File.basename(file.file.path) : file.file_title_translations[locale]) rescue ""
end
extension = file.file.file.extension.downcase rescue ""
a["files"] << {
"file-name_translations" => title_translations,
"file-type" => extension,
2023-01-03 04:07:58 +00:00
"file-url" => (file.file.present? ? "#{base_url}/xhr/archive/download?file=#{file.id}" : "javascript:void(0)")
}
end
end
return a
end
def get_related_feeds
related_feeds = ArchiveFileFeed.where({:category_ids=>self.category_id.to_s, :tag_ids.in=>self.tag_ids.map(&:to_s)}).to_a
end
def update_feed_statuses(statuses) #ex: statuses = {:is_top=>1}
self.update(statuses)
end
def update_feed_data(data)
data = data.clone
feed_data_keys = ["author_translations", "tags", "category", "files", "params", "show_url"]
data["feed_data"] = {}
data["feed_uid"] = data["id"]
data.delete("id")
feed_data_keys.each do |k|
data["feed_data"][k] = data[k]
data.delete(k)
end
@skip_callback = true
# ArchiveSortOrder.update_all(:need_update_sort=>true)
self.update(data)
@skip_callback = nil
end
def self.feeds_finish_callback(action="update", args={})
if action != "destroy"
if action == "update_all"
data = args["data"]
if data
site_feed_id = args["feed_id"]
category_id = args["category_id"]
data.each do |a|
a["category_id"] = category_id if category_id
archive_file = self.where(:feed_uid=> a["id"], :site_feed_id=>site_feed_id).first
if archive_file.nil?
archive_file = self.new(:feed_uid=> a["id"], :site_feed_id=>site_feed_id)
archive_file.instance_variable_set(:@skip_callback, true)
archive_file.save
end
archive_file.update_feed_data(a)
end
self.where(:feed_uid.nin=>data.map{|a| a["id"]}, :site_feed_id=>site_feed_id).destroy
end
end
self.recalc_sort_number
end
ArchiveCache.destroy_all
end
def get_files(locale=nil, serial_number=0)
if locale.nil?
locale = I18n.locale.to_s
end
files = []
if self.feed_uid
files = self.feed_data["files"].map do |h|
serial_number += 1
h = h.clone
h["file-name"] = h["file-name_translations"][locale]
h.delete("file-name_translations")
h["serial_number"] = serial_number
h
end
else
self.archive_file_multiples.order_by(:sort_number=>'desc').each do |file|
if file.choose_lang.include?(locale)
title = (file.file_title.blank? ? File.basename(file.file.path) : file.file_title) rescue ""
extension = file.file.file.extension.downcase rescue ""
serial_number += 1
files << {
"file-name" => title,
"file-type" => extension,
2023-01-03 04:07:58 +00:00
"file-url" => (file.file.present? ? "/xhr/archive/download?file=#{file.id}" : 'javascript:void(0)'),
"target" => "_blank",
"serial_number" => serial_number
}
end
end
if self.urls.present?
self.urls.each_with_index do |url,i|
serial_number += 1
2023-01-03 04:07:58 +00:00
target = (url.match(/\/[^\/]/) ? '_self' : '_blank')
files << {
2022-12-24 13:11:22 +00:00
"file-name" => self.get_url_text(i),
"file-type" => 'link',
2023-01-03 04:07:58 +00:00
"file-url" => url + "\" data-target=\"#{target}",
"target" => target,
"serial_number" => serial_number
}
end
end
end
[files, serial_number]
end
def get_widget_data(locale=nil, serial_number=0, idx=0, show_tags=false, more_url=nil)
created_at_int = self.created_at.strftime('%Y%m%d').to_i
statuses = self.statuses_with_classname.collect do |status|
{
"status" => status["name"] || "",
"status-class" => "status-#{status['classname']}"
}
end
files, serial_number = self.get_files(locale, serial_number)
if more_url.nil?
more_url = OrbitHelper.widget_more_url
end
archive_url = self.get_archive_url(locale, more_url)
data = {
"archive-file-url" => (files.count != 0 ? files[0]["file-url"] : "javascript:void"),
"archive-title" => self.title,
"description" => self.description,
"created_at" => created_at_int,
"archive-url" => self.url,
"archive_url" => archive_url,
"statuses" => statuses,
"files" => files,
"idx" => (idx + 1)
}
if show_tags
data["tags"] = self.tags.map do |tag|
{"name"=>tag.name, "url"=>more_url.to_s + "?tags[]=#{tag.id}"}
end
end
return [data, serial_number, idx]
end
def get_frontend_data(locale=nil, serial_number=0, idx=0, show_tags=false, more_url=nil)
2023-08-22 13:54:49 +00:00
updated_at_int = self.updated_at.strftime('%Y%m%d').to_i
statuses = self.statuses_with_classname.collect do |status|
{
"status" => status["name"] || "",
"status-class" => "status-#{status['classname']}"
}
end
files, serial_number = self.get_files(locale, serial_number)
data = {
"archive-file-url" => (files.count != 0 ? files[0]["file-url"] : "javascript:void"),
"archive-title" => self.title,
"description" => self.description,
2023-08-22 13:54:49 +00:00
"created_at" => updated_at_int,
"archive-url" => self.url,
"url" => self.url,
"statuses" => statuses,
"files" => files,
"idx" => (idx + 1)
}
if show_tags
data["tags"] = self.tags.map do |tag|
{"name"=>tag.name, "url"=>more_url.to_s + "?tags[]=#{tag.id}"}
end
end
if more_url
data["index_url"] = more_url
end
return [data, serial_number, idx]
end
def get_archive_url(locale, more_url)
archive_url = ""
if self.feed_uid
files = self.feed_data["files"]
if files.count == 0
tmp_urls = self.urls
archive_url = ((tmp_urls.nil? || tmp_urls.count == 0) ? 'javascript:void' : (tmp_urls.count > 1 ? (more_url + '?title=' + self.title.to_s) : tmp_urls[0]))
else
archive_url = files.count > 1 ? (more_url + '?title=' + self.title.to_s) : files[0]["file-url"]
end
else
if self.archive_file_multiples.count==0
tmp_urls = self.urls
archive_url = ((tmp_urls.nil? || tmp_urls.count == 0) ? 'javascript:void' : (tmp_urls.count > 1 ? (more_url + '?title=' + self.title.to_s) : tmp_urls[0]))
else
archive_url = self.archive_file_multiples.count > 1 ? (more_url + '?title=' + self.title.to_s) : "/xhr/archive/download?file=#{self.archive_file_multiples.first.id}"
end
end
archive_url
end
def notify_feed(type="create")
related_feeds = self.get_related_feeds.select{|feed| feed.remote_urls.count != 0}
if related_feeds.count != 0
archive_data = self.get_data
if type == "destroy"
tmp_data = {'type'=>'destroy', 'data'=>[self.uid]}
else
tmp_data = {'type'=>type, 'data'=>[archive_data.to_json]}
end
request = Net::HTTP::Post.new('/xhr/feeds/notify_change', 'Content-Type' => 'application/json')
related_feeds.each do |feed|
tmp_data['uid'] = feed.uid
request.body = tmp_data.to_json
feed.remote_urls.each do |remote_url|
uri = URI(remote_url)
http_req = Net::HTTP.new(uri.host, uri.port)
if remote_url.include?('https')
http_req.use_ssl = true
end
response = self.class.http_request( http_req , request )
end
end
end
end
def self.notify_feed_delete(ids)
all_feeds = ArchiveFileFeed.all.select{|feed| feed.remote_urls.count != 0}
if all_feeds.count != 0
tmp_data = {'type'=>'destroy'}
request = Net::HTTP::Post.new('/xhr/feeds/notify_change', 'Content-Type' => 'application/json')
all_feeds.each do |feed|
feed_uid = feed.uid
feed_cache = ArchiveFileFeedCache.where(:uid=>feed_uid).first
if feed_cache
tmp_data['uid'] = feed_uid
tmp_data['data'] = ids & JSON.parse(feed_cache.content)["archives"].map{|a| a["id"]}
request.body = tmp_data.to_json
if tmp_data['data'].count != 0
feed.remote_urls.each do |remote_url|
uri = URI(remote_url)
http_req = Net::HTTP.new(uri.host, uri.port)
if remote_url.include?('https')
http_req.use_ssl = true
end
response = self.http_request( http_req , request )
end
end
end
end
end
end
def self.http_request(http, request)
response = http.request(request)
if response.code.to_i == 301 || response.code.to_i == 302
location = response["location"]
new_uri = URI(location)
http = Net::HTTP.new(new_uri.host, new_uri.port)
if location.include?('https')
http.use_ssl = true
2022-10-24 08:33:07 +00:00
http.verify_mode = OpenSSL::SSL::VERIFY_PEER
end
request.instance_variable_set(:@path, new_uri.path)
response = self.http_request(http, request)
end
response
end
2021-09-29 10:43:34 +00:00
protected
2018-01-25 09:24:39 +00:00
def add_http
return if @skip_callback
2021-09-29 10:43:34 +00:00
in_use_locales = Site.first.in_use_locales
temp_urls = {}
in_use_locales.each do |locale|
2018-01-25 09:24:39 +00:00
locale = locale.to_s
2021-09-29 10:43:34 +00:00
temp_urls[locale] = []
self.urls_translations[locale].each do |tmp_url|
tmp = tmp_url
if tmp.present?
unless /^(http|https|ftp):\/\/[\S]+/.match(tmp_url) || url.include?("mailto:")
tmp = 'http://' + tmp_url
end
temp_urls[locale] << tmp
end
end rescue nil
2018-01-25 09:24:39 +00:00
end
2021-09-29 10:43:34 +00:00
self.urls_translations = temp_urls
2018-01-25 09:24:39 +00:00
end
2014-05-14 11:52:06 +00:00
# def to_indexed_json
# self.to_json
# end
# search_in :title
# searchable do
# text :titles do
# title_translations.to_a.collect{|t| t[1]}
# end
# boolean :frontend_search do
# !is_hidden
# end
# end
# def self.search( category_id = nil )
# if category_id.to_s.size > 0
# find(:all, :conditions => {archive_file_category_id: category_id}).desc( :is_top, :title )
# else
# find(:all).desc( :is_top, :title)
# end
# end
def self.find_by_param(input)
self.find_by(uid: input)
end
def self.widget_datas
where( :is_hidden => false ).desc(:is_top, :title)
end
def get_file_icon( file_data )
file_icon = "<span class=\"o-archives-file-type\">#{file_data.split('.')[-1]}</span>".html_safe
end
def save_archive_file_multiples
return if @skip_callback
2021-09-17 06:02:47 +00:00
self.archive_file_multiples.each do |t|
if t.should_destroy
t.destroy
end
2014-05-14 11:52:06 +00:00
end
end
def self.smart_convertor(text,url)
doc = Nokogiri.HTML(text)
doc.search('a[href]').each do |link|
if link['href'].nil?
link.delete 'href'
elsif link['href'].start_with?('/')
link['href'] = url + link['href']
elsif link['href'].start_with?('../')
link['href'] = url + link['href'][3..-1]
end
end
doc.search('img[src]').each do |link|
if link['src'].nil?
link.delete 'src'
elsif link['src'].start_with?('/')
link['src'] = url + link['src']
elsif link['src'].start_with?('../')
link['src'] = url + link['src'][3..-1]
end
end
return doc.css('body').inner_html
end
2014-05-08 06:03:33 +00:00
end