107 lines
3.7 KiB
Ruby
107 lines
3.7 KiB
Ruby
class ArchiveFileFeed
|
|
include Mongoid::Document
|
|
include Mongoid::Timestamps
|
|
include Slug
|
|
|
|
field :title, as: :slug_title, type: String, localize: true
|
|
field :tag_ids, type: Array, default: []
|
|
field :category_ids, type: Array, default: []
|
|
field :remote_urls, type: Array, default: []
|
|
before_save do
|
|
ArchiveFileFeedCache.where(uid: self.uid).each do |cache|
|
|
cache.regenerate
|
|
end
|
|
end
|
|
def self.create_feed_cache(archive_file=nil,archive_file_feed=nil)
|
|
Thread.new do
|
|
if !archive_file.nil?
|
|
ArchiveFileFeed.where(:tag_ids.in => Array(archive_file.tag_ids).collect{|v| v.to_s}).each do |archive_file_feed|
|
|
uid = archive_file_feed.uid
|
|
ArchiveFileFeedCache.where(:uid => uid).each do |cache|
|
|
cache.regenerate
|
|
end
|
|
end
|
|
elsif !archive_file_feed.nil?
|
|
uid = archive_file_feed.uid
|
|
ArchiveFileFeedCache.where(:uid => uid).each do |cache|
|
|
cache.regenerate
|
|
end
|
|
end
|
|
end
|
|
end
|
|
def generate_one_cache_timeout(base_url: nil, timeout: nil)
|
|
begin
|
|
if timeout.nil?
|
|
feed_cache_content = self.generate_one_cache(base_url: base_url)
|
|
else
|
|
Timeout::timeout(timeout) do
|
|
feed_cache_content = nil
|
|
thread = Thread.new do
|
|
feed_cache_content = self.generate_one_cache(base_url: base_url)
|
|
end
|
|
(1..(timeout.to_i+1)).each do
|
|
sleep(1)
|
|
break if !feed_cache_content.nil? && !thread.alive?
|
|
end
|
|
feed_cache_content
|
|
end
|
|
end
|
|
rescue=> e
|
|
puts [e,e.backtrace]
|
|
nil
|
|
end
|
|
end
|
|
def generate_one_cache(base_url: nil)
|
|
base_url = Site.first.root_url if base_url.nil?
|
|
uid = self.uid
|
|
aff = self
|
|
if !aff.nil?
|
|
tags = aff.tag_ids
|
|
categories = aff.category_ids
|
|
if !(categories.empty? && tags.empty?)
|
|
archives_before_filter = ArchiveFile.can_display.local_data
|
|
if !tags.empty?
|
|
archives_before_filter = archives_before_filter.filter_by_tags(tags)
|
|
end
|
|
if !categories.empty?
|
|
archives_before_filter = archives_before_filter.filter_by_categories(categories,paginate=false)
|
|
end
|
|
archives_before_filter.selector = {"$and"=>[archives_before_filter.selector,{"$or"=>(I18n.available_locales.map{|v| {"title.#{v}"=>{"$nin"=>["", nil]}}})}]}
|
|
archives = archives_before_filter.sorted.to_a
|
|
else
|
|
archives = []
|
|
end
|
|
end
|
|
all_archives = []
|
|
tag_ids = []
|
|
category_ids = []
|
|
cat_ids = archives.collect{|a| a.category_id.blank? ? nil : a.category_id.to_s}.compact.uniq
|
|
tag_ids = archives.collect{|a| a.tag_ids.collect{|v| v.blank? ? nil : v.to_s}}.flatten.compact.uniq
|
|
tag_names = tag_ids.map{|tag_id| Tag.find(tag_id).name_translations rescue nil}.compact
|
|
category_titles = cat_ids.map{|cat_id| Category.find(cat_id).title_translations rescue nil}.compact
|
|
basic_query = {:module => 'archive',:enabled_for=>I18n.locale}
|
|
if !cat_ids.blank?
|
|
query = basic_query.merge({:categories.all => cat_ids})
|
|
else
|
|
query = basic_query.clone
|
|
end
|
|
if !tag_ids.blank?
|
|
query = query.merge({:tags.all => tag_ids})
|
|
end
|
|
page = Page.where(query).first || Page.where(basic_query).first
|
|
more_url = page ? page.get_url : nil
|
|
locales = Site.first.in_use_locales rescue I18n.available_locales
|
|
archives.each_with_index do |archive,i|
|
|
a = archive.get_data(more_url, base_url, cat_ids, tag_ids, locales)
|
|
all_archives << a
|
|
end
|
|
archives = {
|
|
"archives" => all_archives,
|
|
"tags" => tag_names,
|
|
"categories" => category_titles
|
|
}.to_json
|
|
ArchiveFileFeedCache.where(uid: uid).destroy
|
|
feed_cache = ArchiveFileFeedCache.create(uid: uid,content: archives)
|
|
archives
|
|
end
|
|
end |