event_news/app/models/event_news_feed.rb

123 lines
4.6 KiB
Ruby

class EventNewsFeed
include Mongoid::Document
include Mongoid::Timestamps
include Slug
field :title, as: :slug_title, type: String, localize: true
field :tag_ids, type: Array, default: []
field :category_ids, type: Array, default: []
field :remote_urls, type: Array, default: []
before_save do
EventNewsFeedCache.where(uid: self.uid).each do |cache|
cache.regenerate
end
end
def self.create_feed_cache(event_news=nil,event_news_feed=nil)
Thread.new do
if !event_news.nil?
self.where(:tag_ids.in => Array(event_news.tag_ids).collect{|v| v.to_s}).each do |event_news_feed|
uid = event_news_feed.uid
EventNewsFeedCache.where(:uid => uid).each do |cache|
cache.regenerate
end
end
elsif !event_news_feed.nil?
uid = event_news_feed.uid
EventNewsFeedCache.where(:uid => uid).each do |cache|
cache.regenerate
end
end
end
end
def generate_one_cache_timeout(startdt: nil,enddt: nil,dt: nil, base_url: nil, timeout: nil)
timeout = 100000 if timeout.nil?
begin
Timeout::timeout(timeout) {
feed_cache = nil
Thread.new do
feed_cache = self.generate_one_cache(startdt: startdt,enddt: enddt,dt: dt,base_url: base_url)
end
(1..(timeout.to_i+1)).each do
sleep(1)
break if !feed_cache.nil?
end
feed_cache.content
}
rescue=> e
puts [e,e.backtrace]
""
end
end
def generate_one_cache(startdt: nil,enddt: nil,dt: nil, base_url: nil)
base_url = Site.first.root_url if base_url.nil?
uid = self.uid
bf = self
if !bf.nil?
tags = bf.tag_ids
categories = bf.category_ids
if !(categories.empty? && tags.empty?)
anns_before_filter = EventNews.any_of(I18n.available_locales.map{|v| {"title.#{v}"=>{"$nin"=>["", nil]}}}).is_approved_and_show
if !dt.nil?
dt = DateTime.parse(dt)
dtt = dt + 1.day
anns_before_filter = anns_before_filter.where(:postdate.gt => dt, :postdate.lt => dtt)
elsif !startdt.blank? && enddt.blank?
startdt = DateTime.parse(startdt)
enddt = DateTime.now
anns_before_filter = anns_before_filter.where(:postdate.gt => startdt, :postdate.lt => enddt)
elsif !startdt.blank? && !enddt.blank?
startdt = DateTime.parse(startdt)
enddt = DateTime.parse(enddt) + 1.day
anns_before_filter = anns_before_filter.where(:postdate.gt => startdt, :postdate.lt => enddt)
end
anns_before_filter = anns_before_filter.can_display_and_sorted
if !tags.empty?
anns_before_filter = anns_before_filter.filter_by_tags(tags)
end
if !categories.empty?
anns_before_filter = anns_before_filter.filter_by_categories(categories)
end
announcements = anns_before_filter.can_display_and_sorted
else
return
end
end
all_anns = []
first_postdate = anns_before_filter.open_in_future.limit(1).pluck(:postdate)[0]
first_deadline = nil
cat_ids = announcements.collect{|a| a.category_id.blank? ? nil : a.category_id.to_s}.compact.uniq
tag_ids = announcements.collect{|a| a.tag_ids.collect{|v| v.blank? ? nil : v.to_s}}.flatten.compact.uniq
tag_names = tag_ids.map{|tag_id| Tag.find(tag_id).name_translations rescue nil}.compact
category_titles = cat_ids.map{|cat_id| Category.find(cat_id).title_translations rescue nil}.compact
basic_query = {:module => 'event_news_mod',:enabled_for=>I18n.locale}
if !cat_ids.blank?
query = basic_query.merge({:categories.all => cat_ids})
else
query = basic_query.clone
end
if !tag_ids.blank?
query = query.merge({:tags.all => tag_ids})
end
page = Page.where(query).first || Page.where(basic_query).first
more_url = page ? page.get_url : nil
announcements.each do |anns|
deadline = anns.deadline
if !deadline.blank?
if first_deadline.nil? || first_deadline>deadline
first_deadline = deadline
end
end
a = anns.get_data(more_url, base_url, cat_ids, tag_ids)
all_anns << a
end
invalid_date = [first_postdate,first_deadline].compact.sort[0]
anns = {
"event_news_mods" => all_anns,
"tags" => tag_names,
"categories" => category_titles
}.to_json
feed_cache = EventNewsFeedCache.create(uid: uid,content: anns,start: startdt,end: enddt,date: dt,invalid_date: invalid_date)
end
end