feeds/app/models/site_feed.rb

254 lines
9.4 KiB
Ruby

class SiteFeed
include Mongoid::Document
include Mongoid::Timestamps
ModuleAppSyncFields = ["feeds_model", "feeds_uid_field", "feeds_update_callback", "feeds_time_field", "feeds_finish_callback", "feeds_update_statuses_callback"]
field :remote_site_url
field :merge_with_category
field :channel_name
field :channel_title, :localize => true
field :channel_key
field :feed_name, localize: true
field :disabled, type: Boolean, default: false
field :feed_url
field :feed_uid
field :feeds_model
field :feeds_uid_field
field :feeds_update_callback
field :feeds_time_field, type: String, default: "postdate"
field :feeds_finish_callback
field :feeds_update_statuses_callback
field :enable_notify, type: Boolean, default: false
field :is_rss, type: Boolean, default: false
require 'feed_model/cache'
require 'fileutils'
include FeedModel::Cache
Category.send(:include,FeedModel::Cache)
after_create do
if !self.is_rss
self.add_notify
end
crontab_list = `crontab -l`.split("\n") rescue []
site_root = Rails.root.to_s
if crontab_list.select{|s| s.include?(site_root) && s.include?("feeds_module:make_cache")} == []
`(crontab -l ; echo "*/5 * * * * /bin/bash -l -c 'cd #{site_root} && bundle exec rake feeds_module:make_cache > /dev/null'") | crontab`
end
end
before_destroy do
if !self.is_rss
self.remove_notify
end
tmp = SiteFeedAnnc.where(feed_id: self.id)
main_directory = File.join("#{Rails.root}","public","site_feeds")
feed_directory = File.join(main_directory.to_s, self.id.to_s)
FileUtils.rm_rf(feed_directory.to_s)
if tmp.count!=0
tmp.destroy
end
end
after_save do
if @skip_callback
unless @skip_fix_data
update_url = self.remote_site_url_changed?
if self.channel_title_changed?
self.update_channel_title(update_url)
elsif update_url
self.update_remote_site_url
end
end
end
@site_feed_annc = nil
end
scope :enabled, ->{where(:disabled => false)}
def site_feed_annc
@site_feed_annc ||= SiteFeedAnnc.where(:feed_id=>self.id).first
end
def update_channel_title(update_url=false) #update_url=true will also fix remote_site_url in data
site_feed_annc.update_channel_title(update_url) if site_feed_annc
end
def update_remote_site_url
site_feed_annc.update_remote_site_url if site_feed_annc
end
def sync_data_to_annc(site_feed_annc=nil)
category_title = self.category.title_translations rescue {}
tmp_channel_title = self.channel_title_for_cache
clone_fields =["channel_key", "merge_with_category", "remote_site_url", "feeds_model", "feeds_uid_field", "feeds_update_callback", "feeds_time_field", "feeds_update_statuses_callback"]
(site_feed_annc ? [site_feed_annc] : SiteFeedAnnc.where(:feed_id=>self.id)).each do |tmp|
clone_fields.each do |f|
tmp.send("#{f}=", self.send(f))
end
tmp[:feed_name] = self.feed_name_translations
tmp.category_title = category_title
tmp.channel_title = tmp_channel_title
tmp.save
end
end
def get_annc(force_refresh=false)
def to_I18n(title, have_key=false, key="title_translations")
value = I18n.available_locales.map{|v| [v,title]}.to_h
if have_key
{
key => value
}
else
value
end
end
main_directory = File.join("#{Rails.root}","public","site_feeds")
feed_directory = File.join(main_directory.to_s, self.id.to_s)
if !force_refresh && File.exists?(feed_directory)
anns = JSON.parse(File.read(File.join(feed_directory.to_s, self.feed_uid + ".json")))[self.channel_key.pluralize] rescue []
else
uri = URI(self.feed_url)
res = get_response_body(uri) rescue ''
if self.is_rss
rss_data = Hash.from_xml(res)
category_title = rss_data['rss']['channel']['title']
category_title_trans = to_I18n(category_title, true)
res_to_json = rss_data['rss']['channel']["item"].collect do |item|
tag_titles = Array(item['類別']).collect{|title| to_I18n(title, true, "name_translations")}
id = item['link'].split("&s=")[-1]
title = Array(item['title'])[0]
text = Array(item['description'])[0]
files = []
if text.include?('附件')
files = Nokogiri::HTML(get_response_body(URI(item['link']))).search('#RelData1_liFile ol a').collect do |v|
file_title = v.attr('title')
{
"description_translations" => to_I18n(file_title),
"title_translations" => to_I18n(file_title.sub(/^\[(?:(?!\]).)*\]/,'').rpartition(".")[0]),
"url" => v.attr('href')
}
end
end
{
"show_url" => item['link'],
"org_is_top" => 0,
"id" => id,
"title_translations" => to_I18n(title),
"subtitle_translations" => {},
"text_translations" => to_I18n(text),
"postdate" => DateTime.parse(item['pubDate']).to_s,
"image_description_translations" => {},
"image" => {},
"display_img" => false,
"tags" => tag_titles,
"category" => category_title_trans,
"author" => item['author'],
"params" => "#{title}-#{id}",
"bulletin_links" => [],
"bulletin_files" => files,
"bulletin_carousel_images" => [],
"external_link" => nil
}
end
res = JSON.dump({"announcements" => res_to_json})
end
FileUtils.mkdir_p(feed_directory) if !File.exists?(feed_directory)
File.open(File.join(feed_directory.to_s,self.feed_uid + ".json"),"w") do |file|
res.force_encoding("utf-8")
file.write(res)
end
anns = JSON.parse(res)[self.channel_key.pluralize] rescue []
end
anns
end
def get_response_body(uri)
res = Net::HTTP.start(uri.host, uri.port,
:use_ssl => uri.scheme == 'https',
open_timeout: 60,read_timeout: 60,
verify_mode: OpenSSL::SSL::VERIFY_NONE) do |http|
req = Net::HTTP::Get.new(uri)
req['User-Agent'] = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36'
http.request(req)
end
if res.code == "302" || res.code == "301"
location = res['Location']
cookie = res['Set-Cookie']
if location[0] == "/"
uri = URI.parse("#{uri.scheme}://#{uri.host}#{location}")
else
uri = URI.parse(location)
end
begin
res = Net::HTTP.start(uri.host, uri.port,
:use_ssl => uri.scheme == 'https',
open_timeout: 60,read_timeout: 60,
verify_mode: OpenSSL::SSL::VERIFY_NONE) do |http|
req = Net::HTTP::Get.new(uri)
req['Cookie'] = cookie
http.request(req)
end
res_body = res.body
rescue => e
res_body = ''
puts e
puts e.backtrace
end
return res_body
else
return res.body
end
end
def category
Category.find(self.merge_with_category) rescue nil
end
def channel_title_for_cache #empty? must for hash
!self.channel_title_translations.blank? ? self.channel_title_translations : {}#I18n.available_locales.collect{|v| [v,I18n.t("feed.source")]}.to_h
end
def http_request(http, request)
response = http.request(request)
if response.code.to_i == 301 || response.code.to_i == 302
location = response["location"]
new_uri = URI(location)
http = Net::HTTP.new(new_uri.host, new_uri.port)
if location.include?('https')
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
request.instance_variable_set(:@path, new_uri.path)
response = http_request(http, request)
end
response
end
def add_notify
unless self.enable_notify
root_url = Site.first.root_url rescue ""
if root_url.present?
uri = URI(self.remote_site_url)
http_req = Net::HTTP.new(uri.host, uri.port)
if self.remote_site_url.include?('https')
http_req.use_ssl = true
http_req.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
http_req.open_timeout = 10
request = Net::HTTP::Post.new("/xhr/#{self.channel_key.pluralize}/feed_add_remote/#{self.feed_uid}", 'Content-Type' => 'application/json')
request.body = {"url"=>root_url}.to_json
response = http_request( http_req , request )
if response.code.to_i == 200
self.update(:enable_notify=>true)
end
end
end
end
def remove_notify
if self.enable_notify
root_url = Site.first.root_url rescue ""
if root_url.present?
uri = URI(self.remote_site_url)
http_req = Net::HTTP.new(uri.host, uri.port)
if self.remote_site_url.include?('https')
http_req.use_ssl = true
http_req.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
http_req.open_timeout = 10
request = Net::HTTP::Post.new("/xhr/#{self.channel_key.pluralize}/feed_remove_remote/#{self.feed_uid}", 'Content-Type' => 'application/json')
request.body = {"url"=>root_url}.to_json
response = http_request( http_req , request )
if response.code.to_i == 200
self.update(:enable_notify=>false)
end
end
end
end
end