Fix bugs.(include https,other feed source except announcement)

Add module_name select tag at /admin/feeds/announcements page.
Add crontab automatically to update feed contents evevery  minutes when adding new feed.
This commit is contained in:
BoHung Chiu 2020-07-31 14:50:17 +08:00
parent 46955a4024
commit 522946bf30
5 changed files with 182 additions and 77 deletions

View File

@ -63,13 +63,18 @@ class Admin::FeedsController < OrbitAdminController
end end
def announcements def announcements
@filter_fields = {} @filter_fields = {}
@filter_fields['source'] = SiteFeed.all.pluck(:channel_title,:remote_site_url).collect do |a,b| @filter_fields['feed.source'] = SiteFeed.all.pluck(:channel_title,:remote_site_url).collect do |a,b|
tp = (a.blank? || a[I18n.locale].blank?) ? b.gsub(/http:\/\/|https:\/\//,'').gsub(/\./,'-') : a[I18n.locale] tp = (a.blank? || a[I18n.locale].blank?) ? b.gsub(/http:\/\/|https:\/\//,'').gsub(/\./,'-') : a[I18n.locale]
{:title => tp,:id => tp} {:title => tp,:id => tp}
end end
@all_feed_annc = SiteFeedAnnc.all.order(created_at: 1).to_a rescue [] @filter_fields['feed.module_name'] = SiteFeed.all.map{|s| s.channel_key}.uniq.map do |key|
@source = params[:filters][:source] rescue [] {:title => I18n.t("module_name.#{key}"),:id => key}
@keywords = params[:keywords] rescue nil end
@module_name = params[:filters][:module_name] rescue []
@module_name = SiteFeed.all.map{|s| s.channel_key}.uniq if @module_name == []
@all_feed_annc = SiteFeedAnnc.where(:channel_key.in => @module_name).order(created_at: 1).to_a rescue []
@source = params[:filters][:source] rescue []
@keywords = params[:keywords] rescue nil
if request.xhr? if request.xhr?
render :partial => "announcements" render :partial => "announcements"
end end
@ -90,36 +95,45 @@ class Admin::FeedsController < OrbitAdminController
end end
def get_channel_list def get_channel_list
url = params['url'].chomp("/") + "/feeds/channel_lists" puts params['url']
uri = URI.parse(url) url = URI.decode(params['url']).chomp("/") + "/feeds/channel_lists"
res = Net::HTTP.start(uri.host, uri.port,:use_ssl => uri.scheme == 'https',open_timeout: 60,read_timeout: 60) do |http| puts url
req = Net::HTTP::Get.new(uri) uri = URI.parse(URI.decode(url).encode)
http.request(req).body rescue nil begin
end res = Timeout.timeout(10){Net::HTTP.get_response(uri).body}
data = JSON.parse(res) rescue {} rescue
render :json => data.to_json res = nil
puts uri.to_s
puts uri.hostname
end
data = JSON.parse(res) rescue {}
print data
render :json => data.to_json
end end
def get_feed_list def get_feed_list
url = params['url'].chomp("/") + params[:feed_list_url] url = params['url'].chomp("/") + params[:feed_list_url]
uri = URI.parse(url) uri = URI.parse(url)
res = Net::HTTP.start(uri.host, uri.port,:use_ssl => uri.scheme == 'https',open_timeout: 60,read_timeout: 60) do |http| begin
req = Net::HTTP::Get.new(uri) res = Timeout.timeout(10){Net::HTTP.get_response(uri).body}
http.request(req).body rescue nil rescue
end res = nil
data = JSON.parse(res) rescue {} puts uri.to_s
data_to_send = {} puts uri.hostname
data_to_send["feeds"] = [] end
Array(data["feeds"]).each do |feed| data = JSON.parse(res) rescue {}
sf = SiteFeed.find_by(:feed_uid => feed["uid"]) rescue nil data_to_send = {}
if !sf.nil? data_to_send["feeds"] = []
feed["subscribed"] = true Array(data["feeds"]).each do |feed|
else sf = SiteFeed.find_by(:feed_uid => feed["uid"]) rescue nil
feed["subscribed"] = false if !sf.nil?
end feed["subscribed"] = true
data_to_send["feeds"] << feed else
end feed["subscribed"] = false
render :json => data_to_send.to_json end
data_to_send["feeds"] << feed
end
render :json => data_to_send.to_json
end end
def channel_title def channel_title

View File

@ -11,10 +11,21 @@ class SiteFeed
field :feed_url field :feed_url
field :feed_uid field :feed_uid
require 'feed_model/cache' require 'feed_model/cache'
require 'fileutils'
include FeedModel::Cache include FeedModel::Cache
Category.send(:include,FeedModel::Cache) Category.send(:include,FeedModel::Cache)
after_create do
crontab_list = `crontab -l`.split("\n") rescue []
site_root = Rails.root.to_s
if crontab_list.select{|s| s.include?(site_root) && s.include?("feeds_module:make_cache")} == []
`(crontab -l ; echo "*/5 * * * * /bin/bash -l -c 'cd #{site_root} && bundle exec rake feeds_module:make_cache > /dev/null'") | crontab`
end
end
before_destroy do before_destroy do
tmp = SiteFeedAnnc.where(feed_id: self.id).first tmp = SiteFeedAnnc.where(feed_id: self.id).first
main_directory = File.join("#{Rails.root}","public","site_feeds")
feed_directory = File.join(main_directory.to_s, self.id.to_s)
FileUtils.rm_rf(feed_directory.to_s)
if !tmp.nil? if !tmp.nil?
tmp.destroy tmp.destroy
end end
@ -24,22 +35,38 @@ class SiteFeed
main_directory = File.join("#{Rails.root}","public","site_feeds") main_directory = File.join("#{Rails.root}","public","site_feeds")
feed_directory = File.join(main_directory.to_s, self.id.to_s) feed_directory = File.join(main_directory.to_s, self.id.to_s)
if File.exists?(feed_directory) if File.exists?(feed_directory)
anns = JSON.parse(File.read(File.join(feed_directory.to_s, self.feed_uid + ".json")))['announcements'] rescue [] anns = JSON.parse(File.read(File.join(feed_directory.to_s, self.feed_uid + ".json")))[self.channel_key.pluralize] rescue []
else else
uri = URI(self.feed_url) uri = URI(self.feed_url)
res = Net::HTTP.start(uri.host, uri.port,:use_ssl => uri.scheme == 'https',open_timeout: 20,read_timeout: 20) do |http| res = get_response_body(uri) rescue ''
req = Net::HTTP::Get.new(uri)
http.request(req).body rescue ''
end
FileUtils.mkdir_p(feed_directory) if !File.exists?(feed_directory) FileUtils.mkdir_p(feed_directory) if !File.exists?(feed_directory)
File.open(File.join(feed_directory.to_s,self.feed_uid + ".json"),"w") do |file| File.open(File.join(feed_directory.to_s,self.feed_uid + ".json"),"w") do |file|
res.force_encoding("utf-8") res.force_encoding("utf-8")
file.write(res) file.write(res)
end end
anns = JSON.parse(res)['announcements'] rescue [] anns = JSON.parse(res)[self.channel_key.pluralize] rescue []
end end
anns anns
end end
def get_response_body(uri)
res = Net::HTTP.get_response(uri)
if res.code == "302" || res.code == "301"
location = res['Location']
cookie = res['Set-Cookie']
headers = {
'Cookie' => cookie,
}
if location[0] == "/"
uri = URI.parse("#{uri.scheme}://#{uri.host}#{location}")
else
uri = URI.parse(location)
end
res = Net::HTTP.get_response(uri,nil,headers)
return res.body
else
return res.body
end
end
def category def category
Category.find(self.merge_with_category) rescue nil Category.find(self.merge_with_category) rescue nil
end end

View File

@ -20,53 +20,115 @@ class SiteFeedAnnc
Array(self[:all_contents_for_feed]).collect do |v| Array(self[:all_contents_for_feed]).collect do |v|
tmp = v tmp = v
if hidden_annc.exclude?(v['id']) && !tmp["title_translations"][locale].blank? if hidden_annc.exclude?(v['id']) && !tmp["title_translations"][locale].blank?
tmp['statuses'] = [] if self.channel_key == "announcement"
if self[:top_list].count == 0 || self[:top_list].exclude?(tmp['id']) tmp['statuses'] = []
tmp[:is_top] = false if self[:top_list].count == 0 || self[:top_list].exclude?(tmp['id'])
else tmp[:is_top] = false
tmp[:is_top] = true else
tmp[:is_top] = true
tmp['statuses'] << {
"status" => I18n.t(:top),
"status-class" => "status-top"
}
end
if self[:hot_list].count == 0 || self[:top_list].exclude?(tmp['id'])
tmp[:is_hot] = false
else
tmp[:is_hot] = true
tmp['statuses'] << {
"status" => I18n.t(:hot),
"status-class" => "status-hot"
}
end
tmp["category"] = cat
tmp["source-site"] = self.remote_site_url
tmp["source-site-title"] = (self[:channel_title][locale] rescue "")
tmp["params"] = tmp["params"].to_s + "_" + self.feed_id.to_s + "h"
next if !site_source.nil? && site_source != tmp["source-site-title"]
tmp['statuses'] << { tmp['statuses'] << {
"status" => I18n.t(:top), "status" => "<a href='#{tmp["source-site"]}' target='_blank' class='feed-source'>#{tmp["source-site-title"]}</a>",
"status-class" => "status-top" "status-class" => "status-source"
} }
end
if self[:hot_list].count == 0 || self[:top_list].exclude?(tmp['id'])
tmp[:is_hot] = false
else
tmp[:is_hot] = true
tmp['statuses'] << {
"status" => I18n.t(:hot),
"status-class" => "status-hot"
}
end
tmp["category"] = cat
tmp["source-site"] = self.remote_site_url
tmp["source-site-title"] = self[:channel_title][locale]
tmp["params"] = tmp["params"].to_s + "_" + self.feed_id.to_s + "h"
next if !site_source.nil? && site_source != fa["source-site-title"]
tmp['statuses'] << {
"status" => "<a href='#{tmp["source-site"]}' target='_blank' class='feed-source'>#{tmp["source-site-title"]}</a>",
"status-class" => "status-source"
}
files = tmp["bulletin_files"].collect{|bf| { "file_url" => bf["url"], "file_title" => (fa["title_translations"][locale].blank? ? File.basename(fa["url"]) : fa["title_translations"][locale] rescue '') }} rescue [] files = tmp["bulletin_files"].collect{|bf| { "file_url" => bf["url"], "file_title" => (bf["title_translations"][locale].blank? ? File.basename(bf["url"]) : bf["title_translations"][locale] rescue '') }} rescue []
links = tmp["bulletin_links"].map{|link| { "link_url" => link["url"], "link_title" => (link["title_translations"][locale].blank? ? link["url"] : link["title_translations"][locale]) } } rescue [] links = tmp["bulletin_links"].map{|link| { "link_url" => link["url"], "link_title" => (link["title_translations"][locale].blank? ? link["url"] : link["title_translations"][locale]) } } rescue []
tmp["bulletin_links"] = links tmp["bulletin_links"] = links
tmp["bulletin_files"] = files tmp["bulletin_files"] = files
tmp["title"] = tmp["title_translations"][locale] tmp["title"] = tmp["title_translations"][locale]
tmp["subtitle"] = tmp["subtitle_translations"][locale] tmp["subtitle"] = tmp["subtitle_translations"][locale]
tmp["source-site-link"] = tmp["source-site"] tmp["source-site-link"] = tmp["source-site"]
tmp["source-site"] = "<a href='#{tmp["source-site"]}' target='_blank' class='feed-source'>#{tmp["source-site-title"]}</a>" tmp["source-site"] = "<a href='#{tmp["source-site"]}' target='_blank' class='feed-source'>#{tmp["source-site-title"]}</a>"
if !is_widget if !is_widget
tmp["link_to_show"] = OrbitHelper.url_to_show(tmp["params"]) rescue '' tmp["link_to_show"] = OrbitHelper.url_to_show(tmp["params"]) rescue ''
else
tmp["link_to_show"] = OrbitHelper.widget_item_url(tmp["params"]) rescue ''
end
tmp["target"] = "_self"
tmp["img_src"] = tmp["image"]["thumb"] || "/assets/announcement-default.jpg"
tmp["img_description"] = tmp["image_description_translations"][locale]
tmp["more"] = I18n.t(:more_plus)
tmp["view_count"] = ""
else else
tmp["link_to_show"] = OrbitHelper.widget_item_url(tmp["params"]) rescue '' tmp['statuses'] = []
if self[:top_list].count == 0 || self[:top_list].exclude?(tmp['id'])
tmp[:is_top] = false
else
tmp[:is_top] = true
tmp['statuses'] << {
"status" => I18n.t(:top),
"status-class" => "status-top"
}
end
if self[:hot_list].count == 0 || self[:top_list].exclude?(tmp['id'])
tmp[:is_hot] = false
else
tmp[:is_hot] = true
tmp['statuses'] << {
"status" => I18n.t(:hot),
"status-class" => "status-hot"
}
end
tmp["category"] = cat
tmp["source-site"] = self.remote_site_url
tmp["source-site-title"] = (self[:channel_title][locale] rescue "")
tmp["params"] = tmp["params"].to_s + "_" + self.feed_id.to_s + "h"
next if !site_source.nil? && site_source != tmp["source-site-title"]
tmp['statuses'] << {
"status" => "<a href='#{tmp["source-site"]}' target='_blank' class='feed-source'>#{tmp["source-site-title"]}</a>",
"status-class" => "status-source"
}
tmp["source-site-link"] = tmp["source-site"]
tmp["source-site"] = "<a href='#{tmp["source-site"]}' target='_blank' class='feed-source'>#{tmp["source-site-title"]}</a>"
if !is_widget
tmp["link_to_show"] = OrbitHelper.url_to_show(tmp["params"]) rescue ''
else
tmp["link_to_show"] = OrbitHelper.widget_item_url(tmp["params"]) rescue ''
end
tmp["target"] = "_self"
tmp["more"] = I18n.t(:more_plus)
tmp["view_count"] = ""
new_tmp = {}
tmp.each do |key,value|
if key.include? "_translations"
new_tmp[key.sub("_translations","")] = value[locale].to_s rescue ""
elsif key.include?("date") || key.include?("Date")
new_tmp[key] = DateTime.parse(value) rescue nil
else
if value.class == Hash
value.each do |sub_k,sub_v|
if sub_k.include? "_translations"
new_tmp[key][sub_k.sub("_translations","")] = sub_v[locale].to_s rescue ""
else
new_tmp[key][sub_k] = sub_v
end
end
else
new_tmp[key] = value
end
end
end
tmp = BSON::Document.new(new_tmp)
end end
tmp["target"] = "_self"
tmp["img_src"] = tmp["image"]["thumb"] || "/assets/announcement-default.jpg"
tmp["img_description"] = tmp["image_description_translations"][locale]
tmp["more"] = I18n.t(:more_plus)
tmp["view_count"] = ""
else else
tmp = nil tmp = nil
end end

View File

@ -1,5 +1,6 @@
en: en:
feed: feed:
module_name: Module name
new_: "Susbscribe to channel" new_: "Susbscribe to channel"
feed: Feed feed: Feed
all_feeds: All Feeds all_feeds: All Feeds

View File

@ -1,5 +1,6 @@
zh_tw: zh_tw:
feed: feed:
module_name: 模組名稱
new_: "訂閱頻道" new_: "訂閱頻道"
feed: Feed feed: Feed
all_feeds: 'Feeds 列表' all_feeds: 'Feeds 列表'