Fix bugs.(include https,other feed source except announcement)
Add module_name select tag at /admin/feeds/announcements page. Add crontab automatically to update feed contents evevery minutes when adding new feed.
This commit is contained in:
parent
46955a4024
commit
522946bf30
|
@ -63,11 +63,16 @@ class Admin::FeedsController < OrbitAdminController
|
||||||
end
|
end
|
||||||
def announcements
|
def announcements
|
||||||
@filter_fields = {}
|
@filter_fields = {}
|
||||||
@filter_fields['source'] = SiteFeed.all.pluck(:channel_title,:remote_site_url).collect do |a,b|
|
@filter_fields['feed.source'] = SiteFeed.all.pluck(:channel_title,:remote_site_url).collect do |a,b|
|
||||||
tp = (a.blank? || a[I18n.locale].blank?) ? b.gsub(/http:\/\/|https:\/\//,'').gsub(/\./,'-') : a[I18n.locale]
|
tp = (a.blank? || a[I18n.locale].blank?) ? b.gsub(/http:\/\/|https:\/\//,'').gsub(/\./,'-') : a[I18n.locale]
|
||||||
{:title => tp,:id => tp}
|
{:title => tp,:id => tp}
|
||||||
end
|
end
|
||||||
@all_feed_annc = SiteFeedAnnc.all.order(created_at: 1).to_a rescue []
|
@filter_fields['feed.module_name'] = SiteFeed.all.map{|s| s.channel_key}.uniq.map do |key|
|
||||||
|
{:title => I18n.t("module_name.#{key}"),:id => key}
|
||||||
|
end
|
||||||
|
@module_name = params[:filters][:module_name] rescue []
|
||||||
|
@module_name = SiteFeed.all.map{|s| s.channel_key}.uniq if @module_name == []
|
||||||
|
@all_feed_annc = SiteFeedAnnc.where(:channel_key.in => @module_name).order(created_at: 1).to_a rescue []
|
||||||
@source = params[:filters][:source] rescue []
|
@source = params[:filters][:source] rescue []
|
||||||
@keywords = params[:keywords] rescue nil
|
@keywords = params[:keywords] rescue nil
|
||||||
if request.xhr?
|
if request.xhr?
|
||||||
|
@ -90,22 +95,31 @@ class Admin::FeedsController < OrbitAdminController
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_channel_list
|
def get_channel_list
|
||||||
url = params['url'].chomp("/") + "/feeds/channel_lists"
|
puts params['url']
|
||||||
uri = URI.parse(url)
|
url = URI.decode(params['url']).chomp("/") + "/feeds/channel_lists"
|
||||||
res = Net::HTTP.start(uri.host, uri.port,:use_ssl => uri.scheme == 'https',open_timeout: 60,read_timeout: 60) do |http|
|
puts url
|
||||||
req = Net::HTTP::Get.new(uri)
|
uri = URI.parse(URI.decode(url).encode)
|
||||||
http.request(req).body rescue nil
|
begin
|
||||||
|
res = Timeout.timeout(10){Net::HTTP.get_response(uri).body}
|
||||||
|
rescue
|
||||||
|
res = nil
|
||||||
|
puts uri.to_s
|
||||||
|
puts uri.hostname
|
||||||
end
|
end
|
||||||
data = JSON.parse(res) rescue {}
|
data = JSON.parse(res) rescue {}
|
||||||
|
print data
|
||||||
render :json => data.to_json
|
render :json => data.to_json
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_feed_list
|
def get_feed_list
|
||||||
url = params['url'].chomp("/") + params[:feed_list_url]
|
url = params['url'].chomp("/") + params[:feed_list_url]
|
||||||
uri = URI.parse(url)
|
uri = URI.parse(url)
|
||||||
res = Net::HTTP.start(uri.host, uri.port,:use_ssl => uri.scheme == 'https',open_timeout: 60,read_timeout: 60) do |http|
|
begin
|
||||||
req = Net::HTTP::Get.new(uri)
|
res = Timeout.timeout(10){Net::HTTP.get_response(uri).body}
|
||||||
http.request(req).body rescue nil
|
rescue
|
||||||
|
res = nil
|
||||||
|
puts uri.to_s
|
||||||
|
puts uri.hostname
|
||||||
end
|
end
|
||||||
data = JSON.parse(res) rescue {}
|
data = JSON.parse(res) rescue {}
|
||||||
data_to_send = {}
|
data_to_send = {}
|
||||||
|
|
|
@ -11,10 +11,21 @@ class SiteFeed
|
||||||
field :feed_url
|
field :feed_url
|
||||||
field :feed_uid
|
field :feed_uid
|
||||||
require 'feed_model/cache'
|
require 'feed_model/cache'
|
||||||
|
require 'fileutils'
|
||||||
include FeedModel::Cache
|
include FeedModel::Cache
|
||||||
Category.send(:include,FeedModel::Cache)
|
Category.send(:include,FeedModel::Cache)
|
||||||
|
after_create do
|
||||||
|
crontab_list = `crontab -l`.split("\n") rescue []
|
||||||
|
site_root = Rails.root.to_s
|
||||||
|
if crontab_list.select{|s| s.include?(site_root) && s.include?("feeds_module:make_cache")} == []
|
||||||
|
`(crontab -l ; echo "*/5 * * * * /bin/bash -l -c 'cd #{site_root} && bundle exec rake feeds_module:make_cache > /dev/null'") | crontab`
|
||||||
|
end
|
||||||
|
end
|
||||||
before_destroy do
|
before_destroy do
|
||||||
tmp = SiteFeedAnnc.where(feed_id: self.id).first
|
tmp = SiteFeedAnnc.where(feed_id: self.id).first
|
||||||
|
main_directory = File.join("#{Rails.root}","public","site_feeds")
|
||||||
|
feed_directory = File.join(main_directory.to_s, self.id.to_s)
|
||||||
|
FileUtils.rm_rf(feed_directory.to_s)
|
||||||
if !tmp.nil?
|
if !tmp.nil?
|
||||||
tmp.destroy
|
tmp.destroy
|
||||||
end
|
end
|
||||||
|
@ -24,22 +35,38 @@ class SiteFeed
|
||||||
main_directory = File.join("#{Rails.root}","public","site_feeds")
|
main_directory = File.join("#{Rails.root}","public","site_feeds")
|
||||||
feed_directory = File.join(main_directory.to_s, self.id.to_s)
|
feed_directory = File.join(main_directory.to_s, self.id.to_s)
|
||||||
if File.exists?(feed_directory)
|
if File.exists?(feed_directory)
|
||||||
anns = JSON.parse(File.read(File.join(feed_directory.to_s, self.feed_uid + ".json")))['announcements'] rescue []
|
anns = JSON.parse(File.read(File.join(feed_directory.to_s, self.feed_uid + ".json")))[self.channel_key.pluralize] rescue []
|
||||||
else
|
else
|
||||||
uri = URI(self.feed_url)
|
uri = URI(self.feed_url)
|
||||||
res = Net::HTTP.start(uri.host, uri.port,:use_ssl => uri.scheme == 'https',open_timeout: 20,read_timeout: 20) do |http|
|
res = get_response_body(uri) rescue ''
|
||||||
req = Net::HTTP::Get.new(uri)
|
|
||||||
http.request(req).body rescue ''
|
|
||||||
end
|
|
||||||
FileUtils.mkdir_p(feed_directory) if !File.exists?(feed_directory)
|
FileUtils.mkdir_p(feed_directory) if !File.exists?(feed_directory)
|
||||||
File.open(File.join(feed_directory.to_s,self.feed_uid + ".json"),"w") do |file|
|
File.open(File.join(feed_directory.to_s,self.feed_uid + ".json"),"w") do |file|
|
||||||
res.force_encoding("utf-8")
|
res.force_encoding("utf-8")
|
||||||
file.write(res)
|
file.write(res)
|
||||||
end
|
end
|
||||||
anns = JSON.parse(res)['announcements'] rescue []
|
anns = JSON.parse(res)[self.channel_key.pluralize] rescue []
|
||||||
end
|
end
|
||||||
anns
|
anns
|
||||||
end
|
end
|
||||||
|
def get_response_body(uri)
|
||||||
|
res = Net::HTTP.get_response(uri)
|
||||||
|
if res.code == "302" || res.code == "301"
|
||||||
|
location = res['Location']
|
||||||
|
cookie = res['Set-Cookie']
|
||||||
|
headers = {
|
||||||
|
'Cookie' => cookie,
|
||||||
|
}
|
||||||
|
if location[0] == "/"
|
||||||
|
uri = URI.parse("#{uri.scheme}://#{uri.host}#{location}")
|
||||||
|
else
|
||||||
|
uri = URI.parse(location)
|
||||||
|
end
|
||||||
|
res = Net::HTTP.get_response(uri,nil,headers)
|
||||||
|
return res.body
|
||||||
|
else
|
||||||
|
return res.body
|
||||||
|
end
|
||||||
|
end
|
||||||
def category
|
def category
|
||||||
Category.find(self.merge_with_category) rescue nil
|
Category.find(self.merge_with_category) rescue nil
|
||||||
end
|
end
|
||||||
|
|
|
@ -20,6 +20,7 @@ class SiteFeedAnnc
|
||||||
Array(self[:all_contents_for_feed]).collect do |v|
|
Array(self[:all_contents_for_feed]).collect do |v|
|
||||||
tmp = v
|
tmp = v
|
||||||
if hidden_annc.exclude?(v['id']) && !tmp["title_translations"][locale].blank?
|
if hidden_annc.exclude?(v['id']) && !tmp["title_translations"][locale].blank?
|
||||||
|
if self.channel_key == "announcement"
|
||||||
tmp['statuses'] = []
|
tmp['statuses'] = []
|
||||||
if self[:top_list].count == 0 || self[:top_list].exclude?(tmp['id'])
|
if self[:top_list].count == 0 || self[:top_list].exclude?(tmp['id'])
|
||||||
tmp[:is_top] = false
|
tmp[:is_top] = false
|
||||||
|
@ -41,15 +42,15 @@ class SiteFeedAnnc
|
||||||
end
|
end
|
||||||
tmp["category"] = cat
|
tmp["category"] = cat
|
||||||
tmp["source-site"] = self.remote_site_url
|
tmp["source-site"] = self.remote_site_url
|
||||||
tmp["source-site-title"] = self[:channel_title][locale]
|
tmp["source-site-title"] = (self[:channel_title][locale] rescue "")
|
||||||
tmp["params"] = tmp["params"].to_s + "_" + self.feed_id.to_s + "h"
|
tmp["params"] = tmp["params"].to_s + "_" + self.feed_id.to_s + "h"
|
||||||
next if !site_source.nil? && site_source != fa["source-site-title"]
|
next if !site_source.nil? && site_source != tmp["source-site-title"]
|
||||||
tmp['statuses'] << {
|
tmp['statuses'] << {
|
||||||
"status" => "<a href='#{tmp["source-site"]}' target='_blank' class='feed-source'>#{tmp["source-site-title"]}</a>",
|
"status" => "<a href='#{tmp["source-site"]}' target='_blank' class='feed-source'>#{tmp["source-site-title"]}</a>",
|
||||||
"status-class" => "status-source"
|
"status-class" => "status-source"
|
||||||
}
|
}
|
||||||
|
|
||||||
files = tmp["bulletin_files"].collect{|bf| { "file_url" => bf["url"], "file_title" => (fa["title_translations"][locale].blank? ? File.basename(fa["url"]) : fa["title_translations"][locale] rescue '') }} rescue []
|
files = tmp["bulletin_files"].collect{|bf| { "file_url" => bf["url"], "file_title" => (bf["title_translations"][locale].blank? ? File.basename(bf["url"]) : bf["title_translations"][locale] rescue '') }} rescue []
|
||||||
links = tmp["bulletin_links"].map{|link| { "link_url" => link["url"], "link_title" => (link["title_translations"][locale].blank? ? link["url"] : link["title_translations"][locale]) } } rescue []
|
links = tmp["bulletin_links"].map{|link| { "link_url" => link["url"], "link_title" => (link["title_translations"][locale].blank? ? link["url"] : link["title_translations"][locale]) } } rescue []
|
||||||
tmp["bulletin_links"] = links
|
tmp["bulletin_links"] = links
|
||||||
tmp["bulletin_files"] = files
|
tmp["bulletin_files"] = files
|
||||||
|
@ -67,6 +68,67 @@ class SiteFeedAnnc
|
||||||
tmp["img_description"] = tmp["image_description_translations"][locale]
|
tmp["img_description"] = tmp["image_description_translations"][locale]
|
||||||
tmp["more"] = I18n.t(:more_plus)
|
tmp["more"] = I18n.t(:more_plus)
|
||||||
tmp["view_count"] = ""
|
tmp["view_count"] = ""
|
||||||
|
else
|
||||||
|
tmp['statuses'] = []
|
||||||
|
if self[:top_list].count == 0 || self[:top_list].exclude?(tmp['id'])
|
||||||
|
tmp[:is_top] = false
|
||||||
|
else
|
||||||
|
tmp[:is_top] = true
|
||||||
|
tmp['statuses'] << {
|
||||||
|
"status" => I18n.t(:top),
|
||||||
|
"status-class" => "status-top"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
if self[:hot_list].count == 0 || self[:top_list].exclude?(tmp['id'])
|
||||||
|
tmp[:is_hot] = false
|
||||||
|
else
|
||||||
|
tmp[:is_hot] = true
|
||||||
|
tmp['statuses'] << {
|
||||||
|
"status" => I18n.t(:hot),
|
||||||
|
"status-class" => "status-hot"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
tmp["category"] = cat
|
||||||
|
tmp["source-site"] = self.remote_site_url
|
||||||
|
tmp["source-site-title"] = (self[:channel_title][locale] rescue "")
|
||||||
|
tmp["params"] = tmp["params"].to_s + "_" + self.feed_id.to_s + "h"
|
||||||
|
next if !site_source.nil? && site_source != tmp["source-site-title"]
|
||||||
|
tmp['statuses'] << {
|
||||||
|
"status" => "<a href='#{tmp["source-site"]}' target='_blank' class='feed-source'>#{tmp["source-site-title"]}</a>",
|
||||||
|
"status-class" => "status-source"
|
||||||
|
}
|
||||||
|
tmp["source-site-link"] = tmp["source-site"]
|
||||||
|
tmp["source-site"] = "<a href='#{tmp["source-site"]}' target='_blank' class='feed-source'>#{tmp["source-site-title"]}</a>"
|
||||||
|
if !is_widget
|
||||||
|
tmp["link_to_show"] = OrbitHelper.url_to_show(tmp["params"]) rescue ''
|
||||||
|
else
|
||||||
|
tmp["link_to_show"] = OrbitHelper.widget_item_url(tmp["params"]) rescue ''
|
||||||
|
end
|
||||||
|
tmp["target"] = "_self"
|
||||||
|
tmp["more"] = I18n.t(:more_plus)
|
||||||
|
tmp["view_count"] = ""
|
||||||
|
new_tmp = {}
|
||||||
|
tmp.each do |key,value|
|
||||||
|
if key.include? "_translations"
|
||||||
|
new_tmp[key.sub("_translations","")] = value[locale].to_s rescue ""
|
||||||
|
elsif key.include?("date") || key.include?("Date")
|
||||||
|
new_tmp[key] = DateTime.parse(value) rescue nil
|
||||||
|
else
|
||||||
|
if value.class == Hash
|
||||||
|
value.each do |sub_k,sub_v|
|
||||||
|
if sub_k.include? "_translations"
|
||||||
|
new_tmp[key][sub_k.sub("_translations","")] = sub_v[locale].to_s rescue ""
|
||||||
|
else
|
||||||
|
new_tmp[key][sub_k] = sub_v
|
||||||
|
end
|
||||||
|
end
|
||||||
|
else
|
||||||
|
new_tmp[key] = value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
tmp = BSON::Document.new(new_tmp)
|
||||||
|
end
|
||||||
else
|
else
|
||||||
tmp = nil
|
tmp = nil
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
en:
|
en:
|
||||||
feed:
|
feed:
|
||||||
|
module_name: Module name
|
||||||
new_: "Susbscribe to channel"
|
new_: "Susbscribe to channel"
|
||||||
feed: Feed
|
feed: Feed
|
||||||
all_feeds: All Feeds
|
all_feeds: All Feeds
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
zh_tw:
|
zh_tw:
|
||||||
feed:
|
feed:
|
||||||
|
module_name: 模組名稱
|
||||||
new_: "訂閱頻道"
|
new_: "訂閱頻道"
|
||||||
feed: Feed
|
feed: Feed
|
||||||
all_feeds: 'Feeds 列表'
|
all_feeds: 'Feeds 列表'
|
||||||
|
|
Loading…
Reference in New Issue