diff --git a/Gemfile b/Gemfile
index 4fa81c9d..22a4e0c1 100644
--- a/Gemfile
+++ b/Gemfile
@@ -37,9 +37,8 @@ gem 'therubyracer' if RUBY_PLATFORM.downcase.include?("linux")
gem "impressionist", :require => "impressionist", :path => "vendor/impressionist"
-# gem "tire"
-gem 'sunspot_rails', "~> 1.3.2"
-gem 'sunspot_solr'
+ gem "tire"
+
# Gems used only for assets and not required
# in production environments by default.
diff --git a/Gemfile.lock b/Gemfile.lock
index b6525487..5d74a545 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -96,6 +96,7 @@ GEM
railties (>= 3.0.0)
fastercsv (1.5.4)
haml (3.1.4)
+ hashr (0.0.21)
hike (1.2.1)
hoe (2.16.1)
rake (~> 0.8)
@@ -185,6 +186,8 @@ GEM
redis (>= 2.0.1)
resque (>= 1.8.0)
rufus-scheduler
+ rest-client (1.6.7)
+ mime-types (>= 1.16)
rsolr (1.0.8)
builder (>= 2.1.2)
rspec (2.8.0)
@@ -253,13 +256,18 @@ GEM
sunspot_rails (1.3.3)
nokogiri
sunspot (= 1.3.3)
- sunspot_solr (1.3.3)
therubyracer (0.9.9)
libv8 (~> 3.3.10)
thor (0.14.6)
tilt (1.3.3)
tinymce-rails (3.4.8)
railties (>= 3.1)
+ tire (0.4.2)
+ activemodel (>= 3.0)
+ hashr (~> 0.0.19)
+ multi_json (~> 1.0)
+ rake
+ rest-client (~> 1.6)
transaction-simple (1.4.0)
hoe (>= 1.1.7)
treetop (1.4.10)
@@ -327,9 +335,8 @@ DEPENDENCIES
spork
sprockets
sunspot-rails-tester
- sunspot_rails (~> 1.3.2)
- sunspot_solr
therubyracer
tinymce-rails
+ tire
uglifier
watchr
diff --git a/app/models/mongoid/base_model.rb b/app/models/mongoid/base_model.rb
deleted file mode 100644
index 6f6a3937..00000000
--- a/app/models/mongoid/base_model.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# coding: utf-8
-# 基本 Model,加入一些通用功能
-module Mongoid
- module BaseModel
- extend ActiveSupport::Concern
-
- included do
- scope :recent, desc(:_id)
- scope :exclude_ids, Proc.new { |ids| where(:_id.nin => ids.map(&:to_i)) }
- scope :by_week, where(:created_at.gte => 7.days.ago.utc)
- end
-
- module ClassMethods
- # like ActiveRecord find_by_id
- def find_by_id(id)
- if id.is_a?(Integer) or id.is_a?(String)
- where(:_id => id.to_i).first
- else
- nil
- end
- end
-
- def find_in_batches(opts = {})
- batch_size = opts[:batch_size] || 1000
- start = opts.delete(:start).to_i || 0
- objects = self.limit(batch_size).skip(start)
- t = Time.new
- while objects.any?
- yield objects
- start += batch_size
- # Rails.logger.debug("processed #{start} records in #{Time.new - t} seconds") if Rails.logger.debug?
- break if objects.size < batch_size
- objects = self.limit(batch_size).skip(start)
- end
- end
-
- def delay
- Sidekiq::Extensions::Proxy.new(DelayedDocument, self)
- end
- end
-
- def delay
- Sidekiq::Extensions::Proxy.new(DelayedDocument, self)
- end
- end
-end
diff --git a/app/models/sunspot/mongoid.rb b/app/models/sunspot/mongoid.rb
deleted file mode 100644
index 0f207c5a..00000000
--- a/app/models/sunspot/mongoid.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# coding: utf-8
-# this is from : https://github.com/jugyo/sunspot_mongoid
-# this file is special for mongoid_auto_increment_id
-require 'sunspot'
-require 'mongoid'
-require 'sunspot/rails'
-
-# == Examples:
-#
-# class Post
-# include Mongoid::Document
-# field :title
-#
-# include Sunspot::Mongoid
-# searchable do
-# text :title
-# end
-# end
-#
-module Sunspot
- module Mongoid
- def self.included(base)
- base.class_eval do
- extend Sunspot::Rails::Searchable::ActsAsMethods
- Sunspot::Adapters::DataAccessor.register(DataAccessor, base)
- Sunspot::Adapters::InstanceAdapter.register(InstanceAdapter, base)
- end
- end
-
- class InstanceAdapter < Sunspot::Adapters::InstanceAdapter
- def id
- @instance.id
- end
- end
-
- class DataAccessor < Sunspot::Adapters::DataAccessor
- def load(id)
- @clazz.where(:_id => id).first
- end
-
- def load_all(ids)
- @clazz.where(:_id.in => ids.collect { |id| id.to_i })
- end
- end
- end
-end
diff --git a/config/resque.god b/config/resque.god
index 6f331dd8..767cc47f 100644
--- a/config/resque.god
+++ b/config/resque.god
@@ -5,6 +5,7 @@ num_workers = rails_env == 'production' ? 5 : 2
num_workers.times do |num|
God.watch do |w|
w.dir = "#{rails_root}"
+ w.log = "#{rails_root}/log/myprocess.log"
w.name = "resque-#{num}"
w.group = 'resque'
w.interval = 30.seconds
diff --git a/public/static/kernel.js b/public/static/kernel.js
index c5b9a8a6..9511b847 100644
--- a/public/static/kernel.js
+++ b/public/static/kernel.js
@@ -14,7 +14,7 @@ $(document).ready(function() {
function ajax_load_proc(wapper,url){
wapper.load(url,function(respText,textSta,XML){
if(textSta == 'error')
- wapper.html("Loading Failed");
+ wapper.html("Loading Failed
Go See");
});
}
diff --git a/solr/conf/admin-extra.html b/solr/conf/admin-extra.html
deleted file mode 100644
index aa739da8..00000000
--- a/solr/conf/admin-extra.html
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
diff --git a/solr/conf/elevate.xml b/solr/conf/elevate.xml
deleted file mode 100644
index 7630ebe2..00000000
--- a/solr/conf/elevate.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/solr/conf/mapping-ISOLatin1Accent.txt b/solr/conf/mapping-ISOLatin1Accent.txt
deleted file mode 100644
index ede77425..00000000
--- a/solr/conf/mapping-ISOLatin1Accent.txt
+++ /dev/null
@@ -1,246 +0,0 @@
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Syntax:
-# "source" => "target"
-# "source".length() > 0 (source cannot be empty.)
-# "target".length() >= 0 (target can be empty.)
-
-# example:
-# "À" => "A"
-# "\u00C0" => "A"
-# "\u00C0" => "\u0041"
-# "ß" => "ss"
-# "\t" => " "
-# "\n" => ""
-
-# À => A
-"\u00C0" => "A"
-
-# Á => A
-"\u00C1" => "A"
-
-# Â => A
-"\u00C2" => "A"
-
-# Ã => A
-"\u00C3" => "A"
-
-# Ä => A
-"\u00C4" => "A"
-
-# Å => A
-"\u00C5" => "A"
-
-# Æ => AE
-"\u00C6" => "AE"
-
-# Ç => C
-"\u00C7" => "C"
-
-# È => E
-"\u00C8" => "E"
-
-# É => E
-"\u00C9" => "E"
-
-# Ê => E
-"\u00CA" => "E"
-
-# Ë => E
-"\u00CB" => "E"
-
-# Ì => I
-"\u00CC" => "I"
-
-# Í => I
-"\u00CD" => "I"
-
-# Î => I
-"\u00CE" => "I"
-
-# Ï => I
-"\u00CF" => "I"
-
-# IJ => IJ
-"\u0132" => "IJ"
-
-# Ð => D
-"\u00D0" => "D"
-
-# Ñ => N
-"\u00D1" => "N"
-
-# Ò => O
-"\u00D2" => "O"
-
-# Ó => O
-"\u00D3" => "O"
-
-# Ô => O
-"\u00D4" => "O"
-
-# Õ => O
-"\u00D5" => "O"
-
-# Ö => O
-"\u00D6" => "O"
-
-# Ø => O
-"\u00D8" => "O"
-
-# Œ => OE
-"\u0152" => "OE"
-
-# Þ
-"\u00DE" => "TH"
-
-# Ù => U
-"\u00D9" => "U"
-
-# Ú => U
-"\u00DA" => "U"
-
-# Û => U
-"\u00DB" => "U"
-
-# Ü => U
-"\u00DC" => "U"
-
-# Ý => Y
-"\u00DD" => "Y"
-
-# Ÿ => Y
-"\u0178" => "Y"
-
-# à => a
-"\u00E0" => "a"
-
-# á => a
-"\u00E1" => "a"
-
-# â => a
-"\u00E2" => "a"
-
-# ã => a
-"\u00E3" => "a"
-
-# ä => a
-"\u00E4" => "a"
-
-# å => a
-"\u00E5" => "a"
-
-# æ => ae
-"\u00E6" => "ae"
-
-# ç => c
-"\u00E7" => "c"
-
-# è => e
-"\u00E8" => "e"
-
-# é => e
-"\u00E9" => "e"
-
-# ê => e
-"\u00EA" => "e"
-
-# ë => e
-"\u00EB" => "e"
-
-# ì => i
-"\u00EC" => "i"
-
-# í => i
-"\u00ED" => "i"
-
-# î => i
-"\u00EE" => "i"
-
-# ï => i
-"\u00EF" => "i"
-
-# ij => ij
-"\u0133" => "ij"
-
-# ð => d
-"\u00F0" => "d"
-
-# ñ => n
-"\u00F1" => "n"
-
-# ò => o
-"\u00F2" => "o"
-
-# ó => o
-"\u00F3" => "o"
-
-# ô => o
-"\u00F4" => "o"
-
-# õ => o
-"\u00F5" => "o"
-
-# ö => o
-"\u00F6" => "o"
-
-# ø => o
-"\u00F8" => "o"
-
-# œ => oe
-"\u0153" => "oe"
-
-# ß => ss
-"\u00DF" => "ss"
-
-# þ => th
-"\u00FE" => "th"
-
-# ù => u
-"\u00F9" => "u"
-
-# ú => u
-"\u00FA" => "u"
-
-# û => u
-"\u00FB" => "u"
-
-# ü => u
-"\u00FC" => "u"
-
-# ý => y
-"\u00FD" => "y"
-
-# ÿ => y
-"\u00FF" => "y"
-
-# ff => ff
-"\uFB00" => "ff"
-
-# fi => fi
-"\uFB01" => "fi"
-
-# fl => fl
-"\uFB02" => "fl"
-
-# ffi => ffi
-"\uFB03" => "ffi"
-
-# ffl => ffl
-"\uFB04" => "ffl"
-
-# ſt => ft
-"\uFB05" => "ft"
-
-# st => st
-"\uFB06" => "st"
diff --git a/solr/conf/protwords.txt b/solr/conf/protwords.txt
deleted file mode 100644
index 1dfc0abe..00000000
--- a/solr/conf/protwords.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#-----------------------------------------------------------------------
-# Use a protected word file to protect against the stemmer reducing two
-# unrelated words to the same base word.
-
-# Some non-words that normally won't be encountered,
-# just to test that they won't be stemmed.
-dontstems
-zwhacky
-
diff --git a/solr/conf/schema.xml b/solr/conf/schema.xml
deleted file mode 100644
index 01b9c8c8..00000000
--- a/solr/conf/schema.xml
+++ /dev/null
@@ -1,245 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- id
-
- text
-
-
-
-
diff --git a/solr/conf/scripts.conf b/solr/conf/scripts.conf
deleted file mode 100644
index f58b262a..00000000
--- a/solr/conf/scripts.conf
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-user=
-solr_hostname=localhost
-solr_port=8983
-rsyncd_port=18983
-data_dir=
-webapp_name=solr
-master_host=
-master_data_dir=
-master_status_dir=
diff --git a/solr/conf/solrconfig.xml b/solr/conf/solrconfig.xml
deleted file mode 100644
index 809e9a56..00000000
--- a/solr/conf/solrconfig.xml
+++ /dev/null
@@ -1,938 +0,0 @@
-
-
-
-
-
- ${solr.abortOnConfigurationError:true}
-
-
-
-
-
-
-
-
-
-
-
-
- ${solr.data.dir:./solr/data}
-
-
-
- false
- 10
-
-
-
- 32
-
- 10000
- 1000
- 10000
-
-
-
-
-
-
-
- native
-
-
-
-
-
- false
- 32
- 10
-
-
-
-
-
- false
-
- true
-
-
-
-
-
- 1
-
- 0
-
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1024
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
- 20
-
- 200
-
-
-
-
-
-
-
-
-
-
-
- solr rocks
- 0
- 10
-
-
- static firstSearcher warming query from solrconfig.xml
-
-
-
-
- false
-
- 2
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- explicit
-
-
-
-
-
-
-
-
-
-
- dismax
- explicit
- 0.01
-
- text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
-
-
- text^0.2 features^1.1 name^1.5 manu^1.4 manu_exact^1.9
-
-
- popularity^0.5 recip(price,1,1000,1000)^0.3
-
-
- id,name,price,score
-
-
- 2<-1 5<-2 6<90%
-
- 100
- *:*
-
- text features name
-
- 0
-
- name
- regex
-
-
-
-
-
-
- dismax
- explicit
- text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0
- 2<-1 5<-2 6<90%
-
- incubationdate_dt:[* TO NOW/DAY-1MONTH]^2.2
-
-
-
- inStock:true
-
-
-
- cat
- manu_exact
- price:[* TO 500]
- price:[500 TO *]
-
-
-
-
-
-
-
-
-
- textSpell
-
- default
- name
- ./spellchecker
-
-
-
-
-
-
-
-
- false
-
- false
-
- 1
-
-
- spellcheck
-
-
-
-
-
-
- true
-
-
- tvComponent
-
-
-
-
-
-
-
- default
-
- org.carrot2.clustering.lingo.LingoClusteringAlgorithm
-
- 20
-
-
- stc
- org.carrot2.clustering.stc.STCClusteringAlgorithm
-
-
-
-
- true
- default
- true
-
- name
- id
-
- features
-
- true
-
-
-
- false
-
-
- clusteringComponent
-
-
-
-
-
-
- text
- true
- ignored_
-
- true
- links
- ignored_
-
-
-
-
-
-
- true
-
-
- termsComponent
-
-
-
-
-
- string
- elevate.xml
-
-
-
-
- explicit
-
-
- elevator
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- standard
- solrpingquery
- all
-
-
-
-
-
- explicit
-
- true
-
-
-
-
-
-
-
- 100
-
-
-
-
-
-
- 70
-
- 0.5
-
- [-\w ,/\n\"']{20,200}
-
-
-
-
-
- ]]>
- ]]>
-
-
-
-
-
-
-
-
- 5
-
-
-
-
-
- solr
-
-
-
-
- 1
- 2
-
-
-
diff --git a/solr/conf/spellings.txt b/solr/conf/spellings.txt
deleted file mode 100644
index d7ede6f5..00000000
--- a/solr/conf/spellings.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-pizza
-history
\ No newline at end of file
diff --git a/solr/conf/stopwords.txt b/solr/conf/stopwords.txt
deleted file mode 100644
index b5824da3..00000000
--- a/solr/conf/stopwords.txt
+++ /dev/null
@@ -1,58 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#-----------------------------------------------------------------------
-# a couple of test stopwords to test that the words are really being
-# configured from this file:
-stopworda
-stopwordb
-
-#Standard english stop words taken from Lucene's StopAnalyzer
-a
-an
-and
-are
-as
-at
-be
-but
-by
-for
-if
-in
-into
-is
-it
-no
-not
-of
-on
-or
-s
-such
-t
-that
-the
-their
-then
-there
-these
-they
-this
-to
-was
-will
-with
-
diff --git a/solr/conf/synonyms.txt b/solr/conf/synonyms.txt
deleted file mode 100644
index b0e31cb7..00000000
--- a/solr/conf/synonyms.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#-----------------------------------------------------------------------
-#some test synonym mappings unlikely to appear in real input text
-aaa => aaaa
-bbb => bbbb1 bbbb2
-ccc => cccc1,cccc2
-a\=>a => b\=>b
-a\,a => b\,b
-fooaaa,baraaa,bazaaa
-
-# Some synonym groups specific to this example
-GB,gib,gigabyte,gigabytes
-MB,mib,megabyte,megabytes
-Television, Televisions, TV, TVs
-#notice we use "gib" instead of "GiB" so any WordDelimiterFilter coming
-#after us won't split it into two words.
-
-# Synonym mappings can be used for spelling correction too
-pixima => pixma
-
diff --git a/log/sunspot-solr-development.log.lck b/tmp/elasticsearch.log
similarity index 100%
rename from log/sunspot-solr-development.log.lck
rename to tmp/elasticsearch.log
diff --git a/tmp/tire-dsl.rb b/tmp/tire-dsl.rb
new file mode 100644
index 00000000..5b8117ba
--- /dev/null
+++ b/tmp/tire-dsl.rb
@@ -0,0 +1,885 @@
+# encoding: UTF-8
+#
+# **Tire** provides rich and comfortable Ruby API for the
+# [_ElasticSearch_](http://www.elasticsearch.org/) search engine/database.
+#
+# _ElasticSearch_ is a scalable, distributed, cloud-ready, highly-available
+# full-text search engine and database, communicating by JSON over RESTful HTTP,
+# based on [Lucene](http://lucene.apache.org/), written in Java.
+#
+#
+# _Tire_ is open source, and you can download or clone the source code
+# from .
+#
+# By following these instructions you should have the search running
+# on a sane operation system in less then 10 minutes.
+
+# Note, that this file can be executed directly:
+#
+# ruby -I lib examples/tire-dsl.rb
+#
+
+
+#### Installation
+
+# Install _Tire_ with _Rubygems_:
+
+#
+# gem install tire
+#
+require 'rubygems'
+require 'colorize'
+
+# _Tire_ uses the [_multi_json_](https://github.com/intridea/multi_json) gem as a generic JSON library.
+# We want to use the [_yajl-ruby_](https://github.com/brianmario/yajl-ruby) gem in its full on mode here.
+#
+require 'yajl/json_gem'
+
+# Now, let's require the _Tire_ gem itself, and we're ready to go.
+#
+require 'tire'
+
+#### Prerequisites
+
+# We'll need a working and running _ElasticSearch_ server, of course. Thankfully, that's easy.
+( puts <<-"INSTALL" ; exit(1) ) unless (RestClient.get('http://localhost:9200') rescue false)
+
+ [ERROR] You don’t appear to have ElasticSearch installed. Please install and launch it with the following commands:
+
+ curl -k -L -o elasticsearch-0.19.0.tar.gz http://github.com/downloads/elasticsearch/elasticsearch/elasticsearch-0.19.0.tar.gz
+ tar -zxvf elasticsearch-0.19.0.tar.gz
+ ./elasticsearch-0.19.0/bin/elasticsearch -f
+INSTALL
+
+### Storing and indexing documents
+
+# Let's initialize an index named “articles”.
+#
+Tire.index 'articles' do
+ # To make sure it's fresh, let's delete any existing index with the same name.
+ #
+ delete
+ # And then, let's create it.
+ #
+ create
+
+ # We want to store and index some articles with `title`, `tags` and `published_on` properties.
+ # Simple Hashes are OK. The default type is „document”.
+ #
+ store :title => '復興「校球」 政大男足決戰UFA足球聯賽', :tags => ['足球'], :published_on => '2011-01-01'
+ store :title => '社科院舉辦碩博士班畢業生撥穗典禮', :tags => ['博士班', '畢業'], :published_on => '2011-01-02'
+
+ # We usually want to set a specific _type_ for the document in _ElasticSearch_.
+ # Simply setting a `type` property is OK.
+ #
+ store :type => 'article',
+ :title => '支持政大學子 羅家倫之女設立獎學金',
+ :tags => ['獎學金'],
+ :published_on => '2011-01-02'
+
+ # We may want to wrap your data in a Ruby class, and use it when storing data.
+ # The contract required of such a class is very simple.
+ #
+ class Article
+
+ #
+ attr_reader :title, :tags, :published_on
+ def initialize(attributes={})
+ @attributes = attributes
+ @attributes.each_pair { |name,value| instance_variable_set :"@#{name}", value }
+ end
+
+ # It must provide a `type`, `_type` or `document_type` method for propper mapping.
+ #
+ def type
+ 'article'
+ end
+
+ # And it must provide a `to_indexed_json` method for conversion to JSON.
+ #
+ def to_indexed_json
+ @attributes.to_json
+ end
+ end
+
+ # Note: Since our class takes a Hash of attributes on initialization, we may even
+ # wrap the results in instances of this class; we'll see how to do that further below.
+ #
+ article = Article.new :title => '親身感受臺灣特色 日本田野研究團政大學習',
+ :tags => ['臺灣特色', '日本'],
+ :published_on => '2011-01-03'
+
+ # Let's store the `article`, now.
+ #
+ store article
+
+ # And let's „force refresh“ the index, so we can query it immediately.
+ #
+ refresh
+end
+
+# We may want to define a specific [mapping](http://www.elasticsearch.org/guide/reference/api/admin-indices-create-index.html)
+# for the index.
+
+Tire.index 'articles' do
+ # To do so, let's just pass a Hash containing the specified mapping to the `Index#create` method.
+ #
+ create :mappings => {
+
+ # Let's specify for which _type_ of documents this mapping should be used:
+ # „article”, in our case.
+ #
+ :article => {
+ :properties => {
+
+ # Let's specify the type of the field, whether it should be analyzed, ...
+ #
+ :id => { :type => 'string', :index => 'not_analyzed', :include_in_all => false },
+
+ # ... set the boost or analyzer settings for the field, etc. The _ElasticSearch_ guide
+ # has [more information](http://elasticsearch.org/guide/reference/mapping/index.html).
+ # Don't forget, that proper mapping is key to efficient and effective search.
+ # But don't fret about getting the mapping right the first time, you won't.
+ # In most cases, the default, dynamic mapping is just fine for prototyping.
+ #
+ :title => { :type => 'string', :analyzer => 'cjk', :boost => 2.0 },
+ :tags => { :type => 'string', :analyzer => 'keyword' },
+ :content => { :type => 'string', :analyzer => 'cjk' }
+ }
+ }
+ }
+end
+
+#### Bulk Indexing
+
+# Of course, we may have large amounts of data, and adding them to the index one by one really isn't the best idea.
+# We can use _ElasticSearch's_ [bulk API](http://www.elasticsearch.org/guide/reference/api/bulk.html)
+# for importing the data.
+
+# So, for demonstration purposes, let's suppose we have a simple collection of hashes to store.
+#
+articles = [
+
+ # Notice that such objects must have an `id` property!
+ #
+ { :id => '1', :type => 'article', :title => '復興「校球」 政大男足決戰UFA足球聯賽', :tags => ['足球'], :published_on => '2011-01-01' },
+
+ # And, of course, they should contain the `type` property for the mapping to work!
+ #
+ { :id => '2', :type => 'article', :title => '社科院舉辦碩博士班畢業生撥穗典禮', :tags => ['博士班', '畢業','社科院'], :published_on => '2011-01-02' },
+ { :id => '3', :type => 'article', :title => '支持政大學子 羅家倫之女設立獎學金', :tags => ['獎學金'], :published_on => '2011-01-02' },
+ { :id => '4', :type => 'article', :title => '親身感受臺灣特色 日本田野研究團政大學習', :tags => ['臺灣特色', '日本'], :published_on => '2011-01-03' }
+]
+
+# We can just push them into the index in one go.
+#
+Tire.index 'articles' do
+ import articles
+end
+
+# Of course, we can easily manipulate the documents before storing them in the index.
+#
+Tire.index 'articles' do
+ delete
+
+ # ... by passing a block to the `import` method. The collection will
+ # be available in the block argument.
+ #
+ import articles do |documents|
+
+ # We will capitalize every _title_ and return the manipulated collection
+ # back to the `import` method.
+ #
+ documents.map { |document| document.update(:title => document[:title].capitalize) }
+ end
+
+ refresh
+end
+
+### Searching
+
+# With the documents indexed and stored in the _ElasticSearch_ database, we can search them, finally.
+#
+# _Tire_ exposes the search interface via simple domain-specific language.
+
+#### Simple Query String Searches
+
+# We can do simple searches, like searching for articles containing “One” in their title.
+#
+s = Tire.search('news_bulletins') do
+ query do
+ string "title:政大"
+ end
+end
+
+# The results:
+# * One [tags: ruby]
+#
+s.results.each do |document|
+ puts "Test1==============================Has results: #{s.results.count}".yellow
+ puts "* #{ document.title } [tags: ]"
+end
+
+# Or, we can search for articles published between January, 1st and January, 2nd.
+#
+puts "Test2==Or, we can search for articles published between January, 1st and January, 2nd.=Has results: #{s.results.count}".yellow
+s = Tire.search('articles') do
+ query do
+ string "published_on:[2011-01-01 TO 2011-01-02]"
+ end
+end
+
+# The results:
+# * One [published: 2011-01-01]
+# * Two [published: 2011-01-02]
+# * Three [published: 2011-01-02]
+#
+s.results.each do |document|
+ puts "* #{ document.title } [published: #{document.published_on}]"
+end
+
+# Notice, that we can access local variables from the _enclosing scope_.
+# (Of course, we may write the blocks in shorter notation.)
+
+# We will define the query in a local variable named `q`...
+#
+q = "title:T*"
+# ... and we can use it inside the `query` block.
+#
+s = Tire.search('articles') { query { string q } }
+
+# The results:
+# * Two [tags: ruby, python]
+# * Three [tags: java]
+#
+puts "Test3==and we can use it inside the `query` block..[ #{q} ]=Has results: #{s.results.count}".yellow
+s.results.each do |document|
+ puts "* #{ document.title } [tags:]"
+end
+
+# Often, we need to access variables or methods defined in the _outer scope_.
+# To do that, we have to use a slight variation of the DSL.
+#
+
+# Let's assume we have a plain Ruby class, named `Article`.
+#
+class Article
+
+ # We will define the query in a class method...
+ #
+ def self.q
+ "title:T*"
+ end
+
+ # ... and wrap the _Tire_ search method in another one.
+ def self.search
+
+ # Notice how we pass the `search` object around as a block argument.
+ #
+ Tire.search('articles') do |search|
+
+ # And we pass the query object in a similar matter.
+ #
+ search.query do |query|
+
+ # Which means we can access the `q` class method.
+ #
+ query.string self.q
+ end
+ end.results
+ end
+end
+
+# We may use any valid [Lucene query syntax](http://lucene.apache.org/java/3_0_3/queryparsersyntax.html)
+# for the `query_string` queries.
+
+# For debugging our queries, we can display the JSON which is being sent to _ElasticSearch_.
+#
+# {"query":{"query_string":{"query":"title:T*"}}}
+#
+puts "", "Query:", "-"*80
+puts s.to_json.green
+
+# Or better yet, we may display a complete `curl` command to recreate the request in terminal,
+# so we can see the naked response, tweak request parameters and meditate on problems.
+#
+# curl -X POST "http://localhost:9200/articles/_search?pretty=true" \
+# -d '{"query":{"query_string":{"query":"title:T*"}}}'
+#
+puts "", "Try the query in Curl:", "-"*80
+puts s.to_curl.green
+
+
+### Logging
+
+# For debugging more complex situations, we can enable logging, so requests and responses
+# will be logged using this `curl`-friendly format.
+
+Tire.configure do
+
+ # By default, at the _info_ level, only the `curl`-format of request and
+ # basic information about the response will be logged:
+ #
+ # # 2011-04-24 11:34:01:150 [CREATE] ("articles")
+ # #
+ # curl -X POST "http://localhost:9200/articles"
+ #
+ # # 2011-04-24 11:34:01:152 [200]
+ #
+ logger 'elasticsearch.log'
+
+ # For debugging, we can switch to the _debug_ level, which will log the complete JSON responses.
+ #
+ # That's very convenient if we want to post a recreation of some problem or solution
+ # to the mailing list, IRC channel, etc.
+ #
+ logger 'elasticsearch.log', :level => 'debug'
+
+ # Note that we can pass any [`IO`](http://www.ruby-doc.org/core/classes/IO.html)-compatible Ruby object as a logging device.
+ #
+ logger STDERR
+end
+
+### Configuration
+
+# As we have just seen with logging, we can configure various parts of _Tire_.
+#
+Tire.configure do
+
+ # First of all, we can configure the URL for _ElasticSearch_.
+ #
+ url "http://search.example.com"
+
+ # Second, we may want to wrap the result items in our own class, for instance
+ # the `Article` class set above.
+ #
+ wrapper Article
+
+ # Finally, we can reset one or all configuration settings to their defaults.
+ #
+ reset :url
+ reset
+
+end
+
+
+### Complex Searching
+
+# Query strings are convenient for simple searches, but we may want to define our queries more expressively,
+# using the _ElasticSearch_ [Query DSL](http://www.elasticsearch.org/guide/reference/query-dsl/index.html).
+#
+s = Tire.search('articles') do
+
+ # Let's suppose we want to search for articles with specific _tags_, in our case “ruby” _or_ “python”.
+ #
+ query do
+
+ # That's a great excuse to use a [_terms_](http://elasticsearch.org/guide/reference/query-dsl/terms-query.html)
+ # query.
+ #
+ terms :tags, ['ruby', 'python']
+ end
+end
+
+# The search, as expected, returns three articles, all tagged “ruby” — among other tags:
+#
+# * Two [tags: ruby, python]
+# * One [tags: ruby]
+# * Four [tags: ruby, php]
+#
+puts "Test4==The search, as expected, returns three articles, all tagged “STHs” — among other tags.Has results: #{s.results.count}".yellow
+s.results.each do |document|
+ puts "* #{ document.title } [tags: ]"
+end
+
+# What if we wanted to search for articles tagged both “ruby” _and_ “python”?
+#
+s = Tire.search('articles') do
+ query do
+
+ # That's a great excuse to specify `minimum_match` for the query.
+ #
+ terms :tags, ['ruby', 'python'], :minimum_match => 2
+ end
+end
+
+# The search, as expected, returns one article, tagged with _both_ “ruby” and “python”:
+#
+# * Two [tags: ruby, python]
+#
+puts "Test5==The search, as expected, returns one article, tagged with _both_ 'ruby' and 'python'.Has results: #{s.results.count}".yellow
+s.results.each do |document|
+ puts "* #{ document.title } [tags: ]"
+end
+
+#### Boolean Queries
+
+# Quite often, we need complex queries with boolean logic.
+# Instead of composing long query strings such as `tags:ruby OR tags:java AND NOT tags:python`,
+# we can use the [_bool_](http://www.elasticsearch.org/guide/reference/query-dsl/bool-query.html)
+# query.
+
+s = Tire.search('news_bulletins') do
+ query do
+
+ # In _Tire_, we can build `bool` queries declaratively, as usual.
+ boolean do
+
+ # Let's define a `should` (`OR`) query for _ruby_,
+ #
+ should { string 'title:政大' }
+
+ # as well as for _java_,
+ must_not { string 'title:復興' }
+
+ # while defining a `must_not` (`AND NOT`) query for _python_.
+ # must_not { string 'tags:python' }
+ end
+ end
+end
+
+# The search returns these documents:
+#
+# * One [tags: ruby]
+# * Three [tags: java]
+# * Four [tags: ruby, php]
+puts "Test6==Boolean Queries.Has results: #{s.results.count}".yellow
+s.results.each do |document|
+ puts "* #{ document.title } [tags: ]"
+end
+
+puts "Test7== mix and reuse Boolean Queries: #{s.results.count}".yellow
+# The best thing about `boolean` queries is that we can very easily save these partial queries as Ruby blocks,
+# to mix and reuse them later, since we can call the `boolean` method multiple times.
+#
+
+# Let's define the query for the _tags_ property,
+#
+tags_query = lambda do |boolean|
+ boolean.should { string 'tags:ruby' }
+ boolean.should { string 'tags:java' }
+end
+
+# ... and a query for the _published_on_ property.
+published_on_query = lambda do |boolean|
+ boolean.must { string 'published_on:[2011-01-01 TO 2011-01-02]' }
+end
+
+# Now, we can use the `tags_query` on its own.
+#
+Tire.search('articles') { query { boolean &tags_query } }
+
+# Or, we can combine it with the `published_on` query.
+#
+Tire.search('articles') do
+ query do
+ boolean &tags_query
+ boolean &published_on_query
+ end
+end
+
+# _ElasticSearch_ supports many types of [queries](http://www.elasticsearch.org/guide/reference/query-dsl/).
+#
+# Eventually, _Tire_ will support all of them. So far, only these are supported:
+#
+# * [string](http://www.elasticsearch.org/guide/reference/query-dsl/query-string-query.html)
+# * [text](http://www.elasticsearch.org/guide/reference/query-dsl/text-query.html)
+# * [term](http://elasticsearch.org/guide/reference/query-dsl/term-query.html)
+# * [terms](http://elasticsearch.org/guide/reference/query-dsl/terms-query.html)
+# * [bool](http://www.elasticsearch.org/guide/reference/query-dsl/bool-query.html)
+# * [custom_score](http://www.elasticsearch.org/guide/reference/query-dsl/custom-score-query.html)
+# * [fuzzy](http://www.elasticsearch.org/guide/reference/query-dsl/fuzzy-query.html)
+# * [all](http://www.elasticsearch.org/guide/reference/query-dsl/match-all-query.html)
+# * [ids](http://www.elasticsearch.org/guide/reference/query-dsl/ids-query.html)
+
+puts "Topic#### Faceted Search ==> SKIP".yellow
+
+
+
+# # _ElasticSearch_ makes it trivial to retrieve complex aggregated data from our index/database,
+# # so called [_facets_](http://www.elasticsearch.org/guide/reference/api/search/facets/index.html).
+
+# # Let's say we want to display article counts for every tag in the database.
+# # For that, we'll use a _terms_ facet.
+
+# #
+# s = Tire.search 'articles' do
+
+# # We will search for articles whose title begins with letter “T”,
+# #
+# query { string 'title:T*' }
+
+# # and retrieve the counts “bucketed” by `tags`.
+# #
+# facet 'tags' do
+# terms :tags
+# end
+# end
+
+# # As we see, our query has found two articles, and if you recall our articles from above,
+# # _Two_ is tagged with “ruby” and “python”, while _Three_ is tagged with “java”.
+# #
+# # Found 2 articles: Three, Two
+# #
+# # The counts shouldn't surprise us:
+# #
+# # Counts by tag:
+# # -------------------------
+# # ruby 1
+# # python 1
+# # java 1
+# #
+# puts "Found #{s.results.count} articles: #{s.results.map(&:title).join(', ')}"
+# puts "Counts by tag:", "-"*25
+# s.results.facets['tags']['terms'].each do |f|
+# puts "#{f['term'].ljust(10)} #{f['count']}"
+# end
+
+# # These counts are based on the scope of our current query.
+# # What if we wanted to display aggregated counts by `tags` across the whole database?
+
+# #
+# s = Tire.search 'articles' do
+
+# # Let's repeat the search for “T”...
+# #
+# query { string 'title:T*' }
+
+# facet 'global-tags', :global => true do
+
+# # ...but set the `global` scope for the facet in this case.
+# #
+# terms :tags
+# end
+
+# # We can even _combine_ facets scoped to the current query
+# # with globally scoped facets — we'll just use a different name.
+# #
+# facet 'current-tags' do
+# terms :tags
+# end
+# end
+
+# # Aggregated results for the current query are the same as previously:
+# #
+# # Current query facets:
+# # -------------------------
+# # ruby 1
+# # python 1
+# # java 1
+# #
+# puts "Current query facets:", "-"*25
+# s.results.facets['current-tags']['terms'].each do |f|
+# puts "#{f['term'].ljust(10)} #{f['count']}"
+# end
+
+# # On the other hand, aggregated results for the global scope include also
+# # tags for articles not matched by the query, such as “java” or “php”:
+# #
+# # Global facets:
+# # -------------------------
+# # ruby 3
+# # python 1
+# # php 1
+# # java 1
+# #
+# puts "Global facets:", "-"*25
+# s.results.facets['global-tags']['terms'].each do |f|
+# puts "#{f['term'].ljust(10)} #{f['count']}"
+# end
+
+# # _ElasticSearch_ supports many advanced types of facets, such as those for computing statistics or geographical distance.
+# #
+# # Eventually, _Tire_ will support all of them. So far, only these are supported:
+# #
+# # * [terms](http://www.elasticsearch.org/guide/reference/api/search/facets/terms-facet.html)
+# # * [date](http://www.elasticsearch.org/guide/reference/api/search/facets/date-histogram-facet.html)
+# # * [range](http://www.elasticsearch.org/guide/reference/api/search/facets/range-facet.html)
+# # * [histogram](http://www.elasticsearch.org/guide/reference/api/search/facets/histogram-facet.html)
+# # * [statistical](http://www.elasticsearch.org/guide/reference/api/search/facets/statistical-facet.html)
+# # * [terms_stats](http://www.elasticsearch.org/guide/reference/api/search/facets/terms-stats-facet.html)
+# # * [query](http://www.elasticsearch.org/guide/reference/api/search/facets/query-facet.html)
+
+# # We have seen that _ElasticSearch_ facets enable us to fetch complex aggregations from our data.
+# #
+# # They are frequently used for another feature, „faceted navigation“.
+# # We can be combine query and facets with
+# # [filters](http://elasticsearch.org/guide/reference/api/search/filter.html),
+# # so the returned documents are restricted by certain criteria — for example to a specific category —,
+# # but the aggregation calculations are still based on the original query.
+
+
+# #### Filtered Search
+
+# # So, let's make our search a bit more complex. Let's search for articles whose titles begin
+# # with letter “T”, again, but filter the results, so only the articles tagged “ruby”
+# # are returned.
+# #
+# s = Tire.search 'articles' do
+
+# # We will use just the same **query** as before.
+# #
+# query { string 'title:T*' }
+
+# # But we will add a _terms_ **filter** based on tags.
+# #
+# filter :terms, :tags => ['ruby']
+
+# # And, of course, our facet definition.
+# #
+# facet('tags') { terms :tags }
+
+# end
+
+# # We see that only the article _Two_ (tagged “ruby” and “python”) is returned,
+# # _not_ the article _Three_ (tagged “java”):
+# #
+# # * Two [tags: ruby, python]
+# #
+# s.results.each do |document|
+# puts "* #{ document.title } [tags: ]"
+# end
+
+# # The _count_ for article _Three_'s tags, “java”, on the other hand, _is_ in fact included:
+# #
+# # Counts by tag:
+# # -------------------------
+# # ruby 1
+# # python 1
+# # java 1
+# #
+# puts "Counts by tag:", "-"*25
+# s.results.facets['tags']['terms'].each do |f|
+# puts "#{f['term'].ljust(10)} #{f['count']}"
+# end
+
+# #### Sorting
+
+# # By default, the results are sorted according to their relevancy.
+# #
+# s = Tire.search('articles') { query { string 'tags:ruby' } }
+
+# s.results.each do |document|
+# puts "* #{ document.title } " +
+# "[tags: ; " +
+
+# # The score is available as the `_score` property.
+# #
+# "score: #{document._score}]"
+# end
+
+# # The results:
+# #
+# # * One [tags: ruby; score: 0.30685282]
+# # * Four [tags: ruby, php; score: 0.19178301]
+# # * Two [tags: ruby, python; score: 0.19178301]
+
+# # But, what if we want to sort the results based on some other criteria,
+# # such as published date or product price? We can do that.
+# #
+# s = Tire.search 'articles' do
+
+# # We will search for articles tagged “ruby”, again, ...
+# #
+# query { string 'tags:ruby' }
+
+# # ... but will sort them by their `title`, in descending order.
+# #
+# sort { by :title, 'desc' }
+# end
+
+# # The results:
+# #
+# # * Two
+# # * One
+# # * Four
+# #
+# s.results.each do |document|
+# puts "* #{ document.title }"
+# end
+
+# # Of course, it's possible to combine more fields in the sorting definition.
+
+# s = Tire.search 'articles' do
+
+# # We will just get all articles in this case.
+# #
+# query { all }
+
+# sort do
+
+# # We will sort the results by their `published_on` property in _ascending_ order (the default),
+# #
+# by :published_on
+
+# # and by their `title` property, in _descending_ order.
+# #
+# by :title, 'desc'
+# end
+# end
+
+# # The results:
+# # * One (Published on: 2011-01-01)
+# # * Two (Published on: 2011-01-02)
+# # * Three (Published on: 2011-01-02)
+# # * Four (Published on: 2011-01-03)
+# #
+# s.results.each do |document|
+# puts "* #{ document.title.ljust(10) } (Published on: #{ document.published_on })"
+# end
+
+# #### Highlighting
+
+# # Often, we want to highlight the snippets matching our query in the displayed results.
+# # _ElasticSearch_ provides rich
+# # [highlighting](http://www.elasticsearch.org/guide/reference/api/search/highlighting.html)
+# # features, and _Tire_ makes them trivial to use.
+# #
+# s = Tire.search 'articles' do
+
+# # Let's search for documents containing word “Two” in their titles,
+# query { string 'title:Two' }
+
+# # and instruct _ElasticSearch_ to highlight relevant snippets.
+# #
+# highlight :title
+# end
+
+# # The results:
+# # Title: Two; Highlighted: Two
+# #
+# s.results.each do |document|
+# puts "Title: #{ document.title }; Highlighted: #{document.highlight.title}"
+# end
+
+# # We can configure many options for highlighting, such as:
+# #
+# s = Tire.search 'articles' do
+# query { string 'title:Two' }
+
+# # • specify the fields to highlight
+# #
+# highlight :title, :body
+
+# # • specify their individual options
+# #
+# highlight :title, :body => { :number_of_fragments => 0 }
+
+# # • or specify global highlighting options, such as the wrapper tag
+# #
+# highlight :title, :body, :options => { :tag => '' }
+# end
+
+# #### Percolation
+
+# # _ElasticSearch_ comes with one very interesting, and rather unique feature:
+# # [_percolation_](http://www.elasticsearch.org/guide/reference/api/percolate.html).
+
+# # It works in a „reverse search“ manner to regular search workflow of adding
+# # documents to the index and then querying them.
+# # Percolation allows us to register a query, and ask if a specific document
+# # matches it, either on demand, or immediately as the document is being indexed.
+
+# # Let's review an example for an index named _weather_.
+# # We will register three queries for percolation against this index.
+# #
+# index = Tire.index('weather') do
+# delete
+# create
+
+# # First, a query named _warning_,
+# #
+# register_percolator_query('warning', :tags => ['warning']) { string 'warning OR severe OR extreme' }
+
+# # a query named _tsunami_,
+# #
+# register_percolator_query('tsunami', :tags => ['tsunami']) { string 'tsunami' }
+
+# # and a query named _floods_.
+# #
+# register_percolator_query('floods', :tags => ['floods']) { string 'flood*' }
+
+# end
+
+# # Notice, that we have added a _tags_ field to the query document, because it behaves
+# # just like any other document in _ElasticSearch_.
+
+# # We will refresh the `_percolator` index for immediate access.
+# #
+# Tire.index('_percolator').refresh
+
+# # Now, let's _percolate_ a document containing some trigger words against all registered queries.
+# #
+# matches = index.percolate(:message => '[Warning] Extreme flooding expected after tsunami wave.')
+
+# # The result will contain, unsurprisingly, names of all the three registered queries:
+# #
+# # Matching queries: ["floods", "tsunami", "warning"]
+# #
+# puts "Matching queries: " + matches.inspect
+
+# # We can filter the executed queries with a regular _ElasticSearch_ query passed as a block to
+# # the `percolate` method.
+# #
+# matches = index.percolate(:message => '[Warning] Extreme flooding expected after tsunami wave.') do
+# # Let's use a _terms_ query against the `tags` field.
+# term :tags, 'tsunami'
+# end
+
+# # In this case, the result will contain only the name of the “tsunami” query.
+# #
+# # Matching queries: ["tsunami"]
+# #
+# puts "Matching queries: " + matches.inspect
+
+# # What if we percolate another document, without the “tsunami” trigger word?
+# #
+# matches = index.percolate(:message => '[Warning] Extreme temperatures expected.') { term :tags, 'tsunami' }
+
+# # As expected, we will get an empty array:
+# #
+# # Matching queries: []
+# #
+# puts "Matching queries: " + matches.inspect
+
+# # Well, that's of course immensely useful for real-time search systems. But, there's more.
+# # We can _percolate_ a document _at the same time_ it is being stored in the index,
+# # getting back a list of matching queries.
+
+# # Let's store a document with some trigger words in the index, and mark it for percolation.
+# #
+# response = index.store :message => '[Warning] Severe floods expected after tsunami wave.', :percolate => true
+
+# # We will get the names of all matching queries in response.
+# #
+# # Matching queries: ["floods", "tsunami", "warning"]
+# #
+# puts "Matching queries: " + response['matches'].inspect
+
+# # As with the _percolate_ example, we can filter the executed queries.
+# #
+# response = index.store :message => '[Warning] Severe floods expected after tsunami wave.',
+# # Let's use a simple string query for the “tsunami” tag.
+# :percolate => 'tags:tsunami'
+
+# # Unsurprisingly, the response will contain just the name of the “tsunami” query.
+# #
+# # Matching queries: ["tsunami"]
+# #
+# puts "Matching queries: " + response['matches'].inspect
+
+# ### ActiveModel Integration
+
+# # As you can see, [_Tire_](https://github.com/karmi/tire) supports the
+# # main features of _ElasticSearch_ in Ruby.
+# #
+# # It allows you to create and delete indices, add documents, search them, retrieve the facets, highlight the results,
+# # and comes with a usable logging facility.
+# #
+# # Of course, the holy grail of any search library is easy, painless integration with your Ruby classes, and,
+# # most importantly, with ActiveRecord/ActiveModel classes.
+# #
+# # Please, check out the [README](https://github.com/karmi/tire/tree/master#readme) file for instructions
+# # how to include _Tire_-based search in your models..
+# #
+# # Send any feedback via Github issues, or ask questions in the [#elasticsearch](irc://irc.freenode.net/#elasticsearch) IRC channel.
\ No newline at end of file
diff --git a/vendor/built_in_modules/news/app/controllers/panel/news/front_end/news_bulletins_controller.rb b/vendor/built_in_modules/news/app/controllers/panel/news/front_end/news_bulletins_controller.rb
index a8479ee9..0b757d06 100644
--- a/vendor/built_in_modules/news/app/controllers/panel/news/front_end/news_bulletins_controller.rb
+++ b/vendor/built_in_modules/news/app/controllers/panel/news/front_end/news_bulletins_controller.rb
@@ -10,12 +10,22 @@ class Panel::News::FrontEnd::NewsBulletinsController < OrbitWidgetController
def index
if !params[:search_query].blank?
- @news_bulletins = NewsBulletin.solr_search do
- fulltext params[:search_query] do
- fields(:text,:title=>2.0)
- end
- end.each_hit_with_result
+ search_query =params[:search_query].gsub(/"/,"").split(" ")
+ words_query = lambda do |boolean|
+ search_query.each do |word|
+ boolean.should { string "title:#{word}" }
+ #boolean.should { string "sub_title:#{word}" }
+ #boolean.should { string "text:#{word}" }
+ end
+ end
+
+ search_result=Tire.search('news_bulletins') do
+ query {boolean &words_query }
+ #raise to_curl
+ end.results.collect{|t| t.id}
+
+ @news_bulletins = NewsBulletin.can_display.any_in(_id:search_result).page( params[:page_main]).per(10)
else
date_now = Time.now
if !params[:category_id].blank?
diff --git a/vendor/built_in_modules/news/app/models/news_bulletin.rb b/vendor/built_in_modules/news/app/models/news_bulletin.rb
index 9e8d0a2b..b2a8191f 100644
--- a/vendor/built_in_modules/news/app/models/news_bulletin.rb
+++ b/vendor/built_in_modules/news/app/models/news_bulletin.rb
@@ -5,16 +5,13 @@ class NewsBulletin
include Mongoid::Timestamps
include Mongoid::MultiParameterAttributes
include Impressionist::Impressionable
- # include OrbitBasis::BaseModel
- # include OrbitSearchLib::ObjectSearchable
- # include NccuSearch
+ include Tire::Model::Search
+ include Tire::Model::Callbacks
# include Redis::Objects
- include Mongoid::BaseModel
-
- include Sunspot::Mongoid
- # scope :searchable,where(:is_checked=>true,:is_hidden=>false,:is_pending=>false)
+
+ scope :searchable,where(:is_checked=>true,:is_hidden=>false,:is_pending=>false)
is_impressionable :counter_cache => { :column_name => :view_count }
@@ -64,61 +61,43 @@ class NewsBulletin
after_save :save_news_bulletin_links
after_save :save_news_bulletin_files
-# include OrbitBasis::ClassMethods
-
- searchable do
- text :title do
- titles = ""
- titles << (self.title.zh_tw.nil? ? '':self.title.zh_tw)
- titles << (self.title.en.nil? ? '':self.title.en)
- #(titles << self.title.en )unless self.title.nil?
- doc = Nokogiri::HTML(titles)
- doc.text
- #VALID_LOCALES
- end
-
- text :text do
- texts = ""
- texts << (self.text.zh_tw.nil? ? '':self.text.zh_tw)
- texts << (self.text.en.nil? ? '':self.text.en)
- #texts << self.text.en )unless self.text.en.nil?
- doc = Nokogiri::HTML(texts)
- doc.text
- #VALID_LOCALES
- end
- # text :sub_titles do
- # self.subtitle.zh_tw
- # #VALID_LOCALES
- # end
- # text :text do
- # self.text.zh_tw
- # #VALID_LOCALES
- # end
- # text :content, :publish_month
- # text :comments do
- # comments.map(&:content)
- # end
- # time :published_at
- # string :publish_month
- end
+ # Tire.index 'news_bulletin' do
+ # delete
+ # create :mappings =>{
+ # :news_bulletin_by_title => {
+ # :properties=> {
+ # :title => { :type => 'string', :index => 'not_analyzed', :include_in_all => false },
+ # :body => {}
+ # }
+ # }
+ # :news_bulletin_by_body => {
+ # :properties=> {
+ # :body => { :type => 'string', :index => 'not_analyzed', :include_in_all => false },
+ # }
+ # }
+ # }
+ # end
# mapping do
# indexes :title, :analyzer => 'cjk', :boost => 10
# indexes :sub_titles, :analyzer => 'cjk', :boost => 5
# indexes :text, :analyzer => 'cjk'
- # end
-
- # def to_indexed_json
- # titles = title.zh_tw + title.en
- # sub_titles = subtitle.zh_tw + subtitle.en
- # texts = text.zh_tw + text.en
- # {
- # :id => id,
- # :title => titles,
- # :sub_titles => sub_titles,
- # :text => texts
- # }.to_json
# end
+ def type
+ "news_bulletin"
+ end
+
+ def to_indexed_json
+ titles = title.zh_tw #+ title.en
+ sub_titles = subtitle.zh_tw #+ subtitle.en
+ texts = text.zh_tw #+ text.en
+ {
+ :_id => _id,
+ :title => Nokogiri::HTML(titles).text,
+ :sub_title => Nokogiri::HTML(sub_titles).text,
+ :text => Nokogiri::HTML(texts).text
+ }.to_json
+ end
def self.search( search = nil, category_id = nil )
if category_id.to_s.size > 0 and search.to_s.size > 0