2017-06-23 21:19:08 +08:00
|
|
|
class Table < ApplicationRecord
|
2016-07-21 19:11:15 +08:00
|
|
|
include SearchableModel
|
2016-09-21 21:35:23 +08:00
|
|
|
|
2017-01-17 00:11:08 +08:00
|
|
|
auto_strip_attributes :name, nullify: false
|
|
|
|
validates :name,
|
|
|
|
length: { maximum: Constants::NAME_MAX_LENGTH }
|
2016-02-12 23:52:43 +08:00
|
|
|
validates :contents,
|
2016-09-16 17:39:37 +08:00
|
|
|
presence: true,
|
2016-10-05 23:45:20 +08:00
|
|
|
length: { maximum: Constants::TABLE_JSON_MAX_SIZE_MB.megabytes }
|
2016-02-12 23:52:43 +08:00
|
|
|
|
2017-06-28 21:21:32 +08:00
|
|
|
belongs_to :created_by,
|
|
|
|
foreign_key: 'created_by_id',
|
|
|
|
class_name: 'User',
|
|
|
|
optional: true
|
|
|
|
belongs_to :last_modified_by,
|
|
|
|
foreign_key: 'last_modified_by_id',
|
|
|
|
class_name: 'User',
|
|
|
|
optional: true
|
|
|
|
belongs_to :team, optional: true
|
2016-02-12 23:52:43 +08:00
|
|
|
has_one :step_table, inverse_of: :table
|
|
|
|
has_one :step, through: :step_table
|
|
|
|
|
|
|
|
has_one :result_table, inverse_of: :table
|
|
|
|
has_one :result, through: :result_table
|
|
|
|
has_many :report_elements, inverse_of: :table, dependent: :destroy
|
|
|
|
|
|
|
|
after_save :update_ts_index
|
2016-07-21 19:11:15 +08:00
|
|
|
#accepts_nested_attributes_for :table
|
|
|
|
|
2017-05-05 22:41:23 +08:00
|
|
|
def self.search(user,
|
|
|
|
include_archived,
|
|
|
|
query = nil,
|
|
|
|
page = 1,
|
|
|
|
_current_team = nil,
|
|
|
|
options = {})
|
2016-07-21 19:11:15 +08:00
|
|
|
step_ids =
|
|
|
|
Step
|
2016-10-05 23:45:20 +08:00
|
|
|
.search(user, include_archived, nil, Constants::SEARCH_NO_LIMIT)
|
2016-07-21 19:11:15 +08:00
|
|
|
.joins(:step_tables)
|
|
|
|
.distinct
|
2017-04-11 20:55:44 +08:00
|
|
|
.pluck('step_tables.id')
|
2016-07-21 19:11:15 +08:00
|
|
|
|
|
|
|
result_ids =
|
|
|
|
Result
|
2016-10-05 23:45:20 +08:00
|
|
|
.search(user, include_archived, nil, Constants::SEARCH_NO_LIMIT)
|
2016-07-21 19:11:15 +08:00
|
|
|
.joins(:result_table)
|
|
|
|
.distinct
|
2017-04-11 20:55:44 +08:00
|
|
|
.pluck('result_tables.id')
|
2016-07-21 19:11:15 +08:00
|
|
|
|
2017-01-18 21:05:57 +08:00
|
|
|
table_query =
|
|
|
|
Table
|
2016-07-21 19:11:15 +08:00
|
|
|
.distinct
|
2017-05-05 22:41:23 +08:00
|
|
|
.joins('LEFT OUTER JOIN step_tables ON step_tables.table_id = tables.id')
|
|
|
|
.joins('LEFT OUTER JOIN result_tables ON ' \
|
|
|
|
'result_tables.table_id = tables.id')
|
|
|
|
.joins('LEFT OUTER JOIN results ON result_tables.result_id = results.id')
|
|
|
|
.where('step_tables.id IN (?) OR result_tables.id IN (?)',
|
|
|
|
step_ids, result_ids)
|
|
|
|
|
|
|
|
if options[:whole_word].to_s == 'true' ||
|
|
|
|
options[:whole_phrase].to_s == 'true'
|
|
|
|
like = options[:match_case].to_s == 'true' ? '~' : '~*'
|
|
|
|
s_query = query.gsub(/[!()&|:]/, ' ')
|
|
|
|
.strip
|
|
|
|
.split(/\s+/)
|
|
|
|
.map { |t| t + ':*' }
|
|
|
|
if options[:whole_word].to_s == 'true'
|
|
|
|
a_query = query.split
|
|
|
|
.map { |a| Regexp.escape(a) }
|
|
|
|
.join('|')
|
|
|
|
s_query = s_query.join('|')
|
|
|
|
else
|
|
|
|
a_query = Regexp.escape(query)
|
|
|
|
s_query = s_query.join('&')
|
|
|
|
end
|
|
|
|
a_query = '\\y(' + a_query + ')\\y'
|
|
|
|
s_query = s_query.tr('\'', '"')
|
|
|
|
|
|
|
|
new_query = table_query.where(
|
|
|
|
"(trim_html_tags(tables.name) #{like} ?" \
|
|
|
|
"OR tables.data_vector @@ to_tsquery(?))",
|
2017-01-18 21:05:57 +08:00
|
|
|
a_query,
|
|
|
|
s_query
|
|
|
|
)
|
2017-05-05 22:41:23 +08:00
|
|
|
else
|
|
|
|
like = options[:match_case].to_s == 'true' ? 'LIKE' : 'ILIKE'
|
|
|
|
a_query = query.split.map { |a| "%#{sanitize_sql_like(a)}%" }
|
2016-07-21 19:11:15 +08:00
|
|
|
|
2017-05-05 22:41:23 +08:00
|
|
|
# Trim whitespace and replace it with OR character. Make prefixed
|
|
|
|
# wildcard search term and escape special characters.
|
|
|
|
# For example, search term 'demo project' is transformed to
|
|
|
|
# 'demo:*|project:*' which makes word inclusive search with postfix
|
|
|
|
# wildcard.
|
|
|
|
s_query = query.gsub(/[!()&|:]/, ' ')
|
|
|
|
.strip
|
|
|
|
.split(/\s+/)
|
|
|
|
.map { |t| t + ':*' }
|
|
|
|
.join('|')
|
|
|
|
.tr('\'', '"')
|
|
|
|
new_query = table_query.where(
|
|
|
|
"(trim_html_tags(tables.name) #{like} ANY (array[?])" \
|
|
|
|
"OR tables.data_vector @@ to_tsquery(?))",
|
|
|
|
a_query,
|
|
|
|
s_query
|
|
|
|
)
|
|
|
|
end
|
2016-07-21 19:11:15 +08:00
|
|
|
|
|
|
|
# Show all results if needed
|
2016-10-05 23:45:20 +08:00
|
|
|
if page == Constants::SEARCH_NO_LIMIT
|
2016-07-21 19:11:15 +08:00
|
|
|
new_query
|
|
|
|
else
|
|
|
|
new_query
|
2016-10-05 23:45:20 +08:00
|
|
|
.limit(Constants::SEARCH_LIMIT)
|
|
|
|
.offset((page - 1) * Constants::SEARCH_LIMIT)
|
2016-07-21 19:11:15 +08:00
|
|
|
end
|
|
|
|
end
|
2016-02-12 23:52:43 +08:00
|
|
|
|
|
|
|
def contents_utf_8
|
|
|
|
contents.present? ? contents.force_encoding(Encoding::UTF_8) : nil
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_ts_index
|
|
|
|
if contents_changed?
|
|
|
|
sql = "UPDATE tables " +
|
|
|
|
"SET data_vector = " +
|
|
|
|
"to_tsvector(substring(encode(contents::bytea, 'escape'), 9)) " +
|
|
|
|
"WHERE id = " + Integer(id).to_s
|
|
|
|
Table.connection.execute(sql)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|