2018-11-07 12:04:29 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2018-03-05 22:54:04 +08:00
|
|
|
require 'csv'
|
2018-11-07 12:04:29 +08:00
|
|
|
|
2018-03-05 22:54:04 +08:00
|
|
|
module RepositoryZipExport
|
|
|
|
def self.generate_zip(params, repository, current_user)
|
|
|
|
# Fetch rows in the same order as in the currently viewed datatable
|
2020-07-17 21:17:09 +08:00
|
|
|
if params[:my_module_id]
|
|
|
|
rows = if repository.is_a?(RepositorySnapshot)
|
2020-08-05 15:59:58 +08:00
|
|
|
repository.repository_rows
|
|
|
|
else
|
|
|
|
repository.repository_rows
|
|
|
|
.joins(:my_module_repository_rows)
|
|
|
|
.where(my_module_repository_rows: { my_module_id: params[:my_module_id] })
|
|
|
|
end
|
2020-07-17 21:17:09 +08:00
|
|
|
else
|
|
|
|
ordered_row_ids = params[:row_ids]
|
|
|
|
id_row_map = RepositoryRow.where(id: ordered_row_ids,
|
|
|
|
repository: repository)
|
|
|
|
.index_by(&:id)
|
|
|
|
rows = ordered_row_ids.collect { |id| id_row_map[id.to_i] }
|
|
|
|
end
|
2018-03-05 22:54:04 +08:00
|
|
|
|
|
|
|
zip = ZipExport.create(user: current_user)
|
|
|
|
zip.generate_exportable_zip(
|
|
|
|
current_user,
|
2020-07-17 21:17:09 +08:00
|
|
|
to_csv(rows, params[:header_ids], current_user, repository.team),
|
2018-03-05 22:54:04 +08:00
|
|
|
:repositories
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-09-17 05:01:44 +08:00
|
|
|
def self.to_csv(rows, column_ids, user, team, handle_file_name_func = nil)
|
2018-03-05 22:54:04 +08:00
|
|
|
# Parse column names
|
|
|
|
csv_header = []
|
|
|
|
column_ids.each do |c_id|
|
|
|
|
csv_header << case c_id.to_i
|
|
|
|
when -1, -2
|
|
|
|
next
|
|
|
|
when -3
|
2018-04-04 17:55:11 +08:00
|
|
|
I18n.t('repositories.table.id')
|
2018-03-05 22:54:04 +08:00
|
|
|
when -4
|
2018-04-04 17:55:11 +08:00
|
|
|
I18n.t('repositories.table.row_name')
|
2018-03-05 22:54:04 +08:00
|
|
|
when -5
|
2018-04-04 17:55:11 +08:00
|
|
|
I18n.t('repositories.table.added_by')
|
|
|
|
when -6
|
2018-03-05 22:54:04 +08:00
|
|
|
I18n.t('repositories.table.added_on')
|
2020-06-22 20:28:49 +08:00
|
|
|
when -7
|
|
|
|
I18n.t('repositories.table.archived_by')
|
|
|
|
when -8
|
|
|
|
I18n.t('repositories.table.archived_on')
|
2018-03-05 22:54:04 +08:00
|
|
|
else
|
|
|
|
column = RepositoryColumn.find_by_id(c_id)
|
|
|
|
column ? column.name : nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
CSV.generate do |csv|
|
|
|
|
csv << csv_header
|
|
|
|
rows.each do |row|
|
|
|
|
csv_row = []
|
|
|
|
column_ids.each do |c_id|
|
|
|
|
csv_row << case c_id.to_i
|
|
|
|
when -1, -2
|
|
|
|
next
|
|
|
|
when -3
|
2018-04-04 17:55:11 +08:00
|
|
|
row.id
|
2018-03-05 22:54:04 +08:00
|
|
|
when -4
|
2018-04-04 17:55:11 +08:00
|
|
|
row.name
|
2018-03-05 22:54:04 +08:00
|
|
|
when -5
|
2018-04-04 17:55:11 +08:00
|
|
|
row.created_by.full_name
|
|
|
|
when -6
|
2018-03-05 22:54:04 +08:00
|
|
|
I18n.l(row.created_at, format: :full)
|
2020-06-22 20:28:49 +08:00
|
|
|
when -7
|
|
|
|
row.archived_by.full_name
|
|
|
|
when -8
|
|
|
|
I18n.l(row.archived_on, format: :full)
|
2018-03-05 22:54:04 +08:00
|
|
|
else
|
|
|
|
cell = row.repository_cells
|
|
|
|
.find_by(repository_column_id: c_id)
|
2018-09-17 05:01:44 +08:00
|
|
|
|
2018-04-24 20:53:49 +08:00
|
|
|
if cell
|
2018-09-23 17:14:59 +08:00
|
|
|
if cell.value_type == 'RepositoryAssetValue' &&
|
2018-09-23 17:30:20 +08:00
|
|
|
handle_file_name_func
|
2018-09-17 05:01:44 +08:00
|
|
|
handle_file_name_func.call(cell.value.asset)
|
|
|
|
else
|
2018-09-23 17:14:59 +08:00
|
|
|
SmartAnnotations::TagToText.new(
|
2020-01-10 16:29:20 +08:00
|
|
|
user, team, cell.value.export_formatted
|
2018-09-23 17:14:59 +08:00
|
|
|
).text
|
2018-09-17 05:01:44 +08:00
|
|
|
end
|
2018-04-24 20:53:49 +08:00
|
|
|
end
|
2018-03-05 22:54:04 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
csv << csv_row
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|