2023-09-08 17:35:16 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
class RepositoryZipExportJob < ZipExportJob
|
|
|
|
private
|
|
|
|
|
|
|
|
# Override
|
|
|
|
def fill_content(dir, params)
|
2023-10-09 21:40:19 +08:00
|
|
|
repository = RepositoryBase.find(params[:repository_id])
|
2023-09-08 17:35:16 +08:00
|
|
|
# Fetch rows in the same order as in the currently viewed datatable
|
|
|
|
if params[:my_module_id]
|
|
|
|
rows = if repository.is_a?(RepositorySnapshot)
|
|
|
|
repository.repository_rows
|
|
|
|
else
|
|
|
|
repository.repository_rows
|
|
|
|
.joins(:my_module_repository_rows)
|
|
|
|
.where(my_module_repository_rows: { my_module_id: params[:my_module_id] })
|
|
|
|
end
|
|
|
|
if repository.has_stock_management?
|
|
|
|
rows = rows.left_joins(my_module_repository_rows: :repository_stock_unit_item)
|
|
|
|
.select(
|
|
|
|
'repository_rows.*',
|
|
|
|
'my_module_repository_rows.stock_consumption'
|
|
|
|
)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
ordered_row_ids = params[:row_ids]
|
|
|
|
id_row_map = RepositoryRow.where(id: ordered_row_ids,
|
|
|
|
repository: repository)
|
|
|
|
.index_by(&:id)
|
|
|
|
rows = ordered_row_ids.collect { |id| id_row_map[id.to_i] }
|
|
|
|
end
|
|
|
|
data = RepositoryZipExport.to_csv(rows,
|
2024-01-16 20:07:11 +08:00
|
|
|
params[:header_ids].map(&:to_i),
|
2023-09-08 17:35:16 +08:00
|
|
|
@user,
|
|
|
|
repository,
|
|
|
|
nil,
|
|
|
|
params[:my_module_id].present?)
|
2023-11-07 22:02:19 +08:00
|
|
|
File.binwrite("#{dir}/export.csv", data.encode('UTF-8', invalid: :replace, undef: :replace))
|
2023-09-08 17:35:16 +08:00
|
|
|
end
|
2023-10-03 19:27:40 +08:00
|
|
|
|
|
|
|
def failed_notification_title
|
|
|
|
I18n.t('activejob.failure_notifiable_job.item_notification_title',
|
|
|
|
item: I18n.t('activejob.failure_notifiable_job.items.repository_item'))
|
|
|
|
end
|
2023-09-08 17:35:16 +08:00
|
|
|
end
|