2023-09-08 17:35:16 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
class RepositoryZipExportJob < ZipExportJob
|
|
|
|
private
|
|
|
|
|
|
|
|
# Override
|
|
|
|
def fill_content(dir, params)
|
2023-10-09 21:40:19 +08:00
|
|
|
repository = RepositoryBase.find(params[:repository_id])
|
2023-09-08 17:35:16 +08:00
|
|
|
# Fetch rows in the same order as in the currently viewed datatable
|
|
|
|
if params[:my_module_id]
|
|
|
|
rows = if repository.is_a?(RepositorySnapshot)
|
|
|
|
repository.repository_rows
|
|
|
|
else
|
|
|
|
repository.repository_rows
|
|
|
|
.joins(:my_module_repository_rows)
|
|
|
|
.where(my_module_repository_rows: { my_module_id: params[:my_module_id] })
|
|
|
|
end
|
|
|
|
if repository.has_stock_management?
|
|
|
|
rows = rows.left_joins(my_module_repository_rows: :repository_stock_unit_item)
|
|
|
|
.select(
|
|
|
|
'repository_rows.*',
|
|
|
|
'my_module_repository_rows.stock_consumption'
|
|
|
|
)
|
|
|
|
end
|
2024-08-06 19:26:14 +08:00
|
|
|
service = RepositoryExportService.new(@file_type, rows, params[:header_ids].map(&:to_i),
|
|
|
|
repository, in_module: true)
|
2023-09-08 17:35:16 +08:00
|
|
|
else
|
2024-08-06 19:26:14 +08:00
|
|
|
ordered_row_ids = params[:row_ids].map(&:to_i)
|
|
|
|
rows = repository.repository_rows.where(id: ordered_row_ids)
|
|
|
|
service = RepositoryExportService.new(@file_type, rows, params[:header_ids].map(&:to_i),
|
|
|
|
repository, in_module: false, ordered_row_ids: ordered_row_ids)
|
2023-09-08 17:35:16 +08:00
|
|
|
end
|
2024-03-07 22:48:01 +08:00
|
|
|
exported_data = service.export!
|
2024-06-07 15:55:11 +08:00
|
|
|
File.binwrite("#{dir}/export.#{@file_type}", exported_data)
|
2023-09-08 17:35:16 +08:00
|
|
|
end
|
2023-10-03 19:27:40 +08:00
|
|
|
|
|
|
|
def failed_notification_title
|
|
|
|
I18n.t('activejob.failure_notifiable_job.item_notification_title',
|
|
|
|
item: I18n.t('activejob.failure_notifiable_job.items.repository_item'))
|
|
|
|
end
|
2023-09-08 17:35:16 +08:00
|
|
|
end
|