Refactor old export async methods to ActiveJobs [SCI-9092] (#6170)

This commit is contained in:
Alex Kriuchykhin 2023-09-08 11:35:16 +02:00 committed by GitHub
parent 4abc2288b3
commit d8965e8ef9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 475 additions and 488 deletions

View file

@ -152,7 +152,14 @@ class MyModuleRepositoriesController < ApplicationController
def export_repository
if params[:header_ids]
RepositoryZipExport.generate_zip(params, @repository, current_user)
RepositoryZipExportJob.perform_later(
user_id: current_user.id,
params: {
repository_id: @repository.id,
my_module_id: @my_module.id,
header_ids: params[:header_ids]
}
)
Activities::CreateActivityService.call(
activity_type: :export_inventory_items_assigned_to_task,

View file

@ -103,7 +103,14 @@ class MyModuleRepositorySnapshotsController < ApplicationController
def export_repository_snapshot
if params[:header_ids]
RepositoryZipExport.generate_zip(params, @repository_snapshot, current_user)
RepositoryZipExportJob.perform_later(
user_id: current_user.id,
params: {
repository_id: @repository_snapshot.id,
my_module_id: @my_module.id,
header_ids: params[:header_ids]
}
)
Activities::CreateActivityService.call(
activity_type: :export_inventory_snapshot_items_assigned_to_task,

View file

@ -337,7 +337,14 @@ class RepositoriesController < ApplicationController
def export_repository
if params[:row_ids] && params[:header_ids]
RepositoryZipExport.generate_zip(params, @repository, current_user)
RepositoryZipExportJob.perform_later(
user_id: current_user.id,
params: {
repository_id: @repository.id,
row_ids: params[:row_ids],
header_ids: params[:header_ids]
}
)
log_activity(:export_inventory_items)
render json: { message: t('zip_export.export_request_success') }
else
@ -359,7 +366,12 @@ class RepositoriesController < ApplicationController
def export_repository_stock_items
row_ids = @repository.repository_rows.where(id: params[:row_ids]).pluck(:id)
if row_ids.any?
RepositoryStockLedgerZipExport.generate_zip(row_ids, current_user.id)
RepositoryStockZipExportJob.perform_later(
user_id: current_user.id,
params: {
repository_row_ids: row_ids
}
)
render json: { message: t('zip_export.export_request_success') }
else
render json: { message: t('zip_export.export_error') }, status: :unprocessable_entity

View file

@ -25,16 +25,18 @@ class TeamsController < ApplicationController
def export_projects
if current_user.has_available_exports?
current_user.increase_daily_exports_counter!
generate_export_projects_zip
TeamZipExportJob.perform_later(
user_id: current_user.id,
params: {
team_id: @team.id,
project_ids: @exp_projects.collect(&:id)
}
)
log_activity(:export_projects,
team: @team.id,
projects: @exp_projects.map(&:name).join(', '))
render json: {
flash: t('projects.export_projects.success_flash')
}, status: :ok
render json: { flash: t('projects.export_projects.success_flash') }
end
end
@ -147,20 +149,6 @@ class TeamsController < ApplicationController
end
end
def generate_export_projects_zip
ids = @exp_projects.index_by(&:id)
options = { team: @team }
zip = TeamZipExport.create(user: current_user)
zip.generate_exportable_zip(
current_user.id,
ids,
:teams,
options
)
ids
end
def log_activity(type_of, message_items = {})
Activities::CreateActivityService
.call(activity_type: type_of,

View file

@ -144,8 +144,7 @@ module Protocols
"href='#{Rails.application.routes.url_helpers.protocol_path(@protocol)}'>" \
"#{@protocol.name}</a>"
)
UserNotification.create!(notification: notification, user: @user)
notification.create_user_notification(@user)
end
# Overrides method from FailedDeliveryNotifiableJob concern

View file

@ -94,7 +94,7 @@ class RepositoriesExportJob < ApplicationJob
.zip_exports_download_export_all_path(@zip_export)}'>" \
"#{@zip_export.zip_file_name}</a>"
)
UserNotification.create!(notification: notification, user: @user)
notification.create_user_notification(@user)
end
# Overrides method from FailedDeliveryNotifiableJob concern

View file

@ -0,0 +1,11 @@
# frozen_string_literal: true
class RepositoryStockZipExportJob < ZipExportJob
private
# Overrride
def fill_content(dir, params)
data = RepositoryStockLedgerZipExport.to_csv(params[:repository_row_ids])
File.binwrite("#{dir}/export.csv", data)
end
end

View file

@ -0,0 +1,40 @@
# frozen_string_literal: true
class RepositoryZipExportJob < ZipExportJob
private
# Override
def fill_content(dir, params)
repository = Repository.find(params[:repository_id])
# Fetch rows in the same order as in the currently viewed datatable
if params[:my_module_id]
rows = if repository.is_a?(RepositorySnapshot)
repository.repository_rows
else
repository.repository_rows
.joins(:my_module_repository_rows)
.where(my_module_repository_rows: { my_module_id: params[:my_module_id] })
end
if repository.has_stock_management?
rows = rows.left_joins(my_module_repository_rows: :repository_stock_unit_item)
.select(
'repository_rows.*',
'my_module_repository_rows.stock_consumption'
)
end
else
ordered_row_ids = params[:row_ids]
id_row_map = RepositoryRow.where(id: ordered_row_ids,
repository: repository)
.index_by(&:id)
rows = ordered_row_ids.collect { |id| id_row_map[id.to_i] }
end
data = RepositoryZipExport.to_csv(rows,
params[:header_ids],
@user,
repository,
nil,
params[:my_module_id].present?)
File.binwrite("#{dir}/export.csv", data)
end
end

View file

@ -0,0 +1,281 @@
# frozen_string_literal: true
require 'fileutils'
require 'csv'
class TeamZipExportJob < ZipExportJob
include StringUtility
private
# Override
def zip_name
"projects_export_#{Time.now.utc.strftime('%F_%H-%M-%S_UTC')}.zip"
end
# Override
def fill_content(dir, params)
# Create team folder
@team = Team.find(params[:team_id])
projects = @team.projects.where(id: params[:project_ids])
team_path = "#{dir}/#{to_filesystem_name(@team.name)}"
FileUtils.mkdir_p(team_path)
# Iterate through every project
p_idx = p_archive_idx = 0
projects.each do |project|
idx = project.archived ? (p_archive_idx += 1) : (p_idx += 1)
project_path = make_model_dir(team_path, project, idx)
project_name = project_path.split('/')[-1]
obj_filenames = { repositories: {}, assets: {}, tables: {} }
# Change current dir for correct generation of relative links
Dir.chdir(project_path)
project_path = '.'
inventories = "#{project_path}/Inventories"
FileUtils.mkdir_p(inventories)
repositories = project.assigned_repositories_and_snapshots
# Iterate through every inventory repo and save it to CSV
repositories.each_with_index do |repo, repo_idx|
next if obj_filenames[:repositories][repo.id].present?
obj_filenames[:repositories][repo.id] = {
file: save_inventories_to_csv(inventories, repo, repo_idx)
}
end
# Include all experiments
ex_idx = ex_archive_idx = 0
project.experiments.each do |experiment|
idx = experiment.archived ? (ex_archive_idx += 1) : (ex_idx += 1)
experiment_path = make_model_dir(project_path, experiment, idx)
# Include all modules
mod_pos = mod_archive_pos = 0
experiment.my_modules.order(:workflow_order).each do |my_module|
pos = my_module.archived ? (mod_archive_pos += 1) : (mod_pos += 1)
my_module_path = make_model_dir(experiment_path, my_module, pos)
# Create upper directories for both elements
protocol_path = "#{my_module_path}/Protocol attachments"
result_path = "#{my_module_path}/Result attachments"
FileUtils.mkdir_p(protocol_path)
FileUtils.mkdir_p(result_path)
# Export protocols
steps = my_module.protocols.map(&:steps).flatten
obj_filenames[:assets].merge!(
export_assets(StepAsset.where(step: steps), :step, protocol_path)
)
obj_filenames[:tables].merge!(
export_tables(StepTable.where(step: steps), :step, protocol_path)
)
# Export results
[false, true].each do |archived|
obj_filenames[:assets].merge!(
export_assets(
ResultAsset.where(result: my_module.results.where(archived: archived)),
:result,
result_path,
archived
)
)
end
[false, true].each do |archived|
obj_filenames[:tables].merge!(
export_tables(
ResultTable.where(result: my_module.results.where(archived: archived)),
:result,
result_path,
archived
)
)
end
end
end
# Generate and export whole project report HTML
html_name = "#{project_name} Report.html"
project_report_pdf = project.generate_teams_export_report_html(
@user, @team, html_name, obj_filenames
)
File.binwrite("#{project_path}/#{html_name}", project_report_pdf)
end
ensure
# Change current dir outside dir, since it will be deleted
Dir.chdir(Rails.root)
end
# Create directory for project, experiment, or module
def make_model_dir(parent_path, model, index)
# For MyModule, the index indicates its position in project sidebar
if model.instance_of?(MyModule)
class_name = 'module'
model_format = '(%<idx>s) %<name>s'
else
class_name = model.class.to_s.downcase.pluralize
model_format = '%<name>s (%<idx>s)'
end
model_name =
format(model_format, idx: index, name: to_filesystem_name(model.name))
model_path = parent_path
if model.archived
model_path += "/Archived #{class_name}"
FileUtils.mkdir_p(model_path)
end
model_path += "/#{model_name}"
FileUtils.mkdir_p(model_path)
model_path
end
# Appends given suffix to file_name and then adds original extension
def append_file_suffix(file_name, suffix)
ext = File.extname(file_name)
File.basename(file_name, ext) + suffix + ext
end
def create_archived_results_folder(result_path)
path = "#{result_path}/Archived attachments"
FileUtils.mkdir_p(path) unless File.directory?(path)
path
end
# Helper method to extract given assets to the directory
def export_assets(elements, type, directory, archived = false)
directory = create_archived_results_folder(directory) if archived && elements.present?
asset_indexes = {}
elements.each_with_index do |element, i|
asset = element.asset
preview = prepare_preview(asset)
if type == :step
name = "#{directory}/" \
"#{append_file_suffix(asset.file_name, "_#{i}_Step#{element.step.position_plus_one}")}"
if preview
preview_name = "#{directory}/" \
"#{append_file_suffix(preview[:file_name], "_#{i}_Step#{element.step.position_plus_one}_preview")}"
end
elsif type == :result
name = "#{directory}/#{append_file_suffix(asset.file_name, "_#{i}")}"
preview_name = "#{directory}/#{append_file_suffix(preview[:file_name], "_#{i}_preview")}" if preview
end
if asset.file.attached?
begin
File.binwrite(name, asset.file.download)
File.binwrite(preview_name, preview[:file_data]) if preview
rescue ActiveStorage::FileNotFoundError
next
end
end
asset_indexes[asset.id] = {
file: name,
preview: preview_name
}
end
asset_indexes
end
def prepare_preview(asset)
if asset.previewable? && !asset.list?
preview = asset.inline? ? asset.large_preview : asset.medium_preview
if preview.is_a?(ActiveStorage::Preview)
return unless preview.image.attached?
file_name = preview.image.filename.to_s
file_data = preview.image.download
else
file_name = preview.blob.filename.to_s
begin
file_data = preview.processed.service.download(preview.key)
# handle files not processable by Vips (no preview available) or missing
rescue Vips::Error, ActiveStorage::FileNotFoundError
return nil
end
end
{
file_name: file_name,
file_data: file_data
}
end
end
# Helper method to extract given tables to the directory
def export_tables(elements, type, directory, archived = false)
directory = create_archived_results_folder(directory) if archived && elements.present?
table_indexes = {}
elements.each_with_index do |element, i|
table = element.table
table_name = table.name.presence || 'Table'
table_name += i.to_s
if type == :step
name = "#{directory}/#{to_filesystem_name(table_name)}" \
"_#{i}_Step#{element.step.position_plus_one}.csv"
elsif type == :result
name = "#{directory}/#{to_filesystem_name(table_name)}.csv"
end
File.binwrite(name, table.to_csv)
table_indexes[table.id] = {
file: name
}
end
table_indexes
end
# Helper method for saving inventories to CSV
def save_inventories_to_csv(path, repo, idx)
repo_name = "#{to_filesystem_name(repo.name)} (#{idx})"
# Attachment folder
rel_attach_path = "#{repo_name} attachments"
attach_path = "#{path}/#{rel_attach_path}"
FileUtils.mkdir_p(attach_path)
# CSV file
csv_file_path = "#{path}/#{repo_name}.csv"
# Define headers and columns IDs
col_ids = [-3, -4, -5, -6] + repo.repository_columns.map(&:id)
# Define callback function for file name
assets = {}
asset_counter = 0
handle_name_func = lambda do |asset|
file_name = append_file_suffix(asset.file_name, "_#{asset_counter}").to_s
# Save pair for downloading it later
assets[asset] = "#{attach_path}/#{file_name}"
asset_counter += 1
rel_path = "#{rel_attach_path}/#{file_name}"
return "=HYPERLINK(\"#{rel_path}\", \"#{rel_path}\")"
end
# Generate CSV
csv_data = RepositoryZipExport.to_csv(repo.repository_rows, col_ids, @user, repo, handle_name_func)
File.binwrite(csv_file_path, csv_data)
# Save all attachments (it doesn't work directly in callback function
assets.each do |asset, asset_path|
asset.file.open do |file|
FileUtils.cp(file.path, asset_path)
end
rescue ActiveStorage::FileNotFoundError
next
end
csv_file_path
end
end

View file

@ -0,0 +1,50 @@
# frozen_string_literal: true
class ZipExportJob < ApplicationJob
include FailedDeliveryNotifiableJob
def perform(user_id:, params: {})
@user = User.find(user_id)
I18n.backend.date_format = @user.settings[:date_format] || Constants::DEFAULT_DATE_FORMAT
ZipExport.transaction do
@zip_export = ZipExport.create!(user: @user)
zip_input_dir = FileUtils.mkdir_p(Rails.root.join("tmp/temp_zip_#{Time.now.to_i}").to_s).first
zip_dir = FileUtils.mkdir_p(Rails.root.join('tmp/zip-ready').to_s).first
full_zip_name = File.join(zip_dir, zip_name)
fill_content(zip_input_dir, params)
@zip_export.zip!(zip_input_dir, full_zip_name)
@zip_export.zip_file.attach(io: File.open(full_zip_name), filename: zip_name)
generate_notification!
ensure
FileUtils.rm_rf([zip_input_dir, full_zip_name], secure: true)
end
ensure
I18n.backend.date_format = nil
end
private
def zip_name
"export_#{Time.now.utc.strftime('%F %H-%M-%S_UTC')}.zip"
end
def fill_content(dir, params)
raise NotImplementedError
end
def generate_notification!
notification = Notification.create!(
type_of: :deliver,
title: I18n.t('zip_export.notification_title'),
message: "<a data-id='#{@zip_export.id}' " \
"data-turbolinks='false' " \
"href='#{Rails.application
.routes
.url_helpers
.zip_exports_download_path(@zip_export)}'>" \
"#{@zip_export.zip_file_name}</a>"
)
notification.create_user_notification(@user)
end
end

View file

@ -4,22 +4,24 @@ module PrefixedIdModel
extend ActiveSupport::Concern
included do
begin
indexdef = "CREATE INDEX index_#{table_name}_on_#{name.underscore}_code"\
" ON public.#{table_name} USING gin ((('#{self::ID_PREFIX}'::text || id)) gin_trgm_ops)"
unless Rails.env.production?
begin
indexdef = "CREATE INDEX index_#{table_name}_on_#{name.underscore}_code " \
"ON public.#{table_name} USING gin ((('#{self::ID_PREFIX}'::text || id)) gin_trgm_ops)"
index_exists = ActiveRecord::Base.connection.execute(
"SELECT indexdef FROM pg_indexes WHERE tablename NOT LIKE 'pg%';"
).to_a.map(&:values).flatten.include?(indexdef)
index_exists = ActiveRecord::Base.connection.execute(
"SELECT indexdef FROM pg_indexes WHERE tablename NOT LIKE 'pg%';"
).to_a.map(&:values).flatten.include?(indexdef)
# rubocop:disable Rails/Output
puts("\nWARNING missing index\n#{indexdef}\nfor prefixed id model #{name}!\n\n") unless index_exists
# rubocop:enable Rails/Output
rescue ActiveRecord::NoDatabaseError, ActiveRecord::ConnectionNotEstablished
# only applicable during build and when setting up project
# rubocop:disable Rails/Output
puts("\nWARNING missing index\n#{indexdef}\nfor prefixed id model #{name}!\n\n") unless index_exists
# rubocop:enable Rails/Output
rescue ActiveRecord::NoDatabaseError, ActiveRecord::ConnectionNotEstablished
# only applicable during build and when setting up project
end
end
self::PREFIXED_ID_SQL = "('#{self::ID_PREFIX}' || #{table_name}.id)"
self::PREFIXED_ID_SQL = "('#{self::ID_PREFIX}' || #{table_name}.id)".freeze
def code
"#{self.class::ID_PREFIX}#{id}"

View file

@ -14,6 +14,7 @@ class MyModuleRepositoryRow < ApplicationRecord
touch: true,
inverse_of: :my_module_repository_rows
belongs_to :repository_stock_unit_item, optional: true
has_many :repository_ledger_records, as: :reference, dependent: :nullify
validates :repository_row, uniqueness: { scope: :my_module }

View file

@ -6,4 +6,15 @@ class RepositoryLedgerRecord < ApplicationRecord
belongs_to :repository_stock_value
belongs_to :reference, polymorphic: true
belongs_to :user
belongs_to :repository,
(lambda do |repository_ledger_record|
repository_ledger_record.reference_type == 'RepositoryBase' ? self : none
end),
optional: true, foreign_key: :reference_id, inverse_of: :repository_ledger_records
belongs_to :my_module_repository_row,
(lambda do |repository_ledger_record|
repository_ledger_record.reference_type == 'MyModuleRepositoryRow' ? self : none
end),
optional: true, foreign_key: :reference_id, inverse_of: :repository_ledger_records
has_one :repository_row, through: :repository_stock_value
end

View file

@ -1,311 +1,7 @@
# frozen_string_literal: true
require 'fileutils'
require 'csv'
class TeamZipExport < ZipExport
include StringUtility
def generate_exportable_zip(user_id, data, type, options = {})
@user = User.find(user_id)
zip_input_dir = FileUtils.mkdir_p(
File.join(Rails.root, "tmp/temp_zip_#{Time.now.to_i}")
).first
zip_dir = FileUtils.mkdir_p(File.join(Rails.root, 'tmp/zip-ready')).first
zip_name = "projects_export_#{Time.now.utc.strftime('%F_%H-%M-%S_UTC')}.zip"
full_zip_name = File.join(zip_dir, zip_name)
fill_content(zip_input_dir, data, type, options)
zip!(zip_input_dir, full_zip_name)
zip_file.attach(io: File.open(full_zip_name), filename: zip_name)
generate_notification(user) if save
ensure
FileUtils.rm_rf([zip_input_dir, full_zip_name], secure: true)
end
handle_asynchronously :generate_exportable_zip,
queue: :team_zip_export
def self.exports_limit
(Rails.application.secrets.export_all_limit_24h || 3).to_i
end
private
# Export all functionality
def generate_teams_zip(tmp_dir, data, options = {})
# Create team folder
@team = options[:team]
team_path = "#{tmp_dir}/#{to_filesystem_name(@team.name)}"
FileUtils.mkdir_p(team_path)
# Iterate through every project
p_idx = p_archive_idx = 0
data.each do |(_, p)|
idx = p.archived ? (p_archive_idx += 1) : (p_idx += 1)
project_path = make_model_dir(team_path, p, idx)
project_name = project_path.split('/')[-1]
obj_filenames = { repositories: {}, assets: {}, tables: {} }
# Change current dir for correct generation of relative links
Dir.chdir(project_path)
project_path = '.'
inventories = "#{project_path}/Inventories"
FileUtils.mkdir_p(inventories)
repositories = p.assigned_repositories_and_snapshots
# Iterate through every inventory repo and save it to CSV
repositories.each_with_index do |repo, repo_idx|
next if obj_filenames[:repositories][repo.id].present?
obj_filenames[:repositories][repo.id] = {
file: save_inventories_to_csv(inventories, repo, repo_idx)
}
end
# Include all experiments
ex_idx = ex_archive_idx = 0
p.experiments.each do |ex|
idx = ex.archived ? (ex_archive_idx += 1) : (ex_idx += 1)
experiment_path = make_model_dir(project_path, ex, idx)
# Include all modules
mod_pos = mod_archive_pos = 0
ex.my_modules.order(:workflow_order).each do |my_module|
pos = my_module.archived ? (mod_archive_pos += 1) : (mod_pos += 1)
my_module_path = make_model_dir(experiment_path, my_module, pos)
# Create upper directories for both elements
protocol_path = "#{my_module_path}/Protocol attachments"
result_path = "#{my_module_path}/Result attachments"
FileUtils.mkdir_p(protocol_path)
FileUtils.mkdir_p(result_path)
# Export protocols
steps = my_module.protocols.map(&:steps).flatten
obj_filenames[:assets].merge!(
export_assets(StepAsset.where(step: steps), :step, protocol_path)
)
obj_filenames[:tables].merge!(
export_tables(StepTable.where(step: steps), :step, protocol_path)
)
# Export results
[false, true].each do |archived|
obj_filenames[:assets].merge!(
export_assets(
ResultAsset.where(result: my_module.results.where(archived: archived)),
:result,
result_path,
archived
)
)
end
[false, true].each do |archived|
obj_filenames[:tables].merge!(
export_tables(
ResultTable.where(result: my_module.results.where(archived: archived)),
:result,
result_path,
archived
)
)
end
end
end
# Generate and export whole project report HTML
html_name = "#{project_name} Report.html"
project_report_pdf = p.generate_teams_export_report_html(
@user, @team, html_name, obj_filenames
)
file = FileUtils.touch("#{project_path}/#{html_name}").first
File.open(file, 'wb') { |f| f.write(project_report_pdf) }
end
ensure
# Change current dir outside tmp_dir, since tmp_dir will be deleted
Dir.chdir(Rails.root)
end
def generate_notification(user)
notification = Notification.create(
type_of: :deliver,
title: I18n.t('zip_export.notification_title'),
message: "<a data-id='#{id}' " \
"data-turbolinks='false' " \
"href='#{Rails.application
.routes
.url_helpers
.zip_exports_download_export_all_path(self)}'>" \
"#{zip_file_name}</a>"
)
UserNotification.create(notification: notification, user: user)
end
# Create directory for project, experiment, or module
def make_model_dir(parent_path, model, index)
# For MyModule, the index indicates its position in project sidebar
if model.class == MyModule
class_name = 'module'
model_format = '(%<idx>s) %<name>s'
else
class_name = model.class.to_s.downcase.pluralize
model_format = '%<name>s (%<idx>s)'
end
model_name =
format(model_format, idx: index, name: to_filesystem_name(model.name))
model_path = parent_path
if model.archived
model_path += "/Archived #{class_name}"
FileUtils.mkdir_p(model_path)
end
model_path += "/#{model_name}"
FileUtils.mkdir_p(model_path)
model_path
end
# Appends given suffix to file_name and then adds original extension
def append_file_suffix(file_name, suffix)
ext = File.extname(file_name)
File.basename(file_name, ext) + suffix + ext
end
def create_archived_results_folder(result_path)
path = "#{result_path}/Archived attachments"
FileUtils.mkdir_p(path) unless File.directory?(path)
path
end
# Helper method to extract given assets to the directory
def export_assets(elements, type, directory, archived = false)
directory = create_archived_results_folder(directory) if archived && elements.present?
asset_indexes = {}
elements.each_with_index do |element, i|
asset = element.asset
preview = prepare_preview(asset)
if type == :step
name = "#{directory}/" \
"#{append_file_suffix(asset.file_name, "_#{i}_Step#{element.step.position_plus_one}")}"
if preview
preview_name = "#{directory}/" \
"#{append_file_suffix(preview[:file_name], "_#{i}_Step#{element.step.position_plus_one}_preview")}"
end
elsif type == :result
name = "#{directory}/#{append_file_suffix(asset.file_name, "_#{i}")}"
preview_name = "#{directory}/#{append_file_suffix(preview[:file_name], "_#{i}_preview")}" if preview
end
if asset.file.attached?
File.open(name, 'wb') { |f| f.write(asset.file.download) }
File.open(preview_name, 'wb') { |f| f.write(preview[:file_data]) } if preview
end
asset_indexes[asset.id] = {
file: name,
preview: preview_name
}
end
asset_indexes
end
def prepare_preview(asset)
if asset.previewable? && !asset.list?
preview = asset.inline? ? asset.large_preview : asset.medium_preview
if preview.is_a?(ActiveStorage::Preview)
return unless preview.image.attached?
file_name = preview.image.filename.to_s
file_data = preview.image.download
else
file_name = preview.blob.filename.to_s
begin
file_data = preview.processed.service.download(preview.key)
rescue Vips::Error # handle files not processable by Vips (no preview available)
return nil
end
end
{
file_name: file_name,
file_data: file_data
}
end
end
# Helper method to extract given tables to the directory
def export_tables(elements, type, directory, archived = false)
directory = create_archived_results_folder(directory) if archived && elements.present?
table_indexes = {}
elements.each_with_index do |element, i|
table = element.table
table_name = table.name.presence || 'Table'
table_name += i.to_s
if type == :step
name = "#{directory}/#{to_filesystem_name(table_name)}" \
"_#{i}_Step#{element.step.position_plus_one}.csv"
elsif type == :result
name = "#{directory}/#{to_filesystem_name(table_name)}.csv"
end
file = FileUtils.touch(name).first
File.open(file, 'wb') { |f| f.write(table.to_csv) }
table_indexes[table.id] = {
file: name
}
end
table_indexes
end
# Helper method for saving inventories to CSV
def save_inventories_to_csv(path, repo, idx)
repo_name = "#{to_filesystem_name(repo.name)} (#{idx})"
# Attachment folder
rel_attach_path = "#{repo_name} attachments"
attach_path = "#{path}/#{rel_attach_path}"
FileUtils.mkdir_p(attach_path)
# CSV file
csv_file_path = "#{path}/#{repo_name}.csv"
csv_file = FileUtils.touch(csv_file_path).first
# Define headers and columns IDs
col_ids = [-3, -4, -5, -6] + repo.repository_columns.map(&:id)
# Define callback function for file name
assets = {}
asset_counter = 0
handle_name_func = lambda do |asset|
file_name = append_file_suffix(asset.file_name, "_#{asset_counter}").to_s
# Save pair for downloading it later
assets[asset] = "#{attach_path}/#{file_name}"
asset_counter += 1
rel_path = "#{rel_attach_path}/#{file_name}"
return "=HYPERLINK(\"#{rel_path}\", \"#{rel_path}\")"
end
# Generate CSV
csv_data = RepositoryZipExport.to_csv(repo.repository_rows, col_ids, @user, repo, handle_name_func)
File.open(csv_file, 'wb') { |f| f.write(csv_data) }
# Save all attachments (it doesn't work directly in callback function
assets.each do |asset, asset_path|
asset.file.open do |file|
FileUtils.cp(file.path, asset_path)
end
end
csv_file_path
end
end

View file

@ -4,20 +4,6 @@ require 'zip'
require 'fileutils'
require 'csv'
# To use ZipExport you have to define the generate_( type )_zip method!
# Example:
# def generate_(type)_zip(tmp_dir, data, options = {})
# attributes = options.fetch(:attributes) { :attributes_missing }
# file = FileUtils.touch("#{tmp_dir}/export.csv").first
# records = data
# CSV.open(file, 'wb') do |csv|
# csv << attributes
# records.find_each do |entity|
# csv << entity.values_at(*attributes.map(&:to_sym))
# end
# end
# end
class ZipExport < ApplicationRecord
belongs_to :user, optional: true
@ -26,8 +12,7 @@ class ZipExport < ApplicationRecord
after_create :self_destruct
def self.delete_expired_export(id)
export = find_by_id(id)
export&.destroy
find_by(id: id)&.destroy
end
def zip_file_name
@ -45,62 +30,10 @@ class ZipExport < ApplicationRecord
end
end
def generate_exportable_zip(user_id, data, type, options = {})
user = User.find(user_id)
I18n.backend.date_format = user.settings[:date_format] || Constants::DEFAULT_DATE_FORMAT
zip_input_dir = FileUtils.mkdir_p(File.join(Rails.root, "tmp/temp_zip_#{Time.now.to_i}")).first
tmp_zip_dir = FileUtils.mkdir_p(File.join(Rails.root, 'tmp/zip-ready')).first
tmp_zip_name = "export_#{Time.now.strftime('%F %H-%M-%S_UTC')}.zip"
tmp_full_zip_name = File.join(tmp_zip_dir, tmp_zip_name)
fill_content(zip_input_dir, data, type, options)
zip!(zip_input_dir, tmp_full_zip_name)
zip_file.attach(io: File.open(tmp_full_zip_name), filename: tmp_zip_name)
generate_notification(user) if save
ensure
FileUtils.rm_rf([zip_input_dir, tmp_full_zip_name], secure: true)
end
handle_asynchronously :generate_exportable_zip
private
def self_destruct
ZipExport.delay(run_at: Constants::EXPORTABLE_ZIP_EXPIRATION_DAYS.days.from_now)
.delete_expired_export(id)
end
def method_missing(method_name, *args, &block)
return super unless method_name.to_s.start_with?('generate_')
raise StandardError, 'Method is missing! To use this zip_export you have to define a method: generate_( type )_zip.'
end
def respond_to_missing?(method_name, include_private = false)
method_name.to_s.start_with?('generate_') || super
end
def fill_content(dir, data, type, options = {})
eval("generate_#{type}_zip(dir, data, options)")
end
def generate_notification(user)
notification = Notification.create(
type_of: :deliver,
title: I18n.t('zip_export.notification_title'),
message: "<a data-id='#{id}' " \
"data-turbolinks='false' " \
"href='#{Rails.application
.routes
.url_helpers
.zip_exports_download_path(self)}'>" \
"#{zip_file_name}</a>"
)
UserNotification.create(notification: notification, user: user)
end
def generate_repositories_zip(tmp_dir, data, _options = {})
file = FileUtils.touch("#{tmp_dir}/export.csv").first
File.open(file, 'wb') { |f| f.write(data) }
end
end

View file

@ -22,48 +22,31 @@ module RepositoryStockLedgerZipExport
stock_balance_unit
).freeze
def self.generate_zip(row_ids, user_id)
rows = generate_data(row_ids)
zip = ZipExport.create(user_id: user_id)
zip.generate_exportable_zip(
user_id,
to_csv(rows),
:repositories
)
end
def self.to_csv(rows)
def self.to_csv(repository_row_ids)
csv_header = COLUMNS.map { |col| I18n.t("repository_stock_values.stock_export.headers.#{col}") }
repository_ledger_records = load_records(repository_row_ids)
CSV.generate do |csv|
csv << csv_header
rows.each do |row|
csv << row
repository_ledger_records.each do |record|
csv << generate_record_data(record)
end
end
end
def self.generate_data(row_ids)
data = []
repository_ledger_records =
RepositoryLedgerRecord.joins(repository_stock_value: :repository_row)
.includes(:user, { repository_stock_value: :repository_row })
.where(repository_row: { id: row_ids })
.joins('LEFT OUTER JOIN my_module_repository_rows ON
repository_ledger_records.reference_id = my_module_repository_rows.id')
.joins('LEFT OUTER JOIN my_modules ON
my_modules.id = my_module_repository_rows.my_module_id')
.joins('LEFT OUTER JOIN experiments ON experiments.id = my_modules.experiment_id')
.joins('LEFT OUTER JOIN projects ON projects.id = experiments.project_id')
.joins('LEFT OUTER JOIN teams ON teams.id = projects.team_id')
.order('repository_row.created_at, repository_ledger_records.created_at')
.select('repository_ledger_records.*,
my_modules.id AS module_id, my_modules.name AS module_name,
projects.name AS project_name, teams.name AS team_name,
experiments.name AS experiment_name')
# rubocop:disable Metrics/BlockLength
repository_ledger_records.each do |record|
class << self
private
def load_records(repository_row_ids)
RepositoryLedgerRecord
.joins(:repository_row)
.preload(:user, repository_row: { repository: :team })
.preload(my_module_repository_row: { my_module: { experiment: { project: :team } } })
.where(repository_row: { id: repository_row_ids })
.order(:created_at)
end
def generate_record_data(record)
consumption_type = record.reference_type == 'MyModuleRepositoryRow' ? 'Task' : 'Inventory'
if record.amount.positive?
@ -78,32 +61,31 @@ module RepositoryStockLedgerZipExport
row_data = [
consumption_type,
record.repository_stock_value.repository_row.name,
record.repository_stock_value.repository_row.code,
record.repository_row.name,
record.repository_row.code,
consumed_amount,
consumed_amount_unit,
added_amount,
added_amount_unit,
record.user.full_name,
record.created_at.strftime(record.user.date_format),
record.team_name,
record.repository_row.repository.team.name,
record.unit,
record.balance.to_d
]
if consumption_type == 'Task'
my_module = record.my_module_repository_row.my_module
breadcrumbs_data = [
record.project_name,
record.experiment_name,
record.module_name,
"#{MyModule::ID_PREFIX}#{record.module_id}"
my_module.experiment.project.name,
my_module.experiment.name,
my_module.name,
my_module.code
]
end
row_data.insert(10, *breadcrumbs_data)
data << row_data
row_data
end
# rubocop:enable Metrics/BlockLength
data
end
end

View file

@ -3,39 +3,6 @@
require 'csv'
module RepositoryZipExport
def self.generate_zip(params, repository, current_user)
# Fetch rows in the same order as in the currently viewed datatable
if params[:my_module_id]
rows = if repository.is_a?(RepositorySnapshot)
repository.repository_rows
else
repository.repository_rows
.joins(:my_module_repository_rows)
.where(my_module_repository_rows: { my_module_id: params[:my_module_id] })
end
if repository.has_stock_management?
rows = rows.left_joins(my_module_repository_rows: :repository_stock_unit_item)
.select(
'repository_rows.*',
'my_module_repository_rows.stock_consumption'
)
end
else
ordered_row_ids = params[:row_ids]
id_row_map = RepositoryRow.where(id: ordered_row_ids,
repository: repository)
.index_by(&:id)
rows = ordered_row_ids.collect { |id| id_row_map[id.to_i] }
end
zip = ZipExport.create(user: current_user)
zip.generate_exportable_zip(
current_user.id,
to_csv(rows, params[:header_ids], current_user, repository, nil, params[:my_module_id].present?),
:repositories
)
end
def self.to_csv(rows, column_ids, user, repository, handle_file_name_func = nil, in_module = false)
# Parse column names
csv_header = []