mirror of
https://github.com/scinote-eln/scinote-web.git
synced 2025-10-24 12:46:39 +08:00
Merge remote-tracking branch 'upstream/features/september-release' into SCI-9010-rework-result-activities-to-be-aligned-with-step-activities
This commit is contained in:
commit
cd892878db
37 changed files with 557 additions and 541 deletions
|
|
@ -381,6 +381,9 @@ var dropdownSelector = (function() {
|
|||
|| (config.selectKeys || []).includes(e.keyCode)) {
|
||||
return;
|
||||
}
|
||||
if (!dropdownContainer.hasClass('open')) {
|
||||
dropdownContainer.find('.input-field').focus();
|
||||
}
|
||||
e.stopPropagation();
|
||||
loadData(selectElement, dropdownContainer);
|
||||
})
|
||||
|
|
|
|||
|
|
@ -67,6 +67,8 @@ label {
|
|||
* Global fix for handsontable
|
||||
*/
|
||||
.hot-table-container {
|
||||
display: flex;
|
||||
overflow: auto;
|
||||
.ht_master .wtHolder {
|
||||
height: auto !important;
|
||||
width: auto !important;
|
||||
|
|
|
|||
|
|
@ -29,6 +29,8 @@ div.print-report {
|
|||
}
|
||||
|
||||
.hot-table-container {
|
||||
display: flex;
|
||||
overflow: auto;
|
||||
.ht_master .wtHolder {
|
||||
overflow: hidden !important;
|
||||
|
||||
|
|
|
|||
|
|
@ -152,7 +152,14 @@ class MyModuleRepositoriesController < ApplicationController
|
|||
|
||||
def export_repository
|
||||
if params[:header_ids]
|
||||
RepositoryZipExport.generate_zip(params, @repository, current_user)
|
||||
RepositoryZipExportJob.perform_later(
|
||||
user_id: current_user.id,
|
||||
params: {
|
||||
repository_id: @repository.id,
|
||||
my_module_id: @my_module.id,
|
||||
header_ids: params[:header_ids]
|
||||
}
|
||||
)
|
||||
|
||||
Activities::CreateActivityService.call(
|
||||
activity_type: :export_inventory_items_assigned_to_task,
|
||||
|
|
|
|||
|
|
@ -103,7 +103,14 @@ class MyModuleRepositorySnapshotsController < ApplicationController
|
|||
|
||||
def export_repository_snapshot
|
||||
if params[:header_ids]
|
||||
RepositoryZipExport.generate_zip(params, @repository_snapshot, current_user)
|
||||
RepositoryZipExportJob.perform_later(
|
||||
user_id: current_user.id,
|
||||
params: {
|
||||
repository_id: @repository_snapshot.id,
|
||||
my_module_id: @my_module.id,
|
||||
header_ids: params[:header_ids]
|
||||
}
|
||||
)
|
||||
|
||||
Activities::CreateActivityService.call(
|
||||
activity_type: :export_inventory_snapshot_items_assigned_to_task,
|
||||
|
|
|
|||
|
|
@ -337,7 +337,14 @@ class RepositoriesController < ApplicationController
|
|||
|
||||
def export_repository
|
||||
if params[:row_ids] && params[:header_ids]
|
||||
RepositoryZipExport.generate_zip(params, @repository, current_user)
|
||||
RepositoryZipExportJob.perform_later(
|
||||
user_id: current_user.id,
|
||||
params: {
|
||||
repository_id: @repository.id,
|
||||
row_ids: params[:row_ids],
|
||||
header_ids: params[:header_ids]
|
||||
}
|
||||
)
|
||||
log_activity(:export_inventory_items)
|
||||
render json: { message: t('zip_export.export_request_success') }
|
||||
else
|
||||
|
|
@ -359,7 +366,12 @@ class RepositoriesController < ApplicationController
|
|||
def export_repository_stock_items
|
||||
row_ids = @repository.repository_rows.where(id: params[:row_ids]).pluck(:id)
|
||||
if row_ids.any?
|
||||
RepositoryStockLedgerZipExport.generate_zip(row_ids, current_user.id)
|
||||
RepositoryStockZipExportJob.perform_later(
|
||||
user_id: current_user.id,
|
||||
params: {
|
||||
repository_row_ids: row_ids
|
||||
}
|
||||
)
|
||||
render json: { message: t('zip_export.export_request_success') }
|
||||
else
|
||||
render json: { message: t('zip_export.export_error') }, status: :unprocessable_entity
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ class ResultsController < ApplicationController
|
|||
|
||||
def update_view_state
|
||||
view_state = @result.current_view_state(current_user)
|
||||
view_state.state['result_assets']['sort'] = params.require(:assets).require(:order)
|
||||
view_state.state['assets']['sort'] = params.require(:assets).require(:order)
|
||||
view_state.save! if view_state.changed?
|
||||
|
||||
render json: {}, status: :ok
|
||||
|
|
@ -201,7 +201,7 @@ class ResultsController < ApplicationController
|
|||
def set_navigator
|
||||
@navigator = {
|
||||
url: tree_navigator_my_module_path(@my_module),
|
||||
archived: params[:view_mode] == 'archived',
|
||||
archived: false,
|
||||
id: @my_module.code
|
||||
}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -25,16 +25,18 @@ class TeamsController < ApplicationController
|
|||
def export_projects
|
||||
if current_user.has_available_exports?
|
||||
current_user.increase_daily_exports_counter!
|
||||
|
||||
generate_export_projects_zip
|
||||
|
||||
TeamZipExportJob.perform_later(
|
||||
user_id: current_user.id,
|
||||
params: {
|
||||
team_id: @team.id,
|
||||
project_ids: @exp_projects.collect(&:id)
|
||||
}
|
||||
)
|
||||
log_activity(:export_projects,
|
||||
team: @team.id,
|
||||
projects: @exp_projects.map(&:name).join(', '))
|
||||
|
||||
render json: {
|
||||
flash: t('projects.export_projects.success_flash')
|
||||
}, status: :ok
|
||||
render json: { flash: t('projects.export_projects.success_flash') }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -147,20 +149,6 @@ class TeamsController < ApplicationController
|
|||
end
|
||||
end
|
||||
|
||||
def generate_export_projects_zip
|
||||
ids = @exp_projects.index_by(&:id)
|
||||
|
||||
options = { team: @team }
|
||||
zip = TeamZipExport.create(user: current_user)
|
||||
zip.generate_exportable_zip(
|
||||
current_user.id,
|
||||
ids,
|
||||
:teams,
|
||||
options
|
||||
)
|
||||
ids
|
||||
end
|
||||
|
||||
def log_activity(type_of, message_items = {})
|
||||
Activities::CreateActivityService
|
||||
.call(activity_type: type_of,
|
||||
|
|
|
|||
|
|
@ -41,17 +41,13 @@ module SecondaryNavigationHelper
|
|||
end
|
||||
|
||||
def is_module_results?
|
||||
%w(index).include?(action_name) && controller_name == 'results' && !%w(archived).include?(params[:view_mode])
|
||||
%w(index).include?(action_name) && controller_name == 'results'
|
||||
end
|
||||
|
||||
def is_module_activities?
|
||||
action_name == 'activities'
|
||||
end
|
||||
|
||||
def is_module_archive?
|
||||
%w(index).include?(action_name) && controller_name == 'results' && %w(archived).include?(params[:view_mode])
|
||||
end
|
||||
|
||||
def title_element
|
||||
if all_projects_page?
|
||||
current_team
|
||||
|
|
|
|||
|
|
@ -2,6 +2,9 @@
|
|||
<div class="results-wrapper">
|
||||
<ResultsToolbar :sort="sort"
|
||||
:canCreate="canCreate == 'true'"
|
||||
:archived="archived == 'true'"
|
||||
:active_url="active_url"
|
||||
:archived_url="archived_url"
|
||||
@setSort="setSort"
|
||||
@setFilters="setFilters"
|
||||
@newResult="createResult"
|
||||
|
|
@ -36,7 +39,10 @@
|
|||
components: { ResultsToolbar, Result },
|
||||
props: {
|
||||
url: { type: String, required: true },
|
||||
canCreate: { type: String, required: true }
|
||||
canCreate: { type: String, required: true },
|
||||
archived: { type: String, required: true },
|
||||
active_url: { type: String, required: true },
|
||||
archived_url: { type: String, required: true }
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,25 @@
|
|||
{{ i18n.t('my_modules.results.add_label') }}
|
||||
</button>
|
||||
</div>
|
||||
<div class="dropdown view-switch" >
|
||||
<div class="btn btn-light btn-black view-switch-button prevent-shrink" id="viewSwitchButton" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">
|
||||
<span v-if="archived" class="state-view-switch-btn-name">{{ i18n.t('my_modules.results.archived_results') }}</span>
|
||||
<span v-else class="state-view-switch-btn-name">{{ i18n.t('my_modules.results.active_results') }}</span>
|
||||
<span class="sn-icon sn-icon-down"></span>
|
||||
</div>
|
||||
<ul class="dropdown-menu dropdown-menu-right" aria-labelledby="viewSwitchButton">
|
||||
<li class="view-switch-active">
|
||||
<a :href="active_url" :class="{'form-dropdown-state-item prevent-shrink': !archived}">
|
||||
{{ i18n.t('my_modules.results.active_results') }}
|
||||
</a>
|
||||
</li>
|
||||
<li class="view-switch-archived">
|
||||
<a :href="archived_url" :class="{'form-dropdown-state-item prevent-shrink': archived}">
|
||||
{{ i18n.t('my_modules.results.archived_results') }}
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="result-toolbar__right flex items-center" @click="$emit('expandAll')">
|
||||
<button class="btn btn-secondary mr-3">
|
||||
{{ i18n.t('my_modules.results.expand_label') }}
|
||||
|
|
@ -48,7 +67,10 @@
|
|||
name: 'ResultsToolbar',
|
||||
props: {
|
||||
sort: { type: String, required: true },
|
||||
canCreate: { type: Boolean, required: true }
|
||||
canCreate: { type: Boolean, required: true },
|
||||
archived: { type: Boolean, required: true },
|
||||
active_url: { type: String, required: true },
|
||||
archived_url: { type: String, required: true }
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -47,17 +47,16 @@
|
|||
</div>
|
||||
</div>
|
||||
<div class="attachments" :data-parent-id="parent.id">
|
||||
<template v-for="(attachment, index) in attachmentsOrdered">
|
||||
<component
|
||||
:is="attachment_view_mode(attachmentsOrdered[index])"
|
||||
:key="attachment.id"
|
||||
:attachment="attachment"
|
||||
:parentId="parseInt(parent.id)"
|
||||
@attachment:viewMode="updateAttachmentViewMode"
|
||||
@attachment:delete="deleteAttachment(attachment.id)"
|
||||
@attachment:moved="attachmentMoved"
|
||||
/>
|
||||
</template>
|
||||
<component
|
||||
v-for="(attachment, index) in attachmentsOrdered"
|
||||
:key="attachment.id"
|
||||
:is="attachment_view_mode(attachmentsOrdered[index])"
|
||||
:attachment="attachment"
|
||||
:parentId="parseInt(parent.id)"
|
||||
@attachment:viewMode="updateAttachmentViewMode"
|
||||
@attachment:delete="deleteAttachment(attachment.id)"
|
||||
@attachment:moved="attachmentMoved"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
<template>
|
||||
<div ref="modal" @keydown.esc="cancel" class="modal" id="modalDestroyProtocolContent" tabindex="-1" role="dialog">
|
||||
<div ref="modal" @keydown.esc="cancel" class="modal" id="modalMoveProtocolContent" tabindex="-1" role="dialog">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
|
|
@ -38,7 +38,7 @@
|
|||
</div>
|
||||
<div class="modal-footer">
|
||||
<button class="btn btn-secondary" @click="cancel">{{ i18n.t('general.cancel') }}</button>
|
||||
<button class="btn btn-primary" @click="confirm">{{ i18n.t('general.move')}}</button>
|
||||
<button class="btn btn-primary" @click="confirm" :disabled="!target">{{ i18n.t('general.move')}}</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -144,8 +144,7 @@ module Protocols
|
|||
"href='#{Rails.application.routes.url_helpers.protocol_path(@protocol)}'>" \
|
||||
"#{@protocol.name}</a>"
|
||||
)
|
||||
|
||||
UserNotification.create!(notification: notification, user: @user)
|
||||
notification.create_user_notification(@user)
|
||||
end
|
||||
|
||||
# Overrides method from FailedDeliveryNotifiableJob concern
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ class RepositoriesExportJob < ApplicationJob
|
|||
.zip_exports_download_export_all_path(@zip_export)}'>" \
|
||||
"#{@zip_export.zip_file_name}</a>"
|
||||
)
|
||||
UserNotification.create!(notification: notification, user: @user)
|
||||
notification.create_user_notification(@user)
|
||||
end
|
||||
|
||||
# Overrides method from FailedDeliveryNotifiableJob concern
|
||||
|
|
|
|||
11
app/jobs/repository_stock_zip_export_job.rb
Normal file
11
app/jobs/repository_stock_zip_export_job.rb
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RepositoryStockZipExportJob < ZipExportJob
|
||||
private
|
||||
|
||||
# Overrride
|
||||
def fill_content(dir, params)
|
||||
data = RepositoryStockLedgerZipExport.to_csv(params[:repository_row_ids])
|
||||
File.binwrite("#{dir}/export.csv", data)
|
||||
end
|
||||
end
|
||||
40
app/jobs/repository_zip_export_job.rb
Normal file
40
app/jobs/repository_zip_export_job.rb
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RepositoryZipExportJob < ZipExportJob
|
||||
private
|
||||
|
||||
# Override
|
||||
def fill_content(dir, params)
|
||||
repository = Repository.find(params[:repository_id])
|
||||
# Fetch rows in the same order as in the currently viewed datatable
|
||||
if params[:my_module_id]
|
||||
rows = if repository.is_a?(RepositorySnapshot)
|
||||
repository.repository_rows
|
||||
else
|
||||
repository.repository_rows
|
||||
.joins(:my_module_repository_rows)
|
||||
.where(my_module_repository_rows: { my_module_id: params[:my_module_id] })
|
||||
end
|
||||
if repository.has_stock_management?
|
||||
rows = rows.left_joins(my_module_repository_rows: :repository_stock_unit_item)
|
||||
.select(
|
||||
'repository_rows.*',
|
||||
'my_module_repository_rows.stock_consumption'
|
||||
)
|
||||
end
|
||||
else
|
||||
ordered_row_ids = params[:row_ids]
|
||||
id_row_map = RepositoryRow.where(id: ordered_row_ids,
|
||||
repository: repository)
|
||||
.index_by(&:id)
|
||||
rows = ordered_row_ids.collect { |id| id_row_map[id.to_i] }
|
||||
end
|
||||
data = RepositoryZipExport.to_csv(rows,
|
||||
params[:header_ids],
|
||||
@user,
|
||||
repository,
|
||||
nil,
|
||||
params[:my_module_id].present?)
|
||||
File.binwrite("#{dir}/export.csv", data)
|
||||
end
|
||||
end
|
||||
281
app/jobs/team_zip_export_job.rb
Normal file
281
app/jobs/team_zip_export_job.rb
Normal file
|
|
@ -0,0 +1,281 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'fileutils'
|
||||
require 'csv'
|
||||
|
||||
class TeamZipExportJob < ZipExportJob
|
||||
include StringUtility
|
||||
|
||||
private
|
||||
|
||||
# Override
|
||||
def zip_name
|
||||
"projects_export_#{Time.now.utc.strftime('%F_%H-%M-%S_UTC')}.zip"
|
||||
end
|
||||
|
||||
# Override
|
||||
def fill_content(dir, params)
|
||||
# Create team folder
|
||||
@team = Team.find(params[:team_id])
|
||||
projects = @team.projects.where(id: params[:project_ids])
|
||||
team_path = "#{dir}/#{to_filesystem_name(@team.name)}"
|
||||
FileUtils.mkdir_p(team_path)
|
||||
|
||||
# Iterate through every project
|
||||
p_idx = p_archive_idx = 0
|
||||
projects.each do |project|
|
||||
idx = project.archived ? (p_archive_idx += 1) : (p_idx += 1)
|
||||
project_path = make_model_dir(team_path, project, idx)
|
||||
project_name = project_path.split('/')[-1]
|
||||
|
||||
obj_filenames = { repositories: {}, assets: {}, tables: {} }
|
||||
|
||||
# Change current dir for correct generation of relative links
|
||||
Dir.chdir(project_path)
|
||||
project_path = '.'
|
||||
|
||||
inventories = "#{project_path}/Inventories"
|
||||
FileUtils.mkdir_p(inventories)
|
||||
|
||||
repositories = project.assigned_repositories_and_snapshots
|
||||
|
||||
# Iterate through every inventory repo and save it to CSV
|
||||
repositories.each_with_index do |repo, repo_idx|
|
||||
next if obj_filenames[:repositories][repo.id].present?
|
||||
|
||||
obj_filenames[:repositories][repo.id] = {
|
||||
file: save_inventories_to_csv(inventories, repo, repo_idx)
|
||||
}
|
||||
end
|
||||
|
||||
# Include all experiments
|
||||
ex_idx = ex_archive_idx = 0
|
||||
project.experiments.each do |experiment|
|
||||
idx = experiment.archived ? (ex_archive_idx += 1) : (ex_idx += 1)
|
||||
experiment_path = make_model_dir(project_path, experiment, idx)
|
||||
|
||||
# Include all modules
|
||||
mod_pos = mod_archive_pos = 0
|
||||
experiment.my_modules.order(:workflow_order).each do |my_module|
|
||||
pos = my_module.archived ? (mod_archive_pos += 1) : (mod_pos += 1)
|
||||
my_module_path = make_model_dir(experiment_path, my_module, pos)
|
||||
|
||||
# Create upper directories for both elements
|
||||
protocol_path = "#{my_module_path}/Protocol attachments"
|
||||
result_path = "#{my_module_path}/Result attachments"
|
||||
FileUtils.mkdir_p(protocol_path)
|
||||
FileUtils.mkdir_p(result_path)
|
||||
|
||||
# Export protocols
|
||||
steps = my_module.protocols.map(&:steps).flatten
|
||||
obj_filenames[:assets].merge!(
|
||||
export_assets(StepAsset.where(step: steps), :step, protocol_path)
|
||||
)
|
||||
obj_filenames[:tables].merge!(
|
||||
export_tables(StepTable.where(step: steps), :step, protocol_path)
|
||||
)
|
||||
|
||||
# Export results
|
||||
[false, true].each do |archived|
|
||||
obj_filenames[:assets].merge!(
|
||||
export_assets(
|
||||
ResultAsset.where(result: my_module.results.where(archived: archived)),
|
||||
:result,
|
||||
result_path,
|
||||
archived
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
[false, true].each do |archived|
|
||||
obj_filenames[:tables].merge!(
|
||||
export_tables(
|
||||
ResultTable.where(result: my_module.results.where(archived: archived)),
|
||||
:result,
|
||||
result_path,
|
||||
archived
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Generate and export whole project report HTML
|
||||
html_name = "#{project_name} Report.html"
|
||||
project_report_pdf = project.generate_teams_export_report_html(
|
||||
@user, @team, html_name, obj_filenames
|
||||
)
|
||||
File.binwrite("#{project_path}/#{html_name}", project_report_pdf)
|
||||
end
|
||||
ensure
|
||||
# Change current dir outside dir, since it will be deleted
|
||||
Dir.chdir(Rails.root)
|
||||
end
|
||||
|
||||
# Create directory for project, experiment, or module
|
||||
def make_model_dir(parent_path, model, index)
|
||||
# For MyModule, the index indicates its position in project sidebar
|
||||
if model.instance_of?(MyModule)
|
||||
class_name = 'module'
|
||||
model_format = '(%<idx>s) %<name>s'
|
||||
else
|
||||
class_name = model.class.to_s.downcase.pluralize
|
||||
model_format = '%<name>s (%<idx>s)'
|
||||
end
|
||||
model_name =
|
||||
format(model_format, idx: index, name: to_filesystem_name(model.name))
|
||||
|
||||
model_path = parent_path
|
||||
if model.archived
|
||||
model_path += "/Archived #{class_name}"
|
||||
FileUtils.mkdir_p(model_path)
|
||||
end
|
||||
model_path += "/#{model_name}"
|
||||
FileUtils.mkdir_p(model_path)
|
||||
model_path
|
||||
end
|
||||
|
||||
# Appends given suffix to file_name and then adds original extension
|
||||
def append_file_suffix(file_name, suffix)
|
||||
ext = File.extname(file_name)
|
||||
File.basename(file_name, ext) + suffix + ext
|
||||
end
|
||||
|
||||
def create_archived_results_folder(result_path)
|
||||
path = "#{result_path}/Archived attachments"
|
||||
FileUtils.mkdir_p(path) unless File.directory?(path)
|
||||
path
|
||||
end
|
||||
|
||||
# Helper method to extract given assets to the directory
|
||||
def export_assets(elements, type, directory, archived = false)
|
||||
directory = create_archived_results_folder(directory) if archived && elements.present?
|
||||
|
||||
asset_indexes = {}
|
||||
elements.each_with_index do |element, i|
|
||||
asset = element.asset
|
||||
preview = prepare_preview(asset)
|
||||
if type == :step
|
||||
name = "#{directory}/" \
|
||||
"#{append_file_suffix(asset.file_name, "_#{i}_Step#{element.step.position_plus_one}")}"
|
||||
if preview
|
||||
preview_name = "#{directory}/" \
|
||||
"#{append_file_suffix(preview[:file_name], "_#{i}_Step#{element.step.position_plus_one}_preview")}"
|
||||
end
|
||||
elsif type == :result
|
||||
name = "#{directory}/#{append_file_suffix(asset.file_name, "_#{i}")}"
|
||||
preview_name = "#{directory}/#{append_file_suffix(preview[:file_name], "_#{i}_preview")}" if preview
|
||||
end
|
||||
|
||||
if asset.file.attached?
|
||||
begin
|
||||
File.binwrite(name, asset.file.download)
|
||||
File.binwrite(preview_name, preview[:file_data]) if preview
|
||||
rescue ActiveStorage::FileNotFoundError
|
||||
next
|
||||
end
|
||||
end
|
||||
asset_indexes[asset.id] = {
|
||||
file: name,
|
||||
preview: preview_name
|
||||
}
|
||||
end
|
||||
asset_indexes
|
||||
end
|
||||
|
||||
def prepare_preview(asset)
|
||||
if asset.previewable? && !asset.list?
|
||||
preview = asset.inline? ? asset.large_preview : asset.medium_preview
|
||||
if preview.is_a?(ActiveStorage::Preview)
|
||||
return unless preview.image.attached?
|
||||
|
||||
file_name = preview.image.filename.to_s
|
||||
file_data = preview.image.download
|
||||
else
|
||||
file_name = preview.blob.filename.to_s
|
||||
|
||||
begin
|
||||
file_data = preview.processed.service.download(preview.key)
|
||||
# handle files not processable by Vips (no preview available) or missing
|
||||
rescue Vips::Error, ActiveStorage::FileNotFoundError
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
{
|
||||
file_name: file_name,
|
||||
file_data: file_data
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
# Helper method to extract given tables to the directory
|
||||
def export_tables(elements, type, directory, archived = false)
|
||||
directory = create_archived_results_folder(directory) if archived && elements.present?
|
||||
|
||||
table_indexes = {}
|
||||
elements.each_with_index do |element, i|
|
||||
table = element.table
|
||||
table_name = table.name.presence || 'Table'
|
||||
table_name += i.to_s
|
||||
|
||||
if type == :step
|
||||
name = "#{directory}/#{to_filesystem_name(table_name)}" \
|
||||
"_#{i}_Step#{element.step.position_plus_one}.csv"
|
||||
elsif type == :result
|
||||
name = "#{directory}/#{to_filesystem_name(table_name)}.csv"
|
||||
end
|
||||
File.binwrite(name, table.to_csv)
|
||||
table_indexes[table.id] = {
|
||||
file: name
|
||||
}
|
||||
end
|
||||
|
||||
table_indexes
|
||||
end
|
||||
|
||||
# Helper method for saving inventories to CSV
|
||||
def save_inventories_to_csv(path, repo, idx)
|
||||
repo_name = "#{to_filesystem_name(repo.name)} (#{idx})"
|
||||
|
||||
# Attachment folder
|
||||
rel_attach_path = "#{repo_name} attachments"
|
||||
attach_path = "#{path}/#{rel_attach_path}"
|
||||
FileUtils.mkdir_p(attach_path)
|
||||
|
||||
# CSV file
|
||||
csv_file_path = "#{path}/#{repo_name}.csv"
|
||||
|
||||
# Define headers and columns IDs
|
||||
col_ids = [-3, -4, -5, -6] + repo.repository_columns.map(&:id)
|
||||
|
||||
# Define callback function for file name
|
||||
assets = {}
|
||||
asset_counter = 0
|
||||
handle_name_func = lambda do |asset|
|
||||
file_name = append_file_suffix(asset.file_name, "_#{asset_counter}").to_s
|
||||
|
||||
# Save pair for downloading it later
|
||||
assets[asset] = "#{attach_path}/#{file_name}"
|
||||
|
||||
asset_counter += 1
|
||||
rel_path = "#{rel_attach_path}/#{file_name}"
|
||||
return "=HYPERLINK(\"#{rel_path}\", \"#{rel_path}\")"
|
||||
end
|
||||
|
||||
# Generate CSV
|
||||
csv_data = RepositoryZipExport.to_csv(repo.repository_rows, col_ids, @user, repo, handle_name_func)
|
||||
File.binwrite(csv_file_path, csv_data)
|
||||
|
||||
# Save all attachments (it doesn't work directly in callback function
|
||||
assets.each do |asset, asset_path|
|
||||
asset.file.open do |file|
|
||||
FileUtils.cp(file.path, asset_path)
|
||||
end
|
||||
rescue ActiveStorage::FileNotFoundError
|
||||
next
|
||||
end
|
||||
|
||||
csv_file_path
|
||||
end
|
||||
end
|
||||
50
app/jobs/zip_export_job.rb
Normal file
50
app/jobs/zip_export_job.rb
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ZipExportJob < ApplicationJob
|
||||
include FailedDeliveryNotifiableJob
|
||||
|
||||
def perform(user_id:, params: {})
|
||||
@user = User.find(user_id)
|
||||
I18n.backend.date_format = @user.settings[:date_format] || Constants::DEFAULT_DATE_FORMAT
|
||||
ZipExport.transaction do
|
||||
@zip_export = ZipExport.create!(user: @user)
|
||||
zip_input_dir = FileUtils.mkdir_p(Rails.root.join("tmp/temp_zip_#{Time.now.to_i}").to_s).first
|
||||
zip_dir = FileUtils.mkdir_p(Rails.root.join('tmp/zip-ready').to_s).first
|
||||
full_zip_name = File.join(zip_dir, zip_name)
|
||||
|
||||
fill_content(zip_input_dir, params)
|
||||
@zip_export.zip!(zip_input_dir, full_zip_name)
|
||||
@zip_export.zip_file.attach(io: File.open(full_zip_name), filename: zip_name)
|
||||
generate_notification!
|
||||
ensure
|
||||
FileUtils.rm_rf([zip_input_dir, full_zip_name], secure: true)
|
||||
end
|
||||
ensure
|
||||
I18n.backend.date_format = nil
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def zip_name
|
||||
"export_#{Time.now.utc.strftime('%F %H-%M-%S_UTC')}.zip"
|
||||
end
|
||||
|
||||
def fill_content(dir, params)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def generate_notification!
|
||||
notification = Notification.create!(
|
||||
type_of: :deliver,
|
||||
title: I18n.t('zip_export.notification_title'),
|
||||
message: "<a data-id='#{@zip_export.id}' " \
|
||||
"data-turbolinks='false' " \
|
||||
"href='#{Rails.application
|
||||
.routes
|
||||
.url_helpers
|
||||
.zip_exports_download_path(@zip_export)}'>" \
|
||||
"#{@zip_export.zip_file_name}</a>"
|
||||
)
|
||||
notification.create_user_notification(@user)
|
||||
end
|
||||
end
|
||||
|
|
@ -4,22 +4,24 @@ module PrefixedIdModel
|
|||
extend ActiveSupport::Concern
|
||||
|
||||
included do
|
||||
begin
|
||||
indexdef = "CREATE INDEX index_#{table_name}_on_#{name.underscore}_code"\
|
||||
" ON public.#{table_name} USING gin ((('#{self::ID_PREFIX}'::text || id)) gin_trgm_ops)"
|
||||
unless Rails.env.production?
|
||||
begin
|
||||
indexdef = "CREATE INDEX index_#{table_name}_on_#{name.underscore}_code " \
|
||||
"ON public.#{table_name} USING gin ((('#{self::ID_PREFIX}'::text || id)) gin_trgm_ops)"
|
||||
|
||||
index_exists = ActiveRecord::Base.connection.execute(
|
||||
"SELECT indexdef FROM pg_indexes WHERE tablename NOT LIKE 'pg%';"
|
||||
).to_a.map(&:values).flatten.include?(indexdef)
|
||||
index_exists = ActiveRecord::Base.connection.execute(
|
||||
"SELECT indexdef FROM pg_indexes WHERE tablename NOT LIKE 'pg%';"
|
||||
).to_a.map(&:values).flatten.include?(indexdef)
|
||||
|
||||
# rubocop:disable Rails/Output
|
||||
puts("\nWARNING missing index\n#{indexdef}\nfor prefixed id model #{name}!\n\n") unless index_exists
|
||||
# rubocop:enable Rails/Output
|
||||
rescue ActiveRecord::NoDatabaseError, ActiveRecord::ConnectionNotEstablished
|
||||
# only applicable during build and when setting up project
|
||||
# rubocop:disable Rails/Output
|
||||
puts("\nWARNING missing index\n#{indexdef}\nfor prefixed id model #{name}!\n\n") unless index_exists
|
||||
# rubocop:enable Rails/Output
|
||||
rescue ActiveRecord::NoDatabaseError, ActiveRecord::ConnectionNotEstablished
|
||||
# only applicable during build and when setting up project
|
||||
end
|
||||
end
|
||||
|
||||
self::PREFIXED_ID_SQL = "('#{self::ID_PREFIX}' || #{table_name}.id)"
|
||||
self::PREFIXED_ID_SQL = "('#{self::ID_PREFIX}' || #{table_name}.id)".freeze
|
||||
|
||||
def code
|
||||
"#{self.class::ID_PREFIX}#{id}"
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ class MyModuleRepositoryRow < ApplicationRecord
|
|||
touch: true,
|
||||
inverse_of: :my_module_repository_rows
|
||||
belongs_to :repository_stock_unit_item, optional: true
|
||||
has_many :repository_ledger_records, as: :reference, dependent: :nullify
|
||||
|
||||
validates :repository_row, uniqueness: { scope: :my_module }
|
||||
|
||||
|
|
|
|||
|
|
@ -6,4 +6,15 @@ class RepositoryLedgerRecord < ApplicationRecord
|
|||
belongs_to :repository_stock_value
|
||||
belongs_to :reference, polymorphic: true
|
||||
belongs_to :user
|
||||
belongs_to :repository,
|
||||
(lambda do |repository_ledger_record|
|
||||
repository_ledger_record.reference_type == 'RepositoryBase' ? self : none
|
||||
end),
|
||||
optional: true, foreign_key: :reference_id, inverse_of: :repository_ledger_records
|
||||
belongs_to :my_module_repository_row,
|
||||
(lambda do |repository_ledger_record|
|
||||
repository_ledger_record.reference_type == 'MyModuleRepositoryRow' ? self : none
|
||||
end),
|
||||
optional: true, foreign_key: :reference_id, inverse_of: :repository_ledger_records
|
||||
has_one :repository_row, through: :repository_stock_value
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,14 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Settings < ApplicationRecord
|
||||
attr_accessor :merged_values
|
||||
|
||||
before_validation -> { self.values = merged_values || values }
|
||||
|
||||
def self.instance
|
||||
first || new
|
||||
end
|
||||
|
||||
def values
|
||||
merged_values = super
|
||||
self.merged_values ||= super
|
||||
self.class.instance_methods(false).grep(/^load_values_from_[A-Z0-9_]*/).each do |method|
|
||||
merged_values = merged_values.merge(public_send(method))
|
||||
self.merged_values = self.merged_values.merge(public_send(method))
|
||||
end
|
||||
merged_values
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,311 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'fileutils'
|
||||
require 'csv'
|
||||
|
||||
class TeamZipExport < ZipExport
|
||||
include StringUtility
|
||||
|
||||
def generate_exportable_zip(user_id, data, type, options = {})
|
||||
@user = User.find(user_id)
|
||||
zip_input_dir = FileUtils.mkdir_p(
|
||||
File.join(Rails.root, "tmp/temp_zip_#{Time.now.to_i}")
|
||||
).first
|
||||
zip_dir = FileUtils.mkdir_p(File.join(Rails.root, 'tmp/zip-ready')).first
|
||||
|
||||
zip_name = "projects_export_#{Time.now.utc.strftime('%F_%H-%M-%S_UTC')}.zip"
|
||||
full_zip_name = File.join(zip_dir, zip_name)
|
||||
|
||||
fill_content(zip_input_dir, data, type, options)
|
||||
zip!(zip_input_dir, full_zip_name)
|
||||
zip_file.attach(io: File.open(full_zip_name), filename: zip_name)
|
||||
generate_notification(user) if save
|
||||
ensure
|
||||
FileUtils.rm_rf([zip_input_dir, full_zip_name], secure: true)
|
||||
end
|
||||
|
||||
handle_asynchronously :generate_exportable_zip,
|
||||
queue: :team_zip_export
|
||||
|
||||
def self.exports_limit
|
||||
(Rails.application.secrets.export_all_limit_24h || 3).to_i
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Export all functionality
|
||||
def generate_teams_zip(tmp_dir, data, options = {})
|
||||
# Create team folder
|
||||
@team = options[:team]
|
||||
team_path = "#{tmp_dir}/#{to_filesystem_name(@team.name)}"
|
||||
FileUtils.mkdir_p(team_path)
|
||||
|
||||
# Iterate through every project
|
||||
p_idx = p_archive_idx = 0
|
||||
data.each do |(_, p)|
|
||||
idx = p.archived ? (p_archive_idx += 1) : (p_idx += 1)
|
||||
project_path = make_model_dir(team_path, p, idx)
|
||||
project_name = project_path.split('/')[-1]
|
||||
|
||||
obj_filenames = { repositories: {}, assets: {}, tables: {} }
|
||||
|
||||
# Change current dir for correct generation of relative links
|
||||
Dir.chdir(project_path)
|
||||
project_path = '.'
|
||||
|
||||
inventories = "#{project_path}/Inventories"
|
||||
FileUtils.mkdir_p(inventories)
|
||||
|
||||
repositories = p.assigned_repositories_and_snapshots
|
||||
|
||||
# Iterate through every inventory repo and save it to CSV
|
||||
repositories.each_with_index do |repo, repo_idx|
|
||||
next if obj_filenames[:repositories][repo.id].present?
|
||||
|
||||
obj_filenames[:repositories][repo.id] = {
|
||||
file: save_inventories_to_csv(inventories, repo, repo_idx)
|
||||
}
|
||||
end
|
||||
|
||||
# Include all experiments
|
||||
ex_idx = ex_archive_idx = 0
|
||||
p.experiments.each do |ex|
|
||||
idx = ex.archived ? (ex_archive_idx += 1) : (ex_idx += 1)
|
||||
experiment_path = make_model_dir(project_path, ex, idx)
|
||||
|
||||
# Include all modules
|
||||
mod_pos = mod_archive_pos = 0
|
||||
ex.my_modules.order(:workflow_order).each do |my_module|
|
||||
pos = my_module.archived ? (mod_archive_pos += 1) : (mod_pos += 1)
|
||||
my_module_path = make_model_dir(experiment_path, my_module, pos)
|
||||
|
||||
# Create upper directories for both elements
|
||||
protocol_path = "#{my_module_path}/Protocol attachments"
|
||||
result_path = "#{my_module_path}/Result attachments"
|
||||
FileUtils.mkdir_p(protocol_path)
|
||||
FileUtils.mkdir_p(result_path)
|
||||
|
||||
# Export protocols
|
||||
steps = my_module.protocols.map(&:steps).flatten
|
||||
obj_filenames[:assets].merge!(
|
||||
export_assets(StepAsset.where(step: steps), :step, protocol_path)
|
||||
)
|
||||
obj_filenames[:tables].merge!(
|
||||
export_tables(StepTable.where(step: steps), :step, protocol_path)
|
||||
)
|
||||
|
||||
# Export results
|
||||
[false, true].each do |archived|
|
||||
obj_filenames[:assets].merge!(
|
||||
export_assets(
|
||||
ResultAsset.where(result: my_module.results.where(archived: archived)),
|
||||
:result,
|
||||
result_path,
|
||||
archived
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
[false, true].each do |archived|
|
||||
obj_filenames[:tables].merge!(
|
||||
export_tables(
|
||||
ResultTable.where(result: my_module.results.where(archived: archived)),
|
||||
:result,
|
||||
result_path,
|
||||
archived
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Generate and export whole project report HTML
|
||||
html_name = "#{project_name} Report.html"
|
||||
project_report_pdf = p.generate_teams_export_report_html(
|
||||
@user, @team, html_name, obj_filenames
|
||||
)
|
||||
file = FileUtils.touch("#{project_path}/#{html_name}").first
|
||||
File.open(file, 'wb') { |f| f.write(project_report_pdf) }
|
||||
end
|
||||
ensure
|
||||
# Change current dir outside tmp_dir, since tmp_dir will be deleted
|
||||
Dir.chdir(Rails.root)
|
||||
end
|
||||
|
||||
def generate_notification(user)
|
||||
notification = Notification.create(
|
||||
type_of: :deliver,
|
||||
title: I18n.t('zip_export.notification_title'),
|
||||
message: "<a data-id='#{id}' " \
|
||||
"data-turbolinks='false' " \
|
||||
"href='#{Rails.application
|
||||
.routes
|
||||
.url_helpers
|
||||
.zip_exports_download_export_all_path(self)}'>" \
|
||||
"#{zip_file_name}</a>"
|
||||
)
|
||||
UserNotification.create(notification: notification, user: user)
|
||||
end
|
||||
|
||||
# Create directory for project, experiment, or module
|
||||
def make_model_dir(parent_path, model, index)
|
||||
# For MyModule, the index indicates its position in project sidebar
|
||||
if model.class == MyModule
|
||||
class_name = 'module'
|
||||
model_format = '(%<idx>s) %<name>s'
|
||||
else
|
||||
class_name = model.class.to_s.downcase.pluralize
|
||||
model_format = '%<name>s (%<idx>s)'
|
||||
end
|
||||
model_name =
|
||||
format(model_format, idx: index, name: to_filesystem_name(model.name))
|
||||
|
||||
model_path = parent_path
|
||||
if model.archived
|
||||
model_path += "/Archived #{class_name}"
|
||||
FileUtils.mkdir_p(model_path)
|
||||
end
|
||||
model_path += "/#{model_name}"
|
||||
FileUtils.mkdir_p(model_path)
|
||||
model_path
|
||||
end
|
||||
|
||||
# Appends given suffix to file_name and then adds original extension
|
||||
def append_file_suffix(file_name, suffix)
|
||||
ext = File.extname(file_name)
|
||||
File.basename(file_name, ext) + suffix + ext
|
||||
end
|
||||
|
||||
def create_archived_results_folder(result_path)
|
||||
path = "#{result_path}/Archived attachments"
|
||||
FileUtils.mkdir_p(path) unless File.directory?(path)
|
||||
path
|
||||
end
|
||||
|
||||
# Helper method to extract given assets to the directory
|
||||
def export_assets(elements, type, directory, archived = false)
|
||||
directory = create_archived_results_folder(directory) if archived && elements.present?
|
||||
|
||||
asset_indexes = {}
|
||||
elements.each_with_index do |element, i|
|
||||
asset = element.asset
|
||||
preview = prepare_preview(asset)
|
||||
if type == :step
|
||||
name = "#{directory}/" \
|
||||
"#{append_file_suffix(asset.file_name, "_#{i}_Step#{element.step.position_plus_one}")}"
|
||||
if preview
|
||||
preview_name = "#{directory}/" \
|
||||
"#{append_file_suffix(preview[:file_name], "_#{i}_Step#{element.step.position_plus_one}_preview")}"
|
||||
end
|
||||
elsif type == :result
|
||||
name = "#{directory}/#{append_file_suffix(asset.file_name, "_#{i}")}"
|
||||
preview_name = "#{directory}/#{append_file_suffix(preview[:file_name], "_#{i}_preview")}" if preview
|
||||
end
|
||||
|
||||
if asset.file.attached?
|
||||
File.open(name, 'wb') { |f| f.write(asset.file.download) }
|
||||
File.open(preview_name, 'wb') { |f| f.write(preview[:file_data]) } if preview
|
||||
end
|
||||
asset_indexes[asset.id] = {
|
||||
file: name,
|
||||
preview: preview_name
|
||||
}
|
||||
end
|
||||
asset_indexes
|
||||
end
|
||||
|
||||
def prepare_preview(asset)
|
||||
if asset.previewable? && !asset.list?
|
||||
preview = asset.inline? ? asset.large_preview : asset.medium_preview
|
||||
if preview.is_a?(ActiveStorage::Preview)
|
||||
return unless preview.image.attached?
|
||||
|
||||
file_name = preview.image.filename.to_s
|
||||
file_data = preview.image.download
|
||||
else
|
||||
file_name = preview.blob.filename.to_s
|
||||
|
||||
begin
|
||||
file_data = preview.processed.service.download(preview.key)
|
||||
rescue Vips::Error # handle files not processable by Vips (no preview available)
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
{
|
||||
file_name: file_name,
|
||||
file_data: file_data
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
# Helper method to extract given tables to the directory
|
||||
def export_tables(elements, type, directory, archived = false)
|
||||
directory = create_archived_results_folder(directory) if archived && elements.present?
|
||||
|
||||
table_indexes = {}
|
||||
elements.each_with_index do |element, i|
|
||||
table = element.table
|
||||
table_name = table.name.presence || 'Table'
|
||||
table_name += i.to_s
|
||||
|
||||
if type == :step
|
||||
name = "#{directory}/#{to_filesystem_name(table_name)}" \
|
||||
"_#{i}_Step#{element.step.position_plus_one}.csv"
|
||||
elsif type == :result
|
||||
name = "#{directory}/#{to_filesystem_name(table_name)}.csv"
|
||||
end
|
||||
file = FileUtils.touch(name).first
|
||||
File.open(file, 'wb') { |f| f.write(table.to_csv) }
|
||||
table_indexes[table.id] = {
|
||||
file: name
|
||||
}
|
||||
end
|
||||
|
||||
table_indexes
|
||||
end
|
||||
|
||||
# Helper method for saving inventories to CSV
|
||||
def save_inventories_to_csv(path, repo, idx)
|
||||
repo_name = "#{to_filesystem_name(repo.name)} (#{idx})"
|
||||
|
||||
# Attachment folder
|
||||
rel_attach_path = "#{repo_name} attachments"
|
||||
attach_path = "#{path}/#{rel_attach_path}"
|
||||
FileUtils.mkdir_p(attach_path)
|
||||
|
||||
# CSV file
|
||||
csv_file_path = "#{path}/#{repo_name}.csv"
|
||||
csv_file = FileUtils.touch(csv_file_path).first
|
||||
|
||||
# Define headers and columns IDs
|
||||
col_ids = [-3, -4, -5, -6] + repo.repository_columns.map(&:id)
|
||||
|
||||
# Define callback function for file name
|
||||
assets = {}
|
||||
asset_counter = 0
|
||||
handle_name_func = lambda do |asset|
|
||||
file_name = append_file_suffix(asset.file_name, "_#{asset_counter}").to_s
|
||||
|
||||
# Save pair for downloading it later
|
||||
assets[asset] = "#{attach_path}/#{file_name}"
|
||||
|
||||
asset_counter += 1
|
||||
rel_path = "#{rel_attach_path}/#{file_name}"
|
||||
return "=HYPERLINK(\"#{rel_path}\", \"#{rel_path}\")"
|
||||
end
|
||||
|
||||
# Generate CSV
|
||||
csv_data = RepositoryZipExport.to_csv(repo.repository_rows, col_ids, @user, repo, handle_name_func)
|
||||
File.open(csv_file, 'wb') { |f| f.write(csv_data) }
|
||||
|
||||
# Save all attachments (it doesn't work directly in callback function
|
||||
assets.each do |asset, asset_path|
|
||||
asset.file.open do |file|
|
||||
FileUtils.cp(file.path, asset_path)
|
||||
end
|
||||
end
|
||||
|
||||
csv_file_path
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,20 +4,6 @@ require 'zip'
|
|||
require 'fileutils'
|
||||
require 'csv'
|
||||
|
||||
# To use ZipExport you have to define the generate_( type )_zip method!
|
||||
# Example:
|
||||
# def generate_(type)_zip(tmp_dir, data, options = {})
|
||||
# attributes = options.fetch(:attributes) { :attributes_missing }
|
||||
# file = FileUtils.touch("#{tmp_dir}/export.csv").first
|
||||
# records = data
|
||||
# CSV.open(file, 'wb') do |csv|
|
||||
# csv << attributes
|
||||
# records.find_each do |entity|
|
||||
# csv << entity.values_at(*attributes.map(&:to_sym))
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
|
||||
class ZipExport < ApplicationRecord
|
||||
belongs_to :user, optional: true
|
||||
|
||||
|
|
@ -26,8 +12,7 @@ class ZipExport < ApplicationRecord
|
|||
after_create :self_destruct
|
||||
|
||||
def self.delete_expired_export(id)
|
||||
export = find_by_id(id)
|
||||
export&.destroy
|
||||
find_by(id: id)&.destroy
|
||||
end
|
||||
|
||||
def zip_file_name
|
||||
|
|
@ -45,62 +30,10 @@ class ZipExport < ApplicationRecord
|
|||
end
|
||||
end
|
||||
|
||||
def generate_exportable_zip(user_id, data, type, options = {})
|
||||
user = User.find(user_id)
|
||||
I18n.backend.date_format = user.settings[:date_format] || Constants::DEFAULT_DATE_FORMAT
|
||||
zip_input_dir = FileUtils.mkdir_p(File.join(Rails.root, "tmp/temp_zip_#{Time.now.to_i}")).first
|
||||
tmp_zip_dir = FileUtils.mkdir_p(File.join(Rails.root, 'tmp/zip-ready')).first
|
||||
tmp_zip_name = "export_#{Time.now.strftime('%F %H-%M-%S_UTC')}.zip"
|
||||
tmp_full_zip_name = File.join(tmp_zip_dir, tmp_zip_name)
|
||||
|
||||
fill_content(zip_input_dir, data, type, options)
|
||||
zip!(zip_input_dir, tmp_full_zip_name)
|
||||
zip_file.attach(io: File.open(tmp_full_zip_name), filename: tmp_zip_name)
|
||||
generate_notification(user) if save
|
||||
ensure
|
||||
FileUtils.rm_rf([zip_input_dir, tmp_full_zip_name], secure: true)
|
||||
end
|
||||
|
||||
handle_asynchronously :generate_exportable_zip
|
||||
|
||||
private
|
||||
|
||||
def self_destruct
|
||||
ZipExport.delay(run_at: Constants::EXPORTABLE_ZIP_EXPIRATION_DAYS.days.from_now)
|
||||
.delete_expired_export(id)
|
||||
end
|
||||
|
||||
def method_missing(method_name, *args, &block)
|
||||
return super unless method_name.to_s.start_with?('generate_')
|
||||
|
||||
raise StandardError, 'Method is missing! To use this zip_export you have to define a method: generate_( type )_zip.'
|
||||
end
|
||||
|
||||
def respond_to_missing?(method_name, include_private = false)
|
||||
method_name.to_s.start_with?('generate_') || super
|
||||
end
|
||||
|
||||
def fill_content(dir, data, type, options = {})
|
||||
eval("generate_#{type}_zip(dir, data, options)")
|
||||
end
|
||||
|
||||
def generate_notification(user)
|
||||
notification = Notification.create(
|
||||
type_of: :deliver,
|
||||
title: I18n.t('zip_export.notification_title'),
|
||||
message: "<a data-id='#{id}' " \
|
||||
"data-turbolinks='false' " \
|
||||
"href='#{Rails.application
|
||||
.routes
|
||||
.url_helpers
|
||||
.zip_exports_download_path(self)}'>" \
|
||||
"#{zip_file_name}</a>"
|
||||
)
|
||||
UserNotification.create(notification: notification, user: user)
|
||||
end
|
||||
|
||||
def generate_repositories_zip(tmp_dir, data, _options = {})
|
||||
file = FileUtils.touch("#{tmp_dir}/export.csv").first
|
||||
File.open(file, 'wb') { |f| f.write(data) }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class ResultSerializer < ActiveModel::Serializer
|
|||
|
||||
attributes :name, :id, :urls, :updated_at, :created_at_formatted, :updated_at_formatted, :user,
|
||||
:my_module_id, :attachments_manageble, :marvinjs_enabled, :marvinjs_context, :type,
|
||||
:wopi_enabled, :wopi_context, :created_at, :created_by, :archived
|
||||
:wopi_enabled, :wopi_context, :created_at, :created_by, :archived, :assets_order
|
||||
|
||||
def marvinjs_enabled
|
||||
MarvinJsService.enabled?
|
||||
|
|
@ -55,6 +55,10 @@ class ResultSerializer < ActiveModel::Serializer
|
|||
}
|
||||
end
|
||||
|
||||
def assets_order
|
||||
object.current_view_state(current_user).state.dig('assets', 'sort') unless object.destroyed?
|
||||
end
|
||||
|
||||
def attachments_manageble
|
||||
can_manage_result?(object)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -22,48 +22,31 @@ module RepositoryStockLedgerZipExport
|
|||
stock_balance_unit
|
||||
).freeze
|
||||
|
||||
def self.generate_zip(row_ids, user_id)
|
||||
rows = generate_data(row_ids)
|
||||
|
||||
zip = ZipExport.create(user_id: user_id)
|
||||
zip.generate_exportable_zip(
|
||||
user_id,
|
||||
to_csv(rows),
|
||||
:repositories
|
||||
)
|
||||
end
|
||||
|
||||
def self.to_csv(rows)
|
||||
def self.to_csv(repository_row_ids)
|
||||
csv_header = COLUMNS.map { |col| I18n.t("repository_stock_values.stock_export.headers.#{col}") }
|
||||
repository_ledger_records = load_records(repository_row_ids)
|
||||
|
||||
CSV.generate do |csv|
|
||||
csv << csv_header
|
||||
rows.each do |row|
|
||||
csv << row
|
||||
repository_ledger_records.each do |record|
|
||||
csv << generate_record_data(record)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def self.generate_data(row_ids)
|
||||
data = []
|
||||
repository_ledger_records =
|
||||
RepositoryLedgerRecord.joins(repository_stock_value: :repository_row)
|
||||
.includes(:user, { repository_stock_value: :repository_row })
|
||||
.where(repository_row: { id: row_ids })
|
||||
.joins('LEFT OUTER JOIN my_module_repository_rows ON
|
||||
repository_ledger_records.reference_id = my_module_repository_rows.id')
|
||||
.joins('LEFT OUTER JOIN my_modules ON
|
||||
my_modules.id = my_module_repository_rows.my_module_id')
|
||||
.joins('LEFT OUTER JOIN experiments ON experiments.id = my_modules.experiment_id')
|
||||
.joins('LEFT OUTER JOIN projects ON projects.id = experiments.project_id')
|
||||
.joins('LEFT OUTER JOIN teams ON teams.id = projects.team_id')
|
||||
.order('repository_row.created_at, repository_ledger_records.created_at')
|
||||
.select('repository_ledger_records.*,
|
||||
my_modules.id AS module_id, my_modules.name AS module_name,
|
||||
projects.name AS project_name, teams.name AS team_name,
|
||||
experiments.name AS experiment_name')
|
||||
# rubocop:disable Metrics/BlockLength
|
||||
repository_ledger_records.each do |record|
|
||||
class << self
|
||||
private
|
||||
|
||||
def load_records(repository_row_ids)
|
||||
RepositoryLedgerRecord
|
||||
.joins(:repository_row)
|
||||
.preload(:user, repository_row: { repository: :team })
|
||||
.preload(my_module_repository_row: { my_module: { experiment: { project: :team } } })
|
||||
.where(repository_row: { id: repository_row_ids })
|
||||
.order(:created_at)
|
||||
end
|
||||
|
||||
def generate_record_data(record)
|
||||
consumption_type = record.reference_type == 'MyModuleRepositoryRow' ? 'Task' : 'Inventory'
|
||||
|
||||
if record.amount.positive?
|
||||
|
|
@ -78,32 +61,31 @@ module RepositoryStockLedgerZipExport
|
|||
|
||||
row_data = [
|
||||
consumption_type,
|
||||
record.repository_stock_value.repository_row.name,
|
||||
record.repository_stock_value.repository_row.code,
|
||||
record.repository_row.name,
|
||||
record.repository_row.code,
|
||||
consumed_amount,
|
||||
consumed_amount_unit,
|
||||
added_amount,
|
||||
added_amount_unit,
|
||||
record.user.full_name,
|
||||
record.created_at.strftime(record.user.date_format),
|
||||
record.team_name,
|
||||
record.repository_row.repository.team.name,
|
||||
record.unit,
|
||||
record.balance.to_d
|
||||
]
|
||||
|
||||
if consumption_type == 'Task'
|
||||
my_module = record.my_module_repository_row.my_module
|
||||
breadcrumbs_data = [
|
||||
record.project_name,
|
||||
record.experiment_name,
|
||||
record.module_name,
|
||||
"#{MyModule::ID_PREFIX}#{record.module_id}"
|
||||
my_module.experiment.project.name,
|
||||
my_module.experiment.name,
|
||||
my_module.name,
|
||||
my_module.code
|
||||
]
|
||||
end
|
||||
|
||||
row_data.insert(10, *breadcrumbs_data)
|
||||
data << row_data
|
||||
row_data
|
||||
end
|
||||
# rubocop:enable Metrics/BlockLength
|
||||
data
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,39 +3,6 @@
|
|||
require 'csv'
|
||||
|
||||
module RepositoryZipExport
|
||||
def self.generate_zip(params, repository, current_user)
|
||||
# Fetch rows in the same order as in the currently viewed datatable
|
||||
if params[:my_module_id]
|
||||
rows = if repository.is_a?(RepositorySnapshot)
|
||||
repository.repository_rows
|
||||
else
|
||||
repository.repository_rows
|
||||
.joins(:my_module_repository_rows)
|
||||
.where(my_module_repository_rows: { my_module_id: params[:my_module_id] })
|
||||
end
|
||||
if repository.has_stock_management?
|
||||
rows = rows.left_joins(my_module_repository_rows: :repository_stock_unit_item)
|
||||
.select(
|
||||
'repository_rows.*',
|
||||
'my_module_repository_rows.stock_consumption'
|
||||
)
|
||||
end
|
||||
else
|
||||
ordered_row_ids = params[:row_ids]
|
||||
id_row_map = RepositoryRow.where(id: ordered_row_ids,
|
||||
repository: repository)
|
||||
.index_by(&:id)
|
||||
rows = ordered_row_ids.collect { |id| id_row_map[id.to_i] }
|
||||
end
|
||||
|
||||
zip = ZipExport.create(user: current_user)
|
||||
zip.generate_exportable_zip(
|
||||
current_user.id,
|
||||
to_csv(rows, params[:header_ids], current_user, repository, nil, params[:my_module_id].present?),
|
||||
:repositories
|
||||
)
|
||||
end
|
||||
|
||||
def self.to_csv(rows, column_ids, user, repository, handle_file_name_func = nil, in_module = false)
|
||||
# Parse column names
|
||||
csv_header = []
|
||||
|
|
|
|||
|
|
@ -12,10 +12,6 @@
|
|||
title="<%= t("nav2.modules.results") %>"
|
||||
>
|
||||
<%= t("nav2.modules.results") %>
|
||||
<% @active_results_size = @my_module.results.where(archived:false).size %>
|
||||
<% if @active_results_size.positive? %>
|
||||
<sup class="navigation-results-counter"><%= @my_module.archived_branch? ? @my_module.results.size : @active_results_size %></sup>
|
||||
<% end %>
|
||||
</a>
|
||||
<a class="p-3 border-b-4 border-transparent hover:no-underline uppercase text-bold capitalize <%= is_module_activities? ? "text-sn-blue" : "text-sn-grey" %>"
|
||||
href="<%= activities_my_module_url(@my_module, view_mode: params[:view_mode]) %>"
|
||||
|
|
@ -26,18 +22,6 @@
|
|||
<% end %>
|
||||
|
||||
<div data-hook="secondary-navigation-tabs"></div>
|
||||
<% if can_read_experiment?(@my_module.experiment) && !@my_module.archived_branch? %>
|
||||
<a class="p-3 border-b-4 border-transparent hover:no-underline uppercase text-bold capitalize <%= is_module_archive? ? "text-sn-blue" : "text-sn-grey" %>"
|
||||
href="<%= my_module_results_path(@my_module, view_mode: :archived) %>"
|
||||
title="<%= t'nav2.modules.archive' %>"
|
||||
>
|
||||
<%= t("nav2.modules.archive") %>
|
||||
<% @archived_results_size = @my_module.results.where(archived:true).size %>
|
||||
<% if @archived_results_size.positive? %>
|
||||
<sup class="navigation-results-counter"><%= @archived_results_size %></sup>
|
||||
<% end %>
|
||||
</a>
|
||||
<% end %>
|
||||
<%= render partial: '/my_modules/task_flow_and_sharing' %>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@
|
|||
<div class="description-text">
|
||||
<%= custom_auto_link(experiment.description, team: current_team) %>
|
||||
</div>
|
||||
<% if experiment.description.present? && experiment.description.length > Constants::EXPERIMENT_LONG_DESCRIPTION %>
|
||||
<% if experiment.description.present? && (experiment.description.length > Constants::EXPERIMENT_LONG_DESCRIPTION || experiment.description.count("\n") > 2) %>
|
||||
<%= link_to t('experiments.card.more'),
|
||||
experiment_path(experiment),
|
||||
class: 'more-button experiment-action-link',
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
<%= render partial: "/shared/sidebar/templates_sidebar", locals: {active: :protocol} %>
|
||||
<% end %>
|
||||
<% provide(:container_class, 'no-second-nav-container') %>
|
||||
<div class="content-pane protocols-show flexible" >
|
||||
<div class="content-header">
|
||||
<div class="title-row">
|
||||
<h1>
|
||||
|
|
@ -28,6 +27,7 @@
|
|||
</h1>
|
||||
</div>
|
||||
</div>
|
||||
<div class="content-pane protocols-show flexible" >
|
||||
<div class="protocol-position-container">
|
||||
<div
|
||||
id="protocolContainer"
|
||||
|
|
|
|||
|
|
@ -13,8 +13,9 @@
|
|||
<h4 class="modal-title"><%=t 'zip_export.consumption_modal_label' %></h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div><%=t('zip_export.consumption_header_html', repository: repository.name) %></div>
|
||||
<div><%=t 'zip_export.consumption_footer_html' %></div>
|
||||
<p><%=t('zip_export.consumption_header_html', repository: repository.name) %></p>
|
||||
<p><%=t 'zip_export.consumption_body_html' %></p>
|
||||
<p class='pb-0'><%=t 'zip_export.consumption_footer_html' %></p>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type='button' class='btn btn-secondary' data-dismiss='modal' id='close-modal-export-stock-consumption'><%= t('general.cancel')%></button>
|
||||
|
|
|
|||
|
|
@ -19,7 +19,10 @@
|
|||
|
||||
<div id="results" data-behaviour="vue">
|
||||
<results url="<%= my_module_results_url(@my_module, view_mode: params[:view_mode]) %>"
|
||||
can-create=<%= can_create_results?(@my_module) && params[:view_mode].blank? %>>
|
||||
active_url="<%= my_module_results_url(@my_module) %>"
|
||||
archived_url="<%= my_module_results_url(@my_module, view_mode: :archived) %>"
|
||||
can-create=<%= can_create_results?(@my_module) && !(params[:view_mode] == 'archived') %>
|
||||
archived=<%= params[:view_mode] == 'archived' %>>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
</span>
|
||||
</div>
|
||||
|
||||
<div class="my-5 flex-1 bg-sn-white">
|
||||
<div class="my-5 max-w-4xl flex-1 bg-sn-white">
|
||||
<div class="content-pane flexible">
|
||||
<%= render partial: 'shareable_links/my_modules/header_actions' %>
|
||||
<div class="px-4">
|
||||
|
|
|
|||
|
|
@ -2,13 +2,13 @@
|
|||
<div class="comment-container">
|
||||
<div class="avatar-placehodler">
|
||||
<span class='global-avatar-container'>
|
||||
<%= image_tag avatar_path(comment.user, :icon_small), class: 'avatar' %>
|
||||
<%= image_tag user_avatar_absolute_url(comment.user, :icon_small, true), class: 'user-avatar' %>
|
||||
</span>
|
||||
</div>
|
||||
<div class="content-placeholder">
|
||||
<div class="comment-name"><%= comment.user.full_name %></div>
|
||||
<div class="comment-right !w-fit">
|
||||
<div class="comment-datetime !w-fit"><%= comment.created_at.iso8601 %></div>
|
||||
<div class="comment-datetime !w-fit"><%= l(comment.created_at, format: :full) %></div>
|
||||
</div>
|
||||
<div class="comment-message">
|
||||
<div class="view-mode"><%= smart_annotation_text(comment.message) %></div>
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
<% if element.name.present? %>
|
||||
<div class="step-element-name">
|
||||
<%= render partial: "shareable_links/my_modules/inline_view", locals: { text: element.name, smart_annotation_enabled: false } %>
|
||||
</div>
|
||||
</div>
|
||||
<% end %>
|
||||
</div>
|
||||
<div class="step-table view locked" tabindex="0">
|
||||
|
|
|
|||
|
|
@ -1252,6 +1252,8 @@ en:
|
|||
load_from_file_protocol_general_error: "Failed to load the protocol from file. It is likely that certain fields (protocol and individual step titles and names) contain too many or too few characters.(max is %{max} and min is %{min})"
|
||||
results:
|
||||
head_title: "%{project} | %{module} | Results"
|
||||
active_results: "Active results"
|
||||
archived_results: "Archived results"
|
||||
default_name: "New result"
|
||||
placeholder: "Enter result name"
|
||||
add_label: "New result"
|
||||
|
|
@ -3532,9 +3534,10 @@ en:
|
|||
repository_footer_html: 'Inventory will be exported in a .csv file format. You will receive <strong>email with a link</strong> where you can download it.'
|
||||
export_request_success: "Export request received. Your export request is being processed."
|
||||
export_error: "Error when creating zip export."
|
||||
consumption_modal_label: 'Consumption report'
|
||||
consumption_header_html: 'You are about to generate consumption report for selected items in inventory %{repository}.'
|
||||
consumption_footer_html: 'Consumption report will be exported in a .csv file format. You will receive <strong>email with a link</strong> where you can download it.'
|
||||
consumption_modal_label: 'Export consumption'
|
||||
consumption_header_html: 'You are about to generate consumption report for selected items in inventory %{repository}*. The report will be saved in a .csv file format.'
|
||||
consumption_body_html: 'After the report is generated, you will be able to find the <strong>download link</strong> in both your SciNote notifications and email inbox.'
|
||||
consumption_footer_html: 'For security reasons, please note that the link will only be <strong>valid for 7 days</strong>.'
|
||||
consumption_generate: 'Generate'
|
||||
webhooks:
|
||||
index:
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue