mirror of
https://github.com/scinote-eln/scinote-web.git
synced 2025-12-11 14:45:59 +08:00
Merge pull request #7635 from rekonder/aj_SCI_10738
Refactor inventory import backend [SCI-10738]
This commit is contained in:
commit
060a694fc0
15 changed files with 258 additions and 346 deletions
|
|
@ -280,7 +280,7 @@ class RepositoriesController < ApplicationController
|
|||
render_403 unless can_create_repository_rows?(@repository)
|
||||
|
||||
unless import_params[:file]
|
||||
repository_response(t('repositories.parse_sheet.errors.no_file_selected'))
|
||||
unprocessable_entity_repository_response(t('repositories.parse_sheet.errors.no_file_selected'))
|
||||
return
|
||||
end
|
||||
begin
|
||||
|
|
@ -290,16 +290,12 @@ class RepositoriesController < ApplicationController
|
|||
session: session
|
||||
)
|
||||
if parsed_file.too_large?
|
||||
return render json: { error: t('general.file.size_exceeded', file_size: Rails.configuration.x.file_max_size_mb) }, status: :unprocessable_entity
|
||||
render json: { error: t('general.file.size_exceeded', file_size: Rails.configuration.x.file_max_size_mb) },
|
||||
status: :unprocessable_entity
|
||||
elsif parsed_file.has_too_many_rows?
|
||||
return render json: { error: t('repositories.import_records.error_message.items_limit', items_size: Constants::IMPORT_REPOSITORY_ITEMS_LIMIT) }, status: :unprocessable_entity
|
||||
render json: { error: t('repositories.import_records.error_message.items_limit',
|
||||
items_size: Constants::IMPORT_REPOSITORY_ITEMS_LIMIT) }, status: :unprocessable_entity
|
||||
else
|
||||
sheet = SpreadsheetParser.open_spreadsheet(import_params[:file])
|
||||
duplicate_ids = SpreadsheetParser.duplicate_ids(sheet)
|
||||
if duplicate_ids.any?
|
||||
@importing_duplicates_warning = t('repositories.import_records.error_message.importing_duplicates', duplicate_ids: duplicate_ids)
|
||||
end
|
||||
|
||||
@import_data = parsed_file.data
|
||||
|
||||
if @import_data.header.blank? || @import_data.columns.blank?
|
||||
|
|
@ -307,64 +303,58 @@ class RepositoriesController < ApplicationController
|
|||
end
|
||||
|
||||
if (@temp_file = parsed_file.generate_temp_file)
|
||||
render json: {
|
||||
import_data: @import_data,
|
||||
temp_file: @temp_file
|
||||
}
|
||||
render json: { import_data: @import_data, temp_file: @temp_file }
|
||||
else
|
||||
return render json: { error: t('repositories.parse_sheet.errors.temp_file_failure') }, status: :unprocessable_entity
|
||||
render json: { error: t('repositories.parse_sheet.errors.temp_file_failure') }, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
rescue ArgumentError, CSV::MalformedCSVError
|
||||
return render json: { error: t('repositories.parse_sheet.errors.invalid_file', encoding: ''.encoding) }, status: :unprocessable_entity
|
||||
render json: { error: t('repositories.parse_sheet.errors.invalid_file', encoding: ''.encoding) },
|
||||
status: :unprocessable_entity
|
||||
rescue TypeError
|
||||
return render json: { error: t('repositories.parse_sheet.errors.invalid_extension') }, status: :unprocessable_entity
|
||||
render json: { error: t('repositories.parse_sheet.errors.invalid_extension') }, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def import_records
|
||||
render_403 unless can_create_repository_rows?(Repository.accessible_by_teams(current_team)
|
||||
.find_by_id(import_params[:id]))
|
||||
|
||||
# Access the checkbox values from params
|
||||
can_edit_existing_items = params[:can_edit_existing_items]
|
||||
should_overwrite_with_empty_cells = params[:should_overwrite_with_empty_cells]
|
||||
preview = params[:preview]
|
||||
|
||||
.find_by(id: import_params[:id]))
|
||||
# Check if there exist mapping for repository record (it's mandatory)
|
||||
if import_params[:mappings].present? && import_params[:mappings].value?('-1')
|
||||
import_records = repostiory_import_actions
|
||||
status = import_records.import!(can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
|
||||
status = ImportRepository::ImportRecords
|
||||
.new(
|
||||
temp_file: TempFile.find_by(id: import_params[:file_id]),
|
||||
repository: Repository.accessible_by_teams(current_team).find_by(id: import_params[:id]),
|
||||
mappings: import_params[:mappings],
|
||||
session: session,
|
||||
user: current_user,
|
||||
can_edit_existing_items: import_params[:can_edit_existing_items],
|
||||
should_overwrite_with_empty_cells: import_params[:should_overwrite_with_empty_cells],
|
||||
preview: import_params[:preview]
|
||||
).import!
|
||||
if status[:status] == :ok
|
||||
log_activity(:import_inventory_items,
|
||||
num_of_items: status[:nr_of_added])
|
||||
log_activity(:import_inventory_items, num_of_items: status[:nr_of_added])
|
||||
|
||||
flash[:success] = t('repositories.import_records.success_flash',
|
||||
number_of_rows: status[:nr_of_added],
|
||||
total_nr: status[:total_nr])
|
||||
|
||||
if preview
|
||||
render json: status, status: :ok
|
||||
else
|
||||
render json: {}, status: :ok
|
||||
unless import_params[:preview]
|
||||
flash[:success] = t('repositories.import_records.success_flash',
|
||||
number_of_rows: status[:nr_of_added],
|
||||
total_nr: status[:total_nr])
|
||||
end
|
||||
|
||||
render json: import_params[:preview] ? status : {}, status: :ok
|
||||
|
||||
else
|
||||
flash[:alert] =
|
||||
t('repositories.import_records.partial_success_flash',
|
||||
nr: status[:nr_of_added], total_nr: status[:total_nr])
|
||||
unless import_params[:preview]
|
||||
flash[:alert] =
|
||||
t('repositories.import_records.partial_success_flash',
|
||||
nr: status[:nr_of_added], total_nr: status[:total_nr])
|
||||
end
|
||||
|
||||
render json: {}, status: :unprocessable_entity
|
||||
end
|
||||
else
|
||||
render json: {
|
||||
html: render_to_string(
|
||||
partial: 'shared/flash_errors',
|
||||
formats: :html,
|
||||
locals: { error_title: t('repositories.import_records.error_message.errors_list_title'),
|
||||
error: t('repositories.import_records.error_message.no_repository_name') }
|
||||
)
|
||||
}, status: :unprocessable_entity
|
||||
render json: { error: t('repositories.import_records.error_message.mapping_error') },
|
||||
status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -387,8 +377,7 @@ class RepositoriesController < ApplicationController
|
|||
row_ids: params[:row_ids],
|
||||
header_ids: params[:header_ids]
|
||||
},
|
||||
file_type: params[:empty_export] == '1' ? 'csv' : params[:file_type],
|
||||
empty_export: params[:empty_export] == '1'
|
||||
file_type: params[:file_type]
|
||||
)
|
||||
update_user_export_file_type if current_user.settings[:repository_export_file_type] != params[:file_type]
|
||||
log_activity(:export_inventory_items)
|
||||
|
|
@ -480,16 +469,6 @@ class RepositoriesController < ApplicationController
|
|||
|
||||
private
|
||||
|
||||
def repostiory_import_actions
|
||||
ImportRepository::ImportRecords.new(
|
||||
temp_file: TempFile.find_by_id(import_params[:file_id]),
|
||||
repository: Repository.accessible_by_teams(current_team).find_by_id(import_params[:id]),
|
||||
mappings: import_params[:mappings],
|
||||
session: session,
|
||||
user: current_user
|
||||
)
|
||||
end
|
||||
|
||||
def load_repository
|
||||
repository_id = params[:id] || params[:repository_id]
|
||||
@repository = Repository.accessible_by_teams(current_user.teams).find_by(id: repository_id)
|
||||
|
|
@ -571,7 +550,8 @@ class RepositoriesController < ApplicationController
|
|||
end
|
||||
|
||||
def import_params
|
||||
params.permit(:id, :file, :file_id, :preview, mappings: {}).to_h
|
||||
params.permit(:id, :file, :file_id, :preview, :can_edit_existing_items,
|
||||
:should_overwrite_with_empty_cells, :preview, mappings: {}).to_h
|
||||
end
|
||||
|
||||
def repository_response(message)
|
||||
|
|
|
|||
|
|
@ -120,7 +120,7 @@ export default {
|
|||
});
|
||||
|
||||
columns.push({
|
||||
field: 'status',
|
||||
field: 'import_status',
|
||||
headerName: this.i18n.t('repositories.import_records.steps.step3.status'),
|
||||
pinned: 'right'
|
||||
});
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class RepositoriesExportJob < ApplicationJob
|
|||
FileUtils.mkdir_p(attachments_path)
|
||||
|
||||
# File creation
|
||||
file_name = FileUtils.touch("#{path}/#{repository_name}.#{@file_type}").first
|
||||
repository_items_file_name = FileUtils.touch("#{path}/#{repository_name}.#{@file_type}").first
|
||||
|
||||
# Define headers and columns IDs
|
||||
col_ids = [-3, -4, -5, -6, -7, -8, -9, -10]
|
||||
|
|
@ -69,10 +69,10 @@ class RepositoriesExportJob < ApplicationJob
|
|||
|
||||
# Generate CSV / XLSX
|
||||
service = RepositoryExportService
|
||||
.new(@file_type, repository.repository_rows, col_ids, @user, repository, in_module: handle_name_func)
|
||||
.new(@file_type, repository.repository_rows, col_ids, @user, repository, handle_name_func)
|
||||
exported_data = service.export!
|
||||
|
||||
File.binwrite(file_name, exported_data)
|
||||
File.binwrite(repository_items_file_name, exported_data)
|
||||
|
||||
# Save all attachments (it doesn't work directly in callback function
|
||||
assets.each do |asset, asset_path|
|
||||
|
|
|
|||
|
|
@ -35,15 +35,9 @@ class RepositoryZipExportJob < ZipExportJob
|
|||
params[:header_ids].map(&:to_i),
|
||||
@user,
|
||||
repository,
|
||||
in_module: params[:my_module_id].present?,
|
||||
empty_export: @empty_export)
|
||||
in_module: params[:my_module_id].present?)
|
||||
exported_data = service.export!
|
||||
|
||||
if @empty_export
|
||||
File.binwrite("#{dir}/Export_Inventory_Empty_#{Time.now.utc.strftime('%F %H-%M-%S_UTC')}.#{@file_type}", exported_data)
|
||||
else
|
||||
File.binwrite("#{dir}/export.#{@file_type}", exported_data)
|
||||
end
|
||||
File.binwrite("#{dir}/export.#{@file_type}", exported_data)
|
||||
end
|
||||
|
||||
def failed_notification_title
|
||||
|
|
|
|||
|
|
@ -3,10 +3,9 @@
|
|||
class ZipExportJob < ApplicationJob
|
||||
include FailedDeliveryNotifiableJob
|
||||
|
||||
def perform(user_id:, params: {}, file_type: :csv, empty_export: false)
|
||||
def perform(user_id:, params: {}, file_type: :csv)
|
||||
@user = User.find(user_id)
|
||||
@file_type = file_type.to_sym
|
||||
@empty_export = empty_export
|
||||
I18n.backend.date_format = @user.settings[:date_format] || Constants::DEFAULT_DATE_FORMAT
|
||||
zip_input_dir = FileUtils.mkdir_p(Rails.root.join("tmp/temp_zip_#{Time.now.to_i}").to_s).first
|
||||
zip_dir = FileUtils.mkdir_p(Rails.root.join('tmp/zip-ready').to_s).first
|
||||
|
|
|
|||
|
|
@ -203,11 +203,6 @@ class Repository < RepositoryBase
|
|||
new_repo
|
||||
end
|
||||
|
||||
def import_records(sheet, mappings, user, can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
importer = RepositoryImportParser::Importer.new(sheet, mappings, user, self)
|
||||
importer.run(can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
end
|
||||
|
||||
def assigned_rows(my_module)
|
||||
repository_rows.joins(:my_module_repository_rows).where(my_module_repository_rows: { my_module_id: my_module.id })
|
||||
end
|
||||
|
|
|
|||
|
|
@ -105,6 +105,8 @@ class RepositoryRow < ApplicationRecord
|
|||
length: { maximum: Constants::NAME_MAX_LENGTH }
|
||||
validates :created_by, presence: true
|
||||
|
||||
attr_accessor :import_status
|
||||
|
||||
scope :active, -> { where(archived: false) }
|
||||
scope :archived, -> { where(archived: true) }
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
class RepositoryRowSerializer < ActiveModel::Serializer
|
||||
include Rails.application.routes.url_helpers
|
||||
|
||||
attributes :id, :name, :code
|
||||
attributes :id, :name, :code, :import_status
|
||||
|
||||
has_many :repository_cells, serializer: RepositoryCellSerializer
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module ImportRepository
|
||||
class ImportRecords
|
||||
def initialize(options)
|
||||
|
|
@ -6,60 +8,25 @@ module ImportRepository
|
|||
@mappings = options.fetch(:mappings)
|
||||
@session = options.fetch(:session)
|
||||
@user = options.fetch(:user)
|
||||
@can_edit_existing_items = options.fetch(:can_edit_existing_items)
|
||||
@should_overwrite_with_empty_cells = options.fetch(:should_overwrite_with_empty_cells)
|
||||
@preview = options.fetch(:preview)
|
||||
end
|
||||
|
||||
def import!(can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
status = run_import_actions(can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
#@temp_file.destroy
|
||||
def import!
|
||||
status = @temp_file.file.open do |temp_file|
|
||||
importer = RepositoryImportParser::Importer.new(SpreadsheetParser.open_spreadsheet(temp_file),
|
||||
@mappings,
|
||||
@user,
|
||||
@repository,
|
||||
@can_edit_existing_items,
|
||||
@should_overwrite_with_empty_cells,
|
||||
@preview)
|
||||
importer.run
|
||||
end
|
||||
|
||||
@temp_file.destroy unless @preview
|
||||
status
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def run_import_actions(can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
@temp_file.file.open do |temp_file|
|
||||
@repository.import_records(
|
||||
SpreadsheetParser.open_spreadsheet(temp_file),
|
||||
@mappings,
|
||||
@user,
|
||||
can_edit_existing_items,
|
||||
should_overwrite_with_empty_cells,
|
||||
preview
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def run_checks
|
||||
unless @mappings
|
||||
return {
|
||||
status: :error,
|
||||
errors:
|
||||
I18n.t('repositories.import_records.error_message.no_data_to_parse')
|
||||
}
|
||||
end
|
||||
unless @mappings.value?('-1')
|
||||
return {
|
||||
status: :error,
|
||||
errors:
|
||||
I18n.t('repositories.import_records.error_message.no_column_name')
|
||||
}
|
||||
end
|
||||
unless @temp_file
|
||||
return {
|
||||
status: :error,
|
||||
errors:
|
||||
I18n.t(
|
||||
'repositories.import_records.error_message.temp_file_not_found'
|
||||
)
|
||||
}
|
||||
end
|
||||
unless @temp_file.session_id == session.id
|
||||
return {
|
||||
status: :error,
|
||||
errors:
|
||||
I18n.t('repositories.import_records.error_message.session_expired')
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
require 'csv'
|
||||
|
||||
module RepositoryCsvExport
|
||||
def self.to_csv(rows, column_ids, user, repository, handle_file_name_func, in_module, empty_export)
|
||||
def self.to_csv(rows, column_ids, user, repository, handle_file_name_func, in_module)
|
||||
# Parse column names
|
||||
csv_header = []
|
||||
add_consumption = in_module && !repository.is_a?(RepositorySnapshot) && repository.has_stock_management?
|
||||
|
|
@ -38,47 +38,45 @@ module RepositoryCsvExport
|
|||
|
||||
CSV.generate do |csv|
|
||||
csv << csv_header
|
||||
unless empty_export
|
||||
rows.each do |row|
|
||||
csv_row = []
|
||||
column_ids.each do |c_id|
|
||||
case c_id
|
||||
when -1, -2
|
||||
next
|
||||
when -3
|
||||
csv_row << (repository.is_a?(RepositorySnapshot) ? row.parent_id : row.code)
|
||||
when -4
|
||||
csv_row << row.name
|
||||
when -5
|
||||
csv_row << row.created_by.full_name
|
||||
when -6
|
||||
csv_row << I18n.l(row.created_at, format: :full)
|
||||
when -7
|
||||
csv_row << row.updated_at ? I18n.l(row.updated_at, format: :full) : ''
|
||||
when -8
|
||||
csv_row << row.last_modified_by.full_name
|
||||
when -9
|
||||
csv_row << (row.archived? && row.archived_by.present? ? row.archived_by.full_name : '')
|
||||
when -10
|
||||
csv_row << (row.archived? && row.archived_on.present? ? I18n.l(row.archived_on, format: :full) : '')
|
||||
when -11
|
||||
csv_row << row.parent_repository_rows.map(&:code).join(' | ')
|
||||
csv_row << row.child_repository_rows.map(&:code).join(' | ')
|
||||
else
|
||||
cell = row.repository_cells.find_by(repository_column_id: c_id)
|
||||
rows.each do |row|
|
||||
csv_row = []
|
||||
column_ids.each do |c_id|
|
||||
case c_id
|
||||
when -1, -2
|
||||
next
|
||||
when -3
|
||||
csv_row << (repository.is_a?(RepositorySnapshot) ? row.parent_id : row.code)
|
||||
when -4
|
||||
csv_row << row.name
|
||||
when -5
|
||||
csv_row << row.created_by.full_name
|
||||
when -6
|
||||
csv_row << I18n.l(row.created_at, format: :full)
|
||||
when -7
|
||||
csv_row << row.updated_at ? I18n.l(row.updated_at, format: :full) : ''
|
||||
when -8
|
||||
csv_row << row.last_modified_by.full_name
|
||||
when -9
|
||||
csv_row << (row.archived? && row.archived_by.present? ? row.archived_by.full_name : '')
|
||||
when -10
|
||||
csv_row << (row.archived? && row.archived_on.present? ? I18n.l(row.archived_on, format: :full) : '')
|
||||
when -11
|
||||
csv_row << row.parent_repository_rows.map(&:code).join(' | ')
|
||||
csv_row << row.child_repository_rows.map(&:code).join(' | ')
|
||||
else
|
||||
cell = row.repository_cells.find_by(repository_column_id: c_id)
|
||||
|
||||
csv_row << if cell
|
||||
if cell.value_type == 'RepositoryAssetValue' && handle_file_name_func
|
||||
handle_file_name_func.call(cell.value.asset)
|
||||
else
|
||||
cell.value.export_formatted
|
||||
end
|
||||
if cell.value_type == 'RepositoryAssetValue' && handle_file_name_func
|
||||
handle_file_name_func.call(cell.value.asset)
|
||||
else
|
||||
cell.value.export_formatted
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
csv_row << row.row_consumption(row.stock_consumption) if add_consumption
|
||||
csv << csv_row
|
||||
end
|
||||
csv_row << row.row_consumption(row.stock_consumption) if add_consumption
|
||||
csv << csv_row
|
||||
end
|
||||
end.encode('UTF-8', invalid: :replace, undef: :replace)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class RepositoryExportService
|
||||
def initialize(file_type, rows, columns, user, repository, handle_name_func = nil, in_module: false, empty_export: false)
|
||||
def initialize(file_type, rows, columns, user, repository, handle_name_func = nil, in_module: false)
|
||||
@file_type = file_type
|
||||
@user = user
|
||||
@rows = rows
|
||||
|
|
@ -9,13 +9,12 @@ class RepositoryExportService
|
|||
@repository = repository
|
||||
@handle_name_func = handle_name_func
|
||||
@in_module = in_module
|
||||
@empty_export = empty_export
|
||||
end
|
||||
|
||||
def export!
|
||||
case @file_type
|
||||
when :csv
|
||||
file_data = RepositoryCsvExport.to_csv(@rows, @columns, @user, @repository, @handle_name_func, @in_module, @empty_export)
|
||||
file_data = RepositoryCsvExport.to_csv(@rows, @columns, @user, @repository, @handle_name_func, @in_module)
|
||||
when :xlsx
|
||||
file_data = RepositoryXlsxExport.to_xlsx(@rows, @columns, @user, @repository, @handle_name_func, @in_module)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -36,9 +36,9 @@ module RepositoryXlsxExport
|
|||
when -6
|
||||
row_data << I18n.l(row.created_at, format: :full)
|
||||
when -7
|
||||
csv_row << row.updated_at ? I18n.l(row.updated_at, format: :full) : ''
|
||||
row_data << row.updated_at ? I18n.l(row.updated_at, format: :full) : ''
|
||||
when -8
|
||||
csv_row << row.last_modified_by.full_name
|
||||
row_data << row.last_modified_by.full_name
|
||||
when -9
|
||||
row_data << (row.archived? && row.archived_by.present? ? row.archived_by.full_name : '')
|
||||
when -10
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ module RepositoryImportParser
|
|||
class Importer
|
||||
IMPORT_BATCH_SIZE = 500
|
||||
|
||||
def initialize(sheet, mappings, user, repository)
|
||||
def initialize(sheet, mappings, user, repository, can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
@columns = []
|
||||
@name_index = -1
|
||||
@total_new_rows = 0
|
||||
|
|
@ -23,30 +23,36 @@ module RepositoryImportParser
|
|||
@mappings = mappings
|
||||
@user = user
|
||||
@repository_columns = @repository.repository_columns
|
||||
@can_edit_existing_items = can_edit_existing_items
|
||||
@should_overwrite_with_empty_cells = should_overwrite_with_empty_cells
|
||||
@preview = preview
|
||||
end
|
||||
|
||||
def run(can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
def run
|
||||
fetch_columns
|
||||
return check_for_duplicate_columns if check_for_duplicate_columns
|
||||
|
||||
import_rows!(can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
import_rows!
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def fetch_columns
|
||||
@mappings.each_with_index do |(_, value), index|
|
||||
value = JSON.parse(value) rescue value
|
||||
value = value.to_s unless value.is_a?(Hash)
|
||||
|
||||
if value == '-1'
|
||||
# Fill blank space, so our indices stay the same
|
||||
case value
|
||||
when '-1'
|
||||
@columns << nil
|
||||
@name_index = index
|
||||
|
||||
# creating a custom option column
|
||||
elsif value.is_a?(Hash)
|
||||
new_repository_column = @repository.repository_columns.create!(created_by: @user, name: value['name']+rand(10000).to_s, data_type: "Repository#{value['type']}Value")
|
||||
when Hash
|
||||
new_repository_column = if @preview
|
||||
@repository.repository_columns.new(created_by: @user, name: value['name'],
|
||||
data_type: "Repository#{value['type']}Value")
|
||||
else
|
||||
@repository.repository_columns.create!(created_by: @user, name: value['name'],
|
||||
data_type: "Repository#{value['type']}Value")
|
||||
end
|
||||
@columns << new_repository_column
|
||||
else
|
||||
@columns << @repository_columns.where(data_type: Extends::REPOSITORY_IMPORTABLE_TYPES)
|
||||
|
|
@ -63,191 +69,169 @@ module RepositoryImportParser
|
|||
end
|
||||
end
|
||||
|
||||
def import_rows!(can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
errors = false
|
||||
def handle_invalid_cell_value(value, cell_value)
|
||||
if value.present? && cell_value.nil?
|
||||
@errors << 'Incorrect data format'
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
def import_rows!
|
||||
checked_rows = []
|
||||
duplicate_ids = SpreadsheetParser.duplicate_ids(@sheet)
|
||||
|
||||
imported_rows = []
|
||||
@rows.each do |row|
|
||||
next if row.blank?
|
||||
|
||||
@repository.transaction do
|
||||
batch_counter = 0
|
||||
full_row_import_batch = []
|
||||
unless @header_skipped
|
||||
@header_skipped = true
|
||||
next
|
||||
end
|
||||
@total_new_rows += 1
|
||||
incoming_row = SpreadsheetParser.parse_row(row, @sheet, date_format: @user.settings['date_format'])
|
||||
existing_row = RepositoryRow.includes(repository_cells: :value)
|
||||
.find_by(id: incoming_row[0].to_s.gsub(RepositoryRow::ID_PREFIX, ''))
|
||||
|
||||
@rows.each do |row|
|
||||
# Skip empty rows
|
||||
next if row.blank?
|
||||
if existing_row.present?
|
||||
if !@can_edit_existing_items
|
||||
existing_row.import_status = 'unchanged'
|
||||
elsif existing_row.archived
|
||||
existing_row.import_status = 'archived'
|
||||
elsif existing_row.repository_id != @repository.id
|
||||
existing_row.import_status = 'incorrect_inventory'
|
||||
elsif duplicate_ids.include?(existing_row.id)
|
||||
existing_row.import_status = 'duplicated'
|
||||
end
|
||||
|
||||
# Skip duplicates
|
||||
next if duplicate_ids.include?(row.first)
|
||||
|
||||
unless @header_skipped
|
||||
@header_skipped = true
|
||||
if existing_row.import_status.present?
|
||||
checked_rows << existing_row if @preview
|
||||
next
|
||||
end
|
||||
@total_new_rows += 1
|
||||
|
||||
new_full_row = {}
|
||||
incoming_row = SpreadsheetParser.parse_row(
|
||||
row,
|
||||
@sheet,
|
||||
date_format: @user.settings['date_format']
|
||||
)
|
||||
|
||||
incoming_row.each_with_index do |value, index|
|
||||
if index == @name_index
|
||||
|
||||
# check if row (inventory) already exists
|
||||
existing_row = RepositoryRow.includes(repository_cells: :value).find_by(id: incoming_row[0].to_s.gsub(RepositoryRow::ID_PREFIX, ''))
|
||||
|
||||
# if it doesn't exist create it
|
||||
unless existing_row
|
||||
new_row =
|
||||
RepositoryRow.new(name: try_decimal_to_string(value),
|
||||
repository: @repository,
|
||||
created_by: @user,
|
||||
last_modified_by: @user)
|
||||
unless new_row.valid?
|
||||
errors = true
|
||||
break
|
||||
end
|
||||
new_full_row[:repository_row] = new_row
|
||||
next
|
||||
end
|
||||
|
||||
# if it's a preview always add the existing row
|
||||
if preview
|
||||
new_full_row[:repository_row] = existing_row
|
||||
|
||||
# otherwise add according to criteria
|
||||
else
|
||||
# if it does exist but shouldn't be edited, error out and break
|
||||
if existing_row && (can_edit_existing_items == false)
|
||||
errors = true
|
||||
break
|
||||
end
|
||||
|
||||
# if it does exist and should be edited, update the existing row
|
||||
if existing_row && (can_edit_existing_items == true)
|
||||
# update the existing row with incoming row data
|
||||
new_full_row[:repository_row] = existing_row
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
next unless @columns[index]
|
||||
new_full_row[index] = value
|
||||
end
|
||||
|
||||
if new_full_row[:repository_row].present?
|
||||
full_row_import_batch << new_full_row
|
||||
batch_counter += 1
|
||||
end
|
||||
|
||||
next if batch_counter < IMPORT_BATCH_SIZE
|
||||
|
||||
# import_batch_to_database(full_row_import_batch, can_edit_existing_items, should_overwrite_with_empty_cells, preview: preview)
|
||||
imported_rows += import_batch_to_database(full_row_import_batch, can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
full_row_import_batch = []
|
||||
batch_counter = 0
|
||||
end
|
||||
|
||||
# Import of the remaining rows
|
||||
imported_rows += import_batch_to_database(full_row_import_batch, can_edit_existing_items, should_overwrite_with_empty_cells, preview) if full_row_import_batch.any?
|
||||
|
||||
full_row_import_batch
|
||||
checked_rows << import_row(existing_row, incoming_row)
|
||||
end
|
||||
|
||||
if errors
|
||||
return { status: :error,
|
||||
nr_of_added: @new_rows_added,
|
||||
total_nr: @total_new_rows }
|
||||
end
|
||||
changes = ActiveModelSerializers::SerializableResource.new(
|
||||
imported_rows,
|
||||
checked_rows.compact,
|
||||
each_serializer: RepositoryRowSerializer,
|
||||
include: [:repository_cells]
|
||||
).as_json
|
||||
|
||||
{ status: :ok, nr_of_added: @new_rows_added, total_nr: @total_new_rows, changes: changes, import_date: I18n.l(Date.today, format: :full_date) }
|
||||
{ status: :ok, nr_of_added: @new_rows_added, total_nr: @total_new_rows, changes: changes,
|
||||
import_date: I18n.l(Date.today, format: :full_date) }
|
||||
end
|
||||
|
||||
def import_batch_to_database(full_row_import_batch, can_edit_existing_items, should_overwrite_with_empty_cells, preview)
|
||||
skipped_rows = []
|
||||
def import_row(repository_row, import_row)
|
||||
@repository.transaction do
|
||||
@errors = []
|
||||
@updated = false
|
||||
repository_row_name = try_decimal_to_string(import_row[@name_index])
|
||||
|
||||
full_row_import_batch.map do |full_row|
|
||||
# skip archived rows and rows that belong to other repositories
|
||||
if full_row[:repository_row].archived || full_row[:repository_row].repository_id != @repository.id
|
||||
skipped_rows << full_row[:repository_row]
|
||||
next
|
||||
if repository_row.present?
|
||||
repository_row.name = repository_row_name
|
||||
else
|
||||
repository_row = RepositoryRow.new(name: repository_row_name,
|
||||
repository: @repository,
|
||||
created_by: @user,
|
||||
last_modified_by: @user,
|
||||
import_status: 'created')
|
||||
end
|
||||
|
||||
full_row[:repository_row].save!(validate: false)
|
||||
@new_rows_added += 1
|
||||
@preview ? repository_row.validate : repository_row.save!
|
||||
@errors << repository_row.errors.full_messages.join(',') if repository_row.errors.present?
|
||||
|
||||
full_row.reject { |k| k == :repository_row }.each do |index, value|
|
||||
column = @columns[index]
|
||||
@updated = repository_row.changed?
|
||||
|
||||
@columns.each_with_index do |column, index|
|
||||
next if column.blank?
|
||||
|
||||
value = import_row[index]
|
||||
value = try_decimal_to_string(value) unless column.repository_number_value?
|
||||
next if value.nil?
|
||||
|
||||
cell_value_attributes = {
|
||||
created_by: @user,
|
||||
last_modified_by: @user,
|
||||
repository_cell_attributes: {
|
||||
repository_row: full_row[:repository_row],
|
||||
repository_column: column,
|
||||
importing: true
|
||||
}
|
||||
}
|
||||
cell_value = if value.present?
|
||||
column.data_type.constantize.import_from_text(
|
||||
value,
|
||||
{
|
||||
created_by: @user,
|
||||
last_modified_by: @user,
|
||||
repository_cell_attributes: {
|
||||
repository_row: repository_row,
|
||||
repository_column: column,
|
||||
importing: true
|
||||
}
|
||||
},
|
||||
@user.as_json(root: true, only: :settings).deep_symbolize_keys
|
||||
)
|
||||
end
|
||||
next if handle_invalid_cell_value(value, cell_value)
|
||||
|
||||
cell_value = column.data_type.constantize.import_from_text(
|
||||
value,
|
||||
cell_value_attributes,
|
||||
@user.as_json(root: true, only: :settings).deep_symbolize_keys
|
||||
)
|
||||
existing_cell = repository_row.repository_cells.find { |c| c.repository_column_id == column.id }
|
||||
|
||||
existing_cell = full_row[:repository_row].repository_cells.find { |c| c.repository_column_id == column.id }
|
||||
existing_cell = if cell_value.nil?
|
||||
handle_nil_cell_value(existing_cell)
|
||||
else
|
||||
handle_existing_cell_value(existing_cell, cell_value, repository_row)
|
||||
end
|
||||
|
||||
next if cell_value.nil? && existing_cell.nil?
|
||||
|
||||
if existing_cell
|
||||
# existing_cell present && !can_edit_existing_items
|
||||
next if can_edit_existing_items == false
|
||||
|
||||
# existing_cell present && can_edit_existing_items
|
||||
if can_edit_existing_items == true
|
||||
# if incoming cell is not empty
|
||||
case cell_value
|
||||
|
||||
when RepositoryStockValue
|
||||
existing_cell.value.update_data!(cell_value, @user, preview: preview) unless cell_value.nil?
|
||||
|
||||
when RepositoryListValue
|
||||
repository_list_item_id = cell_value[:repository_list_item_id]
|
||||
existing_cell.value.update_data!(repository_list_item_id, @user, preview: preview) unless cell_value.nil?
|
||||
|
||||
when RepositoryStatusValue
|
||||
repository_status_item_id = cell_value[:repository_status_item_id]
|
||||
existing_cell.value.update_data!(repository_status_item_id, @user, preview: preview) unless cell_value.nil?
|
||||
|
||||
else
|
||||
sanitized_cell_value_data = sanitize_cell_value_data(cell_value.data)
|
||||
existing_cell.value.update_data!(sanitized_cell_value_data, @user, preview: preview) unless cell_value.nil?
|
||||
end
|
||||
|
||||
# if incoming cell is empty && should_overwrite_with_empty_cells
|
||||
existing_cell.value.destroy! if cell_value.nil? && should_overwrite_with_empty_cells == true
|
||||
|
||||
# if incoming cell is empty && !should_overwrite_with_empty_cells
|
||||
next if cell_value.nil? && should_overwrite_with_empty_cells == false
|
||||
end
|
||||
else
|
||||
# no existing_cell. Create a new one.
|
||||
cell_value.repository_cell.value = cell_value
|
||||
cell_value.save!(validate: false)
|
||||
end
|
||||
@updated ||= existing_cell&.value&.changed?
|
||||
@errors << existing_cell.value.errors.full_messages.join(',') if existing_cell&.value&.errors.present?
|
||||
end
|
||||
repository_row.import_status = if @errors.present?
|
||||
@errors.join(',')
|
||||
elsif repository_row.import_status == 'created'
|
||||
@new_rows_added += 1
|
||||
'created'
|
||||
elsif @updated
|
||||
@new_rows_added += 1
|
||||
'updated'
|
||||
else
|
||||
'unchanged'
|
||||
end
|
||||
repository_row
|
||||
rescue ActiveRecord::RecordInvalid
|
||||
raise ActiveRecord::Rollback
|
||||
end
|
||||
end
|
||||
|
||||
full_row[:repository_row]
|
||||
def handle_nil_cell_value(repository_cell)
|
||||
return unless repository_cell.present? && @should_overwrite_with_empty_cells
|
||||
|
||||
if @preview
|
||||
repository_cell = nil
|
||||
@updated = true
|
||||
else
|
||||
repository_cell.value.destroy!
|
||||
end
|
||||
|
||||
repository_cell
|
||||
end
|
||||
|
||||
def handle_existing_cell_value(repository_cell, cell_value, repository_row)
|
||||
if repository_cell.present?
|
||||
case cell_value
|
||||
when RepositoryStockValue
|
||||
repository_cell.value.update_data!(cell_value, @user, preview: @preview)
|
||||
when RepositoryListValue
|
||||
repository_list_item_id = cell_value[:repository_list_item_id]
|
||||
repository_cell.value.update_data!(repository_list_item_id, @user, preview: @preview)
|
||||
when RepositoryStatusValue
|
||||
repository_status_item_id = cell_value[:repository_status_item_id]
|
||||
repository_cell.value.update_data!(repository_status_item_id, @user, preview: @preview)
|
||||
else
|
||||
sanitized_cell_value_data = sanitize_cell_value_data(cell_value.data)
|
||||
repository_cell.value.update_data!(sanitized_cell_value_data, @user, preview: @preview)
|
||||
end
|
||||
repository_cell
|
||||
else
|
||||
# Create new cell
|
||||
cell_value.repository_cell.value = cell_value
|
||||
repository_row.repository_cells << cell_value.repository_cell
|
||||
@preview ? cell_value.validate : cell_value.save!
|
||||
@updated ||= true
|
||||
cell_value.repository_cell
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +0,0 @@
|
|||
<% if error.present? %>
|
||||
<div class="alert alert-danger" role="alert">
|
||||
<div><%= error_title %></div>
|
||||
<br>
|
||||
<%= error %>
|
||||
</div>
|
||||
<% end %>
|
||||
|
|
@ -2341,6 +2341,7 @@ en:
|
|||
duplicated_values: "Two or more columns have the same mapping."
|
||||
errors_list_title: "Items were not imported because one or more errors were found:"
|
||||
no_repository_name: "Item name is required!"
|
||||
mapping_error: "Column mappings are required"
|
||||
edit_record: "Edit"
|
||||
assign_record: "Assign to task"
|
||||
copy_record: "Duplicate"
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue