Merge pull request #7291 from lasniscinote/gl_SCI_10399

(dev) Detect duplicated item IDs when parsing a spreadsheet [SCI-10399]
This commit is contained in:
Gregor Lasnibat 2024-03-25 13:03:44 +01:00 committed by GitHub
commit 411a182f25
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 33 additions and 4 deletions

View file

@ -288,6 +288,12 @@ class RepositoriesController < ApplicationController
items_size: Constants::IMPORT_REPOSITORY_ITEMS_LIMIT)
)
else
sheet = SpreadsheetParser.open_spreadsheet(import_params[:file])
duplicate_ids = SpreadsheetParser.duplicate_ids(sheet)
if duplicate_ids.any?
@importing_duplicates_warning = t('repositories.import_records.error_message.importing_duplicates', duplicate_ids: duplicate_ids)
end
@import_data = parsed_file.data
if @import_data.header.blank? || @import_data.columns.blank?

View file

@ -66,4 +66,12 @@ class SpreadsheetParser
row.map(&:to_s)
end
end
def self.duplicate_ids(sheet)
# Extracting IDs from sheet and removing header row
ids = sheet.drop(1).map(&:first)
# Selecting duplicate IDs
ids.group_by { |id| id }.select { |_, group| group.size > 1 }.keys
end
end

View file

@ -57,6 +57,7 @@ module RepositoryImportParser
def import_rows!(can_edit_existing_items, should_overwrite_with_empty_cells)
errors = false
duplicate_ids = SpreadsheetParser.duplicate_ids(@sheet)
@repository.transaction do
batch_counter = 0
@ -66,6 +67,9 @@ module RepositoryImportParser
# Skip empty rows
next if row.blank?
# Skip duplicates
next if duplicate_ids.include?(row.first)
unless @header_skipped
@header_skipped = true
next
@ -82,11 +86,8 @@ module RepositoryImportParser
incoming_row.each_with_index do |value, index|
if index == @name_index
# extract row_id
row_id = try_decimal_to_string(value)
# check if row (inventory) already exists
existing_row = RepositoryRow.find_by(name: row_id, repository: @repository)
existing_row = RepositoryRow.find_by(id: incoming_row[0])
# if it doesn't exist create it
unless existing_row
@ -145,7 +146,15 @@ module RepositoryImportParser
end
def import_batch_to_database(full_row_import_batch, can_edit_existing_items, should_overwrite_with_empty_cells)
skipped_rows = []
full_row_import_batch.each do |full_row|
# skip archived rows and rows that belong to other repositories
if full_row[:repository_row].archived || full_row[:repository_row].repository_id != @repository.id
skipped_rows << full_row[:repository_row]
next
end
full_row[:repository_row].save!(validate: false)
@new_rows_added += 1

View file

@ -84,6 +84,11 @@
</ul>
</div>
</div>
<% if @importing_duplicates_warning %>
<div class="alert alert-warning">
<%= @importing_duplicates_warning %>
</div>
<% end %>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-dismiss="modal"><%= t('general.cancel')%></button>
<input type="submit" class="btn btn-success" value="<%= t('repositories.modal_parse.import') %>">

View file

@ -2140,6 +2140,7 @@ en:
partial_success_flash: "%{nr} of %{total_nr} successfully imported. Other rows contained errors."
error_message:
items_limit: "The imported file contains too many rows. Max %{items_size} items allowed to upload at once."
importing_duplicates: "Items with duplicates detected: %{duplicate_ids}. These will be ignored on import."
temp_file_not_found: "This file could not be found. Your session might expire."
session_expired: "Your session expired. Please try again."
no_data_to_parse: "There's nothing to be parsed."