Added deleting of files on S3 server when editing/removing a file. Some refactoring.

This commit is contained in:
Matej Zrimšek 2016-08-17 09:44:53 +02:00
parent ccdccc5cf1
commit e4a6a3944e
13 changed files with 83 additions and 119 deletions

View file

@ -116,8 +116,11 @@ function animateSpinner(el, start, options) {
}
/*
* Prevents user from accidentally leaving page when
* server is busy and notifies him with a message.
* Prevents user from accidentally leaving page when server is busy
* and notifies him with a message.
*
* NOTE: Don't prevent event propagation (ev.stopPropagation()), or
* else all events occurring when alert is up will be ignored.
*/
function preventLeavingPage(prevent, msg) {
busy = prevent;

View file

@ -149,12 +149,17 @@
* TODO On S3 server upload error the other files that were already
* asynchronously uploaded remain on the server, but should be deleted - this
* should generally not happen, because all files should fail to upload in
* such case (connection error).
* such cases (like S3 server connection error), except if user cancels the
* upload while still in progress. But this can be abused! One way to solve
* it is to make request to our server for each file seperatelly, and not for
* all together as it is now, despite being less efficient. To make it
* bulletproof, post requests should be issued on server-side.
*/
exports.directUpload = function (ev, form, signUrl, cb) {
form.clearFormErrors();
form.removeBlankFileForms();
$fileInputs = $(form).find("input[type=file]");
exports.directUpload = function (ev, signUrl) {
$form = $(ev.target.form);
$form.clearFormErrors();
$form.removeBlankFileForms();
$fileInputs = $form.find("input[type=file]");
var signRequests = [];
if ($fileInputs.length) {
@ -162,7 +167,6 @@
animateSpinner();
preventLeavingPage(true, I18n.t("general.file.uploading"));
ev.preventDefault();
ev.stopPropagation();
// Validates files and if OK gets upload post requests
_.each($fileInputs, function (fileInput) {
@ -178,7 +182,6 @@
// After successful file validation and posts fetching
if (signRequests.length) {
var fileRequests = [];
var fileIds = [];
if (signRequests.length == 1) {
arguments = [arguments];
@ -188,7 +191,6 @@
var jqXHR = responseData[2];
var data = JSON.parse(jqXHR.responseText);
processPosts(ev, $fileInput, data.posts, fileRequests);
fileIds.push(data.asset_id);
});
$.when.apply($, fileRequests).always(function () {
@ -196,11 +198,7 @@
preventLeavingPage(false);
}).then(function () {
// After successful posts processing and file uploading
$.each($fileInputs, function (fileIdx, fileInput) {
// Use file input to pass file id on submit
cb(fileInput, fileIds[fileIdx]);
});
$(ev.target.form).submit();
$form.submit();
});
}
}, function () {

View file

@ -143,7 +143,6 @@ function applyCollapseLinkCallBack() {
// Toggle collapse button
collapseIcon.toggleClass("glyphicon-collapse-up", !collapsed);
collapseIcon.toggleClass("glyphicon-collapse-down", collapsed);
});
}
@ -323,7 +322,8 @@ function processResult(ev, resultTypeEnum, forS3) {
if(nameValid && filesValid) {
if(forS3) {
// Redirects file uploading to S3
startFileUpload(ev, ev.target);
var url = '/asset_signature.json';
directUpload(ev, url);
} else {
// Local file uploading
animateSpinner();
@ -347,26 +347,6 @@ function processResult(ev, resultTypeEnum, forS3) {
}
}
// S3 direct uploading
function startFileUpload(ev, btn) {
var $form = $(btn.form);
var $editFileInput = $form.find("input[name='result[asset_attributes][id]']").get(0);
var url = '/asset_signature.json';
directUpload(ev, $form, url, function (fileInput, fileId) {
if ($editFileInput) {
// edit mode - input field has to be removed
$editFileInput.value = fileId;
$(fileInput).remove();
} else {
// create mode
fileInput.type = "hidden";
fileInput.name = fileInput.name.replace("[file]", "[id]");
fileInput.value = fileId;
}
});
}
// This checks if the ctarget param exist in the
// rendered url and opens the comment tab
$(document).ready(function(){

View file

@ -576,7 +576,6 @@ $("[data-action='new-step']").on("ajax:success", function(e, data) {
// experience is improved
function processStep(ev, editMode, forS3) {
var $form = $(ev.target.form);
$form.clearFormErrors();
$form.removeBlankExcelTables(editMode);
$form.removeBlankFileForms();
@ -591,7 +590,8 @@ function processStep(ev, editMode, forS3) {
if (filesValid && checklistsValid && nameValid) {
if (forS3) {
// Redirects file uploading to S3
startFileUpload(ev, ev.target);
var url = '/asset_signature.json';
directUpload(ev, url);
} else {
// Local file uploading
animateSpinner();
@ -647,15 +647,3 @@ function renderTable(table) {
$(table).find(".ht_master .wtHolder").css("height", "100%");
}
}
// S3 direct uploading
function startFileUpload(ev, btn) {
var $form = $(btn.form);
var url = '/asset_signature.json';
directUpload(ev, $form, url, function (fileInput, fileId) {
fileInput.type = "hidden";
fileInput.name = fileInput.name.replace("[file]", "[id]");
fileInput.value = fileId;
});
}

View file

@ -54,15 +54,17 @@ function formAjaxResultAsset($form) {
$form
.on("ajax:success", function(e, data) {
$form.after(data.html);
var newResult = $form.next();
initFormSubmitLinks(newResult);
var $newResult = $form.next();
initFormSubmitLinks($newResult);
$(this).remove();
applyEditResultAssetCallback();
applyCollapseLinkCallBack();
toggleResultEditButtons(true);
initResultCommentTabAjax();
expandResult(newResult);
expandResult($newResult);
$imgs = $newResult.find("img");
reloadImages($imgs);
})
.on("ajax:error", function(e, data) {
$form.renderFormErrors("result", data.errors, true, e);

View file

@ -3,9 +3,11 @@
* force browser to reload/update cached image
* (useful for AJAX calls).
*/
function reloadImage(img) {
var src = $(img).attr("src");
src = src.split("?", 1);
src += "?timestamp=" + new Date().getTime();
$(img).attr("src", src);
function reloadImages(imgs) {
_.each(imgs, function (img) {
var src = $(img).attr("src");
src = src.split("?", 1);
src += "?timestamp=" + new Date().getTime();
$(img).attr("src", src);
});
}

View file

@ -71,19 +71,11 @@ function processFile(ev, forS3) {
if(filesValidator(ev, $fileInput, FileTypeEnum.AVATAR)) {
if(forS3) {
// Redirects file uploading to S3
startFileUpload(ev, ev.target);
var url = "/avatar_signature.json";
directUpload(ev, url);
} else {
// Local file uploading
animateSpinner();
}
}
}
// S3 direct uploading
function startFileUpload(ev, btn) {
var $form = $(btn.form);
var url = "/avatar_signature.json";
directUpload(ev, $form, url, function () {
});
}

View file

@ -8,33 +8,20 @@ class AssetsController < ApplicationController
respond_to do |format|
format.json {
if asset_params[:asset_id]
asset = Asset.find_by_id asset_params[:asset_id]
asset.file.destroy
asset.file_empty asset_params[:file].original_filename, asset_params[:file].size()
validationAsset = asset
else
# We can't verify file content (spoofing) of an empty
# file, so we use dummy validationAsset instead
asset = Asset.new_empty asset_params[:file].original_filename, asset_params[:file].size()
validationAsset = Asset.new(asset_params)
end
# We need to validate again so that asset's
# after_validation gets triggered
if validationAsset.valid?
asset.save!
posts = generate_upload_posts asset
render json: {
asset_id: asset.id,
posts: posts
}
else
asset = Asset.new(asset_params)
if asset.errors.any?
# We need to validate, although 'new' already does it, so that
# asset's after_validation gets triggered, which modifies errors
asset.valid?
render json: {
status: 'error',
errors: validationAsset.errors
errors: asset.errors
} , status: :bad_request
else
posts = generate_upload_posts asset
render json: {
posts: posts
}
end
}
end
@ -156,7 +143,6 @@ class AssetsController < ApplicationController
def asset_params
params.permit(
:asset_id,
:file
)
end

View file

@ -32,14 +32,7 @@ class ResultAssetsController < ApplicationController
end
def create
asset_attrs = result_params[:asset_attributes]
if asset_attrs and asset_attrs[:id]
@asset = Asset.find_by_id asset_attrs[:id]
else
@asset = Asset.new(result_params[:asset_attributes])
end
@asset = Asset.new(result_params[:asset_attributes])
@asset.created_by = current_user
@asset.last_modified_by = current_user
@result = Result.new(

View file

@ -30,21 +30,21 @@ class StepsController < ApplicationController
def create
if @direct_upload
new_assets = []
step_data = step_params.except(:assets_attributes)
step_assets = step_params.slice(:assets_attributes)
@step = Step.new(step_data)
if step_assets.size > 0
step_assets[:assets_attributes].each do |i, data|
# Ignore destroy requests on create
if data[:_destroy].nil? and data[:id].present?
asset = Asset.find_by_id(data[:id])
asset.created_by = current_user
asset.last_modified_by = current_user
@step.assets << asset
end
step_assets[:assets_attributes].each do |i, data|
# Ignore destroy requests on create
if data[:_destroy].nil?
asset = Asset.new(data)
asset.created_by = current_user
asset.last_modified_by = current_user
new_assets << asset
end
end
@step.assets << new_assets
else
@step = Step.new(step_params)
end
@ -98,6 +98,13 @@ class StepsController < ApplicationController
})}, status: :ok
}
else
# On error, delete the newly added files from S3, as they were
# uploaded on client-side (in case of client-side hacking of
# asset's signature response)
new_assets.each do |asset|
asset.destroy
end
format.json {
render json: {
html: render_to_string({
@ -155,8 +162,7 @@ class StepsController < ApplicationController
if step_assets.include? :assets_attributes
step_assets[:assets_attributes].each do |i, data|
asset = Asset.find_by_id(data[:id])
asset = Asset.new(data)
unless @step.assets.include? asset or not asset
asset.created_by = current_user
asset.last_modified_by = current_user
@ -532,12 +538,11 @@ class StepsController < ApplicationController
attr_params = update_params[key]
for pos, attrs in params[key] do
assetExists = Asset.exists?(attrs[:id])
if attrs[:_destroy] == "1"
attr_params[pos] = {id: attrs[:id], _destroy: "1"}
params[key].delete(pos) if assetExists
params[key].delete(pos)
elsif has_destroy_params(params[key][pos])
attr_params[pos] = {id: attrs[:id]} if assetExists
attr_params[pos] = {id: attrs[:id]}
extract_destroy_params(params[key][pos], attr_params[pos])
end
end

View file

@ -208,6 +208,21 @@ class Asset < ActiveRecord::Base
end
end
def destroy
# Delete files from S3 (when updating an existing file, paperclip does
# this automatically, so this is not needed in such cases)
key = self.file.path[1..-1]
S3_BUCKET.object(key).delete
if (self.file_content_type =~ /^image\//) == 0
self.file.options[:styles].each do |style, option|
key = self.file.path(style)[1..-1]
S3_BUCKET.object(key).delete
end
end
self.delete
end
# If organization is provided, its space_taken
# is updated as well
def update_estimated_size(org = nil)
@ -292,7 +307,7 @@ class Asset < ActiveRecord::Base
def step_or_result
# We must allow both step and result to be blank because of GUI
# (eventhough it's not really a "valid" asset)
# (even though it's not really a "valid" asset)
if step.present? && result.present?
errors.add(:base, "Asset can only be result or step, not both.")
end

View file

@ -114,7 +114,7 @@ class Step < ActiveRecord::Base
def cascade_after_destroy
Comment.destroy(@c_ids)
@c_ids = nil
Asset.destroy(@a_ids)
# Assets already deleted by here
@a_ids = nil
Table.destroy(@t_ids)
@t_ids = nil

View file

@ -2,5 +2,5 @@ class StepAsset < ActiveRecord::Base
validates :step, :asset, presence: true
belongs_to :step, inverse_of: :step_assets
belongs_to :asset, inverse_of: :step_asset
belongs_to :asset, inverse_of: :step_asset, :dependent => :destroy
end