Anonymous
Работа с загрузкой файла фрагмента с использованием jQuery и ASP.NET MVC
Сообщение
Anonymous » 02 авг 2024, 07:55
Я пытаюсь загрузить документы сразу в виде фрагмента, но, к сожалению, загружаемые документы повреждены.
Я храню несколько метаданных вместе с файлом, сначала я сохранение информации в JSON, и JSON будет храниться на сервере, и после этого на основе будет создана папка JSON со всеми файлами внутри папки, затем я запускаю одну службу для хранения документа в нашей системе из жесткой папки, при хранении файлов в жесткой папке на сервере с использованием чанка я столкнулся с этой проблемой
Это мой код:
Код: Выделить всё
function btnUpload_click() {
var e = filename[0];
!e ? Utils.showAlert("File input element not found.")
: e.files.length === 0 ? Utils.showAlert("Files not selected for upload.")
: (messagediv.html(''), multipleFileUpload(e));
}
function multipleFileUpload(e) {
const chunkSize = 1024 * 1024; // 1MB chunk size
var SaveDetails = {
DocTypeId: ctrlDocType.data("kendoDropDownList").value(),
ProcessId: ctrlProcess.data("kendoDropDownList").value(),
ClassificationLevel: ctrlClassificationLevel.data("kendoDropDownList").value(),
Priority: ctrlPriority.data("kendoDropDownList").value(),
FolderId: FolderId,
TreatFileNameAsDocumentNumber: $('#chkTreatAsDocNumber').is(':checked') ? 'Y' : 'N',
LoggedInUserId: Utils.getUserId(),
FileUploadItemList: [],
MetaDataDateFormat: ___culture,
};
debugger
saveBulkUploadWithMetaDataJSON(SaveDetails, function (data) {
debugger
var promises = [];
SaveDetails.Id = data.Ref1;
for (var i = 0; i < e.files.length; i++) {
var file = e.files[i];
promises.push(readFileInChunks(file, chunkSize, SaveDetails));
}
Promise.all(promises).then(function () {
clearDropDownOnChange('');
GetAllServiceFilesForBulkUploadWithMetaData();
}).catch(function (error) {
Utils.showError(error.message);
});
});
}
function readFileInChunks(file, chunkSize, saveDetails) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
let offset = 0;
const chunks = [];
reader.onload = function (event) {
const base64Data = event.target.result.split(',')[1]; // Extract base64 data
const fileItem = {
FileName: file.name,
FileContent: base64Data
};
saveDetails.FileUploadItemList = [];
saveDetails.FileUploadItemList.push(fileItem);
// Update the offset and read the next chunk
offset += chunkSize;
debugger
if (offset < file.size) {
saveDetails.FileUploadItemList[0].ChunkUploaded = false;
// Call the update function for each chunk
UpdateFileChunkBulkUploadWithMetaDataJSON(saveDetails)
.then(readNextChunk)
.catch(reject); // Handle any errors during the update
} else {
saveDetails.FileUploadItemList[0].ChunkUploaded = true;
// Resolve the promise with the collected chunks
UpdateFileChunkBulkUploadWithMetaDataJSON(saveDetails)
.then(() => resolve(chunks))
.catch(reject); // Handle any errors during the final update
}
};
reader.onerror = function (event) {
reject(new Error("File could not be read! Code " + event.target.error.code));
};
function readNextChunk() {
const blob = file.slice(offset, offset + chunkSize);
reader.readAsDataURL(blob);
}
readNextChunk();
});
}
function saveBulkUploadWithMetaDataJSON(SaveDetails, callback) {
ajaxCallFunction("POST", "/api/BulkUpload/BulkUpload/BulkUploadWithMetaDataJSON", JSON.stringify(SaveDetails), function (response) {
callback != undefined ? callback(response) : '';
});
}
function UpdateFileChunkBulkUploadWithMetaDataJSON(saveDetails) {
return new Promise((resolve, reject) => {
ajaxCallFunction("POST", "/api/BulkUpload/BulkUpload/UpdateFileChunkBulkUploadWithMetaDataJSON", JSON.stringify(saveDetails), function (response) {
if (response.ReturnStatus == "0") {
resolve(response);
} else {
reject(new Error("Failed to update chunk metadata"));
}
});
});
}
Вот мой API:
Код: Выделить всё
[HttpPost]
[Authorize]
[Filters.AuthorizeLoginApi()]
[Route("api/BulkUpload/BulkUpload/BulkUploadWithMetaDataJSON")]
[RateLimitApiAttribute(count: GetCountOnFunctions.AddEdit, timeUnit: GetMinutesOnFunctions.AddEdit)]
public CSVUploadResponse BulkUploadWithMetaDataJSON([FromBody] DocPro.DMS.BusinessEntities.Request.BulkUpload.BulkUploadWithMetaDataRequest request)
{
StringBuilder Success = new StringBuilder();
CSVUploadResponse response = new CSVUploadResponse();
try
{
if (request == null)
{
response.ReturnMessage = "Unable To process.Please try again.";
return response;
}
string BulkUploadWithMetaDataServiceFilePath = Convert.ToString(System.Configuration.ConfigurationManager.AppSettings["BulkUploadWithMetaDataServiceFilePath"]);
if (System.IO.Directory.Exists(BulkUploadWithMetaDataServiceFilePath))
{
DirectoryInfo di = new DirectoryInfo(BulkUploadWithMetaDataServiceFilePath);
System.IO.FileInfo fi = di.GetFiles().OrderBy(file => file.Name).ToArray().Where(file => file.Extension.ToLower() == ".json").ToArray().FirstOrDefault();
if (fi == null)
{
System.IO.File.Create(BulkUploadWithMetaDataServiceFilePath + "\\Records.json").Close();
fi = di.GetFiles().OrderBy(file => file.Name).ToArray().Where(file => file.Extension.ToLower() == ".json").ToArray().FirstOrDefault();
}
string directoryName = string.Empty;
DateTime dateTimeNow = DateTime.Now;
string formattedDate = dateTimeNow.ToString("ddMMyyyy");
string directoryNameWithoutNumber = BulkUploadWithMetaDataServiceFilePath + "\\" + Convert.ToString(request.LoggedInUserId) + "_" + formattedDate;
int directoryCount = 1;
directoryName = directoryNameWithoutNumber + "_" + Convert.ToString(directoryCount);
while (Directory.Exists(directoryName))
{
directoryCount++;
directoryName = directoryNameWithoutNumber + "_" + Convert.ToString(directoryCount);
}
Directory.CreateDirectory(directoryName);
//foreach (BusinessEntities.Request.BulkUpload.FileUploadItem data in request.FileUploadItemList)
//{
// System.IO.File.WriteAllBytes(directoryName + "\\" + data.FileName, data.FileContent);
//}//Will delete this
string jsonText = System.IO.File.ReadAllText(fi.FullName);
List BulkUploadResponseList =
JsonConvert.DeserializeObject(jsonText);
if (BulkUploadResponseList == null)
{
BulkUploadResponseList = new List();
}
var guid = Guid.NewGuid().ToString();
BusinessEntities.Response.BulkUpload.BulkUploadResponse BulkUploadResponse = new BusinessEntities.Response.BulkUpload.BulkUploadResponse()
{
Id = guid,
DocTypeId = request.DocTypeId,
ProcessId = request.ProcessId,
ClassificationLevel = request.ClassificationLevel,
Priority = request.Priority,
FolderId = request.FolderId,
TreatFileNameAsDocumentNumber = request.TreatFileNameAsDocumentNumber,
IsCompleted = "N",
LoggedInUserId = request.LoggedInUserId,
MetaDataDateFormat = request.MetaDataDateFormat,
Progress = 0,
RequestedOn = DateTime.Now,
TotalRecords = request.FileUploadItemList.Count(),
DirectoryName = directoryName,
FileName = Path.GetFileName(directoryName)
};
BulkUploadResponseList.Add(BulkUploadResponse);
string updatedJson = JsonConvert.SerializeObject(BulkUploadResponseList, Formatting.Indented);
System.IO.File.WriteAllText(fi.FullName, updatedJson);
response.Ref1 = guid;
Success.Append("Your file is now in queue for processing!");
response.ReturnMessage = Convert.ToString(Success);
return response;
}
else
{
Success.Append("Folder structure not configured as per defined in configuration. Please contact DocPro Administrator.");
response.ReturnMessage = Convert.ToString(Success);
return response;
}
}
catch (Exception ex)
{
response.ReturnMessage = ex.Message;
}
return response;
}
[HttpPost]
[Authorize]
[Filters.AuthorizeLoginApi()]
[Route("api/BulkUpload/BulkUpload/UpdateFileChunkBulkUploadWithMetaDataJSON")]
public SPResponse UpdateFileChunkBulkUploadWithMetaDataJSON([FromBody] DocPro.DMS.BusinessEntities.Request.BulkUpload.BulkUploadWithMetaDataRequest request)
{
SPResponse response = new SPResponse();
if (request == null)
{
response.ReturnMessage = "Unable To process.Please try again.";
return response;
}
string BulkUploadWithMetaDataServiceFilePath = Convert.ToString(System.Configuration.ConfigurationManager.AppSettings["BulkUploadWithMetaDataServiceFilePath"]);
if (System.IO.Directory.Exists(BulkUploadWithMetaDataServiceFilePath))
{
DirectoryInfo di = new DirectoryInfo(BulkUploadWithMetaDataServiceFilePath);
System.IO.FileInfo fi = di.GetFiles().OrderBy(file => file.Name).ToArray().Where(file => file.Extension.ToLower() == ".json").ToArray().FirstOrDefault();
if (fi == null)
{
System.IO.File.Create(BulkUploadWithMetaDataServiceFilePath + "\\Records.json").Close();
fi = di.GetFiles().OrderBy(file => file.Name).ToArray().Where(file => file.Extension.ToLower() == ".json").ToArray().FirstOrDefault();
}
//var jsonFile = Path.Combine(fi.DirectoryName, fi.FullName);
DirectoryInfo jdi = new DirectoryInfo(fi.DirectoryName);
FileInfo[] jFile = jdi.GetFiles("Records.json");
foreach (FileInfo json in jFile)
{
string jsonFileText = System.IO.File.ReadAllText(json.FullName);
var jsonConfig = JsonConvert.DeserializeObject(jsonFileText);
foreach (var data in jsonConfig)
{
if (data.Id == request.Id)
{
data.FileContent = AppendData(data.FileContent, request.FileUploadItemList[0].FileContent);
if (request.FileUploadItemList[0].ChunkUploaded)
{
System.IO.File.WriteAllBytes(data.DirectoryName + "\\" + request.FileUploadItemList[0].FileName, data.FileContent);
data.FileContent = null;
}
}
}
}
//foreach (BusinessEntities.Request.BulkUpload.FileUploadItem data in request.FileUploadItemList)
//{
// System.IO.File.WriteAllBytes(directoryName + "\\" + data.FileName, data.FileContent);
//}
response.ReturnMessage = "Chunk Uploaded";
response.ReturnStatus = "0";
}
return response;
}
Я добавляю фрагмент в JSON после того, как все фрагменты добавлены, и сохраняю файл на своем жестком диске.
Что такое здесь не так?
Подробнее здесь:
https://stackoverflow.com/questions/788 ... sp-net-mvc
1722574543
Anonymous
Я пытаюсь загрузить документы сразу в виде фрагмента, но, к сожалению, загружаемые документы повреждены. Я храню несколько метаданных вместе с файлом, сначала я сохранение информации в JSON, и JSON будет храниться на сервере, и после этого на основе будет создана папка JSON со всеми файлами внутри папки, затем я запускаю одну службу для хранения документа в нашей системе из жесткой папки, при хранении файлов в жесткой папке на сервере с использованием чанка я столкнулся с этой проблемой Это мой код: [code]function btnUpload_click() { var e = filename[0]; !e ? Utils.showAlert("File input element not found.") : e.files.length === 0 ? Utils.showAlert("Files not selected for upload.") : (messagediv.html(''), multipleFileUpload(e)); } function multipleFileUpload(e) { const chunkSize = 1024 * 1024; // 1MB chunk size var SaveDetails = { DocTypeId: ctrlDocType.data("kendoDropDownList").value(), ProcessId: ctrlProcess.data("kendoDropDownList").value(), ClassificationLevel: ctrlClassificationLevel.data("kendoDropDownList").value(), Priority: ctrlPriority.data("kendoDropDownList").value(), FolderId: FolderId, TreatFileNameAsDocumentNumber: $('#chkTreatAsDocNumber').is(':checked') ? 'Y' : 'N', LoggedInUserId: Utils.getUserId(), FileUploadItemList: [], MetaDataDateFormat: ___culture, }; debugger saveBulkUploadWithMetaDataJSON(SaveDetails, function (data) { debugger var promises = []; SaveDetails.Id = data.Ref1; for (var i = 0; i < e.files.length; i++) { var file = e.files[i]; promises.push(readFileInChunks(file, chunkSize, SaveDetails)); } Promise.all(promises).then(function () { clearDropDownOnChange(''); GetAllServiceFilesForBulkUploadWithMetaData(); }).catch(function (error) { Utils.showError(error.message); }); }); } function readFileInChunks(file, chunkSize, saveDetails) { return new Promise((resolve, reject) => { const reader = new FileReader(); let offset = 0; const chunks = []; reader.onload = function (event) { const base64Data = event.target.result.split(',')[1]; // Extract base64 data const fileItem = { FileName: file.name, FileContent: base64Data }; saveDetails.FileUploadItemList = []; saveDetails.FileUploadItemList.push(fileItem); // Update the offset and read the next chunk offset += chunkSize; debugger if (offset < file.size) { saveDetails.FileUploadItemList[0].ChunkUploaded = false; // Call the update function for each chunk UpdateFileChunkBulkUploadWithMetaDataJSON(saveDetails) .then(readNextChunk) .catch(reject); // Handle any errors during the update } else { saveDetails.FileUploadItemList[0].ChunkUploaded = true; // Resolve the promise with the collected chunks UpdateFileChunkBulkUploadWithMetaDataJSON(saveDetails) .then(() => resolve(chunks)) .catch(reject); // Handle any errors during the final update } }; reader.onerror = function (event) { reject(new Error("File could not be read! Code " + event.target.error.code)); }; function readNextChunk() { const blob = file.slice(offset, offset + chunkSize); reader.readAsDataURL(blob); } readNextChunk(); }); } function saveBulkUploadWithMetaDataJSON(SaveDetails, callback) { ajaxCallFunction("POST", "/api/BulkUpload/BulkUpload/BulkUploadWithMetaDataJSON", JSON.stringify(SaveDetails), function (response) { callback != undefined ? callback(response) : ''; }); } function UpdateFileChunkBulkUploadWithMetaDataJSON(saveDetails) { return new Promise((resolve, reject) => { ajaxCallFunction("POST", "/api/BulkUpload/BulkUpload/UpdateFileChunkBulkUploadWithMetaDataJSON", JSON.stringify(saveDetails), function (response) { if (response.ReturnStatus == "0") { resolve(response); } else { reject(new Error("Failed to update chunk metadata")); } }); }); } [/code] Вот мой API: [code][HttpPost] [Authorize] [Filters.AuthorizeLoginApi()] [Route("api/BulkUpload/BulkUpload/BulkUploadWithMetaDataJSON")] [RateLimitApiAttribute(count: GetCountOnFunctions.AddEdit, timeUnit: GetMinutesOnFunctions.AddEdit)] public CSVUploadResponse BulkUploadWithMetaDataJSON([FromBody] DocPro.DMS.BusinessEntities.Request.BulkUpload.BulkUploadWithMetaDataRequest request) { StringBuilder Success = new StringBuilder(); CSVUploadResponse response = new CSVUploadResponse(); try { if (request == null) { response.ReturnMessage = "Unable To process.Please try again."; return response; } string BulkUploadWithMetaDataServiceFilePath = Convert.ToString(System.Configuration.ConfigurationManager.AppSettings["BulkUploadWithMetaDataServiceFilePath"]); if (System.IO.Directory.Exists(BulkUploadWithMetaDataServiceFilePath)) { DirectoryInfo di = new DirectoryInfo(BulkUploadWithMetaDataServiceFilePath); System.IO.FileInfo fi = di.GetFiles().OrderBy(file => file.Name).ToArray().Where(file => file.Extension.ToLower() == ".json").ToArray().FirstOrDefault(); if (fi == null) { System.IO.File.Create(BulkUploadWithMetaDataServiceFilePath + "\\Records.json").Close(); fi = di.GetFiles().OrderBy(file => file.Name).ToArray().Where(file => file.Extension.ToLower() == ".json").ToArray().FirstOrDefault(); } string directoryName = string.Empty; DateTime dateTimeNow = DateTime.Now; string formattedDate = dateTimeNow.ToString("ddMMyyyy"); string directoryNameWithoutNumber = BulkUploadWithMetaDataServiceFilePath + "\\" + Convert.ToString(request.LoggedInUserId) + "_" + formattedDate; int directoryCount = 1; directoryName = directoryNameWithoutNumber + "_" + Convert.ToString(directoryCount); while (Directory.Exists(directoryName)) { directoryCount++; directoryName = directoryNameWithoutNumber + "_" + Convert.ToString(directoryCount); } Directory.CreateDirectory(directoryName); //foreach (BusinessEntities.Request.BulkUpload.FileUploadItem data in request.FileUploadItemList) //{ // System.IO.File.WriteAllBytes(directoryName + "\\" + data.FileName, data.FileContent); //}//Will delete this string jsonText = System.IO.File.ReadAllText(fi.FullName); List BulkUploadResponseList = JsonConvert.DeserializeObject(jsonText); if (BulkUploadResponseList == null) { BulkUploadResponseList = new List(); } var guid = Guid.NewGuid().ToString(); BusinessEntities.Response.BulkUpload.BulkUploadResponse BulkUploadResponse = new BusinessEntities.Response.BulkUpload.BulkUploadResponse() { Id = guid, DocTypeId = request.DocTypeId, ProcessId = request.ProcessId, ClassificationLevel = request.ClassificationLevel, Priority = request.Priority, FolderId = request.FolderId, TreatFileNameAsDocumentNumber = request.TreatFileNameAsDocumentNumber, IsCompleted = "N", LoggedInUserId = request.LoggedInUserId, MetaDataDateFormat = request.MetaDataDateFormat, Progress = 0, RequestedOn = DateTime.Now, TotalRecords = request.FileUploadItemList.Count(), DirectoryName = directoryName, FileName = Path.GetFileName(directoryName) }; BulkUploadResponseList.Add(BulkUploadResponse); string updatedJson = JsonConvert.SerializeObject(BulkUploadResponseList, Formatting.Indented); System.IO.File.WriteAllText(fi.FullName, updatedJson); response.Ref1 = guid; Success.Append("Your file is now in queue for processing!"); response.ReturnMessage = Convert.ToString(Success); return response; } else { Success.Append("Folder structure not configured as per defined in configuration. Please contact DocPro Administrator."); response.ReturnMessage = Convert.ToString(Success); return response; } } catch (Exception ex) { response.ReturnMessage = ex.Message; } return response; } [HttpPost] [Authorize] [Filters.AuthorizeLoginApi()] [Route("api/BulkUpload/BulkUpload/UpdateFileChunkBulkUploadWithMetaDataJSON")] public SPResponse UpdateFileChunkBulkUploadWithMetaDataJSON([FromBody] DocPro.DMS.BusinessEntities.Request.BulkUpload.BulkUploadWithMetaDataRequest request) { SPResponse response = new SPResponse(); if (request == null) { response.ReturnMessage = "Unable To process.Please try again."; return response; } string BulkUploadWithMetaDataServiceFilePath = Convert.ToString(System.Configuration.ConfigurationManager.AppSettings["BulkUploadWithMetaDataServiceFilePath"]); if (System.IO.Directory.Exists(BulkUploadWithMetaDataServiceFilePath)) { DirectoryInfo di = new DirectoryInfo(BulkUploadWithMetaDataServiceFilePath); System.IO.FileInfo fi = di.GetFiles().OrderBy(file => file.Name).ToArray().Where(file => file.Extension.ToLower() == ".json").ToArray().FirstOrDefault(); if (fi == null) { System.IO.File.Create(BulkUploadWithMetaDataServiceFilePath + "\\Records.json").Close(); fi = di.GetFiles().OrderBy(file => file.Name).ToArray().Where(file => file.Extension.ToLower() == ".json").ToArray().FirstOrDefault(); } //var jsonFile = Path.Combine(fi.DirectoryName, fi.FullName); DirectoryInfo jdi = new DirectoryInfo(fi.DirectoryName); FileInfo[] jFile = jdi.GetFiles("Records.json"); foreach (FileInfo json in jFile) { string jsonFileText = System.IO.File.ReadAllText(json.FullName); var jsonConfig = JsonConvert.DeserializeObject(jsonFileText); foreach (var data in jsonConfig) { if (data.Id == request.Id) { data.FileContent = AppendData(data.FileContent, request.FileUploadItemList[0].FileContent); if (request.FileUploadItemList[0].ChunkUploaded) { System.IO.File.WriteAllBytes(data.DirectoryName + "\\" + request.FileUploadItemList[0].FileName, data.FileContent); data.FileContent = null; } } } } //foreach (BusinessEntities.Request.BulkUpload.FileUploadItem data in request.FileUploadItemList) //{ // System.IO.File.WriteAllBytes(directoryName + "\\" + data.FileName, data.FileContent); //} response.ReturnMessage = "Chunk Uploaded"; response.ReturnStatus = "0"; } return response; } [/code] Я добавляю фрагмент в JSON после того, как все фрагменты добавлены, и сохраняю файл на своем жестком диске. Что такое здесь не так? Подробнее здесь: [url]https://stackoverflow.com/questions/78822048/dealing-with-chunk-file-upload-using-jquery-and-asp-net-mvc[/url]