Merge pull request #1670 from ONLYOFFICE/bugfix/63774-local-restore

Bugfix/63774 local restore
This commit is contained in:
SuhorukovAnton 2023-08-28 18:57:14 +03:00 committed by GitHub
commit ea72f288a2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 250 additions and 73 deletions

View File

@ -22,8 +22,8 @@
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Web.Studio.Core.Backup;
public class BackupFileUploadHandler
@ -35,56 +35,101 @@ public class BackupFileUploadHandler
public async Task Invoke(HttpContext context,
PermissionContext permissionContext,
BackupAjaxHandler backupAjaxHandler)
BackupAjaxHandler backupAjaxHandler,
ICache cache,
TenantManager tenantManager,
IConfiguration configuration)
{
FileUploadResult result;
BackupFileUploadResult result = null;
try
{
if (context.Request.Form.Files.Count == 0)
{
result = Error("No files.");
}
{
if (!permissionContext.CheckPermissions(SecutiryConstants.EditPortalSettings))
{
result = Error("Access denied.");
}
var file = context.Request.Form.Files[0];
var filePath = backupAjaxHandler.GetTmpFilePath();
if (File.Exists(filePath))
{
File.Delete(filePath);
}
using (var fileStream = File.Create(filePath))
{
await file.CopyToAsync(fileStream);
throw new ArgumentException("Access denied.");
}
var tenantId = tenantManager.GetCurrentTenant().Id;
var path = backupAjaxHandler.GetTmpFilePath();
if (context.Request.Query["Init"].ToString() == "true")
{
long.TryParse(context.Request.Query["totalSize"], out var size);
if (size <= 0)
{
throw new ArgumentException("Total size must be greater than 0.");
}
result = Success();
var maxSize = tenantManager.GetCurrentTenantQuota().MaxTotalSize;
if (size > maxSize)
{
throw new ArgumentException(BackupResource.LargeBackup);
}
try
{
if (File.Exists(path))
{
File.Delete(path);
}
cache.Insert($"{tenantId} backupTotalSize", size.ToString(), TimeSpan.FromMinutes(10));
int.TryParse(configuration["files:uploader:chunk-size"], out var chunkSize);
chunkSize = chunkSize == 0 ? 10 * 1024 * 1024 : chunkSize;
result = Success(chunkSize);
}
catch
{
throw new ArgumentException("Can't start upload.");
}
}
else
{
long.TryParse(cache.Get<string>($"{tenantId} backupTotalSize"), out var totalSize);
if (totalSize <= 0)
{
throw new ArgumentException("Need init upload.");
}
var file = context.Request.Form.Files[0];
using var stream = file.OpenReadStream();
using var fs = File.Open(path, FileMode.Append);
await stream.CopyToAsync(fs);
if (fs.Length >= totalSize)
{
cache.Remove($"{tenantId} backupTotalSize");
result = Success(endUpload: true);
}
else
{
result = Success();
}
}
}
catch (Exception error)
{
result = Error(error.Message);
}
await context.Response.WriteAsync(System.Text.Json.JsonSerializer.Serialize(result));
await context.Response.WriteAsync(System.Text.Json.JsonSerializer.Serialize(result, new System.Text.Json.JsonSerializerOptions()
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
}));
}
private FileUploadResult Success()
private BackupFileUploadResult Success(int chunk = 0, bool endUpload = false)
{
return new FileUploadResult
return new BackupFileUploadResult
{
Success = true
Success = true,
ChunkSize = chunk,
EndUpload = endUpload
};
}
private FileUploadResult Error(string messageFormat, params object[] args)
private BackupFileUploadResult Error(string messageFormat, params object[] args)
{
return new FileUploadResult
return new BackupFileUploadResult
{
Success = false,
Message = string.Format(messageFormat, args)
@ -92,6 +137,14 @@ public class BackupFileUploadHandler
}
}
internal class BackupFileUploadResult
{
public bool Success { get; set; }
public string Message { get; set; }
public int ChunkSize { get; set; }
public bool EndUpload { get; set; }
}
public static class BackupFileUploadHandlerExtensions
{
public static IApplicationBuilder UseBackupFileUploadHandler(this IApplicationBuilder builder)

View File

@ -77,5 +77,14 @@ namespace ASC.Data.Backup.Core {
return ResourceManager.GetString("ButtonSetPassword", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Backup is larger than the total size of the portal.
/// </summary>
internal static string LargeBackup {
get {
return ResourceManager.GetString("LargeBackup", resourceCulture);
}
}
}
}

View File

@ -1,5 +1,64 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
@ -53,10 +112,10 @@
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=6.0.2.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=6.0.2.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<data name="BackupNotFound" xml:space="preserve">
<value>The backup file is invalid. Please, use a file created in ONLYOFFICE v11.5 or later.</value>
@ -64,4 +123,7 @@
<data name="ButtonSetPassword" xml:space="preserve">
<value>Set Password</value>
</data>
<data name="LargeBackup" xml:space="preserve">
<value>Backup is larger than the total size of the portal</value>
</data>
</root>

View File

@ -32,6 +32,8 @@ global using System.Reflection;
global using System.Security.Cryptography;
global using System.ServiceModel;
global using System.Text;
global using System.Text.Json.Serialization;
global using System.Text.RegularExpressions;
global using System.Xml;
global using System.Xml.Linq;
@ -80,7 +82,6 @@ global using ASC.Notify.Recipients;
global using ASC.Security.Cryptography;
global using ASC.Web.Core.PublicResources;
global using ASC.Web.Core.Users;
global using ASC.Web.Core.Utility;
global using ASC.Web.Core.WhiteLabel;
global using ASC.Web.Studio.Core;
global using ASC.Web.Studio.Core.Notify;

View File

@ -109,6 +109,7 @@ public class ChunkZipWriteOperator : IDataWriteOperator
theMemStream.Position = 0;
StoragePath = await _sessionHolder.UploadChunkAsync(_chunkedUploadSession, theMemStream, theMemStream.Length);
_sha.TransformBlock(buffer, 0, bytesRead, buffer, 0);
}
else
{
@ -119,7 +120,6 @@ public class ChunkZipWriteOperator : IDataWriteOperator
await theMemStream.CopyToAsync(_fileStream);
_fileStream.Flush();
}
_sha.TransformBlock(buffer, 0, bytesRead, buffer, 0);
}
}
if (last)

View File

@ -42,10 +42,10 @@ global using ASC.Files.Core.EF;
global using ASC.Web.Api.Routing;
global using ASC.Web.Studio.Core.Backup;
global using ASC.Web.Studio.Core.Notify;
global using ASC.Web.Studio.Utility;
global using Microsoft.AspNetCore.Authorization;
global using Autofac;
global using Microsoft.AspNetCore.Authorization;
global using Microsoft.AspNetCore.Http.Features;
global using Microsoft.AspNetCore.Mvc;
global using Microsoft.AspNetCore.Server.Kestrel.Core;

View File

@ -16,6 +16,7 @@
{
"Period":"00:15:00"
},
"ChunkSize": 20971520
"ChunkSize": 20971520,
"MaxLocalSize": 1048576000
}
}

View File

@ -8,7 +8,6 @@ import { TenantStatus } from "@docspace/common/constants";
import { startRestore } from "@docspace/common/api/portal";
import { combineUrl } from "@docspace/common/utils";
import toastr from "@docspace/components/toast/toastr";
import { request } from "@docspace/common/api/client";
const ButtonContainer = (props) => {
const {
@ -28,29 +27,11 @@ const ButtonContainer = (props) => {
setTenantStatus,
isFormReady,
getStorageParams,
uploadLocalFile,
} = props;
const [isUploadingLocalFile, setIsUploadingLocalFile] = useState(false);
const [isLoading, setIsLoading] = useState(false);
const localFileUploading = async () => {
try {
const checkedFile = await request({
baseURL: combineUrl(window.DocSpaceConfig?.proxy?.url, config.homepage),
method: "post",
url: `/backupFileUpload.ashx`,
responseType: "text",
data: restoreResource,
});
return checkedFile;
} catch (e) {
toastr.error(e);
setIsUploadingLocalFile(false);
return null;
}
};
const onRestoreClick = async () => {
if (isCheckedThirdPartyStorage) {
const requiredFieldsFilled = isFormReady();
@ -74,16 +55,16 @@ const ButtonContainer = (props) => {
}
if (isCheckedLocalFile) {
const isUploadedFile = await localFileUploading();
const uploadedFile = await uploadLocalFile();
if (!isUploadedFile) {
if (!uploadedFile) {
toastr.error(t("BackupCreatedError"));
setIsLoading(false);
return;
}
if (isUploadedFile?.Message) {
toastr.error(isUploadedFile.Message);
setIsUploadingLocalFile(false);
if (!uploadedFile.data.EndUpload) {
toastr.error(uploadedFile.data.Message ?? t("BackupCreatedError"));
setIsLoading(false);
return;
}
@ -107,19 +88,17 @@ const ButtonContainer = (props) => {
} catch (e) {
toastr.error(e);
setIsUploadingLocalFile(false);
setIsLoading(false);
}
};
const isButtonDisabled =
isLoading ||
isUploadingLocalFile ||
!isMaxProgress ||
!isConfirmed ||
!isEnableRestore ||
!restoreResource;
const isLoadingButton = isUploadingLocalFile || isLoading;
const isLoadingButton = isLoading;
return (
<>
@ -154,11 +133,13 @@ export default inject(({ auth, backup }) => {
isFormReady,
getStorageParams,
restoreResource,
uploadLocalFile,
} = backup;
const { isRestoreAndAutoBackupAvailable } = currentQuotaStore;
const isMaxProgress = downloadingProgress === 100;
return {
uploadLocalFile,
isMaxProgress,
setTenantStatus,
isEnableRestore: isRestoreAndAutoBackupAvailable,

View File

@ -5,10 +5,7 @@ import FileInput from "@docspace/components/file-input";
const LocalFile = ({ setRestoreResource, isEnableRestore, t }) => {
const onClickInput = (file) => {
let data = new FormData();
data.append("file", file);
setRestoreResource(data);
setRestoreResource(file);
};
return (

View File

@ -7,7 +7,9 @@ import {
} from "../pages/PortalSettings/utils";
import toastr from "@docspace/components/toast/toastr";
import { AutoBackupPeriod } from "@docspace/common/constants";
//import api from "@docspace/common/api";
import { combineUrl } from "@docspace/common/utils";
import config from "PACKAGE_FILE";
import { uploadBackup } from "@docspace/common/api/files";
const { EveryDayType, EveryWeekType } = AutoBackupPeriod;
@ -617,6 +619,73 @@ class BackupStore {
setRestoreResource = (value) => {
this.restoreResource = value;
};
setChunkUploadSize = (chunkUploadSize) => {
this.chunkUploadSize = chunkUploadSize;
};
uploadFileChunks = async (requestsDataArray, url) => {
const length = requestsDataArray.length;
let res;
for (let index = 0; index < length; index++) {
res = await uploadBackup(
combineUrl(window.DocSpaceConfig?.proxy?.url, config.homepage, url),
requestsDataArray[index]
);
if (!res) return false;
if (res.data.Message || !res.data.Success) return res;
}
return res;
};
uploadLocalFile = async () => {
try {
const url = "/backupFileUpload.ashx";
const res = await uploadBackup(
combineUrl(
window.DocSpaceConfig?.proxy?.url,
config.homepage,
`${url}?init=true&totalSize=${this.restoreResource.size}`
)
);
if (!res) return false;
if (res.data.Message || !res.data.Success) return res;
const chunkUploadSize = res.data.ChunkSize;
const chunks = Math.ceil(
this.restoreResource.size / chunkUploadSize,
chunkUploadSize
);
const requestsDataArray = [];
let chunk = 0;
while (chunk < chunks) {
const offset = chunk * chunkUploadSize;
const formData = new FormData();
formData.append(
"file",
this.restoreResource.slice(offset, offset + chunkUploadSize)
);
requestsDataArray.push(formData);
chunk++;
}
return await this.uploadFileChunks(requestsDataArray, url);
} catch (e) {
toastr.error(e);
return null;
}
};
}
export default BackupStore;

View File

@ -560,6 +560,10 @@ export function uploadFile(url, data) {
return axios.post(url, data);
}
export function uploadBackup(url, data) {
return axios.post(url, data);
}
export function downloadFiles(fileIds, folderIds) {
const data = { fileIds, folderIds };
return request({ method: "put", url: "/files/fileops/bulkdownload", data });