Merge branch 'develop' into feature/login-error-page

This commit is contained in:
Timofey Boyko 2023-02-07 18:38:40 +03:00
commit 0baee7fbee
64 changed files with 1180 additions and 507 deletions

Binary file not shown.

View File

@ -88,6 +88,13 @@ public static class CustomHealthCheck
if (redisConfiguration != null)
{
// https://github.com/imperugo/StackExchange.Redis.Extensions/issues/513
if (configuration.GetSection("Redis").GetValue<string>("User") != null)
{
redisConfiguration.ConfigurationOptions.User = configuration.GetSection("Redis").GetValue<string>("User");
}
hcBuilder.AddRedis(redisConfiguration.ConfigurationOptions.ToString(),
name: "redis",
tags: new string[] { "redis" },

View File

@ -34,7 +34,13 @@ public static class ServiceCollectionExtension
var rabbitMQConfiguration = configuration.GetSection("RabbitMQ").Get<RabbitMQSettings>();
if (redisConfiguration != null)
{
{
// https://github.com/imperugo/StackExchange.Redis.Extensions/issues/513
if (configuration.GetSection("Redis").GetValue<string>("User") != null)
{
redisConfiguration.ConfigurationOptions.User = configuration.GetSection("Redis").GetValue<string>("User");
}
services.AddStackExchangeRedisExtensions<NewtonsoftSerializer>(redisConfiguration);
services.AddSingleton(typeof(ICacheNotify<>), typeof(RedisCacheNotify<>));
@ -58,7 +64,13 @@ public static class ServiceCollectionExtension
var redisConfiguration = configuration.GetSection("Redis").Get<RedisConfiguration>();
if (redisConfiguration != null)
{
{
// https://github.com/imperugo/StackExchange.Redis.Extensions/issues/513
if (configuration.GetSection("Redis").GetValue<string>("User") != null)
{
redisConfiguration.ConfigurationOptions.User = configuration.GetSection("Redis").GetValue<string>("User");
}
services.AddStackExchangeRedisCache(config =>
{
config.ConfigurationOptions = redisConfiguration.ConfigurationOptions;

View File

@ -45,7 +45,7 @@ public class DbBackupProvider : IBackupProvider
public event EventHandler<ProgressChangedEventArgs> ProgressChanged;
public Task<IEnumerable<XElement>> GetElements(int tenant, string[] configs, IDataWriteOperator writer)
public async Task<IEnumerable<XElement>> GetElements(int tenant, string[] configs, IDataWriteOperator writer)
{
_processedTables.Clear();
var xml = new List<XElement>();
@ -66,11 +66,11 @@ public class DbBackupProvider : IBackupProvider
else
{
connectionKeys.Add(connectionKey, connectionString.Name);
node.Add(BackupDatabase(tenant, connectionString, writer));
node.Add(await BackupDatabase(tenant, connectionString, writer));
}
}
return Task.FromResult(xml.AsEnumerable());
return xml.AsEnumerable();
}
public Task LoadFrom(IEnumerable<XElement> elements, int tenant, string[] configs, IDataReadOperator reader)
@ -137,7 +137,7 @@ public class DbBackupProvider : IBackupProvider
return ConfigurationManager.OpenExeConfiguration(config);
}
private List<XElement> BackupDatabase(int tenant, ConnectionStringSettings connectionString, IDataWriteOperator writer)
private async Task<List<XElement>> BackupDatabase(int tenant, ConnectionStringSettings connectionString, IDataWriteOperator writer)
{
var xml = new List<XElement>();
var errors = 0;
@ -187,7 +187,7 @@ public class DbBackupProvider : IBackupProvider
using (var file = _tempStream.Create())
{
dataTable.WriteXml(file, XmlWriteMode.WriteSchema);
writer.WriteEntry($"{Name}\\{connectionString.Name}\\{table}".ToLower(), file);
await writer.WriteEntryAsync($"{Name}\\{connectionString.Name}\\{table}".ToLower(), file);
}
_processedTables.Add(table);

View File

@ -32,16 +32,11 @@ global using System.Reflection;
global using System.Security.Cryptography;
global using System.ServiceModel;
global using System.Text;
global using System.Text.RegularExpressions;
global using System.Text.RegularExpressions;
global using System.Xml;
global using System.Xml.Linq;
global using System.Xml.Linq;
global using System.Xml.XPath;
global using Amazon;
global using Amazon.S3;
global using Amazon.S3.Model;
global using Amazon.S3.Transfer;
global using ASC.Api.Utils;
global using ASC.Common;
global using ASC.Common.Caching;
@ -50,6 +45,7 @@ global using ASC.Common.Threading;
global using ASC.Common.Utils;
global using ASC.Core;
global using ASC.Core.Billing;
global using ASC.Core.ChunkedUploader;
global using ASC.Core.Common.Configuration;
global using ASC.Core.Common.EF;
global using ASC.Core.Common.EF.Context;
@ -73,6 +69,7 @@ global using ASC.Data.Backup.Utils;
global using ASC.Data.Storage;
global using ASC.Data.Storage.Configuration;
global using ASC.Data.Storage.DiscStorage;
global using ASC.Data.Storage.ZipOperators;
global using ASC.EventBus.Events;
global using ASC.Files.Core;
global using ASC.MessagingSystem.Core;
@ -91,9 +88,6 @@ global using ASC.Web.Studio.Utility;
global using Autofac;
global using ICSharpCode.SharpZipLib.GZip;
global using ICSharpCode.SharpZipLib.Tar;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.AspNetCore.Http;
global using Microsoft.EntityFrameworkCore;

View File

@ -23,31 +23,7 @@
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
/*
*
* (c) Copyright Ascensio System Limited 2010-2020
*
* This program is freeware. You can redistribute it and/or modify it under the terms of the GNU
* General Public License (GPL) version 3 as published by the Free Software Foundation (https://www.gnu.org/copyleft/gpl.html).
* In accordance with Section 7(a) of the GNU GPL its Section 15 shall be amended to the effect that
* Ascensio System SIA expressly excludes the warranty of non-infringement of any third-party rights.
*
* THIS PROGRAM IS DISTRIBUTED WITHOUT ANY WARRANTY; WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR
* FITNESS FOR A PARTICULAR PURPOSE. For more details, see GNU GPL at https://www.gnu.org/copyleft/gpl.html
*
* You can contact Ascensio System SIA by email at sales@onlyoffice.com
*
* The interactive user interfaces in modified source and object code versions of ONLYOFFICE must display
* Appropriate Legal Notices, as required under Section 5 of the GNU GPL version 3.
*
* Pursuant to Section 7 § 3(b) of the GNU GPL you must retain the original ONLYOFFICE logo which contains
* relevant author attributions when distributing the software. If the display of the logo in its graphic
* form is not reasonably feasible for technical reasons, you must include the words "Powered by ONLYOFFICE"
* in every copy of the program you distribute.
* Pursuant to Section 7 § 3(e) we decline to grant you any rights under trademark law for use of our trademarks.
*
*/
namespace ASC.Data.Backup.Services;
@ -69,6 +45,7 @@ public class BackupProgressItem : BaseBackupProgressItem
private BackupStorageFactory _backupStorageFactory;
private BackupRepository _backupRepository;
private BackupPortalTask _backupPortalTask;
private TempStream _tempStream;
private readonly ILogger<BackupProgressItem> _logger;
private readonly CoreBaseSettings _coreBaseSettings;
private readonly NotifyHelper _notifyHelper;
@ -122,18 +99,23 @@ public class BackupProgressItem : BaseBackupProgressItem
_backupStorageFactory = scope.ServiceProvider.GetService<BackupStorageFactory>();
_backupRepository = scope.ServiceProvider.GetService<BackupRepository>();
_backupPortalTask = scope.ServiceProvider.GetService<BackupPortalTask>();
_tempStream = scope.ServiceProvider.GetService<TempStream>();
var dateTime = _coreBaseSettings.Standalone ? DateTime.Now : DateTime.UtcNow;
var backupName = string.Format("{0}_{1:yyyy-MM-dd_HH-mm-ss}.{2}", _tenantManager.GetTenant(TenantId).Alias, dateTime, ArchiveFormat);
var tempFile = CrossPlatform.PathCombine(TempFolder, backupName);
var storagePath = tempFile;
var storagePath = tempFile;
string hash;
try
{
var backupTask = _backupPortalTask;
var backupStorage = _backupStorageFactory.GetBackupStorage(_storageType, TenantId, StorageParams);
var writer = await ZipWriteOperatorFactory.GetWriteOperatorAsync(_tempStream, _storageBasePath, backupName, TempFolder, _userId, backupStorage as IGetterWriteOperator);
var backupTask = _backupPortalTask;
backupTask.Init(TenantId, tempFile, _limit);
backupTask.Init(TenantId, tempFile, _limit, writer);
backupTask.ProgressChanged += (sender, args) =>
{
@ -141,14 +123,19 @@ public class BackupProgressItem : BaseBackupProgressItem
PublishChanges();
};
await backupTask.RunJob();
var backupStorage = _backupStorageFactory.GetBackupStorage(_storageType, TenantId, StorageParams);
if (backupStorage != null)
{
storagePath = await backupStorage.Upload(_storageBasePath, tempFile, _userId);
Link = await backupStorage.GetPublicLink(storagePath);
}
await backupTask.RunJob();
if (writer.NeedUpload)
{
storagePath = await backupStorage.Upload(_storageBasePath, tempFile, _userId);
hash = BackupWorker.GetBackupHash(tempFile);
}
else
{
storagePath = writer.StoragePath;
hash = writer.Hash;
}
Link = await backupStorage.GetPublicLink(storagePath);
var repo = _backupRepository;
@ -165,7 +152,7 @@ public class BackupProgressItem : BaseBackupProgressItem
CreatedOn = DateTime.UtcNow,
ExpiresOn = _storageType == BackupStorageType.DataStore ? DateTime.UtcNow.AddDays(1) : DateTime.MinValue,
StorageParams = JsonConvert.SerializeObject(StorageParams),
Hash = BackupWorker.GetBackupHash(tempFile),
Hash = hash,
Removed = false
});

View File

@ -96,7 +96,8 @@ public class RestoreProgressItem : BaseBackupProgressItem
TenantId = request.TenantId;
Notify = request.NotifyAfterCompletion;
StoragePath = request.FilePathOrId;
StorageType = request.StorageType;
StorageType = request.StorageType;
StorageParams = request.StorageParams;
TempFolder = tempFolder;
_upgradesPath = upgradesPath;
_region = region;
@ -110,7 +111,6 @@ public class RestoreProgressItem : BaseBackupProgressItem
try
{
await using var scope = _serviceScopeProvider.CreateAsyncScope();
_tenantManager = scope.ServiceProvider.GetService<TenantManager>();
@ -186,7 +186,7 @@ public class RestoreProgressItem : BaseBackupProgressItem
}
_tenantManager.SaveTenant(restoredTenant);
_tenantManager.SetCurrentTenant(restoredTenant);
_tenantManager.SetCurrentTenant(restoredTenant);
TenantId = restoredTenant.Id;
_notifyHelper.SendAboutRestoreCompleted(restoredTenant, Notify);
@ -198,13 +198,13 @@ public class RestoreProgressItem : BaseBackupProgressItem
File.Delete(tempFile);
Percentage = 100;
Percentage = 100;
Status = DistributedTaskStatus.Completed;
}
catch (Exception error)
{
_logger.ErrorRestoreProgressItem(error);
Exception = error;
Exception = error;
Status = DistributedTaskStatus.Failted;
if (tenant != null)

View File

@ -31,7 +31,6 @@ public class BackupStorageFactory
{
private readonly ConfigurationExtension _configuration;
private readonly DocumentsBackupStorage _documentsBackupStorage;
private readonly DataStoreBackupStorage _dataStoreBackupStorage;
private readonly ILogger<BackupStorageFactory> _logger;
private readonly LocalBackupStorage _localBackupStorage;
private readonly ConsumerBackupStorage _consumerBackupStorage;
@ -43,12 +42,10 @@ public class BackupStorageFactory
ConfigurationExtension configuration,
DocumentsBackupStorage documentsBackupStorage,
TenantManager tenantManager,
DataStoreBackupStorage dataStoreBackupStorage,
ILogger<BackupStorageFactory> logger)
{
_configuration = configuration;
_documentsBackupStorage = documentsBackupStorage;
_dataStoreBackupStorage = dataStoreBackupStorage;
_logger = logger;
_localBackupStorage = localBackupStorage;
_consumerBackupStorage = consumerBackupStorage;
@ -78,15 +75,15 @@ public class BackupStorageFactory
case BackupStorageType.Documents:
case BackupStorageType.ThridpartyDocuments:
{
_documentsBackupStorage.Init(tenantId);
_documentsBackupStorage.Init(tenantId);
return _documentsBackupStorage;
}
case BackupStorageType.DataStore:
{
_dataStoreBackupStorage.Init(tenantId);
_consumerBackupStorage.Init(tenantId);
return _dataStoreBackupStorage;
return _consumerBackupStorage;
}
case BackupStorageType.Local:
return _localBackupStorage;

View File

@ -22,27 +22,50 @@
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup.Storage;
[Scope]
public class ConsumerBackupStorage : IBackupStorage
public class ConsumerBackupStorage : IBackupStorage, IGetterWriteOperator
{
private const string Domain = "backup";
private IDataStore _store;
private readonly StorageSettingsHelper _storageSettingsHelper;
private readonly StorageSettingsHelper _storageSettingsHelper;
private readonly TempPath _tempPath;
private readonly ILogger<ConsumerBackupStorage> _logger;
private readonly SetupInfo _setupInfo;
private readonly StorageFactory _storageFactory;
private bool _isTemporary;
private string Domain { get => _isTemporary ? "" : "backup"; }
private CommonChunkedUploadSessionHolder _sessionHolder;
public ConsumerBackupStorage(StorageSettingsHelper storageSettingsHelper)
public ConsumerBackupStorage(
StorageSettingsHelper storageSettingsHelper,
TempPath tempPath,
ILogger<ConsumerBackupStorage> logger,
SetupInfo setupInfo,
StorageFactory storageFactory)
{
_storageSettingsHelper = storageSettingsHelper;
_storageSettingsHelper = storageSettingsHelper;
_tempPath = tempPath;
_logger = logger;
_setupInfo = setupInfo;
_storageFactory = storageFactory;
}
public void Init(IReadOnlyDictionary<string, string> storageParams)
{
var settings = new StorageSettings { Module = storageParams["module"], Props = storageParams.Where(r => r.Key != "module").ToDictionary(r => r.Key, r => r.Value) };
_store = _storageSettingsHelper.DataStore(settings);
_store = _storageSettingsHelper.DataStore(settings);
_sessionHolder = new CommonChunkedUploadSessionHolder(_tempPath, _logger, _store, Domain, _setupInfo.ChunkUploadSize);
}
public void Init(int tenant)
{
_isTemporary = true;
_store = _storageFactory.GetStorage(tenant, "backup");
_sessionHolder = new CommonChunkedUploadSessionHolder(_tempPath, _logger, _store, Domain, _setupInfo.ChunkUploadSize);
}
public async Task<string> Upload(string storageBasePath, string localPath, Guid userId)
@ -82,6 +105,23 @@ public class ConsumerBackupStorage : IBackupStorage
public async Task<string> GetPublicLink(string storagePath)
{
return (await _store.GetInternalUriAsync(Domain, storagePath, TimeSpan.FromDays(1), null)).AbsoluteUri;
if (_isTemporary)
{
return (await _store.GetPreSignedUriAsync(Domain, storagePath, TimeSpan.FromDays(1), null)).ToString();
}
else
{
return (await _store.GetInternalUriAsync(Domain, storagePath, TimeSpan.FromDays(1), null)).AbsoluteUri;
}
}
public async Task<IDataWriteOperator> GetWriteOperatorAsync(string storageBasePath, string title, Guid userId)
{
var session = new CommonChunkedUploadSession(-1)
{
TempPath = title,
UploadId = await _store.InitiateChunkedUploadAsync(Domain, title)
};
return _store.CreateDataWriteOperator(session, _sessionHolder);
}
}

View File

@ -1,84 +0,0 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup.Storage;
[Scope]
public class DataStoreBackupStorage : IBackupStorage
{
private int _tenant;
private readonly StorageFactory _storageFactory;
public DataStoreBackupStorage(StorageFactory storageFactory)
{
_storageFactory = storageFactory;
}
public void Init(int tenant)
{
_tenant = tenant;
}
public async Task<string> Upload(string storageBasePath, string localPath, Guid userId)
{
using var stream = File.OpenRead(localPath);
var storagePath = Path.GetFileName(localPath);
await GetDataStore().SaveAsync("", storagePath, stream);
return storagePath;
}
public async Task Download(string storagePath, string targetLocalPath)
{
using var source = await GetDataStore().GetReadStreamAsync("", storagePath);
using var destination = File.OpenWrite(targetLocalPath);
source.CopyTo(destination);
}
public async Task Delete(string storagePath)
{
var dataStore = GetDataStore();
if (await dataStore.IsFileAsync("", storagePath))
{
await dataStore.DeleteAsync("", storagePath);
}
}
public async Task<bool> IsExists(string storagePath)
{
return await GetDataStore().IsFileAsync("", storagePath);
}
public async Task<string> GetPublicLink(string storagePath)
{
return (await GetDataStore().GetPreSignedUriAsync("", storagePath, TimeSpan.FromDays(1), null)).ToString();
}
protected virtual IDataStore GetDataStore()
{
return _storageFactory.GetStorage(_tenant, "backup");
}
}

View File

@ -24,10 +24,12 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
using ASC.Web.Files.Utils;
namespace ASC.Data.Backup.Storage;
[Scope]
public class DocumentsBackupStorage : IBackupStorage
public class DocumentsBackupStorage : IBackupStorage, IGetterWriteOperator
{
private int _tenantId;
private readonly SetupInfo _setupInfo;
@ -35,7 +37,10 @@ public class DocumentsBackupStorage : IBackupStorage
private readonly SecurityContext _securityContext;
private readonly IDaoFactory _daoFactory;
private readonly StorageFactory _storageFactory;
private readonly IServiceProvider _serviceProvider;
private readonly IServiceProvider _serviceProvider;
private FilesChunkedUploadSessionHolder _sessionHolder;
private readonly TempPath _tempPath;
private readonly ILogger<DocumentsBackupStorage> _logger;
public DocumentsBackupStorage(
SetupInfo setupInfo,
@ -43,21 +48,27 @@ public class DocumentsBackupStorage : IBackupStorage
SecurityContext securityContext,
IDaoFactory daoFactory,
StorageFactory storageFactory,
IServiceProvider serviceProvider)
IServiceProvider serviceProvider,
TempPath tempPath,
ILogger<DocumentsBackupStorage> logger)
{
_setupInfo = setupInfo;
_tenantManager = tenantManager;
_securityContext = securityContext;
_daoFactory = daoFactory;
_storageFactory = storageFactory;
_serviceProvider = serviceProvider;
_serviceProvider = serviceProvider;
_tempPath = tempPath;
_logger = logger;
}
public void Init(int tenantId)
{
_tenantId = tenantId;
}
var store = _storageFactory.GetStorage(_tenantId, "files");
_sessionHolder = new FilesChunkedUploadSessionHolder(_daoFactory, _tempPath, _logger, store, "", _setupInfo.ChunkUploadSize);
}
public async Task<string> Upload(string folderId, string localPath, Guid userId)
{
_tenantManager.SetCurrentTenant(_tenantId);
@ -195,6 +206,48 @@ public class DocumentsBackupStorage : IBackupStorage
{
return false;
}
}
public async Task<IDataWriteOperator> GetWriteOperatorAsync(string storageBasePath, string title, Guid userId)
{
_tenantManager.SetCurrentTenant(_tenantId);
if (!userId.Equals(Guid.Empty))
{
_securityContext.AuthenticateMeWithoutCookie(userId);
}
else
{
var tenant = _tenantManager.GetTenant(_tenantId);
_securityContext.AuthenticateMeWithoutCookie(tenant.OwnerId);
}
if (int.TryParse(storageBasePath, out var fId))
{
var uploadSession = await InitUploadChunkAsync(fId, title);
var folderDao = GetFolderDao<int>();
return folderDao.CreateDataWriteOperator(fId, uploadSession, _sessionHolder);
}
else
{
var uploadSession = await InitUploadChunkAsync(storageBasePath, title);
var folderDao = GetFolderDao<string>();
return folderDao.CreateDataWriteOperator(storageBasePath, uploadSession, _sessionHolder);
}
}
private async Task<CommonChunkedUploadSession> InitUploadChunkAsync<T>(T folderId, string title)
{
var folderDao = GetFolderDao<T>();
var fileDao = GetFileDao<T>();
var folder = await folderDao.GetFolderAsync(folderId);
var newFile = _serviceProvider.GetService<File<T>>();
newFile.Title = title;
newFile.ParentId = folder.Id;
var chunkedUploadSession = await fileDao.CreateUploadSessionAsync(newFile, -1);
chunkedUploadSession.CheckQuota = false;
return chunkedUploadSession;
}
private IFolderDao<T> GetFolderDao<T>()

View File

@ -27,7 +27,7 @@
namespace ASC.Data.Backup.Storage;
[Scope]
public class LocalBackupStorage : IBackupStorage
public class LocalBackupStorage : IBackupStorage, IGetterWriteOperator
{
public Task<string> Upload(string storageBasePath, string localPath, Guid userId)
{
@ -66,4 +66,9 @@ public class LocalBackupStorage : IBackupStorage
{
return Task.FromResult(string.Empty);
}
public Task<IDataWriteOperator> GetWriteOperatorAsync(string storageBasePath, string title, Guid userId)
{
return Task.FromResult<IDataWriteOperator>(null);
}
}

View File

@ -1,145 +0,0 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup.Storage;
internal class S3BackupStorage : IBackupStorage
{
private readonly string _accessKeyId;
private readonly string _secretAccessKey;
private readonly string _bucket;
private readonly string _region;
private readonly ILogger _logger;
public S3BackupStorage(ILogger<S3BackupStorage> logger, string accessKeyId, string secretAccessKey, string bucket, string region)
{
_logger = logger;
_accessKeyId = accessKeyId;
_secretAccessKey = secretAccessKey;
_bucket = bucket;
_region = region;
}
public async Task<string> Upload(string storageBasePath, string localPath, Guid userId)
{
string key;
if (string.IsNullOrEmpty(storageBasePath))
{
key = "backup/" + Path.GetFileName(localPath);
}
else
{
key = string.Concat(storageBasePath.Trim(new char[] { ' ', '/', '\\' }), "/", Path.GetFileName(localPath));
}
using (var fileTransferUtility = new TransferUtility(_accessKeyId, _secretAccessKey, RegionEndpoint.GetBySystemName(_region)))
{
await fileTransferUtility.UploadAsync(
new TransferUtilityUploadRequest
{
BucketName = _bucket,
FilePath = localPath,
StorageClass = S3StorageClass.StandardInfrequentAccess,
PartSize = 6291456, // 6 MB.
Key = key
});
}
return key;
}
public async Task Download(string storagePath, string targetLocalPath)
{
var request = new GetObjectRequest
{
BucketName = _bucket,
Key = GetKey(storagePath),
};
using var s3 = GetClient();
using var response = await s3.GetObjectAsync(request);
await response.WriteResponseStreamToFileAsync(targetLocalPath, true, new CancellationToken());
}
public async Task Delete(string storagePath)
{
using var s3 = GetClient();
await s3.DeleteObjectAsync(new DeleteObjectRequest
{
BucketName = _bucket,
Key = GetKey(storagePath)
});
}
public async Task<bool> IsExists(string storagePath)
{
using var s3 = GetClient();
try
{
var request = new ListObjectsRequest { BucketName = _bucket, Prefix = GetKey(storagePath) };
var response = await s3.ListObjectsAsync(request);
return response.S3Objects.Count > 0;
}
catch (AmazonS3Exception ex)
{
_logger.WarningWithException(ex);
return false;
}
}
public Task<string> GetPublicLink(string storagePath)
{
using var s3 = GetClient();
return Task.FromResult(s3.GetPreSignedURL(
new GetPreSignedUrlRequest
{
BucketName = _bucket,
Key = GetKey(storagePath),
Expires = DateTime.UtcNow.AddDays(1),
Verb = HttpVerb.GET
}));
}
private string GetKey(string fileName)
{
// return "backup/" + Path.GetFileName(fileName);
return fileName;
}
private AmazonS3Client GetClient()
{
return new AmazonS3Client(_accessKeyId, _secretAccessKey,
new AmazonS3Config
{
RegionEndpoint = RegionEndpoint.GetBySystemName(_region)
});
}
}

View File

@ -60,12 +60,13 @@ public class BackupPortalTask : PortalTaskBase
_tempStream = tempStream;
}
public void Init(int tenantId, string toFilePath, int limit)
public void Init(int tenantId, string toFilePath, int limit, IDataWriteOperator writeOperator)
{
ArgumentNullOrEmptyException.ThrowIfNullOrEmpty(toFilePath);
BackupFilePath = toFilePath;
Limit = limit;
Limit = limit;
WriteOperator = writeOperator;
Init(tenantId);
}
@ -75,12 +76,11 @@ public class BackupPortalTask : PortalTaskBase
_logger.DebugBeginBackup(TenantId);
_tenantManager.SetCurrentTenant(TenantId);
using (var writer = new ZipWriteOperator(_tempStream, BackupFilePath))
await using (WriteOperator)
{
if (_dump)
{
await DoDump(writer);
await DoDump(WriteOperator);
}
else
{
@ -93,11 +93,11 @@ public class BackupPortalTask : PortalTaskBase
foreach (var module in modulesToProcess)
{
DoBackupModule(writer, module);
await DoBackupModule(WriteOperator, module);
}
if (ProcessStorage)
{
await DoBackupStorage(writer, fileGroups);
await DoBackupStorage(WriteOperator, fileGroups);
}
}
}
@ -157,7 +157,7 @@ public class BackupPortalTask : PortalTaskBase
using (var stream = new MemoryStream(Encoding.UTF8.GetBytes(true.ToString())))
{
writer.WriteEntry(KeyHelper.GetDumpKey(), stream);
await writer.WriteEntryAsync(KeyHelper.GetDumpKey(), stream);
}
var files = new List<BackupFileInfo>();
@ -184,7 +184,7 @@ public class BackupPortalTask : PortalTaskBase
foreach (var db in databases)
{
DoDump(writer, db.Key.Item1, db.Key.Item2, db.Value);
await DoDump(writer, db.Key.Item1, db.Key.Item2, db.Value);
}
var dir = Path.GetDirectoryName(BackupFilePath);
var subDir = Path.Combine(dir, Path.GetFileNameWithoutExtension(BackupFilePath));
@ -194,11 +194,11 @@ public class BackupPortalTask : PortalTaskBase
if (ProcessStorage)
{
DoDumpStorage(writer, files);
await DoDumpStorage(writer, files);
}
}
private void DoDump(IDataWriteOperator writer, string dbName, string connectionString, List<string> tables)
private async Task DoDump(IDataWriteOperator writer, string dbName, string connectionString, List<string> tables)
{
var excluded = ModuleProvider.AllModules.Where(r => _ignoredModules.Contains(r.ModuleName)).SelectMany(r => r.Tables).Select(r => r.Name).ToList();
excluded.AddRange(_ignoredTables);
@ -254,7 +254,7 @@ public class BackupPortalTask : PortalTaskBase
Task.WaitAll(tasks.ToArray());
ArchiveDir(writer, subDir);
await ArchiveDir(writer, subDir);
}
}
@ -524,7 +524,7 @@ public class BackupPortalTask : PortalTaskBase
}
}
private void DoDumpStorage(IDataWriteOperator writer, IReadOnlyList<BackupFileInfo> files)
private async Task DoDumpStorage(IDataWriteOperator writer, IReadOnlyList<BackupFileInfo> files)
{
_logger.DebugBeginBackupStorage();
@ -549,7 +549,7 @@ public class BackupPortalTask : PortalTaskBase
Task.WaitAll(tasks.ToArray());
ArchiveDir(writer, subDir);
await ArchiveDir(writer, subDir);
Directory.Delete(storageDir, true);
}
@ -562,7 +562,7 @@ public class BackupPortalTask : PortalTaskBase
using (var tmpFile = new FileStream(tmpPath, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.Read, 4096, FileOptions.DeleteOnClose))
{
restoreInfoXml.WriteTo(tmpFile);
writer.WriteEntry(KeyHelper.GetStorageRestoreInfoZipKey(), tmpFile);
await writer.WriteEntryAsync(KeyHelper.GetStorageRestoreInfoZipKey(), tmpFile);
}
SetStepCompleted();
@ -599,7 +599,7 @@ public class BackupPortalTask : PortalTaskBase
SetStepCompleted();
}
private void ArchiveDir(IDataWriteOperator writer, string subDir)
private async Task ArchiveDir(IDataWriteOperator writer, string subDir)
{
_logger.DebugArchiveDirStart(subDir);
foreach (var enumerateFile in Directory.EnumerateFiles(subDir, "*", SearchOption.AllDirectories))
@ -612,7 +612,7 @@ public class BackupPortalTask : PortalTaskBase
using (var tmpFile = new FileStream(f, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.Read, 4096, FileOptions.DeleteOnClose))
{
writer.WriteEntry(enumerateFile.Substring(subDir.Length), tmpFile);
await writer.WriteEntryAsync(enumerateFile.Substring(subDir.Length), tmpFile);
}
SetStepCompleted();
@ -633,7 +633,7 @@ public class BackupPortalTask : PortalTaskBase
return files.GroupBy(file => file.Module).ToList();
}
private void DoBackupModule(IDataWriteOperator writer, IModuleSpecifics module)
private async Task DoBackupModule(IDataWriteOperator writer, IModuleSpecifics module)
{
_logger.DebugBeginSavingDataForModule(module.ModuleName);
var tablesToProcess = module.Tables.Where(t => !_ignoredTables.Contains(t.Name) && t.InsertMethod != InsertMethod.None).ToList();
@ -684,7 +684,7 @@ public class BackupPortalTask : PortalTaskBase
data.WriteXml(file, XmlWriteMode.WriteSchema);
data.Clear();
writer.WriteEntry(KeyHelper.GetTableZipKey(module, data.TableName), file);
await writer.WriteEntryAsync(KeyHelper.GetTableZipKey(module, data.TableName), file);
}
_logger.DebugEndSavingTable(table.Name);
@ -714,7 +714,7 @@ public class BackupPortalTask : PortalTaskBase
{
var f = (BackupFileInfo)state;
using var fileStream = await storage.GetReadStreamAsync(f.Domain, f.Path);
writer.WriteEntry(file1.GetZipKey(), fileStream);
await writer.WriteEntryAsync(file1.GetZipKey(), fileStream);
}, file, 5, error => _logger.WarningCanNotBackupFile(file1.Module, file1.Path, error));
SetCurrentStepProgress((int)(++filesProcessed * 100 / (double)filesCount));
@ -730,7 +730,7 @@ public class BackupPortalTask : PortalTaskBase
using (var tmpFile = _tempStream.Create())
{
restoreInfoXml.WriteTo(tmpFile);
writer.WriteEntry(KeyHelper.GetStorageRestoreInfoZipKey(), tmpFile);
await writer.WriteEntryAsync(KeyHelper.GetStorageRestoreInfoZipKey(), tmpFile);
}
_logger.DebugEndBackupStorage();

View File

@ -45,7 +45,8 @@ public abstract class PortalTaskBase
protected ILogger Logger { get; set; }
public int Progress { get; private set; }
public int TenantId { get; private set; }
public bool ProcessStorage { get; set; }
public bool ProcessStorage { get; set; }
protected IDataWriteOperator WriteOperator { get; set; }
protected ModuleProvider ModuleProvider { get; set; }
protected DbFactory DbFactory { get; set; }

View File

@ -96,7 +96,7 @@ public class TransferPortalTask : PortalTaskBase
//save db data to temporary file
var backupTask = _serviceProvider.GetService<BackupPortalTask>();
backupTask.Init(TenantId, backupFilePath, Limit);
backupTask.Init(TenantId, backupFilePath, Limit, ZipWriteOperatorFactory.GetDefaultWriteOperator(_tempStream, backupFilePath));
backupTask.ProcessStorage = false;
backupTask.ProgressChanged += (sender, args) => SetCurrentStepProgress(args.Progress);
foreach (var moduleName in _ignoredModules)

View File

@ -45,6 +45,7 @@
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Rackspace" Version="1.0.0" />
<PackageReference Include="SharpZipLib" Version="1.4.2" />
</ItemGroup>
<ItemGroup>

View File

@ -192,6 +192,13 @@ public abstract class BaseStorage : IDataStore
throw new NotImplementedException();
}
public virtual IDataWriteOperator CreateDataWriteOperator(
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
return new ChunkZipWriteOperator(_tempStream, chunkedUploadSession, sessionHolder);
}
#endregion
public abstract Task DeleteAsync(string domain, string path);

View File

@ -34,7 +34,8 @@ public class CommonChunkedUploadSession : ICloneable
public DateTime Expired { get; set; }
public string Location { get; set; }
public long BytesUploaded { get; set; }
public long BytesTotal { get; set; }
public long BytesTotal { get; set; }
public bool LastChunk { get; set; }
public int TenantId { get; set; }
public Guid UserId { get; set; }
public bool UseChunks { get; set; }
@ -72,7 +73,8 @@ public class CommonChunkedUploadSession : ICloneable
Created = DateTime.UtcNow;
BytesUploaded = 0;
BytesTotal = bytesTotal;
UseChunks = true;
UseChunks = true;
LastChunk = false;
}
public T GetItemOrDefault<T>(string key)

View File

@ -34,9 +34,10 @@ public class CommonChunkedUploadSessionHolder
private readonly TempPath _tempPath;
private readonly ILogger _logger;
private readonly string _domain;
private readonly long _maxChunkUploadSize;
public long MaxChunkUploadSize;
private const string StoragePath = "sessions";
private readonly object _locker = new object();
public CommonChunkedUploadSessionHolder(
TempPath tempPath,
@ -49,7 +50,7 @@ public class CommonChunkedUploadSessionHolder
_logger = logger;
DataStore = dataStore;
_domain = domain;
_maxChunkUploadSize = maxChunkUploadSize;
MaxChunkUploadSize = maxChunkUploadSize;
}
public async Task DeleteExpiredAsync()
@ -83,7 +84,7 @@ public class CommonChunkedUploadSessionHolder
public Task InitAsync(CommonChunkedUploadSession chunkedUploadSession)
{
if (chunkedUploadSession.BytesTotal < _maxChunkUploadSize)
if (chunkedUploadSession.BytesTotal < MaxChunkUploadSize && chunkedUploadSession.BytesTotal != -1)
{
chunkedUploadSession.UseChunks = false;
return Task.CompletedTask;
@ -101,15 +102,14 @@ public class CommonChunkedUploadSessionHolder
chunkedUploadSession.UploadId = uploadId;
}
public async Task FinalizeAsync(CommonChunkedUploadSession uploadSession)
public virtual async Task<string> FinalizeAsync(CommonChunkedUploadSession uploadSession)
{
var tempPath = uploadSession.TempPath;
var uploadId = uploadSession.UploadId;
var eTags = uploadSession.GetItemOrDefault<List<string>>("ETag")
.Select((x, i) => new KeyValuePair<int, string>(i + 1, x))
.ToDictionary(x => x.Key, x => x.Value);
var eTags = uploadSession.GetItemOrDefault<Dictionary<int, string>>("ETag");
await DataStore.FinalizeChunkedUploadAsync(_domain, tempPath, uploadId, eTags);
await DataStore.FinalizeChunkedUploadAsync(_domain, tempPath, uploadId, eTags);
return Path.GetFileName(tempPath);
}
public async Task MoveAsync(CommonChunkedUploadSession chunkedUploadSession, string newPath, bool quotaCheckFileSize = true)
@ -132,23 +132,29 @@ public class CommonChunkedUploadSessionHolder
}
}
public async Task UploadChunkAsync(CommonChunkedUploadSession uploadSession, Stream stream, long length)
public virtual async Task<string> UploadChunkAsync(CommonChunkedUploadSession uploadSession, Stream stream, long length)
{
var tempPath = uploadSession.TempPath;
var uploadId = uploadSession.UploadId;
int chunkNumber;
int.TryParse(uploadSession.GetItemOrDefault<string>("ChunksUploaded"), out chunkNumber);
chunkNumber++;
lock (_locker)
{
int.TryParse(uploadSession.GetItemOrDefault<string>("ChunksUploaded"), out chunkNumber);
chunkNumber++;
uploadSession.Items["ChunksUploaded"] = chunkNumber.ToString();
uploadSession.BytesUploaded += length;
}
var eTag = await DataStore.UploadChunkAsync(_domain, tempPath, uploadId, stream, _maxChunkUploadSize, chunkNumber, length);
var eTag = await DataStore.UploadChunkAsync(_domain, tempPath, uploadId, stream, MaxChunkUploadSize, chunkNumber, length);
uploadSession.Items["ChunksUploaded"] = chunkNumber.ToString();
uploadSession.BytesUploaded += length;
var eTags = uploadSession.GetItemOrDefault<List<string>>("ETag") ?? new List<string>();
eTags.Add(eTag);
uploadSession.Items["ETag"] = eTags;
lock (_locker)
{
var eTags = uploadSession.GetItemOrDefault<Dictionary<int, string>>("ETag") ?? new Dictionary<int, string>();
eTags.Add(chunkNumber, eTag);
uploadSession.Items["ETag"] = eTags;
}
return Path.GetFileName(tempPath);
}
public Stream UploadSingleChunk(CommonChunkedUploadSession uploadSession, Stream stream, long chunkLength)

View File

@ -66,6 +66,7 @@ global using ASC.Data.Storage.GoogleCloud;
global using ASC.Data.Storage.Log;
global using ASC.Data.Storage.RackspaceCloud;
global using ASC.Data.Storage.S3;
global using ASC.Data.Storage.ZipOperators;
global using ASC.EventBus.Events;
global using ASC.Notify.Messages;
global using ASC.Protos.Migration;
@ -74,6 +75,9 @@ global using ASC.Security.Cryptography;
global using Google.Apis.Auth.OAuth2;
global using Google.Cloud.Storage.V1;
global using ICSharpCode.SharpZipLib.GZip;
global using ICSharpCode.SharpZipLib.Tar;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.AspNetCore.Hosting;
global using Microsoft.AspNetCore.Http;

View File

@ -31,6 +31,10 @@ namespace ASC.Data.Storage;
///</summary>
public interface IDataStore
{
IDataWriteOperator CreateDataWriteOperator(
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder);
IQuotaController QuotaController { get; set; }
TimeSpan GetExpire(string domain);

View File

@ -383,6 +383,12 @@ public class S3Storage : BaseStorage
await s3.AbortMultipartUploadAsync(request);
}
public override IDataWriteOperator CreateDataWriteOperator(CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
return new S3ZipWriteOperator(_tempStream, chunkedUploadSession, sessionHolder);
}
#endregion
public override async Task DeleteAsync(string domain, string path)

View File

@ -0,0 +1,142 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.ZipOperators;
public class ChunkZipWriteOperator : IDataWriteOperator
{
private readonly TarOutputStream _tarOutputStream;
private readonly GZipOutputStream _gZipOutputStream;
private readonly CommonChunkedUploadSession _chunkedUploadSession;
private readonly CommonChunkedUploadSessionHolder _sessionHolder;
private readonly SHA256 _sha;
private Stream _fileStream;
private readonly TempStream _tempStream;
public string Hash { get; private set; }
public string StoragePath { get; private set; }
public bool NeedUpload
{
get
{
return false;
}
}
public ChunkZipWriteOperator(TempStream tempStream,
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
_tempStream = tempStream;
_chunkedUploadSession = chunkedUploadSession;
_sessionHolder = sessionHolder;
_fileStream = _tempStream.Create();
_gZipOutputStream = new GZipOutputStream(_fileStream)
{
IsStreamOwner = false
};
_tarOutputStream = new TarOutputStream(_gZipOutputStream, Encoding.UTF8);
_sha = SHA256.Create();
}
public async Task WriteEntryAsync(string key, Stream stream)
{
if (_fileStream == null)
{
_fileStream = _tempStream.Create();
_gZipOutputStream.baseOutputStream_ = _fileStream;
}
using (var buffered = _tempStream.GetBuffered(stream))
{
var entry = TarEntry.CreateTarEntry(key);
entry.Size = buffered.Length;
await _tarOutputStream.PutNextEntryAsync(entry, default);
buffered.Position = 0;
await buffered.CopyToAsync(_tarOutputStream);
await _tarOutputStream.FlushAsync();
await _tarOutputStream.CloseEntryAsync(default);
}
if (_fileStream.Length > _sessionHolder.MaxChunkUploadSize)
{
await UploadAsync(false);
}
}
private async Task UploadAsync(bool last)
{
var chunkUploadSize = _sessionHolder.MaxChunkUploadSize;
var buffer = new byte[chunkUploadSize];
int bytesRead;
_fileStream.Position = 0;
while ((bytesRead = _fileStream.Read(buffer, 0, (int)chunkUploadSize)) > 0)
{
using (var theMemStream = new MemoryStream())
{
await theMemStream.WriteAsync(buffer, 0, bytesRead);
theMemStream.Position = 0;
if (bytesRead == chunkUploadSize || last)
{
if (_fileStream.Position == _fileStream.Length)
{
_chunkedUploadSession.LastChunk = true;
}
theMemStream.Position = 0;
StoragePath = await _sessionHolder.UploadChunkAsync(_chunkedUploadSession, theMemStream, theMemStream.Length);
}
else
{
await _fileStream.DisposeAsync();
_fileStream = _tempStream.Create();
_gZipOutputStream.baseOutputStream_ = _fileStream;
await theMemStream.CopyToAsync(_fileStream);
_fileStream.Flush();
}
_sha.TransformBlock(buffer, 0, bytesRead, buffer, 0);
}
}
if (last)
{
_sha.TransformFinalBlock(buffer, 0, 0);
}
}
public async ValueTask DisposeAsync()
{
_tarOutputStream.Close();
_tarOutputStream.Dispose();
await UploadAsync(true);
_fileStream.Dispose();
Hash = BitConverter.ToString(_sha.Hash).Replace("-", string.Empty);
_sha.Dispose();
}
}

View File

@ -24,16 +24,20 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup;
namespace ASC.Data.Storage.ZipOperators;
public interface IDataWriteOperator : IDisposable
public interface IDataWriteOperator : IAsyncDisposable
{
void WriteEntry(string key, Stream stream);
Task WriteEntryAsync(string key, Stream stream);
bool NeedUpload { get; }
string Hash { get; }
string StoragePath { get; }
}
public interface IDataReadOperator : IDisposable
{
Stream GetEntry(string key);
IEnumerable<string> GetEntries(string key);
IEnumerable<string> GetEntries(string key);
IEnumerable<string> GetDirectories(string key);
}

View File

@ -0,0 +1,32 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.ZipOperators;
public interface IGetterWriteOperator
{
Task<IDataWriteOperator> GetWriteOperatorAsync(string storageBasePath, string title, Guid userId);
}

View File

@ -0,0 +1,148 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.ZipOperators;
public class S3ZipWriteOperator : IDataWriteOperator
{
private readonly TarOutputStream _tarOutputStream;
private readonly GZipOutputStream _gZipOutputStream;
private readonly CommonChunkedUploadSession _chunkedUploadSession;
private readonly CommonChunkedUploadSessionHolder _sessionHolder;
private readonly SHA256 _sha;
private Stream _fileStream;
protected const int TasksLimit = 10;
private readonly List<Task> _tasks = new List<Task>(TasksLimit);
private readonly List<Stream> _streams = new List<Stream>(TasksLimit);
private readonly TempStream _tempStream;
public string Hash { get; private set; }
public string StoragePath { get; private set; }
public bool NeedUpload
{
get
{
return false;
}
}
public S3ZipWriteOperator(TempStream tempStream,
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
_tempStream = tempStream;
_chunkedUploadSession = chunkedUploadSession;
_sessionHolder = sessionHolder;
_fileStream = _tempStream.Create();
_gZipOutputStream = new GZipOutputStream(_fileStream)
{
IsStreamOwner = false
};
_tarOutputStream = new TarOutputStream(_gZipOutputStream, Encoding.UTF8);
_sha = SHA256.Create();
}
public async Task WriteEntryAsync(string key, Stream stream)
{
if (_fileStream == null)
{
_fileStream = _tempStream.Create();
_gZipOutputStream.baseOutputStream_ = _fileStream;
}
using (var buffered = _tempStream.GetBuffered(stream))
{
var entry = TarEntry.CreateTarEntry(key);
entry.Size = buffered.Length;
await _tarOutputStream.PutNextEntryAsync(entry, default);
buffered.Position = 0;
await buffered.CopyToAsync(_tarOutputStream);
await _tarOutputStream.FlushAsync();
await _tarOutputStream.CloseEntryAsync(default);
}
if (_fileStream.Length > _sessionHolder.MaxChunkUploadSize)
{
var fs = _fileStream;
_fileStream = null;
Upload(fs);
Computehash(fs, false);
}
}
private void Computehash(Stream stream, bool last)
{
stream.Position = 0;
var buffer = new byte[_sessionHolder.MaxChunkUploadSize];
int bytesRead;
while ((bytesRead = _fileStream.Read(buffer, 0, (int)_sessionHolder.MaxChunkUploadSize)) > 0)
{
_sha.TransformBlock(buffer, 0, bytesRead, buffer, 0);
}
if (last)
{
_sha.TransformFinalBlock(buffer, 0, 0);
}
}
private void Upload(Stream stream)
{
stream.Position = 0;
if (_tasks.Count == TasksLimit)
{
Task.WaitAny(_tasks.ToArray());
for (var i = 0; i < _tasks.Count; i++)
{
if (_tasks[i].IsCompleted)
{
_tasks.RemoveAt(i);
_streams[i].Dispose();
_streams.RemoveAt(i);
}
}
}
_streams.Add(stream);
_tasks.Add(_sessionHolder.UploadChunkAsync(_chunkedUploadSession, stream, stream.Length));
}
public async ValueTask DisposeAsync()
{
_tarOutputStream.Close();
_tarOutputStream.Dispose();
Upload(_fileStream);
Task.WaitAll(_tasks.ToArray());
StoragePath = await _sessionHolder.FinalizeAsync(_chunkedUploadSession);
Computehash(_fileStream, true);
Hash = BitConverter.ToString(_sha.Hash).Replace("-", string.Empty);
_sha.Dispose();
_streams.ForEach(s => s.Dispose());
}
}

View File

@ -24,55 +24,69 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup;
namespace ASC.Data.Storage.ZipOperators;
public class ZipWriteOperator : IDataWriteOperator
{
private readonly GZipOutputStream _gZipOutputStream;
private readonly TarOutputStream _tarOutputStream;
private readonly Stream _file;
private readonly TempStream _tempStream;
public bool NeedUpload
{
get
{
return true;
}
}
public string Hash => throw new NotImplementedException();
public string StoragePath => throw new NotImplementedException();
public ZipWriteOperator(TempStream tempStream, string targetFile)
{
var file = new FileStream(targetFile, FileMode.Create);
var gZipOutputStream = new GZipOutputStream(file);
_tarOutputStream = new TarOutputStream(gZipOutputStream, Encoding.UTF8);
_tempStream = tempStream;
_file = new FileStream(targetFile, FileMode.Create);
_gZipOutputStream = new GZipOutputStream(_file);
_tarOutputStream = new TarOutputStream(_gZipOutputStream, Encoding.UTF8);
}
public void WriteEntry(string key, Stream stream)
public async Task WriteEntryAsync(string key, Stream stream)
{
using (var buffered = _tempStream.GetBuffered(stream))
{
var entry = TarEntry.CreateTarEntry(key);
entry.Size = buffered.Length;
_tarOutputStream.PutNextEntry(entry);
await _tarOutputStream.PutNextEntryAsync(entry, default);
buffered.Position = 0;
buffered.CopyTo(_tarOutputStream);
_tarOutputStream.CloseEntry();
await buffered.CopyToAsync(_tarOutputStream);
await _tarOutputStream.CloseEntryAsync(default);
}
}
public void Dispose()
public async ValueTask DisposeAsync()
{
_tarOutputStream.Close();
_tarOutputStream.Dispose();
await _tarOutputStream.DisposeAsync();
}
}
public class ZipReadOperator : IDataReadOperator
{
private readonly string _tmpDir;
private readonly string tmpdir;
public ZipReadOperator(string targetFile)
{
_tmpDir = Path.Combine(Path.GetDirectoryName(targetFile), Path.GetFileNameWithoutExtension(targetFile).Replace('>', '_').Replace(':', '_').Replace('?', '_'));
tmpdir = Path.Combine(Path.GetDirectoryName(targetFile), Path.GetFileNameWithoutExtension(targetFile).Replace('>', '_').Replace(':', '_').Replace('?', '_'));
using (var stream = File.OpenRead(targetFile))
using (var reader = new GZipInputStream(stream))
using (var tarOutputStream = TarArchive.CreateInputTarArchive(reader, Encoding.UTF8))
{
tarOutputStream.ExtractContents(_tmpDir);
tarOutputStream.ExtractContents(tmpdir);
}
File.Delete(targetFile);
@ -80,32 +94,29 @@ public class ZipReadOperator : IDataReadOperator
public Stream GetEntry(string key)
{
var filePath = Path.Combine(_tmpDir, key);
return File.Exists(filePath)
? File.Open(filePath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read)
: null;
var filePath = Path.Combine(tmpdir, key);
return File.Exists(filePath) ? File.Open(filePath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read) : null;
}
public IEnumerable<string> GetEntries(string key)
{
var path = Path.Combine(_tmpDir, key);
var path = Path.Combine(tmpdir, key);
var files = Directory.EnumerateFiles(path);
return files;
}
public IEnumerable<string> GetDirectories(string key)
{
var path = Path.Combine(_tmpDir, key);
var files = Directory.EnumerateDirectories(path);
return files;
}
}
public IEnumerable<string> GetDirectories(string key)
{
var path = Path.Combine(tmpdir, key);
var files = Directory.EnumerateDirectories(path);
return files;
}
public void Dispose()
{
if (Directory.Exists(_tmpDir))
if (Directory.Exists(tmpdir))
{
Directory.Delete(_tmpDir, true);
Directory.Delete(tmpdir, true);
}
}
}

View File

@ -0,0 +1,43 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.ZipOperators;
public static class ZipWriteOperatorFactory
{
public static async Task<IDataWriteOperator> GetWriteOperatorAsync(TempStream tempStream, string storageBasePath, string title, string tempFolder, Guid userId, IGetterWriteOperator getter)
{
var writer = await getter.GetWriteOperatorAsync(storageBasePath, title, userId);
return writer ?? new ZipWriteOperator(tempStream, Path.Combine(tempFolder, title));
}
public static IDataWriteOperator GetDefaultWriteOperator(TempStream tempStream, string backupFilePath)
{
return new ZipWriteOperator(tempStream, backupFilePath);
}
}

View File

@ -50,13 +50,6 @@ var logger = LogManager.Setup()
var path = builder.Configuration["pathToConf"];
logger.Debug("path: " + path);
logger.Debug("EnvironmentName: " + builder.Environment.EnvironmentName);
var redisConfiguration = builder.Configuration.GetSection("Redis").Get<RedisConfiguration>();
logger.Error($"redisConfiguration is null: {redisConfiguration == null}");
var kafkaConfiguration = builder.Configuration.GetSection("kafka").Get<KafkaSettings>();
logger.Debug($"kafkaConfiguration is null: {kafkaConfiguration == null}");
var rabbitMQConfiguration = builder.Configuration.GetSection("RabbitMQ").Get<RabbitMQSettings>();
logger.Debug($"rabbitMQConfiguration is null: {rabbitMQConfiguration == null}");
try
{

View File

@ -32,6 +32,17 @@ export function openEdit(fileId, version, doc, view) {
return request(options);
}
export function getReferenceData(object) {
const data = object;
const options = {
method: "post",
url: `/files/file/referencedata`,
data,
};
return request(options);
}
export function getFolderInfo(folderId) {
const options = {
method: "get",

View File

@ -12,6 +12,7 @@ import {
updateFile,
checkFillFormDraft,
convertFile,
getReferenceData,
} from "@docspace/common/api/files";
import { EditorWrapper } from "../components/StyledEditor";
import { useTranslation } from "react-i18next";
@ -232,6 +233,14 @@ function Editor({
}
};
const onSDKRequestReferenceData = async (event) => {
const referenceData = await getReferenceData(
event.data.referenceData ?? event.data
);
docEditor.setReferenceData(referenceData);
};
const onMakeActionLink = (event) => {
const url = window.location.href;
const actionLink = config?.editorConfig?.actionLink;
@ -571,7 +580,8 @@ function Editor({
onRequestMailMergeRecipients,
onRequestCompareFile,
onRequestRestore,
onRequestHistory;
onRequestHistory,
onRequestReferenceData;
// if (isSharingAccess) {
// onRequestSharingSettings = onSDKRequestSharingSettings;
@ -599,8 +609,13 @@ function Editor({
onRequestRestore = onSDKRequestRestore;
}
if (!fileInfo?.providerKey) {
onRequestReferenceData = onSDKRequestReferenceData;
}
const events = {
events: {
onRequestReferenceData,
onAppReady: onSDKAppReady,
onDocumentStateChange: onDocumentStateChange,
onMetaChange: onMetaChange,

View File

@ -27,7 +27,7 @@
</PackageReference>
<PackageReference Include="Microsoft.OneDriveSDK" Version="2.1.0" />
<PackageReference Include="Microsoft.SharePoint.Client" Version="14.0.4762.1000" />
<PackageReference Include="SharpZipLib" Version="1.4.1" />
<PackageReference Include="SharpZipLib" Version="1.4.2" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\common\ASC.Api.Core\ASC.Api.Core.csproj" />
@ -170,60 +170,60 @@
<EmbeddedResource Update="Resources\FilesCommonResource.ru.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.bg.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.cs.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.bg.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.cs.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.el-GR.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.fi.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.fi.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.ja-JP.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.ko-KR.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.lv.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.nl.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.pl.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.pt-BR.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.pt.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.ro.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.sk.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.sl.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.tr.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.lv.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.nl.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.pl.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.pt-BR.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.pt.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.ro.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.sk.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.sl.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.tr.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.uk-UA.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.vi.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.zh-CN.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.vi.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Resources\FilesCommonResource.zh-CN.resx">
<DependentUpon>FilesCommonResource.resx</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Update="Services\NotifyService\FilesPatternResource.de.resx">
<DependentUpon>FilesPatternResource.resx</DependentUpon>
</EmbeddedResource>

View File

@ -0,0 +1,34 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Files.Core.ApiModels.RequestDto;
public class GetReferenceDataDto<T>
{
public T FileKey { get; set; }
public string InstanceId { get; set; }
public T SourceFileId { get; set; }
public string Path { get; set; }
}

View File

@ -0,0 +1,30 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
public class GetReferenceDataFromPathDto<T>
{
public string Path { get; set; }
}

View File

@ -236,6 +236,7 @@ public interface IFileDao<T>
Task<ChunkedUploadSession<T>> CreateUploadSessionAsync(File<T> file, long contentLength);
Task<File<T>> UploadChunkAsync(ChunkedUploadSession<T> uploadSession, Stream chunkStream, long chunkLength);
Task<File<T>> FinalizeUploadSessionAsync(ChunkedUploadSession<T> uploadSession);
Task AbortUploadSessionAsync(ChunkedUploadSession<T> uploadSession);
#endregion

View File

@ -211,6 +211,11 @@ public interface IFolderDao<T>
/// <returns>Maximum size of file which can be uploaded to folder</returns>
Task<long> GetMaxUploadSizeAsync(T folderId, bool chunkedUpload = false);
IDataWriteOperator CreateDataWriteOperator(
T folderId,
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder);
#region Only for TMFolderDao
/// <summary>

View File

@ -1170,15 +1170,16 @@ internal class FileDao : AbstractDao, IFileDao<int>
await _chunkedUploadSessionHolder.UploadChunkAsync(uploadSession, stream, chunkLength);
if (uploadSession.BytesUploaded == uploadSession.BytesTotal)
if (uploadSession.BytesUploaded == uploadSession.BytesTotal || uploadSession.LastChunk)
{
uploadSession.BytesTotal = uploadSession.BytesUploaded;
uploadSession.File = await FinalizeUploadSessionAsync(uploadSession);
}
return uploadSession.File;
}
private async Task<File<int>> FinalizeUploadSessionAsync(ChunkedUploadSession<int> uploadSession)
public async Task<File<int>> FinalizeUploadSessionAsync(ChunkedUploadSession<int> uploadSession)
{
await _chunkedUploadSessionHolder.FinalizeUploadSessionAsync(uploadSession);

View File

@ -48,6 +48,7 @@ internal class FolderDao : AbstractDao, IFolderDao<int>
private readonly CrossDao _crossDao;
private readonly IMapper _mapper;
private static readonly SemaphoreSlim _semaphore = new SemaphoreSlim(1);
private readonly GlobalStore _globalStore;
public FolderDao(
FactoryIndexerFolder factoryIndexer,
@ -67,7 +68,8 @@ internal class FolderDao : AbstractDao, IFolderDao<int>
IDaoFactory daoFactory,
ProviderFolderDao providerFolderDao,
CrossDao crossDao,
IMapper mapper)
IMapper mapper,
GlobalStore globalStore)
: base(
dbContextManager,
userManager,
@ -88,6 +90,7 @@ internal class FolderDao : AbstractDao, IFolderDao<int>
_providerFolderDao = providerFolderDao;
_crossDao = crossDao;
_mapper = mapper;
_globalStore = globalStore;
}
public async Task<Folder<int>> GetFolderAsync(int folderId)
@ -159,7 +162,7 @@ internal class FolderDao : AbstractDao, IFolderDao<int>
return _mapper.Map<DbFolderQuery, Folder<int>>(dbFolder);
}
public IAsyncEnumerable<Folder<int>> GetFoldersAsync(int parentId)
{
return GetFoldersAsync(parentId, default, FilterType.None, false, default, string.Empty);
@ -1496,12 +1499,12 @@ internal class FolderDao : AbstractDao, IFolderDao<int>
{
var q2 = filesDbContext.Security.Where(r => r.TimeStamp > fromTime).Select(r => r.TenantId).Distinct();
await foreach (var q in q2.AsAsyncEnumerable())
{
yield return q;
}
await foreach (var q in q2.AsAsyncEnumerable())
{
yield return q;
}
}
}
private IQueryable<DbFolder> BuildRoomsQuery(FilesDbContext filesDbContext, IQueryable<DbFolder> query, FolderType filterByType, IEnumerable<string> tags, Guid subjectId, bool searchByTags, bool withoutTags,
bool searchByFilter, bool withSubfolders, bool excludeSubject, SubjectFilter subjectFilter, IEnumerable<string> subjectEntriesIds)
@ -1634,6 +1637,14 @@ internal class FolderDao : AbstractDao, IFolderDao<int>
};
}
public IDataWriteOperator CreateDataWriteOperator(
int folderId,
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
return _globalStore.GetStore().CreateDataWriteOperator(chunkedUploadSession, sessionHolder);
}
private string GetProjectTitle(object folderID)
{
return "";

View File

@ -2694,6 +2694,72 @@ public class FileStorageService<T> //: IFileStorageService
return InternalSharedUsersAsync(fileId);
}
public async Task<FileReference<T>> GetReferenceDataAsync(T fileId, string portalName, T sourceFileId, string path)
{
File<T> file = null;
var fileDao = _daoFactory.GetFileDao<T>();
if (portalName == _tenantManager.GetCurrentTenant().Id.ToString())
{
file = await fileDao.GetFileAsync(fileId);
}
if (file == null)
{
var source = await fileDao.GetFileAsync(sourceFileId);
if (source == null)
{
return new FileReference<T>
{
Error = FilesCommonResource.ErrorMassage_FileNotFound
};
}
if (!await _fileSecurity.CanReadAsync(source))
{
return new FileReference<T>
{
Error = FilesCommonResource.ErrorMassage_SecurityException_ReadFile
};
}
var folderDao = _daoFactory.GetFolderDao<T>();
var folder = await folderDao.GetFolderAsync(source.ParentId);
if (!await _fileSecurity.CanReadAsync(folder))
{
return new FileReference<T>
{
Error = FilesCommonResource.ErrorMassage_SecurityException_ReadFolder
};
}
var list = fileDao.GetFilesAsync(folder.Id, new OrderBy(SortedByType.AZ, true), FilterType.FilesOnly, false, Guid.Empty, path, false, false);
file = await list.FirstOrDefaultAsync(fileItem => fileItem.Title == path);
}
if (!await _fileSecurity.CanReadAsync(file))
{
return new FileReference<T>
{
Error = FilesCommonResource.ErrorMassage_SecurityException_ReadFile
};
}
var fileReference = new FileReference<T>
{
Path = file.Title,
ReferenceData = new FileReferenceData<T>
{
FileKey = file.Id,
InstanceId = _tenantManager.GetCurrentTenant().Id.ToString()
},
Url = _documentServiceConnector.ReplaceCommunityAdress(_pathProvider.GetFileStreamUrl(file, lastVersion: true)),
FileType = file.ConvertedExtension.Trim('.')
};
fileReference.Token = _documentServiceHelper.GetSignature(fileReference);
return fileReference;
}
public async Task<List<MentionWrapper>> InternalSharedUsersAsync(T fileId)
{
FileEntry<T> file;

View File

@ -636,7 +636,7 @@ internal class BoxFileDao : BoxDaoBase, IFileDao<string>
uploadSession.BytesUploaded += chunkLength;
if (uploadSession.BytesUploaded == uploadSession.BytesTotal)
if (uploadSession.BytesUploaded == uploadSession.BytesTotal || uploadSession.LastChunk)
{
using var fs = new FileStream(uploadSession.GetItemOrDefault<string>("TempPath"),
FileMode.Open, FileAccess.Read, System.IO.FileShare.None, 4096, FileOptions.DeleteOnClose);
@ -650,6 +650,11 @@ internal class BoxFileDao : BoxDaoBase, IFileDao<string>
return uploadSession.File;
}
public Task<File<string>> FinalizeUploadSessionAsync(ChunkedUploadSession<string> uploadSession)
{
throw new NotImplementedException();
}
public Task AbortUploadSessionAsync(ChunkedUploadSession<string> uploadSession)
{
if (uploadSession.Items.ContainsKey("TempPath"))

View File

@ -33,6 +33,7 @@ internal class BoxFolderDao : BoxDaoBase, IFolderDao<string>
private readonly BoxDaoSelector _boxDaoSelector;
private readonly IFileDao<int> _fileDao;
private readonly IFolderDao<int> _folderDao;
private readonly TempStream _tempStream;
public BoxFolderDao(
IServiceProvider serviceProvider,
@ -48,13 +49,15 @@ internal class BoxFolderDao : BoxDaoBase, IFolderDao<string>
IFileDao<int> fileDao,
IFolderDao<int> folderDao,
TempPath tempPath,
AuthContext authContext)
AuthContext authContext,
TempStream tempStream)
: base(serviceProvider, userManager, tenantManager, tenantUtil, dbContextManager, setupInfo, monitor, fileUtility, tempPath, authContext)
{
_crossDao = crossDao;
_boxDaoSelector = boxDaoSelector;
_fileDao = fileDao;
_folderDao = folderDao;
_tempStream = tempStream;
}
public async Task<Folder<string>> GetFolderAsync(string folderId)
@ -96,10 +99,10 @@ internal class BoxFolderDao : BoxDaoBase, IFolderDao<string>
rooms = FilterByTags(rooms, withoutTags, tags, filesDbContext);
await foreach (var room in rooms)
{
{
yield return room;
}
}
}
public async IAsyncEnumerable<Folder<string>> GetFoldersAsync(string parentId)
{
@ -515,4 +518,9 @@ internal class BoxFolderDao : BoxDaoBase, IFolderDao<string>
return chunkedUpload ? storageMaxUploadSize : Math.Min(storageMaxUploadSize, _setupInfo.AvailableFileSize);
}
}
public IDataWriteOperator CreateDataWriteOperator(string folderId, CommonChunkedUploadSession chunkedUploadSession, CommonChunkedUploadSessionHolder sessionHolder)
{
return new ChunkZipWriteOperator(_tempStream, chunkedUploadSession, sessionHolder);
}
}

View File

@ -654,7 +654,7 @@ internal class DropboxFileDao : DropboxDaoBase, IFileDao<string>
uploadSession.BytesUploaded += chunkLength;
if (uploadSession.BytesUploaded == uploadSession.BytesTotal)
if (uploadSession.BytesUploaded == uploadSession.BytesTotal || uploadSession.LastChunk)
{
uploadSession.File = await FinalizeUploadSessionAsync(uploadSession);
}

View File

@ -33,6 +33,7 @@ internal class DropboxFolderDao : DropboxDaoBase, IFolderDao<string>
private readonly DropboxDaoSelector _dropboxDaoSelector;
private readonly IFileDao<int> _fileDao;
private readonly IFolderDao<int> _folderDao;
private readonly TempStream _tempStream;
public DropboxFolderDao(
IServiceProvider serviceProvider,
@ -48,13 +49,15 @@ internal class DropboxFolderDao : DropboxDaoBase, IFolderDao<string>
IFileDao<int> fileDao,
IFolderDao<int> folderDao,
TempPath tempPath,
AuthContext authContext)
AuthContext authContext,
TempStream tempStream)
: base(serviceProvider, userManager, tenantManager, tenantUtil, dbContextManager, setupInfo, monitor, fileUtility, tempPath, authContext)
{
_crossDao = crossDao;
_dropboxDaoSelector = dropboxDaoSelector;
_fileDao = fileDao;
_folderDao = folderDao;
_tempStream = tempStream;
}
public async Task<Folder<string>> GetFolderAsync(string folderId)
@ -98,10 +101,10 @@ internal class DropboxFolderDao : DropboxDaoBase, IFolderDao<string>
rooms = FilterByTags(rooms, withoutTags, tags, filesDbContext);
await foreach (var room in rooms)
{
{
yield return room;
}
}
}
public async IAsyncEnumerable<Folder<string>> GetFoldersAsync(string parentId)
{
@ -506,4 +509,12 @@ internal class DropboxFolderDao : DropboxDaoBase, IFolderDao<string>
return chunkedUpload ? storageMaxUploadSize : Math.Min(storageMaxUploadSize, _setupInfo.AvailableFileSize);
}
}
public IDataWriteOperator CreateDataWriteOperator(
string folderId,
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
return new ChunkZipWriteOperator(_tempStream, chunkedUploadSession, sessionHolder);
}
}

View File

@ -666,7 +666,7 @@ internal class GoogleDriveFileDao : GoogleDriveDaoBase, IFileDao<string>
var googleDriveSession = uploadSession.GetItemOrDefault<ResumableUploadSession>("GoogleDriveSession");
var storage = await ProviderInfo.StorageAsync;
storage = await ProviderInfo.StorageAsync;
await storage.TransferAsync(googleDriveSession, stream, chunkLength);
await storage.TransferAsync(googleDriveSession, stream, chunkLength, uploadSession.LastChunk);
}
else
{
@ -677,7 +677,7 @@ internal class GoogleDriveFileDao : GoogleDriveDaoBase, IFileDao<string>
uploadSession.BytesUploaded += chunkLength;
if (uploadSession.BytesUploaded == uploadSession.BytesTotal)
if (uploadSession.BytesUploaded == uploadSession.BytesTotal || uploadSession.LastChunk)
{
uploadSession.File = await FinalizeUploadSessionAsync(uploadSession);
}

View File

@ -33,6 +33,7 @@ internal class GoogleDriveFolderDao : GoogleDriveDaoBase, IFolderDao<string>
private readonly GoogleDriveDaoSelector _googleDriveDaoSelector;
private readonly IFileDao<int> _fileDao;
private readonly IFolderDao<int> _folderDao;
private readonly TempStream _tempStream;
public GoogleDriveFolderDao(
IServiceProvider serviceProvider,
@ -48,13 +49,14 @@ internal class GoogleDriveFolderDao : GoogleDriveDaoBase, IFolderDao<string>
IFileDao<int> fileDao,
IFolderDao<int> folderDao,
TempPath tempPath,
AuthContext authContext
) : base(serviceProvider, userManager, tenantManager, tenantUtil, dbContextManager, setupInfo, monitor, fileUtility, tempPath, authContext)
AuthContext authContext,
TempStream tempStream) : base(serviceProvider, userManager, tenantManager, tenantUtil, dbContextManager, setupInfo, monitor, fileUtility, tempPath, authContext)
{
_crossDao = crossDao;
_googleDriveDaoSelector = googleDriveDaoSelector;
_fileDao = fileDao;
_folderDao = folderDao;
_tempStream = tempStream;
}
public async Task<Folder<string>> GetFolderAsync(string folderId)
@ -96,10 +98,10 @@ internal class GoogleDriveFolderDao : GoogleDriveDaoBase, IFolderDao<string>
rooms = FilterByTags(rooms, withoutTags, tags, filesDbContext);
await foreach (var room in rooms)
{
{
yield return room;
}
}
}
public async IAsyncEnumerable<Folder<string>> GetFoldersAsync(string parentId)
{
@ -508,4 +510,12 @@ internal class GoogleDriveFolderDao : GoogleDriveDaoBase, IFolderDao<string>
return chunkedUpload ? storageMaxUploadSize : Math.Min(storageMaxUploadSize, _setupInfo.AvailableFileSize);
}
}
public IDataWriteOperator CreateDataWriteOperator(
string folderId,
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
return new ChunkZipWriteOperator(_tempStream ,chunkedUploadSession, sessionHolder);
}
}

View File

@ -580,7 +580,7 @@ internal class GoogleDriveStorage : IDisposable
return uploadSession;
}
public Task TransferAsync(ResumableUploadSession googleDriveSession, Stream stream, long chunkLength)
public Task TransferAsync(ResumableUploadSession googleDriveSession, Stream stream, long chunkLength, bool lastChunk)
{
ArgumentNullException.ThrowIfNull(stream);
@ -589,10 +589,10 @@ internal class GoogleDriveStorage : IDisposable
throw new InvalidOperationException("Can't upload chunk for given upload session.");
}
return InternalTransferAsync(googleDriveSession, stream, chunkLength);
return InternalTransferAsync(googleDriveSession, stream, chunkLength, lastChunk);
}
private async Task InternalTransferAsync(ResumableUploadSession googleDriveSession, Stream stream, long chunkLength)
private async Task InternalTransferAsync(ResumableUploadSession googleDriveSession, Stream stream, long chunkLength, bool lastChunk)
{
var request = new HttpRequestMessage
{
@ -601,9 +601,21 @@ internal class GoogleDriveStorage : IDisposable
};
request.Headers.Add("Authorization", "Bearer " + AccessToken);
request.Content = new StreamContent(stream);
request.Content.Headers.ContentRange = new ContentRangeHeaderValue(googleDriveSession.BytesTransfered,
googleDriveSession.BytesTransfered + chunkLength - 1,
googleDriveSession.BytesToTransfer);
if (googleDriveSession.BytesToTransfer > 0)
{
request.Headers.Add("Content-Range", string.Format("bytes {0}-{1}/{2}",
googleDriveSession.BytesTransfered,
googleDriveSession.BytesTransfered + chunkLength - 1,
googleDriveSession.BytesToTransfer));
}
else
{
var bytesToTransfer = lastChunk ? (googleDriveSession.BytesTransfered + chunkLength).ToString() : "*";
request.Headers.Add("Content-Range", string.Format("bytes {0}-{1}/{2}",
googleDriveSession.BytesTransfered,
googleDriveSession.BytesTransfered + chunkLength - 1,
bytesToTransfer));
}
var httpClient = _clientFactory.CreateClient();
HttpResponseMessage response;

View File

@ -678,7 +678,7 @@ internal class OneDriveFileDao : OneDriveDaoBase, IFileDao<string>
uploadSession.BytesUploaded += chunkLength;
if (uploadSession.BytesUploaded == uploadSession.BytesTotal)
if (uploadSession.BytesUploaded == uploadSession.BytesTotal || uploadSession.LastChunk)
{
uploadSession.File = await FinalizeUploadSessionAsync(uploadSession);
}
@ -690,7 +690,7 @@ internal class OneDriveFileDao : OneDriveDaoBase, IFileDao<string>
return uploadSession.File;
}
private async Task<File<string>> FinalizeUploadSessionAsync(ChunkedUploadSession<string> uploadSession)
public async Task<File<string>> FinalizeUploadSessionAsync(ChunkedUploadSession<string> uploadSession)
{
if (uploadSession.Items.ContainsKey("OneDriveSession"))
{

View File

@ -73,7 +73,7 @@ internal class OneDriveFolderDao : OneDriveDaoBase, IFolderDao<string>
{
return GetRootFolderAsync(fileId);
}
public async IAsyncEnumerable<Folder<string>> GetRoomsAsync(IEnumerable<string> parentsIds, IEnumerable<string> roomsIds, FilterType filterType, IEnumerable<string> tags, Guid subjectId, string searchText, bool withSubfolders, bool withoutTags, bool excludeSubject, ProviderFilter provider, SubjectFilter subjectFilter, IEnumerable<string> subjectEntriesIds)
{
if (CheckInvalidFilter(filterType) || (provider != ProviderFilter.None && provider != ProviderFilter.OneDrive))
@ -95,10 +95,10 @@ internal class OneDriveFolderDao : OneDriveDaoBase, IFolderDao<string>
rooms = FilterByTags(rooms, withoutTags, tags, filesDbContext);
await foreach (var room in rooms)
{
{
yield return room;
}
}
}
public async IAsyncEnumerable<Folder<string>> GetFoldersAsync(string parentId)
{
@ -519,4 +519,12 @@ internal class OneDriveFolderDao : OneDriveDaoBase, IFolderDao<string>
return chunkedUpload ? storageMaxUploadSize : Math.Min(storageMaxUploadSize, _setupInfo.AvailableFileSize);
}
}
public IDataWriteOperator CreateDataWriteOperator(
string folderId,
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
return null;
}
}

View File

@ -463,6 +463,13 @@ internal class ProviderFileDao : ProviderDaoBase, IFileDao<string>
return uploadSession.File;
}
public Task<File<string>> FinalizeUploadSessionAsync(ChunkedUploadSession<string> uploadSession)
{
var fileDao = GetFileDao(uploadSession.File);
uploadSession.File = ConvertId(uploadSession.File);
return fileDao.FinalizeUploadSessionAsync(uploadSession);
}
public Task AbortUploadSessionAsync(ChunkedUploadSession<string> uploadSession)
{
var fileDao = GetFileDao(uploadSession.File);

View File

@ -473,6 +473,16 @@ internal class ProviderFolderDao : ProviderDaoBase, IFolderDao<string>
return storageMaxUploadSize;
}
public IDataWriteOperator CreateDataWriteOperator(
string folderId,
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
var selector = GetSelector(folderId);
var folderDao = selector.GetFolderDao(folderId);
return folderDao.CreateDataWriteOperator(folderId, chunkedUploadSession, sessionHolder);
}
private IAsyncEnumerable<Folder<string>> FilterByProvider(IAsyncEnumerable<Folder<string>> folders, ProviderFilter provider)
{
if (provider != ProviderFilter.kDrive && provider != ProviderFilter.WebDav && provider != ProviderFilter.Yandex)

View File

@ -471,6 +471,11 @@ internal class SharePointFileDao : SharePointDaoBase, IFileDao<string>
throw new NotImplementedException();
}
public Task<File<string>> FinalizeUploadSessionAsync(ChunkedUploadSession<string> uploadSession)
{
throw new NotImplementedException();
}
public Task AbortUploadSessionAsync(ChunkedUploadSession<string> uploadSession)
{
return Task.FromResult(0);

View File

@ -81,7 +81,7 @@ internal class SharePointFolderDao : SharePointDaoBase, IFolderDao<string>
}
public async IAsyncEnumerable<Folder<string>> GetRoomsAsync(IEnumerable<string> parentsIds, IEnumerable<string> roomsIds, FilterType filterType, IEnumerable<string> tags, Guid subjectId, string searchText, bool withSubfolders, bool withoutTags, bool excludeSubject, ProviderFilter provider, SubjectFilter subjectFilter, IEnumerable<string> subjectEntriesIds)
{
{
if (CheckInvalidFilter(filterType) || (provider != ProviderFilter.None && provider != ProviderFilter.SharePoint))
{
yield break;
@ -101,10 +101,10 @@ internal class SharePointFolderDao : SharePointDaoBase, IFolderDao<string>
rooms = FilterByTags(rooms, withoutTags, tags, filesDbContext);
await foreach (var room in rooms)
{
{
yield return room;
}
}
}
public async IAsyncEnumerable<Folder<string>> GetFoldersAsync(string parentId)
{
@ -390,10 +390,10 @@ internal class SharePointFolderDao : SharePointDaoBase, IFolderDao<string>
if (ProviderInfo.FolderId == oldId)
{
await DaoSelector.UpdateProviderFolderId(ProviderInfo, newFolderId);
}
await DaoSelector.UpdateProviderFolderId(ProviderInfo, newFolderId);
}
}
}
await UpdatePathInDBAsync(oldId, newFolderId);
@ -442,4 +442,12 @@ internal class SharePointFolderDao : SharePointDaoBase, IFolderDao<string>
{
return Task.FromResult(2L * 1024L * 1024L * 1024L);
}
}
public IDataWriteOperator CreateDataWriteOperator(
string folderId,
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
return null;
}
}

View File

@ -664,7 +664,7 @@ internal class SharpBoxFileDao : SharpBoxDaoBase, IFileDao<string>
uploadSession.BytesUploaded += chunkLength;
if (uploadSession.BytesUploaded == uploadSession.BytesTotal)
if (uploadSession.BytesUploaded == uploadSession.BytesTotal || uploadSession.LastChunk)
{
uploadSession.File = await FinalizeUploadSessionAsync(uploadSession);
}

View File

@ -78,7 +78,7 @@ internal class SharpBoxFolderDao : SharpBoxDaoBase, IFolderDao<string>
{
return Task.FromResult(ToFolder(RootFolder()));
}
public async IAsyncEnumerable<Folder<string>> GetRoomsAsync(IEnumerable<string> parentsIds, IEnumerable<string> roomsIds, FilterType filterType, IEnumerable<string> tags, Guid subjectId, string searchText, bool withSubfolders, bool withoutTags, bool excludeSubject, ProviderFilter provider,
SubjectFilter subjectFilter, IEnumerable<string> subjectEntriesIds)
{
@ -101,10 +101,10 @@ internal class SharpBoxFolderDao : SharpBoxDaoBase, IFolderDao<string>
rooms = FilterByTags(rooms, withoutTags, tags, filesDbContext);
await foreach (var room in rooms)
{
{
yield return room;
}
}
}
public IAsyncEnumerable<Folder<string>> GetFoldersAsync(string parentId)
{
@ -449,11 +449,11 @@ internal class SharpBoxFolderDao : SharpBoxDaoBase, IFolderDao<string>
if (ProviderInfo.FolderId == oldId)
{
await DaoSelector.UpdateProviderFolderId(ProviderInfo, newId);
}
await DaoSelector.UpdateProviderFolderId(ProviderInfo, newId);
}
}
}
}
await UpdatePathInDBAsync(oldId, newId);
@ -509,4 +509,12 @@ internal class SharpBoxFolderDao : SharpBoxDaoBase, IFolderDao<string>
return Task.FromResult(chunkedUpload ? storageMaxUploadSize : Math.Min(storageMaxUploadSize, _setupInfo.AvailableFileSize));
}
}
public IDataWriteOperator CreateDataWriteOperator(
string folderId,
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
{
return null;
}
}

View File

@ -32,7 +32,8 @@ global using System.Globalization;
global using System.Linq.Expressions;
global using System.Net;
global using System.Net.Http.Headers;
global using System.Net.Http.Json;global using System.Net.Mime;
global using System.Net.Http.Json;
global using System.Net.Mime;
global using System.Reflection;
global using System.Runtime.Serialization;
global using System.Security;
@ -80,6 +81,7 @@ global using ASC.Core.Notify.Socket;
global using ASC.Core.Tenants;
global using ASC.Core.Users;
global using ASC.Data.Storage;
global using ASC.Data.Storage.ZipOperators;
global using ASC.ElasticSearch;
global using ASC.ElasticSearch.Core;
global using ASC.ElasticSearch.Service;
@ -189,13 +191,11 @@ global using Microsoft.AspNetCore.Mvc.ModelBinding;
global using Microsoft.AspNetCore.WebUtilities;
global using Microsoft.EntityFrameworkCore;
global using Microsoft.EntityFrameworkCore.Infrastructure;
global using Microsoft.EntityFrameworkCore.Metadata;
global using Microsoft.EntityFrameworkCore.Storage;
global using Microsoft.Extensions.Caching.Memory;
global using Microsoft.Extensions.Configuration;
global using Microsoft.Extensions.DependencyInjection;
global using Microsoft.Extensions.Logging;
global using Microsoft.Extensions.Options;
global using Microsoft.Extensions.Primitives;
global using Microsoft.Graph;
global using Microsoft.OneDrive.Sdk;

View File

@ -142,9 +142,11 @@ public class DocumentConfig<T>
{
private readonly DocumentServiceConnector _documentServiceConnector;
private readonly PathProvider _pathProvider;
private readonly TenantManager _tenantManager;
private string _fileUri;
private string _key = string.Empty;
private string _title;
private FileReferenceData<T> _referenceData;
public string FileType => Info.GetFile().ConvertedExtension.Trim('.');
public InfoConfig<T> Info { get; set; }
public bool IsLinkedForMe { get; set; }
@ -157,6 +159,22 @@ public class DocumentConfig<T>
public PermissionsConfig Permissions { get; set; }
public string SharedLinkKey { get; set; }
public FileReferenceData<T> ReferenceData
{
get
{
if(_referenceData == null)
{
_referenceData = new FileReferenceData<T>()
{
FileKey = Info.GetFile().Id,
InstanceId = _tenantManager.GetCurrentTenant().Id.ToString()
};
}
return _referenceData;
}
}
public string Title
{
@ -181,12 +199,13 @@ public class DocumentConfig<T>
}
}
public DocumentConfig(DocumentServiceConnector documentServiceConnector, PathProvider pathProvider, InfoConfig<T> infoConfig)
public DocumentConfig(DocumentServiceConnector documentServiceConnector, PathProvider pathProvider, InfoConfig<T> infoConfig, TenantManager tenantManager)
{
Info = infoConfig;
Permissions = new PermissionsConfig();
_documentServiceConnector = documentServiceConnector;
_pathProvider = pathProvider;
_tenantManager = tenantManager;
}
}
@ -606,6 +625,22 @@ public class PermissionsConfig
public bool Review { get; set; } = true;
}
public class FileReference<T>
{
public FileReferenceData<T> ReferenceData { get; set; }
public string Error { get; set; }
public string Path { get; set; }
public string Url { get; set; }
public string FileType { get; set; }
public string Token { get; set; }
}
public class FileReferenceData<T>
{
public T FileKey { get; set; }
public string InstanceId { get; set; }
}
#endregion Nested Classes
[Transient]

View File

@ -334,7 +334,7 @@ public class FileUploader
var dao = _daoFactory.GetFileDao<T>();
await dao.UploadChunkAsync(uploadSession, stream, chunkLength);
if (uploadSession.BytesUploaded == uploadSession.BytesTotal)
if (uploadSession.BytesUploaded == uploadSession.BytesTotal || uploadSession.LastChunk)
{
var linkDao = _daoFactory.GetLinkDao();
await linkDao.DeleteAllLinkAsync(uploadSession.File.Id.ToString());

View File

@ -0,0 +1,56 @@
namespace ASC.Web.Files.Utils;
public class FilesChunkedUploadSessionHolder : CommonChunkedUploadSessionHolder
{
private readonly IDaoFactory _daoFactory;
public FilesChunkedUploadSessionHolder(IDaoFactory daoFactory, TempPath tempPath, ILogger logger, IDataStore dataStore, string domain, long maxChunkUploadSize = 10485760)
: base(tempPath, logger, dataStore, domain, maxChunkUploadSize)
{
_daoFactory = daoFactory;
}
public override async Task<string> UploadChunkAsync(CommonChunkedUploadSession uploadSession, Stream stream, long length)
{
if (uploadSession is ChunkedUploadSession<int>)
{
return (await InternalUploadChunkAsync<int>(uploadSession, stream, length)).ToString();
}
else
{
return await InternalUploadChunkAsync<string>(uploadSession, stream, length);
}
}
private async Task<T> InternalUploadChunkAsync<T>(CommonChunkedUploadSession uploadSession, Stream stream, long length)
{
var chunkedUploadSession = uploadSession as ChunkedUploadSession<T>;
chunkedUploadSession.File.ContentLength += stream.Length;
var fileDao = GetFileDao<T>();
var file = await fileDao.UploadChunkAsync(chunkedUploadSession, stream, length);
return file.Id;
}
public override async Task<string> FinalizeAsync(CommonChunkedUploadSession uploadSession)
{
if (uploadSession is ChunkedUploadSession<int>)
{
return (await InternalFinalizeAsync<int>(uploadSession)).ToString();
}
else
{
return await InternalFinalizeAsync<string>(uploadSession);
}
}
private async Task<T> InternalFinalizeAsync<T>(CommonChunkedUploadSession commonChunkedUploadSession)
{
var chunkedUploadSession = commonChunkedUploadSession as ChunkedUploadSession<T>;
var fileDao = GetFileDao<T>();
var file = await fileDao.FinalizeUploadSessionAsync(chunkedUploadSession);
return file.Id;
}
private IFileDao<T> GetFileDao<T>()
{
return _daoFactory.GetFileDao<T>();
}
}

View File

@ -254,7 +254,14 @@ public abstract class EditorController<T> : ApiControllerBase
public Task<List<MentionWrapper>> SharedUsers(T fileId)
{
return _fileStorageService.SharedUsersAsync(fileId);
}
}
[HttpPost("file/referencedata")]
public Task<FileReference<T>> GetReferenceDataAsync(GetReferenceDataDto<T> inDto)
{
return _fileStorageService.GetReferenceDataAsync(inDto.FileKey, inDto.InstanceId, inDto.SourceFileId, inDto.Path);
}
}
public class EditorController : ApiControllerBase

View File

@ -44,7 +44,7 @@ global using ASC.Core;
global using ASC.Core.Billing;
global using ASC.Core.Common.EF;
global using ASC.Core.Common.Quota;
global using ASC.Core.Common.Quota.Features;
global using ASC.Core.Common.Quota.Features;
global using ASC.Core.Common.Settings;
global using ASC.Core.Users;
global using ASC.FederatedLogin.Helpers;
@ -54,6 +54,7 @@ global using ASC.Files.Core.ApiModels;
global using ASC.Files.Core.ApiModels.RequestDto;
global using ASC.Files.Core.ApiModels.ResponseDto;
global using ASC.Files.Core.Core;
global using ASC.Files.Core.Data;
global using ASC.Files.Core.EF;
global using ASC.Files.Core.Helpers;
global using ASC.Files.Core.Resources;

View File

@ -90,7 +90,7 @@ public class WebhooksController : BaseSettingsController
return _mapper.Map<WebhooksConfig, WebhooksConfigDto>(webhook);
}
[HttpDelete("webhook")]
[HttpDelete("webhook/{id}")]
public async Task<WebhooksConfigDto> RemoveWebhook(int id)
{
_permissionContext.DemandPermissions(SecutiryConstants.EditPortalSettings);
@ -101,13 +101,13 @@ public class WebhooksController : BaseSettingsController
}
[HttpGet("webhooks/log")]
public async IAsyncEnumerable<WebhooksLogDto> GetJournal(DateTime? delivery, string hookname, string route)
public async IAsyncEnumerable<WebhooksLogDto> GetJournal(WebhooksLogRequest model)
{
_permissionContext.DemandPermissions(SecutiryConstants.EditPortalSettings);
var startIndex = Convert.ToInt32(_context.StartIndex);
var count = Convert.ToInt32(_context.Count);
await foreach (var j in _webhookDbWorker.ReadJournal(startIndex, count, delivery, hookname, route))
await foreach (var j in _webhookDbWorker.ReadJournal(startIndex, count, model.Delivery, model.Hookname, model.Route))
{
yield return _mapper.Map<WebhooksLog, WebhooksLogDto>(j);
}

View File

@ -0,0 +1,36 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Web.Api.ApiModels.RequestsDto;
public class WebhooksLogRequest
{
public DateTime? Delivery { get; set; }
public string Hookname { get; set; }
public string Route { get; set; }
}

View File

@ -28,6 +28,8 @@ namespace ASC.Web.Api.ApiModels.ResponseDto;
public class WebhooksConfigDto : IMapFrom<WebhooksConfig>
{
public int Id { get; set; }
public string Name { get; set; }
public string Uri { get; set; }
public string SecretKey { get; set; }
public bool Enabled { get; set; }