Merge branch 'develop' into feature/new-settings-description

This commit is contained in:
Alexey Safronov 2023-09-08 12:05:20 +04:00
commit 5a76bd2bcf
65 changed files with 1474 additions and 1515 deletions

View File

@ -219,11 +219,7 @@ public class Consumer : IDictionary<string, string>
private void Set(string name, string value)
{
if (!CanSet)
{
throw new NotSupportedException("Key for read only. Key " + name);
}
if (!ManagedKeys.Contains(name))
{
if (_additional.ContainsKey(name))
@ -238,6 +234,11 @@ public class Consumer : IDictionary<string, string>
return;
}
if (!CanSet)
{
throw new NotSupportedException("Key for read only. Key " + name);
}
var tenant = CoreBaseSettings.Standalone
? Tenant.DefaultTenant
: TenantManager.GetCurrentTenant().Id;

View File

@ -1,237 +0,0 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
using ConfigurationManager = System.Configuration.ConfigurationManager;
namespace ASC.Data.Backup;
[Scope]
public class DbBackupProvider : IBackupProvider
{
public string Name => "databases";
private readonly List<string> _processedTables = new List<string>();
private readonly DbHelper _dbHelper;
private readonly TempStream _tempStream;
public DbBackupProvider(DbHelper dbHelper, TempStream tempStream)
{
_dbHelper = dbHelper;
_tempStream = tempStream;
}
public event EventHandler<ProgressChangedEventArgs> ProgressChanged;
public async Task<IEnumerable<XElement>> GetElements(int tenant, string[] configs, IDataWriteOperator writer)
{
_processedTables.Clear();
var xml = new List<XElement>();
var connectionKeys = new Dictionary<string, string>();
foreach (var connectionString in GetConnectionStrings(configs))
{
//do not save the base, having the same provider and connection string is not to duplicate
//data, but also expose the ref attribute of repetitive bases for the correct recovery
var node = new XElement(connectionString.Name);
xml.Add(node);
var connectionKey = connectionString.ProviderName + connectionString.ConnectionString;
if (connectionKeys.TryGetValue(connectionKey, out var value))
{
node.Add(new XAttribute("ref", value));
}
else
{
connectionKeys.Add(connectionKey, connectionString.Name);
node.Add(await BackupDatabase(tenant, connectionString, writer));
}
}
return xml.AsEnumerable();
}
public async Task LoadFromAsync(IEnumerable<XElement> elements, int tenant, string[] configs, IDataReadOperator reader)
{
_processedTables.Clear();
foreach (var connectionString in GetConnectionStrings(configs))
{
await RestoreDatabaseAsync(connectionString, elements, reader);
}
}
public IEnumerable<ConnectionStringSettings> GetConnectionStrings(string[] configs)
{
/* if (configs.Length == 0)
{
configs = new string[] { AppDomain.CurrentDomain.SetupInformation.ConfigurationFile };
}
var connectionStrings = new List<ConnectionStringSettings>();
foreach (var config in configs)
{
connectionStrings.AddRange(GetConnectionStrings(GetConfiguration(config)));
}
return connectionStrings.GroupBy(cs => cs.Name).Select(g => g.First());*/
return null;
}
public IEnumerable<ConnectionStringSettings> GetConnectionStrings(Configuration cfg)
{
var connectionStrings = new List<ConnectionStringSettings>();
foreach (ConnectionStringSettings connectionString in cfg.ConnectionStrings.ConnectionStrings)
{
if (connectionString.Name == "LocalSqlServer" || connectionString.Name == "readonly")
{
continue;
}
connectionStrings.Add(connectionString);
if (connectionString.ConnectionString.Contains("|DataDirectory|"))
{
connectionString.ConnectionString = connectionString.ConnectionString.Replace("|DataDirectory|", Path.GetDirectoryName(cfg.FilePath) + '\\');
}
}
return connectionStrings;
}
private void OnProgressChanged(string status, int progress)
{
ProgressChanged?.Invoke(this, new ProgressChangedEventArgs(status, progress));
}
private Configuration GetConfiguration(string config)
{
if (config.Contains(Path.DirectorySeparatorChar) && !Uri.IsWellFormedUriString(config, UriKind.Relative))
{
var map = new ExeConfigurationFileMap
{
ExeConfigFilename = string.Equals(Path.GetExtension(config), ".config", StringComparison.OrdinalIgnoreCase) ? config : CrossPlatform.PathCombine(config, "Web.config")
};
return ConfigurationManager.OpenMappedExeConfiguration(map, ConfigurationUserLevel.None);
}
return ConfigurationManager.OpenExeConfiguration(config);
}
private async Task<List<XElement>> BackupDatabase(int tenant, ConnectionStringSettings connectionString, IDataWriteOperator writer)
{
var xml = new List<XElement>();
var errors = 0;
var timeout = TimeSpan.FromSeconds(1);
var tables = _dbHelper.GetTables();
for (var i = 0; i < tables.Count; i++)
{
var table = tables[i];
OnProgressChanged(table, (int)(i / (double)tables.Count * 100));
if (_processedTables.Contains(table, StringComparer.InvariantCultureIgnoreCase))
{
continue;
}
xml.Add(new XElement(table));
DataTable dataTable;
while (true)
{
try
{
dataTable = _dbHelper.GetTable(table, tenant);
break;
}
catch
{
errors++;
if (20 < errors)
{
throw;
}
Thread.Sleep(timeout);
}
}
foreach (DataColumn c in dataTable.Columns)
{
if (c.DataType == typeof(DateTime))
{
c.DateTimeMode = DataSetDateTime.Unspecified;
}
}
await using (var file = _tempStream.Create())
{
dataTable.WriteXml(file, XmlWriteMode.WriteSchema);
await writer.WriteEntryAsync($"{Name}\\{connectionString.Name}\\{table}".ToLower(), file);
}
_processedTables.Add(table);
}
return xml;
}
private async Task RestoreDatabaseAsync(ConnectionStringSettings connectionString, IEnumerable<XElement> elements, IDataReadOperator reader)
{
var dbName = connectionString.Name;
var dbElement = elements.SingleOrDefault(e => string.Equals(e.Name.LocalName, connectionString.Name, StringComparison.OrdinalIgnoreCase));
if (dbElement != null && dbElement.Attribute("ref") != null)
{
dbName = dbElement.Attribute("ref").Value;
dbElement = elements.Single(e => string.Equals(e.Name.LocalName, dbElement.Attribute("ref").Value, StringComparison.OrdinalIgnoreCase));
}
if (dbElement == null)
{
return;
}
var tables = _dbHelper.GetTables();
for (var i = 0; i < tables.Count; i++)
{
var table = tables[i];
OnProgressChanged(table, (int)(i / (double)tables.Count * 100));
if (_processedTables.Contains(table, StringComparer.InvariantCultureIgnoreCase))
{
continue;
}
if (dbElement.Element(table) != null)
{
await using (var stream = reader.GetEntry($"{Name}\\{dbName}\\{table}".ToLower()))
{
var data = new DataTable();
data.ReadXml(stream);
await _dbHelper.SetTableAsync(data);
}
_processedTables.Add(table);
}
}
}
}

View File

@ -1,310 +0,0 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup;
[Scope]
public class DbHelper : IDisposable
{
private readonly DbProviderFactory _factory;
private readonly DbConnection _connect;
private readonly DbCommandBuilder _builder;
private readonly DataTable _columns;
private readonly bool _mysql;
private readonly ILogger<DbHelper> _logger;
private readonly TenantDbContext _tenantDbContext;
private readonly CoreDbContext _coreDbContext;
private readonly IDictionary<string, string> _whereExceptions
= new Dictionary<string, string>();
public DbHelper(
ILogger<DbHelper> logger,
ConnectionStringSettings connectionString,
IDbContextFactory<TenantDbContext> tenantDbContext,
IDbContextFactory<CoreDbContext> coreDbContext)
{
_logger = logger;
_tenantDbContext = tenantDbContext.CreateDbContext();
_coreDbContext = coreDbContext.CreateDbContext();
var file = connectionString.ElementInformation.Source;
if ("web.connections.config".Equals(Path.GetFileName(file), StringComparison.InvariantCultureIgnoreCase))
{
file = CrossPlatform.PathCombine(Path.GetDirectoryName(file), "Web.config");
}
var xconfig = XDocument.Load(file);
var provider = xconfig.XPathSelectElement("/configuration/system.data/DbProviderFactories/add[@invariant='" + connectionString.ProviderName + "']");
_factory = (DbProviderFactory)Activator.CreateInstance(Type.GetType(provider.Attribute("type").Value, true));
_builder = _factory.CreateCommandBuilder();
_connect = _factory.CreateConnection();
_connect.ConnectionString = connectionString.ConnectionString;
_connect.Open();
_mysql = connectionString.ProviderName.Contains("mysql", StringComparison.OrdinalIgnoreCase);
if (_mysql)
{
CreateCommand("set @@session.sql_mode = concat(@@session.sql_mode, ',NO_AUTO_VALUE_ON_ZERO')").ExecuteNonQuery();
}
_columns = _connect.GetSchema("Columns");
_whereExceptions["calendar_calendar_item"] = " where calendar_id in (select id from calendar_calendars where tenant = {0}) ";
_whereExceptions["calendar_calendar_user"] = " where calendar_id in (select id from calendar_calendars where tenant = {0}) ";
_whereExceptions["calendar_event_item"] = " inner join calendar_events on calendar_event_item.event_id = calendar_events.id where calendar_events.tenant = {0} ";
_whereExceptions["calendar_event_user"] = " inner join calendar_events on calendar_event_user.event_id = calendar_events.id where calendar_events.tenant = {0} ";
_whereExceptions["crm_entity_contact"] = " inner join crm_contact on crm_entity_contact.contact_id = crm_contact.id where crm_contact.tenant_id = {0} ";
_whereExceptions["crm_entity_tag"] = " inner join crm_tag on crm_entity_tag.tag_id = crm_tag.id where crm_tag.tenant_id = {0} ";
_whereExceptions["files_folder_tree"] = " inner join files_folder on folder_id = id where tenant_id = {0} ";
_whereExceptions["forum_answer_variant"] = " where answer_id in (select id from forum_answer where tenantid = {0})";
_whereExceptions["forum_topic_tag"] = " where topic_id in (select id from forum_topic where tenantid = {0})";
_whereExceptions["forum_variant"] = " where question_id in (select id from forum_question where tenantid = {0})";
_whereExceptions["projects_project_participant"] = " inner join projects_projects on projects_project_participant.project_id = projects_projects.id where projects_projects.tenant_id = {0} ";
_whereExceptions["projects_following_project_participant"] = " inner join projects_projects on projects_following_project_participant.project_id = projects_projects.id where projects_projects.tenant_id = {0} ";
_whereExceptions["projects_project_tag"] = " inner join projects_projects on projects_project_tag.project_id = projects_projects.id where projects_projects.tenant_id = {0} ";
_whereExceptions["tenants_tenants"] = " where id = {0}";
_whereExceptions["core_acl"] = " where tenant = {0} or tenant = -1";
_whereExceptions["core_subscription"] = " where tenant = {0} or tenant = -1";
_whereExceptions["core_subscriptionmethod"] = " where tenant = {0} or tenant = -1";
}
public List<string> GetTables()
{
var allowTables = new List<string>
{
"blogs_",
"bookmarking_",
"calendar_",
"core_",
"crm_",
"events_",
"files_",
"forum_",
"photo_",
"projects_",
"tenants_",
"webstudio_",
"wiki_",
};
var disallowTables = new List<string>
{
"core_settings",
"webstudio_uservisit",
"webstudio_useractivity",
"tenants_forbiden",
};
IEnumerable<string> tables;
if (_mysql)
{
tables = ExecuteList(CreateCommand("show tables"));
}
else
{
tables = _connect
.GetSchema("Tables")
.Select(@"TABLE_TYPE <> 'SYSTEM_TABLE'")
.Select(row => (string)row["TABLE_NAME"]);
}
return tables
.Where(t => allowTables.Any(a => t.StartsWith(a)) && !disallowTables.Any(d => t.StartsWith(d)))
.ToList();
}
public DataTable GetTable(string table, int tenant)
{
try
{
var dataTable = new DataTable(table);
var adapter = _factory.CreateDataAdapter();
adapter.SelectCommand = CreateCommand("select " + Quote(table) + ".* from " + Quote(table) + GetWhere(table, tenant));
_logger.Debug(adapter.SelectCommand.CommandText);
adapter.Fill(dataTable);
return dataTable;
}
catch (Exception error)
{
_logger.ErrorTableString(table, error);
throw;
}
}
public async Task SetTableAsync(DataTable table)
{
await using var tx = _connect.BeginTransaction();
try
{
if ("tenants_tenants".Equals(table.TableName, StringComparison.InvariantCultureIgnoreCase))
{
// remove last tenant
var tenant = await Queries.LastTenantAsync(_tenantDbContext);
if (tenant != null)
{
_tenantDbContext.Tenants.Remove(tenant);
await _tenantDbContext.SaveChangesAsync();
}
/* var tenantid = CreateCommand("select id from tenants_tenants order by id desc limit 1").ExecuteScalar();
CreateCommand("delete from tenants_tenants where id = " + tenantid).ExecuteNonQuery();*/
if (table.Columns.Contains("mappeddomain"))
{
foreach (var r in table.Rows.Cast<DataRow>())
{
r[table.Columns["mappeddomain"]] = null;
if (table.Columns.Contains("id"))
{
var tariff = await Queries.TariffAsync(_coreDbContext, tenant.Id);
tariff.TenantId = (int)r[table.Columns["id"]];
tariff.CreateOn = DateTime.Now;
// CreateCommand("update tenants_tariff set tenant = " + r[table.Columns["id"]] + " where tenant = " + tenantid).ExecuteNonQuery();
_coreDbContext.Entry(tariff).State = EntityState.Modified;
await _coreDbContext.SaveChangesAsync();
}
}
}
}
var sql = new StringBuilder("replace into " + Quote(table.TableName) + "(");
var tableColumns = GetColumnsFrom(table.TableName)
.Intersect(table.Columns.Cast<DataColumn>().Select(c => c.ColumnName), StringComparer.InvariantCultureIgnoreCase)
.ToList();
tableColumns.ForEach(column => sql.Append($"{Quote(column)}, "));
sql.Replace(", ", ") values (", sql.Length - 2, 2);
var insert = _connect.CreateCommand();
tableColumns.ForEach(column =>
{
sql.Append($"@{column}, ");
var p = insert.CreateParameter();
p.ParameterName = "@" + column;
insert.Parameters.Add(p);
});
sql.Replace(", ", ")", sql.Length - 2, 2);
insert.CommandText = sql.ToString();
foreach (var r in table.Rows.Cast<DataRow>())
{
foreach (var c in tableColumns)
{
((IDbDataParameter)insert.Parameters["@" + c]).Value = r[c];
}
insert.ExecuteNonQuery();
}
tx.Commit();
}
catch (Exception e)
{
_logger.ErrorTable(table, e);
}
}
public void Dispose()
{
_builder.Dispose();
_connect.Dispose();
}
public DbCommand CreateCommand(string sql)
{
var command = _connect.CreateCommand();
command.CommandText = sql;
return command;
}
public List<string> ExecuteList(DbCommand command)
{
var list = new List<string>();
using (var result = command.ExecuteReader())
{
while (result.Read())
{
list.Add(result.GetString(0));
}
}
return list;
}
private string Quote(string identifier)
{
return identifier;
}
private IEnumerable<string> GetColumnsFrom(string table)
{
if (_mysql)
{
return ExecuteList(CreateCommand("show columns from " + Quote(table)));
}
else
{
return _columns.Select($"TABLE_NAME = '{table}'")
.Select(r => r["COLUMN_NAME"].ToString());
}
}
private string GetWhere(string tableName, int tenant)
{
if (tenant == -1)
{
return string.Empty;
}
if (_whereExceptions.TryGetValue(tableName.ToLower(), out var exc))
{
return string.Format(exc, tenant);
}
var tenantColumn = GetColumnsFrom(tableName).FirstOrDefault(c => c.StartsWith("tenant", StringComparison.OrdinalIgnoreCase));
return tenantColumn != null ?
" where " + Quote(tenantColumn) + " = " + tenant :
" where 1 = 0";
}
}
static file class Queries
{
public static readonly Func<TenantDbContext, Task<DbTenant>> LastTenantAsync =
Microsoft.EntityFrameworkCore.EF.CompileAsyncQuery(
(TenantDbContext ctx) =>
ctx.Tenants.LastOrDefault());
public static readonly Func<CoreDbContext, int, Task<DbTariff>> TariffAsync =
Microsoft.EntityFrameworkCore.EF.CompileAsyncQuery(
(CoreDbContext ctx, int tenantId) =>
ctx.Tariffs.FirstOrDefault(t => t.TenantId == tenantId));
}

View File

@ -38,20 +38,18 @@ global using System.Text.Json.Serialization;
global using System.Text.RegularExpressions;
global using System.Xml;
global using System.Xml.Linq;
global using System.Xml.XPath;
global using ASC.Api.Utils;
global using ASC.Common;
global using ASC.Common.Caching;
global using ASC.Common.Log;
global using ASC.Common.Threading;
global using ASC.Common.Threading;
global using ASC.Common.Utils;
global using ASC.Core;
global using ASC.Core.Billing;
global using ASC.Core.ChunkedUploader;
global using ASC.Core.Common.Configuration;
global using ASC.Core.Common.EF;
global using ASC.Core.Common.EF.Context;
global using ASC.Core.Common.EF.Model;
global using ASC.Core.Tenants;
global using ASC.Core.Users;
@ -72,7 +70,8 @@ global using ASC.Data.Backup.Utils;
global using ASC.Data.Storage;
global using ASC.Data.Storage.Configuration;
global using ASC.Data.Storage.DiscStorage;
global using ASC.Data.Storage.ZipOperators;
global using ASC.Data.Storage.S3;
global using ASC.Data.Storage.DataOperators;
global using ASC.EventBus.Events;
global using ASC.Files.Core;
global using ASC.MessagingSystem.Core;

View File

@ -222,7 +222,7 @@ public class BackupWorker
}
}
internal static string GetBackupHash(string path)
internal static string GetBackupHashSHA(string path)
{
using (var sha256 = SHA256.Create())
using (var fileStream = File.OpenRead(path))
@ -231,6 +231,43 @@ public class BackupWorker
var hash = sha256.ComputeHash(fileStream);
return BitConverter.ToString(hash).Replace("-", string.Empty);
}
}
internal static string GetBackupHashMD5(string path, long chunkSize)
{
using (var md5 = MD5.Create())
using (var fileStream = File.OpenRead(path))
{var multipartSplitCount = 0;
var splitCount = fileStream.Length / chunkSize;
var mod = (int)(fileStream.Length - chunkSize * splitCount);
IEnumerable<byte> concatHash = new byte[] { };
for (var i = 0; i < splitCount; i++)
{
var offset = i == 0 ? 0 : chunkSize * i;
var chunk = GetChunk(fileStream, offset, (int)chunkSize);
var hash = md5.ComputeHash(chunk);
concatHash = concatHash.Concat(hash);
multipartSplitCount++;
}
if (mod != 0)
{
var chunk = GetChunk(fileStream, chunkSize * splitCount, mod);
var hash = md5.ComputeHash(chunk);
concatHash = concatHash.Concat(hash);
multipartSplitCount++;
}
var multipartHash = BitConverter.ToString(md5.ComputeHash(concatHash.ToArray())).Replace("-", string.Empty);
return multipartHash + "-" + multipartSplitCount;
}
}
private static byte[] GetChunk(Stream sourceStream, long offset, int count)
{
var buffer = new byte[count];
sourceStream.Position = offset;
sourceStream.Read(buffer, 0, count);
return buffer;
}
private BackupProgress ToBackupProgress(BaseBackupProgressItem progressItem)

View File

@ -31,9 +31,7 @@ namespace ASC.Data.Backup.Services;
public class BackupProgressItem : BaseBackupProgressItem
{
public Dictionary<string, string> StorageParams { get; set; }
public string TempFolder { get; set; }
private const string ArchiveFormat = "tar.gz";
public string TempFolder { get; set; }
private bool _isScheduled;
private Guid _userId;
@ -97,16 +95,21 @@ public class BackupProgressItem : BaseBackupProgressItem
_tempStream = scope.ServiceProvider.GetService<TempStream>();
var dateTime = _coreBaseSettings.Standalone ? DateTime.Now : DateTime.UtcNow;
var backupName = string.Format("{0}_{1:yyyy-MM-dd_HH-mm-ss}.{2}", (await _tenantManager.GetTenantAsync(TenantId)).Alias, dateTime, ArchiveFormat);
var tempFile = CrossPlatform.PathCombine(TempFolder, backupName);
var storagePath = tempFile;
string hash;
var tempFile = "";
var storagePath = "";
try
{
var backupStorage = await _backupStorageFactory.GetBackupStorageAsync(_storageType, TenantId, StorageParams);
var writer = await ZipWriteOperatorFactory.GetWriteOperatorAsync(_tempStream, _storageBasePath, backupName, TempFolder, _userId, backupStorage as IGetterWriteOperator);
var getter = backupStorage as IGetterWriteOperator;
var backupName = string.Format("{0}_{1:yyyy-MM-dd_HH-mm-ss}.{2}", (await _tenantManager.GetTenantAsync(TenantId)).Alias, dateTime, await getter.GetBackupExtensionAsync(_storageBasePath));
tempFile = CrossPlatform.PathCombine(TempFolder, backupName);
storagePath = tempFile;
var writer = await DataOperatorFactory.GetWriteOperatorAsync(_tempStream, _storageBasePath, backupName, TempFolder, _userId, getter);
_backupPortalTask.Init(TenantId, tempFile, _limit, writer);
@ -121,7 +124,7 @@ public class BackupProgressItem : BaseBackupProgressItem
if (writer.NeedUpload)
{
storagePath = await backupStorage.UploadAsync(_storageBasePath, tempFile, _userId);
hash = BackupWorker.GetBackupHash(tempFile);
hash = BackupWorker.GetBackupHashSHA(tempFile);
}
else
{

View File

@ -47,8 +47,8 @@
* in every copy of the program you distribute.
* Pursuant to Section 7 § 3(e) we decline to grant you any rights under trademark law for use of our trademarks.
*
*/
*/
namespace ASC.Data.Backup.Services;
[Transient(Additional = typeof(RestoreProgressItemExtention))]
@ -82,7 +82,7 @@ public class RestoreProgressItem : BaseBackupProgressItem
_notifyHelper = notifyHelper;
_coreBaseSettings = coreBaseSettings;
BackupProgressItemEnum = BackupProgressItemEnum.Restore;
BackupProgressItemEnum = BackupProgressItemEnum.Restore;
}
public BackupStorageType StorageType { get; set; }
@ -106,9 +106,7 @@ public class RestoreProgressItem : BaseBackupProgressItem
protected override async Task DoJob()
{
Tenant tenant = null;
var tempFile = PathHelper.GetTempFileName(TempFolder);
var tempFile = "";
try
{
await using var scope = _serviceScopeProvider.CreateAsyncScope();
@ -127,16 +125,21 @@ public class RestoreProgressItem : BaseBackupProgressItem
var storage = await _backupStorageFactory.GetBackupStorageAsync(StorageType, TenantId, StorageParams);
await storage.DownloadAsync(StoragePath, tempFile);
tempFile = await storage.DownloadAsync(StoragePath, TempFolder);
if (!_coreBaseSettings.Standalone)
{
var backupHash = BackupWorker.GetBackupHash(tempFile);
var record = await _backupRepository.GetBackupRecordAsync(backupHash, TenantId);
var shaHash = BackupWorker.GetBackupHashSHA(tempFile);
var record = await _backupRepository.GetBackupRecordAsync(shaHash, TenantId);
if (record == null)
{
throw new Exception(BackupResource.BackupNotFound);
{
var md5Hash = BackupWorker.GetBackupHashMD5(tempFile, S3Storage.ChunkSize);
record = await _backupRepository.GetBackupRecordAsync(md5Hash, TenantId);
if (record == null)
{
throw new Exception(BackupResource.BackupNotFound);
}
}
}

View File

@ -73,11 +73,13 @@ public class ConsumerBackupStorage : IBackupStorage, IGetterWriteOperator
return storagePath;
}
public async Task DownloadAsync(string storagePath, string targetLocalPath)
public async Task<string> DownloadAsync(string storagePath, string targetLocalPath)
{
var tempPath = Path.Combine(targetLocalPath, Path.GetFileName(storagePath));
await using var source = await _store.GetReadStreamAsync(Domain, storagePath);
await using var destination = File.OpenWrite(targetLocalPath);
await using var destination = File.OpenWrite(tempPath);
await source.CopyToAsync(destination);
return tempPath;
}
public async Task DeleteAsync(string storagePath)
@ -119,6 +121,11 @@ public class ConsumerBackupStorage : IBackupStorage, IGetterWriteOperator
TempPath = title,
UploadId = await _store.InitiateChunkedUploadAsync(Domain, title)
};
return _store.CreateDataWriteOperator(session, _sessionHolder);
return _store.CreateDataWriteOperator(session, _sessionHolder, true);
}
public Task<string> GetBackupExtensionAsync(string storageBasePath)
{
return Task.FromResult(_store.GetBackupExtension(true));
}
}

View File

@ -85,18 +85,16 @@ public class DocumentsBackupStorage : IBackupStorage, IGetterWriteOperator
return await Upload(folderId, localPath);
}
public async Task DownloadAsync(string fileId, string targetLocalPath)
public async Task<string> DownloadAsync(string fileId, string targetLocalPath)
{
await _tenantManager.SetCurrentTenantAsync(_tenantId);
if (int.TryParse(fileId, out var fId))
{
await DownloadDaoAsync(fId, targetLocalPath);
return;
return await DownloadDaoAsync(fId, targetLocalPath);
}
await DownloadDaoAsync(fileId, targetLocalPath);
return await DownloadDaoAsync(fileId, targetLocalPath);
}
public async Task DeleteAsync(string fileId)
@ -166,7 +164,7 @@ public class DocumentsBackupStorage : IBackupStorage, IGetterWriteOperator
return file.Id;
}
private async Task DownloadDaoAsync<T>(T fileId, string targetLocalPath)
private async Task<string> DownloadDaoAsync<T>(T fileId, string targetLocalPath)
{
await _tenantManager.SetCurrentTenantAsync(_tenantId);
var fileDao = await GetFileDaoAsync<T>();
@ -177,8 +175,10 @@ public class DocumentsBackupStorage : IBackupStorage, IGetterWriteOperator
}
await using var source = await fileDao.GetFileStreamAsync(file);
await using var destination = File.OpenWrite(targetLocalPath);
var destPath = Path.Combine(targetLocalPath, file.Title);
await using var destination = File.OpenWrite(destPath);
await source.CopyToAsync(destination);
return destPath;
}
private async Task DeleteDaoAsync<T>(T fileId)
@ -192,7 +192,6 @@ public class DocumentsBackupStorage : IBackupStorage, IGetterWriteOperator
var fileDao = await GetFileDaoAsync<T>();
try
{
var file = await fileDao.GetFileAsync(fileId);
return file != null && file.RootFolderType != FolderType.TRASH;
@ -229,6 +228,21 @@ public class DocumentsBackupStorage : IBackupStorage, IGetterWriteOperator
}
}
public async Task<string> GetBackupExtensionAsync(string storageBasePath)
{
await _tenantManager.SetCurrentTenantAsync(_tenantId);
if (int.TryParse(storageBasePath, out var fId))
{
var folderDao = GetFolderDao<int>();
return await folderDao.GetBackupExtensionAsync(fId);
}
else
{
var folderDao = GetFolderDao<string>();
return await folderDao.GetBackupExtensionAsync(storageBasePath);
}
}
private async Task<CommonChunkedUploadSession> InitUploadChunkAsync<T>(T folderId, string title)
{
var folderDao = GetFolderDao<T>();

View File

@ -32,5 +32,5 @@ public interface IBackupStorage
Task<string> GetPublicLinkAsync(string storagePath);
Task<string> UploadAsync(string storageBasePath, string localPath, Guid userId);
Task DeleteAsync(string storagePath);
Task DownloadAsync(string storagePath, string targetLocalPath);
Task<string> DownloadAsync(string storagePath, string targetLocalPath);
}

View File

@ -45,10 +45,11 @@ public class LocalBackupStorage : IBackupStorage, IGetterWriteOperator
return Task.FromResult(storagePath);
}
public Task DownloadAsync(string storagePath, string targetLocalPath)
public Task<string> DownloadAsync(string storagePath, string targetLocalPath)
{
var tempPath = Path.Combine(storagePath, Path.GetFileName(targetLocalPath));
File.Copy(storagePath, targetLocalPath, true);
return Task.CompletedTask;
return Task.FromResult(tempPath);
}
public Task DeleteAsync(string storagePath)
@ -71,4 +72,9 @@ public class LocalBackupStorage : IBackupStorage, IGetterWriteOperator
{
return Task.FromResult<IDataWriteOperator>(null);
}
public Task<string> GetBackupExtensionAsync(string storageBasePath)
{
return Task.FromResult("tar.gz");
}
}

View File

@ -710,17 +710,13 @@ public class BackupPortalTask : PortalTaskBase
foreach (var file in group)
{
var storage = await StorageFactory.GetStorageAsync(TenantId, group.Key);
var file1 = file;
Stream fileStream = null;
await ActionInvoker.Try(async state =>
try
{
var f = (BackupFileInfo)state;
fileStream = await storage.GetReadStreamAsync(f.Domain, f.Path);
}, file, 5, error => _logger.WarningCanNotBackupFile(file1.Module, file1.Path, error));
if(fileStream != null)
await writer.WriteEntryAsync(file.GetZipKey(), file.Domain, file.Path, storage);
}
catch(Exception error)
{
await writer.WriteEntryAsync(file1.GetZipKey(), fileStream);
fileStream.Dispose();
_logger.WarningCanNotBackupFile(file.Module, file.Path, error);
}
SetCurrentStepProgress((int)(++filesProcessed * 100 / (double)filesCount));
}

View File

@ -94,7 +94,7 @@ public class DeletePortalTask : PortalTaskBase
var domains = StorageFactoryConfig.GetDomainList(module);
foreach (var domain in domains)
{
await ActionInvoker.Try(async state => await storage.DeleteFilesAsync((string)state, "\\", "*.*", true), domain, 5,
await ActionInvoker.TryAsync(async state => await storage.DeleteFilesAsync((string)state, "\\", "*.*", true), domain, 5,
onFailure: error => _logger.WarningCanNotDeleteFilesForDomain(domain, error));
}
await storage.DeleteFilesAsync("\\", "*.*", true);

View File

@ -90,7 +90,7 @@ public class RestorePortalTask : PortalTaskBase
_options.DebugBeginRestoreData();
using (var dataReader = new ZipReadOperator(BackupFilePath))
using (var dataReader = DataOperatorFactory.GetReadOperator(BackupFilePath))
{
await using (var entry = dataReader.GetEntry(KeyHelper.GetDumpKey()))
{
@ -421,7 +421,7 @@ public class RestorePortalTask : PortalTaskBase
foreach (var domain in domains)
{
await ActionInvoker.Try(
await ActionInvoker.TryAsync(
async state =>
{
if (await storage.IsDirectoryAsync((string)state))

View File

@ -96,7 +96,7 @@ public class TransferPortalTask : PortalTaskBase
//save db data to temporary file
var backupTask = _serviceProvider.GetService<BackupPortalTask>();
backupTask.Init(TenantId, backupFilePath, Limit, ZipWriteOperatorFactory.GetDefaultWriteOperator(_tempStream, backupFilePath));
backupTask.Init(TenantId, backupFilePath, Limit, DataOperatorFactory.GetDefaultWriteOperator(_tempStream, backupFilePath));
backupTask.ProcessStorage = false;
backupTask.ProgressChanged += (sender, args) => SetCurrentStepProgress(args.Progress);
foreach (var moduleName in _ignoredModules)

View File

@ -207,11 +207,16 @@ public abstract class BaseStorage : IDataStore
public virtual IDataWriteOperator CreateDataWriteOperator(
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
CommonChunkedUploadSessionHolder sessionHolder, bool isConsumerStorage = false)
{
return new ChunkZipWriteOperator(_tempStream, chunkedUploadSession, sessionHolder);
}
public virtual string GetBackupExtension(bool isConsumerStorage = false)
{
return "tar.gz";
}
#endregion
public abstract Task DeleteAsync(string domain, string path);

View File

@ -32,8 +32,9 @@ public class CommonChunkedUploadSessionHolder
public static readonly TimeSpan SlidingExpiration = TimeSpan.FromHours(12);
private readonly TempPath _tempPath;
private readonly string _domain;
public readonly string Domain;
public long MaxChunkUploadSize;
public string TempDomain;
public const string StoragePath = "sessions";
private readonly object _locker = new object();
@ -46,24 +47,24 @@ public class CommonChunkedUploadSessionHolder
{
_tempPath = tempPath;
DataStore = dataStore;
_domain = domain;
Domain = domain;
MaxChunkUploadSize = maxChunkUploadSize;
}
public async Task StoreAsync(CommonChunkedUploadSession s)
{
await using var stream = s.Serialize();
await DataStore.SavePrivateAsync(_domain, GetPathWithId(s.Id), stream, s.Expired);
await DataStore.SavePrivateAsync(Domain, GetPathWithId(s.Id), stream, s.Expired);
}
public async Task RemoveAsync(CommonChunkedUploadSession s)
{
await DataStore.DeleteAsync(_domain, GetPathWithId(s.Id));
await DataStore.DeleteAsync(Domain, GetPathWithId(s.Id));
}
public async Task<Stream> GetStreamAsync(string sessionId)
{
return await DataStore.GetReadStreamAsync(_domain, GetPathWithId(sessionId));
return await DataStore.GetReadStreamAsync(Domain, GetPathWithId(sessionId));
}
public async ValueTask InitAsync(CommonChunkedUploadSession chunkedUploadSession)
@ -75,7 +76,7 @@ public class CommonChunkedUploadSessionHolder
}
var tempPath = Guid.NewGuid().ToString();
var uploadId = await DataStore.InitiateChunkedUploadAsync(_domain, tempPath);
var uploadId = await DataStore.InitiateChunkedUploadAsync(Domain, tempPath);
chunkedUploadSession.TempPath = tempPath;
chunkedUploadSession.UploadId = uploadId;
@ -87,13 +88,13 @@ public class CommonChunkedUploadSessionHolder
var uploadId = uploadSession.UploadId;
var eTags = uploadSession.GetItemOrDefault<Dictionary<int, string>>("ETag");
await DataStore.FinalizeChunkedUploadAsync(_domain, tempPath, uploadId, eTags);
await DataStore.FinalizeChunkedUploadAsync(Domain, tempPath, uploadId, eTags);
return Path.GetFileName(tempPath);
}
public async Task MoveAsync(CommonChunkedUploadSession chunkedUploadSession, string newPath, bool quotaCheckFileSize = true)
{
await DataStore.MoveAsync(_domain, chunkedUploadSession.TempPath, string.Empty, newPath, quotaCheckFileSize);
await DataStore.MoveAsync(Domain, chunkedUploadSession.TempPath, string.Empty, newPath, quotaCheckFileSize);
}
public async Task AbortAsync(CommonChunkedUploadSession uploadSession)
@ -103,7 +104,7 @@ public class CommonChunkedUploadSessionHolder
var tempPath = uploadSession.TempPath;
var uploadId = uploadSession.UploadId;
await DataStore.AbortChunkedUploadAsync(_domain, tempPath, uploadId);
await DataStore.AbortChunkedUploadAsync(Domain, tempPath, uploadId);
}
else if (!string.IsNullOrEmpty(uploadSession.ChunksBuffer))
{
@ -125,7 +126,7 @@ public class CommonChunkedUploadSessionHolder
uploadSession.BytesUploaded += length;
}
var eTag = await DataStore.UploadChunkAsync(_domain, tempPath, uploadId, stream, MaxChunkUploadSize, chunkNumber, length);
var eTag = await DataStore.UploadChunkAsync(Domain, tempPath, uploadId, stream, MaxChunkUploadSize, chunkNumber, length);
lock (_locker)
{

View File

@ -1,30 +1,30 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup;
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.DataOperators;
public static class ActionInvoker
{
@ -47,7 +47,7 @@ public static class ActionInvoker
Action<Exception> onAttemptFailure = null,
int sleepMs = 1000,
bool isSleepExponential = true)
{
{
ArgumentNullException.ThrowIfNull(action);
var countAttempts = 0;
@ -77,7 +77,18 @@ public static class ActionInvoker
}
}
public static async Task Try(
public static async Task TryAsync(
Func<Task> action,
int maxAttempts,
Action<Exception> onFailure = null,
Action<Exception> onAttemptFailure = null,
int sleepMs = 1000,
bool isSleepExponential = true)
{
await TryAsync(state => action(), null, maxAttempts, onFailure, onAttemptFailure, sleepMs, isSleepExponential);
}
public static async Task TryAsync(
Func<object, Task> action,
object state,
int maxAttempts,
@ -85,7 +96,7 @@ public static class ActionInvoker
Action<Exception> onAttemptFailure = null,
int sleepMs = 1000,
bool isSleepExponential = true)
{
{
ArgumentNullException.ThrowIfNull(action);
var countAttempts = 0;

View File

@ -24,9 +24,9 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.ZipOperators;
namespace ASC.Data.Storage.DataOperators;
public static class ZipWriteOperatorFactory
public static class DataOperatorFactory
{
public static async Task<IDataWriteOperator> GetWriteOperatorAsync(TempStream tempStream, string storageBasePath, string title, string tempFolder, Guid userId, IGetterWriteOperator getter)
{
@ -39,5 +39,17 @@ public static class ZipWriteOperatorFactory
{
return new ZipWriteOperator(tempStream, backupFilePath);
}
public static IDataReadOperator GetReadOperator(string targetFile)
{
if (targetFile.EndsWith("tar.gz"))
{
return new ZipReadOperator(targetFile);
}
else
{
return new TarReadOperator(targetFile);
}
}
}

View File

@ -24,11 +24,12 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.ZipOperators;
namespace ASC.Data.Storage.DataOperators;
public interface IDataWriteOperator : IAsyncDisposable
{
Task WriteEntryAsync(string key, Stream stream);
Task WriteEntryAsync(string tarKey, Stream stream);
Task WriteEntryAsync(string tarKey, string domain, string path, IDataStore store);
bool NeedUpload { get; }
string Hash { get; }
string StoragePath { get; }

View File

@ -24,9 +24,10 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.ZipOperators;
namespace ASC.Data.Storage.DataOperators;
public interface IGetterWriteOperator
{
Task<IDataWriteOperator> GetWriteOperatorAsync(string storageBasePath, string title, Guid userId);
Task<string> GetBackupExtensionAsync(string storageBasePath);
}

View File

@ -0,0 +1,58 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.DataOperators;
public abstract class BaseReadOperator: IDataReadOperator
{
internal string _tmpdir;
public Stream GetEntry(string key)
{
var filePath = Path.Combine(_tmpdir, key);
return File.Exists(filePath) ? File.Open(filePath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read) : null;
}
public IEnumerable<string> GetEntries(string key)
{
var path = Path.Combine(_tmpdir, key);
var files = Directory.EnumerateFiles(path);
return files;
}
public IEnumerable<string> GetDirectories(string key)
{
var path = Path.Combine(_tmpdir, key);
var files = Directory.EnumerateDirectories(path);
return files;
}
public void Dispose()
{
if (Directory.Exists(_tmpdir))
{
Directory.Delete(_tmpdir, true);
}
}
}

View File

@ -1,4 +1,4 @@
// (c) Copyright Ascensio System SIA 2010-2022
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
@ -24,30 +24,19 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup;
public interface IBackupProvider
namespace ASC.Data.Storage.DataOperators;
public class TarReadOperator: BaseReadOperator
{
string Name { get; }
event EventHandler<ProgressChangedEventArgs> ProgressChanged;
Task<IEnumerable<XElement>> GetElements(int tenant, string[] configs, IDataWriteOperator writer);
Task LoadFromAsync(IEnumerable<XElement> elements, int tenant, string[] configs, IDataReadOperator reader);
}
public class ProgressChangedEventArgs : EventArgs
{
public string Status { get; private set; }
public double Progress { get; private set; }
public bool Completed { get; private set; }
public ProgressChangedEventArgs(string status, double progress)
: this(status, progress, false) { }
public ProgressChangedEventArgs(string status, double progress, bool completed)
public TarReadOperator(string targetFile)
{
Status = status;
Progress = progress;
Completed = completed;
_tmpdir = Path.Combine(Path.GetDirectoryName(targetFile), Path.GetFileNameWithoutExtension(targetFile).Replace('>', '_').Replace(':', '_').Replace('?', '_'));
using (var stream = File.OpenRead(targetFile))
using (var tarOutputStream = TarArchive.CreateInputTarArchive(stream, Encoding.UTF8))
{
tarOutputStream.ExtractContents(_tmpdir);
}
File.Delete(targetFile);
}
}

View File

@ -0,0 +1,43 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.DataOperators;
public class ZipReadOperator : BaseReadOperator
{
public ZipReadOperator(string targetFile)
{
_tmpdir = Path.Combine(Path.GetDirectoryName(targetFile), Path.GetFileNameWithoutExtension(targetFile).Replace('>', '_').Replace(':', '_').Replace('?', '_'));
using (var stream = File.OpenRead(targetFile))
using (var reader = new GZipInputStream(stream))
using (var tarOutputStream = TarArchive.CreateInputTarArchive(reader, Encoding.UTF8))
{
tarOutputStream.ExtractContents(_tmpdir);
}
File.Delete(targetFile);
}
}

View File

@ -24,7 +24,7 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.ZipOperators;
namespace ASC.Data.Storage.DataOperators;
public class ChunkZipWriteOperator : IDataWriteOperator
{
@ -63,7 +63,21 @@ public class ChunkZipWriteOperator : IDataWriteOperator
_sha = SHA256.Create();
}
public async Task WriteEntryAsync(string key, Stream stream)
public async Task WriteEntryAsync(string tarKey, string domain, string path, IDataStore store)
{
Stream fileStream = null;
await ActionInvoker.TryAsync(async () =>
{
fileStream = await store.GetReadStreamAsync(domain, path);
}, 5, error => throw error);
if (fileStream != null)
{
await WriteEntryAsync(tarKey, fileStream);
fileStream.Dispose();
}
}
public async Task WriteEntryAsync(string tarKey, Stream stream)
{
if (_fileStream == null)
{
@ -73,7 +87,7 @@ public class ChunkZipWriteOperator : IDataWriteOperator
await using (var buffered = _tempStream.GetBuffered(stream))
{
var entry = TarEntry.CreateTarEntry(key);
var entry = TarEntry.CreateTarEntry(tarKey);
entry.Size = buffered.Length;
await _tarOutputStream.PutNextEntryAsync(entry, default);
buffered.Position = 0;

View File

@ -0,0 +1,97 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.DataOperators;
public class S3TarWriteOperator : IDataWriteOperator
{
private readonly CommonChunkedUploadSession _chunkedUploadSession;
private readonly CommonChunkedUploadSessionHolder _sessionHolder;
private readonly S3Storage _store;
private readonly string _domain;
private readonly string _key;
public string Hash { get; private set; }
public string StoragePath { get; private set; }
public bool NeedUpload => false;
public S3TarWriteOperator(CommonChunkedUploadSession chunkedUploadSession, CommonChunkedUploadSessionHolder sessionHolder)
{
_chunkedUploadSession = chunkedUploadSession;
_sessionHolder = sessionHolder;
_store = _sessionHolder.DataStore as S3Storage;
_key = _chunkedUploadSession.TempPath;
_domain = _sessionHolder.TempDomain;
}
public async Task WriteEntryAsync(string tarKey, string domain, string path, IDataStore store)
{
if (store is S3Storage)
{
var s3Store = store as S3Storage;
var fullPath = s3Store.MakePath(domain, path);
await _store.ConcatFileAsync(fullPath, tarKey, _domain, _key);
}
else
{
Stream fileStream = null;
await ActionInvoker.TryAsync(async () =>
{
fileStream = await store.GetReadStreamAsync(domain, path);
}, 5, error => throw error);
if (fileStream != null)
{
await WriteEntryAsync(tarKey, fileStream);
fileStream.Dispose();
}
}
}
public async Task WriteEntryAsync(string tarKey, Stream stream)
{
await _store.ConcatFileStreamAsync(stream, tarKey, _domain, _key);
}
public async ValueTask DisposeAsync()
{
await _store.AddEndAsync(_domain ,_key);
await _store.RemoveFirstBlockAsync(_domain ,_key);
var contentLength = await _store.GetFileSizeAsync(_domain, _key);
Hash = (await _store.GetFileEtagAsync(_domain, _key)).Trim('\"');
(var uploadId, var eTags, var partNumber) = await _store.InitiateConcatAsync(_domain, _key, lastInit: true);
_chunkedUploadSession.BytesUploaded = contentLength;
_chunkedUploadSession.BytesTotal = contentLength;
_chunkedUploadSession.UploadId = uploadId;
_chunkedUploadSession.Items["ETag"] = eTags.ToDictionary(e => e.PartNumber, e => e.ETag);
_chunkedUploadSession.Items["ChunksUploaded"] = (partNumber - 1).ToString();
StoragePath = await _sessionHolder.FinalizeAsync(_chunkedUploadSession);
}
}

View File

@ -24,7 +24,7 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.ZipOperators;
namespace ASC.Data.Storage.DataOperators;
public class S3ZipWriteOperator : IDataWriteOperator
{
@ -67,7 +67,21 @@ public class S3ZipWriteOperator : IDataWriteOperator
_sha = SHA256.Create();
}
public async Task WriteEntryAsync(string key, Stream stream)
public async Task WriteEntryAsync(string tarKey, string domain, string path, IDataStore store)
{
Stream fileStream = null;
await ActionInvoker.TryAsync(async () =>
{
fileStream = await store.GetReadStreamAsync(domain, path);
}, 5, error => throw error);
if (fileStream != null)
{
await WriteEntryAsync(tarKey, fileStream);
fileStream.Dispose();
}
}
public async Task WriteEntryAsync(string tarKey, Stream stream)
{
if (_fileStream == null)
{
@ -77,7 +91,7 @@ public class S3ZipWriteOperator : IDataWriteOperator
await using (var buffered = _tempStream.GetBuffered(stream))
{
var entry = TarEntry.CreateTarEntry(key);
var entry = TarEntry.CreateTarEntry(tarKey);
entry.Size = buffered.Length;
await _tarOutputStream.PutNextEntryAsync(entry, default);
buffered.Position = 0;

View File

@ -24,7 +24,7 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Storage.ZipOperators;
namespace ASC.Data.Storage.DataOperators;
public class ZipWriteOperator : IDataWriteOperator
@ -54,11 +54,25 @@ public class ZipWriteOperator : IDataWriteOperator
_tarOutputStream = new TarOutputStream(_gZipOutputStream, Encoding.UTF8);
}
public async Task WriteEntryAsync(string key, Stream stream)
public async Task WriteEntryAsync(string tarKey, string domain, string path, IDataStore store)
{
Stream fileStream = null;
await ActionInvoker.TryAsync(async () =>
{
fileStream = await store.GetReadStreamAsync(domain, path);
}, 5, error => throw error);
if (fileStream != null)
{
await WriteEntryAsync(tarKey, fileStream);
fileStream.Dispose();
}
}
public async Task WriteEntryAsync(string tarKey, Stream stream)
{
await using (var buffered = _tempStream.GetBuffered(stream))
{
var entry = TarEntry.CreateTarEntry(key);
var entry = TarEntry.CreateTarEntry(tarKey);
entry.Size = buffered.Length;
await _tarOutputStream.PutNextEntryAsync(entry, default);
buffered.Position = 0;
@ -73,50 +87,3 @@ public class ZipWriteOperator : IDataWriteOperator
await _tarOutputStream.DisposeAsync();
}
}
public class ZipReadOperator : IDataReadOperator
{
private readonly string tmpdir;
public ZipReadOperator(string targetFile)
{
tmpdir = Path.Combine(Path.GetDirectoryName(targetFile), Path.GetFileNameWithoutExtension(targetFile).Replace('>', '_').Replace(':', '_').Replace('?', '_'));
using (var stream = File.OpenRead(targetFile))
using (var reader = new GZipInputStream(stream))
using (var tarOutputStream = TarArchive.CreateInputTarArchive(reader, Encoding.UTF8))
{
tarOutputStream.ExtractContents(tmpdir);
}
File.Delete(targetFile);
}
public Stream GetEntry(string key)
{
var filePath = Path.Combine(tmpdir, key);
return File.Exists(filePath) ? File.Open(filePath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read) : null;
}
public IEnumerable<string> GetEntries(string key)
{
var path = Path.Combine(tmpdir, key);
var files = Directory.EnumerateFiles(path);
return files;
}
public IEnumerable<string> GetDirectories(string key)
{
var path = Path.Combine(tmpdir, key);
var files = Directory.EnumerateDirectories(path);
return files;
}
public void Dispose()
{
if (Directory.Exists(tmpdir))
{
Directory.Delete(tmpdir, true);
}
}
}

View File

@ -30,7 +30,7 @@ global using System.Net;
global using System.Net.Http.Headers;
global using System.Runtime.Serialization;
global using System.Security.Cryptography;
global using System.ServiceModel;
global using System.ServiceModel;
global using System.Text;
global using System.Text.Json;
global using System.Text.Json.Serialization;
@ -39,7 +39,10 @@ global using System.Web;
global using Amazon;
global using Amazon.CloudFront;
global using Amazon.CloudFront.Model;
global using Amazon.Extensions.S3.Encryption;
global using Amazon.Extensions.S3.Encryption.Primitives;
global using Amazon.S3;
global using Amazon.S3.Internal;
global using Amazon.S3.Model;
global using Amazon.S3.Transfer;
global using Amazon.Util;
@ -66,7 +69,8 @@ global using ASC.Data.Storage.GoogleCloud;
global using ASC.Data.Storage.Log;
global using ASC.Data.Storage.RackspaceCloud;
global using ASC.Data.Storage.S3;
global using ASC.Data.Storage.ZipOperators;
global using ASC.Data.Storage.Tar;
global using ASC.Data.Storage.DataOperators;
global using ASC.EventBus.Events;
global using ASC.Notify.Messages;
global using ASC.Protos.Migration;

View File

@ -33,7 +33,9 @@ public interface IDataStore
{
IDataWriteOperator CreateDataWriteOperator(
CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder);
CommonChunkedUploadSessionHolder sessionHolder,
bool isConsumerStorage = false);
string GetBackupExtension(bool isConsumerStorage = false);
IQuotaController QuotaController { get; set; }

View File

@ -24,16 +24,13 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
using Amazon.Extensions.S3.Encryption;
using Amazon.Extensions.S3.Encryption.Primitives;
using Amazon.S3.Internal;
namespace ASC.Data.Storage.S3;
[Scope]
public class S3Storage : BaseStorage
{
public override bool IsSupportCdnUri => true;
public static long ChunkSize { get; } = 50 * 1024 * 1024;
public override bool IsSupportChunking => true;
private readonly List<string> _domains = new List<string>();
@ -60,7 +57,7 @@ public class S3Storage : BaseStorage
private EncryptionMethod _encryptionMethod = EncryptionMethod.None;
private string _encryptionKey;
private readonly IConfiguration _configuration;
private readonly CoreBaseSettings _coreBaseSettings;
public S3Storage(
TempStream tempStream,
@ -71,12 +68,12 @@ public class S3Storage : BaseStorage
ILoggerProvider factory,
ILogger<S3Storage> options,
IHttpClientFactory clientFactory,
IConfiguration configuration,
TenantQuotaFeatureStatHelper tenantQuotaFeatureStatHelper,
QuotaSocketManager quotaSocketManager)
QuotaSocketManager quotaSocketManager,
CoreBaseSettings coreBaseSettings)
: base(tempStream, tenantManager, pathUtils, emailValidationKeyProvider, httpContextAccessor, factory, options, clientFactory, tenantQuotaFeatureStatHelper, quotaSocketManager)
{
_configuration = configuration;
_coreBaseSettings = coreBaseSettings;
}
public Uri GetUriInternal(string path)
@ -461,9 +458,28 @@ public class S3Storage : BaseStorage
}
public override IDataWriteOperator CreateDataWriteOperator(CommonChunkedUploadSession chunkedUploadSession,
CommonChunkedUploadSessionHolder sessionHolder)
CommonChunkedUploadSessionHolder sessionHolder, bool isConsumerStorage = false)
{
return new S3ZipWriteOperator(_tempStream, chunkedUploadSession, sessionHolder);
if (_coreBaseSettings.Standalone || isConsumerStorage)
{
return new S3ZipWriteOperator(_tempStream, chunkedUploadSession, sessionHolder);
}
else
{
return new S3TarWriteOperator(chunkedUploadSession, sessionHolder);
}
}
public override string GetBackupExtension(bool isConsumerStorage = false)
{
if (_coreBaseSettings.Standalone || isConsumerStorage)
{
return "tar.gz";
}
else
{
return "tar";
}
}
#endregion
@ -570,9 +586,9 @@ public class S3Storage : BaseStorage
if (string.IsNullOrEmpty(QuotaController.ExcludePattern) ||
!Path.GetFileName(s3Object.Key).StartsWith(QuotaController.ExcludePattern))
{
await QuotaUsedDeleteAsync(domain, s3Object.Size);
}
}
await QuotaUsedDeleteAsync(domain, s3Object.Size);
}
}
}
}
@ -1052,7 +1068,7 @@ public class S3Storage : BaseStorage
_cdnKeyPairId = props["cdn_keyPairId"];
_cdnPrivateKeyPath = props["cdn_privateKeyPath"];
CdnDistributionDomain = props["cdn_distributionDomain"];
}
}
}
props.TryGetValue("subdir", out _subDir);
@ -1218,7 +1234,7 @@ public class S3Storage : BaseStorage
return s30Objects;
}
private string MakePath(string domain, string path)
public string MakePath(string domain, string path)
{
string result;
@ -1304,7 +1320,7 @@ public class S3Storage : BaseStorage
var uploadId = initResponse.UploadId;
var partSize = 500 * 1024 * 1024L;//500 megabytes
var partSize = ChunkSize;
var uploadTasks = new List<Task<CopyPartResponse>>();
@ -1369,6 +1385,260 @@ public class S3Storage : BaseStorage
}
}
public async Task ConcatFileStreamAsync(Stream stream, string tarKey, string destinationDomain, string destinationKey)
{
(var uploadId, var eTags, var partNumber) = await InitiateConcatAsync(destinationDomain, destinationKey);
using var s3 = GetClient();
var destinationPath = MakePath(destinationDomain, destinationKey);
var blockSize = 512;
long prevFileSize = 0;
try
{
var objResult = await s3.GetObjectMetadataAsync(_bucket, destinationPath);
prevFileSize = objResult.ContentLength;
}
catch { }
var header = BuilderHeaders.CreateHeader(tarKey, stream.Length);
var ms = new MemoryStream();
if (prevFileSize % blockSize != 0)
{
var endBlock = new byte[blockSize - prevFileSize % blockSize];
ms.Write(endBlock);
}
ms.Write(header);
stream.Position = 0;
stream.CopyTo(ms);
stream.Dispose();
stream = ms;
stream.Position = 0;
prevFileSize = stream.Length;
var uploadRequest = new UploadPartRequest
{
BucketName = _bucket,
Key = destinationPath,
UploadId = uploadId,
PartNumber = partNumber,
InputStream = stream
};
eTags.Add(new PartETag(partNumber, (await s3.UploadPartAsync(uploadRequest)).ETag));
var completeRequest = new CompleteMultipartUploadRequest
{
BucketName = _bucket,
Key = destinationPath,
UploadId = uploadId,
PartETags = eTags
};
await s3.CompleteMultipartUploadAsync(completeRequest);
}
public async Task ConcatFileAsync(string pathFile, string tarKey, string destinationDomain, string destinationKey)
{
(var uploadId, var eTags, var partNumber) = await InitiateConcatAsync(destinationDomain, destinationKey);
using var s3 = GetClient();
var destinationPath = MakePath(destinationDomain, destinationKey);
var blockSize = 512;
long prevFileSize = 0;
try
{
var objResult = await s3.GetObjectMetadataAsync(_bucket, destinationPath);
prevFileSize = objResult.ContentLength;
}
catch{}
var objFile = await s3.GetObjectMetadataAsync(_bucket, pathFile);
var header = BuilderHeaders.CreateHeader(tarKey, objFile.ContentLength);
using var stream = new MemoryStream();
if (prevFileSize % blockSize != 0)
{
var endBlock = new byte[blockSize - prevFileSize % blockSize];
stream.Write(endBlock);
}
stream.Write(header);
stream.Position = 0;
prevFileSize = objFile.ContentLength;
var uploadRequest = new UploadPartRequest
{
BucketName = _bucket,
Key = destinationPath,
UploadId = uploadId,
PartNumber = partNumber,
InputStream = stream
};
eTags.Add(new PartETag(partNumber, (await s3.UploadPartAsync(uploadRequest)).ETag));
var completeRequest = new CompleteMultipartUploadRequest
{
BucketName = _bucket,
Key = destinationPath,
UploadId = uploadId,
PartETags = eTags
};
var completeUploadResponse = await s3.CompleteMultipartUploadAsync(completeRequest);
/*******/
(uploadId, eTags, partNumber) = await InitiateConcatAsync(destinationDomain, destinationKey);
var copyRequest = new CopyPartRequest
{
DestinationBucket = _bucket,
DestinationKey = destinationPath,
SourceBucket = _bucket,
SourceKey = pathFile,
UploadId = uploadId,
PartNumber = partNumber
};
eTags.Add(new PartETag(partNumber, (await s3.CopyPartAsync(copyRequest)).ETag));
completeRequest = new CompleteMultipartUploadRequest
{
BucketName = _bucket,
Key = destinationPath,
UploadId = uploadId,
PartETags = eTags
};
completeUploadResponse = await s3.CompleteMultipartUploadAsync(completeRequest);
}
public async Task AddEndAsync(string domain, string key)
{
using var s3 = GetClient();
var path = MakePath(domain, key);
var blockSize = 512;
(var uploadId, var eTags, var partNumber) = await InitiateConcatAsync(domain, key);
var obj = await s3.GetObjectMetadataAsync(_bucket, path);
var buffer = new byte[blockSize - obj.ContentLength % blockSize + blockSize * 2];
var stream = new MemoryStream();
stream.Write(buffer);
stream.Position = 0;
var uploadRequest = new UploadPartRequest
{
BucketName = _bucket,
Key = path,
UploadId = uploadId,
PartNumber = partNumber,
InputStream = stream
};
eTags.Add(new PartETag(partNumber, (await s3.UploadPartAsync(uploadRequest)).ETag));
var completeRequest = new CompleteMultipartUploadRequest
{
BucketName = _bucket,
Key = path,
UploadId = uploadId,
PartETags = eTags
};
await s3.CompleteMultipartUploadAsync(completeRequest);
}
public async Task RemoveFirstBlockAsync(string domain, string key)
{
using var s3 = GetClient();
var path = MakePath(domain, key);
(var uploadId, var eTags, var partNumber) = await InitiateConcatAsync(domain, key, true, true);
var completeRequest = new CompleteMultipartUploadRequest
{
BucketName = _bucket,
Key = path,
UploadId = uploadId,
PartETags = eTags
};
await s3.CompleteMultipartUploadAsync(completeRequest);
}
public async Task<(string uploadId, List<PartETag> eTags, int partNumber)> InitiateConcatAsync(string domain, string key, bool removeFirstBlock = false, bool lastInit = false)
{
using var s3 = GetClient();
key = MakePath(domain, key);
var initiateRequest = new InitiateMultipartUploadRequest
{
BucketName = _bucket,
Key = key
};
var initResponse = await s3.InitiateMultipartUploadAsync(initiateRequest);
var eTags = new List<PartETag>();
try
{
var mb5 = 5 * 1024 * 1024;
long bytePosition = removeFirstBlock ? mb5 : 0;
var obj = await s3.GetObjectMetadataAsync(_bucket, key);
var objectSize = obj.ContentLength;
var partSize = ChunkSize;
var partNumber = 1;
for (var i = 1; bytePosition < objectSize; i++)
{
var copyRequest = new CopyPartRequest
{
DestinationBucket = _bucket,
DestinationKey = key,
SourceBucket = _bucket,
SourceKey = key,
UploadId = initResponse.UploadId,
FirstByte = bytePosition,
LastByte = bytePosition + partSize - 1 >= objectSize ? objectSize - 1 : bytePosition + partSize - 1,
PartNumber = i
};
partNumber = i + 1;
bytePosition += partSize;
var x = objectSize - bytePosition;
if (!lastInit && x < mb5 && x > 0)
{
copyRequest.LastByte = objectSize - 1;
bytePosition += partSize;
}
eTags.Add(new PartETag(i, (await s3.CopyPartAsync(copyRequest)).ETag));
}
return (initResponse.UploadId, eTags, partNumber);
}
catch
{
using var stream = new MemoryStream();
var buffer = new byte[5 * 1024 * 1024];
stream.Write(buffer);
stream.Position = 0;
var uploadRequest = new UploadPartRequest
{
BucketName = _bucket,
Key = key,
UploadId = initResponse.UploadId,
PartNumber = 1,
InputStream = stream
};
eTags.Add(new PartETag(1, (await s3.UploadPartAsync(uploadRequest)).ETag));
return (initResponse.UploadId, eTags, 2);
}
}
private IAmazonCloudFront GetCloudFrontClient()
{
var cfg = new AmazonCloudFrontConfig { MaxErrorRetry = 3 };

View File

@ -24,12 +24,27 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup.Core.Log;
public static partial class DbHelperLogger
{
[LoggerMessage(Level = LogLevel.Error, Message = "Table {table}")]
public static partial void ErrorTableString(this ILogger<DbHelper> logger, string table, Exception exception);
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
[LoggerMessage(Level = LogLevel.Error, Message = "Table {table}")]
public static partial void ErrorTable(this ILogger<DbHelper> logger, DataTable table, Exception exception);
namespace ASC.Data.Storage.Tar;
public static class BuilderHeaders
{
public static byte[] CreateHeader(string name, long size)
{
var blockBuffer = new byte[512];
var tarHeader = new TarHeader()
{
Name = name,
Size = size
};
tarHeader.WriteHeader(blockBuffer, null);
return blockBuffer;
}
}

View File

@ -24,8 +24,6 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
using AutoMapper;
using static System.Formats.Asn1.AsnWriter;
namespace ASC.FederatedLogin.LoginProviders;
@ -39,7 +37,6 @@ public class ZoomLoginProvider : BaseLoginProvider<ZoomLoginProvider>
public override string ClientSecret => this["zoomClientSecret"];
public override string CodeUrl => "https://zoom.us/oauth/authorize";
public override string Scopes => "";
public string ApiRedirectUri => this["zoomApiRedirectUrl"];
public const string ApiUrl = "https://api.zoom.us/v2";
private const string UserProfileUrl = $"{ApiUrl}/users/me";

View File

@ -378,7 +378,7 @@ public class MigrationCreator
{
var storage = await _storageFactory.GetStorageAsync(_fromTenantId, group.Key);
var file1 = file;
await ActionInvoker.Try(async state =>
await ActionInvoker.TryAsync(async state =>
{
var f = (BackupFileInfo)state;
using var fileStream = await storage.GetReadStreamAsync(f.Domain, f.Path);

View File

@ -17,18 +17,16 @@ const ErrorFileUpload = ({ t, item, onTextClick, showPasswordInput }) => {
<div className="upload_panel-icon">
<StyledLoadErrorIcon
size="medium"
data-for="errorTooltip"
data-tip={item.error || t("Common:UnknownError")}
data-tooltip-id="errorTooltip"
data-tooltip-content={item.error || t("Common:UnknownError")}
/>
<Tooltip
id="errorTooltip"
offsetTop={0}
getContent={dataTip => (
getContent={({ content }) => (
<Text fontSize="13px" noSelect>
{dataTip}
{content}
</Text>
)}
effect="float"
place={placeTooltip}
maxWidth="320"
color="#f8f7bf"
@ -38,7 +36,8 @@ const ErrorFileUpload = ({ t, item, onTextClick, showPasswordInput }) => {
className="enter-password"
fontWeight="600"
color="#A3A9AE"
onClick={onTextClick}>
onClick={onTextClick}
>
{showPasswordInput ? t("HideInput") : t("EnterPassword")}
</Text>
)}

View File

@ -3,11 +3,11 @@ import Loader from "@docspace/components/loader";
import Section from "@docspace/common/components/Section";
import { loginWithConfirmKey } from "@docspace/common/api/user";
import toastr from "@docspace/components/toast/toastr";
import { frameCallEvent } from "@docspace/common/utils";
const Auth = (props) => {
console.log("Auth render");
//console.log("Auth render");
const { linkData } = props;
useEffect(() => {
loginWithConfirmKey({
ConfirmData: {
@ -16,11 +16,15 @@ const Auth = (props) => {
},
})
.then((res) => {
console.log("Login with confirm key success", res);
//console.log("Login with confirm key success", res);
frameCallEvent({ event: "onAuthSuccess" });
if (typeof res === "string") window.location.replace(res);
else window.location.replace("/");
})
.catch((error) => toastr.error(error));
.catch((error) => {
frameCallEvent({ event: "onAppError", data: error });
toastr.error(error);
});
});
return <Loader className="pageLoader" type="rombs" size="40px" />;

View File

@ -47,8 +47,10 @@ const PublicRoomBlock = (props) => {
</Text>
<div
data-for="emailTooltip"
data-tip={t("Files:MaximumNumberOfExternalLinksCreated")}
data-tooltip-id="emailTooltip"
data-tooltip-content={t(
"Files:MaximumNumberOfExternalLinksCreated"
)}
>
<IconButton
className="link-to-viewing-icon"
@ -61,11 +63,11 @@ const PublicRoomBlock = (props) => {
{externalLinks.length >= LINKS_LIMIT_COUNT && (
<Tooltip
float
id="emailTooltip"
getContent={(dataTip) => (
<Text fontSize="12px">{dataTip}</Text>
getContent={({ content }) => (
<Text fontSize="12px">{content}</Text>
)}
effect="float"
place="bottom"
/>
)}

View File

@ -36,17 +36,17 @@ const RoomCell = ({ sideColor, item }) => {
color={sideColor}
className="row_update-text"
truncate
data-for={"" + item.id}
data-tooltip-id={"" + item.id}
data-tip={""}
data-place={"bottom"}
>
{originRoomTitle || originTitle || "—"}
</StyledText>,
<Tooltip
id={"" + item.id}
float
place="bottom"
key={"tooltip"}
effect={"float"}
id={"" + item.id}
afterShow={getPath}
getContent={() => (
<span>

View File

@ -48,19 +48,16 @@ const Appearance = (props) => {
const [showColorSchemeDialog, setShowColorSchemeDialog] = useState(false);
const [headerColorSchemeDialog, setHeaderColorSchemeDialog] = useState(
headerEditTheme
);
const [headerColorSchemeDialog, setHeaderColorSchemeDialog] =
useState(headerEditTheme);
const [currentColorAccent, setCurrentColorAccent] = useState(null);
const [currentColorButtons, setCurrentColorButtons] = useState(null);
const [openHexColorPickerAccent, setOpenHexColorPickerAccent] = useState(
false
);
const [openHexColorPickerButtons, setOpenHexColorPickerButtons] = useState(
false
);
const [openHexColorPickerAccent, setOpenHexColorPickerAccent] =
useState(false);
const [openHexColorPickerButtons, setOpenHexColorPickerButtons] =
useState(false);
const [appliedColorAccent, setAppliedColorAccent] = useState(
defaultAppliedColorAccent
@ -69,12 +66,10 @@ const Appearance = (props) => {
defaultAppliedColorButtons
);
const [changeCurrentColorAccent, setChangeCurrentColorAccent] = useState(
false
);
const [changeCurrentColorButtons, setChangeCurrentColorButtons] = useState(
false
);
const [changeCurrentColorAccent, setChangeCurrentColorAccent] =
useState(false);
const [changeCurrentColorButtons, setChangeCurrentColorButtons] =
useState(false);
const [isSmallWindow, setIsSmallWindow] = useState(false);
@ -697,7 +692,7 @@ const Appearance = (props) => {
</div>
<div
data-for="theme-add"
data-tooltip-id="theme-add"
data-tip="tooltip"
className="theme-add"
onClick={onAddTheme}
@ -707,7 +702,6 @@ const Appearance = (props) => {
id="theme-add"
offsetBottom={0}
offsetRight={130}
effect="solid"
place="bottom"
getContent={textTooltip}
maxWidth="300px"

View File

@ -35,7 +35,7 @@ import { isSmallTablet } from "@docspace/components/utils/device";
import { SSO_LABEL } from "SRC_DIR/helpers/constants";
import { useTheme } from "styled-components";
const MainProfile = props => {
const MainProfile = (props) => {
const { t } = useTranslation(["Profile", "Common"]);
const {
@ -101,7 +101,8 @@ const MainProfile = props => {
<Link
href={`mailto:${documentationEmail}`}
isHovered={true}
color={theme.profileInfo.tooltipLinkColor}>
color={theme.profileInfo.tooltipLinkColor}
>
{{ supportEmail: documentationEmail }}
</Link>
to take part in the translation and get up to 1 year free of charge."
@ -113,7 +114,8 @@ const MainProfile = props => {
color="#333333"
fontSize="13px"
href={`${helpLink}/guides/become-translator.aspx`}
target="_blank">
target="_blank"
>
{t("Common:LearnMore")}
</Link>
</Box>
@ -125,18 +127,18 @@ const MainProfile = props => {
const { cultureName, currentCulture } = profile;
const language = convertLanguage(cultureName || currentCulture || culture);
const selectedLanguage = cultureNames.find(item => item.key === language) ||
cultureNames.find(item => item.key === culture) || {
const selectedLanguage = cultureNames.find((item) => item.key === language) ||
cultureNames.find((item) => item.key === culture) || {
key: language,
label: "",
};
const onLanguageSelect = language => {
const onLanguageSelect = (language) => {
if (profile.cultureName === language.key) return;
updateProfileCulture(profile.id, language.key)
.then(() => location.reload())
.catch(error => {
.catch((error) => {
toastr.error(error && error.message ? error.message : error);
});
};
@ -170,7 +172,8 @@ const MainProfile = props => {
</StyledAvatarWrapper>
<StyledInfo
withActivationBar={withActivationBar}
currentColorScheme={currentColorScheme}>
currentColorScheme={currentColorScheme}
>
<div className="rows-container">
<div className="profile-block">
<StyledLabel as="div">{t("Common:Name")}</StyledLabel>
@ -181,14 +184,16 @@ const MainProfile = props => {
<StyledLabel
as="div"
marginTopProp={withActivationBar ? "34px" : "16px"}>
marginTopProp={withActivationBar ? "34px" : "16px"}
>
{t("Common:Password")}
</StyledLabel>
<StyledLabel
as="div"
className="profile-language"
marginTopProp="15px">
marginTopProp="15px"
>
{t("Common:Language")}
<HelpButton
size={12}
@ -229,20 +234,21 @@ const MainProfile = props => {
<div className="email-container">
<div className="email-edit-container">
<Text
data-for="emailTooltip"
data-tip={t("EmailNotVerified")}
data-tooltip-id="emailTooltip"
data-tooltip-content={t("EmailNotVerified")}
as="div"
className="email-text-container"
fontWeight={600}>
fontWeight={600}
>
{profile.email}
</Text>
{withActivationBar && (
<Tooltip
float
id="emailTooltip"
getContent={dataTip => (
<Text fontSize="12px">{dataTip}</Text>
getContent={({ content }) => (
<Text fontSize="12px">{content}</Text>
)}
effect="float"
place="bottom"
/>
)}
@ -258,7 +264,8 @@ const MainProfile = props => {
{withActivationBar && (
<div
className="send-again-container"
onClick={sendActivationLinkAction}>
onClick={sendActivationLinkAction}
>
<ReactSVG
className="send-again-icon"
src={SendClockReactSvgUrl}
@ -313,7 +320,8 @@ const MainProfile = props => {
<Text
className="mobile-profile-label-field"
fontWeight={600}
truncate>
truncate
>
{profile.displayName}
</Text>
</div>
@ -332,21 +340,22 @@ const MainProfile = props => {
<div className="email-container">
<div className="email-edit-container">
<Text
data-for="emailTooltip"
data-tip={t("EmailNotVerified")}
data-tooltip-id="emailTooltip"
data-tooltip-content={t("EmailNotVerified")}
as="div"
className="email-text-container"
fontWeight={600}>
fontWeight={600}
>
{profile.email}
</Text>
</div>
{withActivationBar && (
<Tooltip
float
id="emailTooltip"
getContent={dataTip => (
<Text fontSize="12px">{dataTip}</Text>
getContent={({ content }) => (
<Text fontSize="12px">{content}</Text>
)}
effect="float"
place="bottom"
/>
)}
@ -354,7 +363,8 @@ const MainProfile = props => {
{withActivationBar && (
<div
className="send-again-container"
onClick={sendActivationLinkAction}>
onClick={sendActivationLinkAction}
>
<ReactSVG
className="send-again-icon"
src={SendClockReactSvgUrl}

View File

@ -32,7 +32,7 @@
"react-player": "^1.15.3",
"react-router": "^6.10.0",
"react-router-dom": "^6.10.0",
"react-tooltip": "^4.5.1",
"react-tooltip": "5.21.1",
"react-viewer": "^3.2.2",
"react-virtualized-auto-sizer": "^1.0.7",
"react-window": "^1.8.8",

View File

@ -207,30 +207,29 @@ const Template = (args) => (
</Link>
</div>
<div style={{ padding: "24px 0 8px 0" }}>
<Link data-for="group" data-tip={0}>
<Link data-tooltip-id="group" data-tooltip-content={0}>
Bob
</Link>
<br />
<Link data-for="group" data-tip={1}>
<Link data-tooltip-id="group" data-tooltip-content={1}>
John
</Link>
<br />
<Link data-for="group" data-tip={2}>
<Link data-tooltip-id="group" data-tooltip-content={2}>
Kevin
</Link>
<Tooltip
id="group"
offsetRight={90}
getContent={(dataTip) =>
dataTip ? (
getContent={({ content }) =>
content ? (
<div>
<Text isBold={true} fontSize="16px">
{arrayUsers[dataTip].name}
{arrayUsers[content].name}
</Text>
<Text color="#A3A9AE" fontSize="13px">
{arrayUsers[dataTip].email}
{arrayUsers[content].email}
</Text>
<Text fontSize="13px">{arrayUsers[dataTip].position}</Text>
<Text fontSize="13px">{arrayUsers[content].position}</Text>
</div>
) : null
}

View File

@ -119,19 +119,19 @@ const Avatar = (props) => {
<>
<RoleWrapper
size={size}
data-for={uniqueTooltipId}
data-tip={tooltipContent}
data-tooltip-id={uniqueTooltipId}
data-tooltip-content={tooltipContent}
className="avatar_role-wrapper"
>
{props.roleIcon ? props.roleIcon : roleIcon}
</RoleWrapper>
{withTooltip && (
<Tooltip
float
id={uniqueTooltipId}
getContent={(dataTip) => (
<Text fontSize="12px">{dataTip}</Text>
getContent={({ content }) => (
<Text fontSize="12px">{content}</Text>
)}
effect="float"
place={tooltipPlace}
/>
)}

View File

@ -43,7 +43,6 @@ const Chip = (props) => {
const [chipWidth, setChipWidth] = useState(0);
const [isChipOverLimit, setIsChipOverLimit] = useState(false);
const tooltipRef = useRef(null);
const warningRef = useRef(null);
const chipRef = useRef(null);
const chipInputRef = useRef(null);
@ -66,7 +65,6 @@ const Chip = (props) => {
}
}, [newValue]);
useClickOutside(warningRef, () => tooltipRef.current.hideTooltip());
useClickOutside(
chipInputRef,
() => {
@ -122,12 +120,10 @@ const Chip = (props) => {
if (value?.email === currentChip?.email) {
return (
<StyledContainer>
{isChipOverLimit && (
<Tooltip getContent={() => {}} id="input" effect="float" />
)}
{isChipOverLimit && <Tooltip id="input" float />}
<StyledChipInput
data-for="input"
data-tip={chipOverLimitText}
data-tooltip-id="input"
data-tooltip-content={chipOverLimitText}
value={newValue}
forwardedRef={chipInputRef}
onChange={onChange}
@ -156,16 +152,11 @@ const Chip = (props) => {
<IconButton
iconName={WarningIconSvgUrl}
size={12}
className="warning_icon_wrap warning_icon "
data-for="group"
data-tip={invalidEmailText}
/>
<Tooltip
getContent={() => {}}
id="group"
reference={tooltipRef}
place={"top"}
className="warning_icon_wrap warning_icon"
data-tooltip-id="group"
data-tooltip-content={invalidEmailText}
/>
<Tooltip id="group" place={"top"} />
</div>
)}
{/*dir="auto" for correct truncate email view (asd@gmai..., ...خالد@الدوح)*/}

View File

@ -2,8 +2,8 @@ import React from "react";
import PropTypes from "prop-types";
import IconButton from "../icon-button";
import Tooltip from "../tooltip";
import { handleAnyClick } from "../utils/event";
import uniqueId from "lodash/uniqueId";
import { classNames } from "../utils/classNames";
import InfoReactSvgUrl from "PUBLIC_DIR/images/info.react.svg?url";
@ -11,52 +11,14 @@ class HelpButton extends React.Component {
constructor(props) {
super(props);
this.state = {
hideTooltip: false,
};
this.ref = React.createRef();
this.refTooltip = React.createRef();
this.id = this.props.id || uniqueId();
}
afterShow = () => {
this.refTooltip.current.updatePosition();
handleAnyClick(true, this.handleClick);
if (this.state.hideTooltip) {
this.refTooltip.current.hideTooltip();
}
};
afterHide = () => {
if (!this.state.hideTooltip) {
handleAnyClick(false, this.handleClick);
}
};
handleClick = (e) => {
if (!this.ref.current.contains(e.target)) {
this.refTooltip.current.hideTooltip();
}
};
componentWillUnmount() {
handleAnyClick(false, this.handleClick);
}
onClick = () => {
this.setState({ hideTooltip: false });
};
render() {
const {
tooltipContent,
tooltipProps,
place,
offsetTop,
offsetBottom,
offsetRight,
offsetLeft,
offset,
iconName,
color,
getContent,
@ -65,53 +27,48 @@ class HelpButton extends React.Component {
tooltipMaxWidth,
style,
size,
afterShow,
afterHide,
} = this.props;
const anchorSelect = `div[id='${this.id}'] svg`;
return (
<div ref={this.ref} style={style}>
<IconButton
theme={this.props.theme}
id={this.id}
className={`${className} help-icon`}
theme={this.props.theme}
className={classNames(className, "help-icon")}
isClickable={true}
iconName={iconName}
size={size}
color={color}
data-for={this.id}
dataTip={dataTip}
onClick={this.onClick}
/>
{getContent ? (
<Tooltip
tooltipProps={tooltipProps}
theme={this.props.theme}
id={this.id}
reference={this.refTooltip}
effect="solid"
clickable
openOnClick
place={place}
offsetTop={offsetTop}
offsetBottom={offsetBottom}
offsetRight={offsetRight}
offsetLeft={offsetLeft}
afterShow={this.afterShow}
afterHide={this.afterHide}
getContent={getContent}
offset={offset}
afterShow={afterShow}
afterHide={afterHide}
maxWidth={tooltipMaxWidth}
{...tooltipProps}
getContent={getContent}
anchorSelect={anchorSelect}
/>
) : (
<Tooltip
theme={this.props.theme}
id={this.id}
reference={this.refTooltip}
effect="solid"
clickable
openOnClick
place={place}
offsetRight={offsetRight}
offsetLeft={offsetLeft}
afterShow={this.afterShow}
afterHide={this.afterHide}
offset={offset}
afterShow={afterShow}
afterHide={afterHide}
maxWidth={tooltipMaxWidth}
{...tooltipProps}
anchorSelect={anchorSelect}
>
{tooltipContent}
</Tooltip>
@ -131,14 +88,6 @@ HelpButton.propTypes = {
tooltipContent: PropTypes.oneOfType([PropTypes.string, PropTypes.object]),
/** Required to set additional properties of the tooltip */
tooltipProps: PropTypes.object,
/** Sets the right offset for all the tooltips on the page */
offsetRight: PropTypes.number,
/** Sets the left offset for all the tooltips on the page */
offsetLeft: PropTypes.number,
/** Sets the top offset for all the tooltips on the page */
offsetTop: PropTypes.number,
/** Sets the bottom offset for all the tooltips on the page */
offsetBottom: PropTypes.number,
/** Sets the maximum width of the tooltip */
tooltipMaxWidth: PropTypes.string,
/** Sets the tooltip id */
@ -166,10 +115,6 @@ HelpButton.propTypes = {
HelpButton.defaultProps = {
iconName: InfoReactSvgUrl,
place: "top",
offsetRight: 60,
offsetLeft: 0,
offsetTop: 0,
offsetBottom: 0,
className: "icon-button",
size: 12,
};

View File

@ -40,7 +40,7 @@
"react-svg": "^12.1.0",
"react-text-mask": "^5.5.0",
"react-toastify": "^7.0.4",
"react-tooltip": "^4.5.1",
"react-tooltip": "^5.21.1",
"react-transition-group": "^4.4.5",
"react-virtualized": "^9.22.3",
"react-window": "^1.8.8",

View File

@ -25,7 +25,6 @@ class PasswordInput extends React.Component {
const { inputValue, inputType, clipActionResource, emailInputName } = props;
this.ref = React.createRef();
this.refTooltip = React.createRef();
this.state = {
type: inputType,
@ -40,13 +39,8 @@ class PasswordInput extends React.Component {
};
}
hideTooltip = () => {
this.hideTooltip && this.refTooltip.current.hideTooltip();
};
onBlur = (e) => {
e.persist();
this.hideTooltip();
if (this.props.onBlur) this.props.onBlur(e);
};
@ -56,7 +50,6 @@ class PasswordInput extends React.Component {
};
changeInputType = () => {
this.hideTooltip();
const newType = this.state.type === "text" ? "password" : "text";
this.setState({
@ -395,9 +388,10 @@ class PasswordInput extends React.Component {
></InputBlock>
<Tooltip
id="tooltipContent"
effect="solid"
place="top"
clickable
openOnClick
anchorSelect="div[id='tooltipContent'] input"
offsetLeft={this.props.tooltipOffsetLeft}
offsetTop={this.props.tooltipOffsetTop}
reference={this.refTooltip}
@ -435,11 +429,9 @@ class PasswordInput extends React.Component {
<>
<div className="password-field-wrapper">
<PasswordProgress
inputWidth={inputWidth}
data-for="tooltipContent"
data-tip=""
data-event="click"
id="tooltipContent"
ref={this.ref}
inputWidth={inputWidth}
isDisabled={isDisabled}
>
{this.renderInputGroup()}

View File

@ -1,82 +1,79 @@
import React, { Component } from "react";
import PropTypes from "prop-types";
import ReactTooltip from "react-tooltip";
import React from "react";
import { Tooltip as ReactTooltip } from "react-tooltip";
import Portal from "../portal";
import StyledTooltip from "./styled-tooltip";
import { flip, shift, offset } from "@floating-ui/dom";
class Tooltip extends Component {
constructor(props) {
super(props);
}
const defaultOffset = 4;
const Tooltip = (props) => {
const {
id,
place,
getContent,
children,
afterShow,
afterHide,
className,
style,
color,
maxWidth,
anchorSelect,
clickable,
openOnClick,
isOpen,
float,
noArrow = true,
} = props;
componentDidUpdate() {
ReactTooltip.rebuild();
}
render() {
const {
effect,
place,
id,
getContent,
offsetTop,
offsetRight,
offsetBottom,
offsetLeft,
children,
afterShow,
afterHide,
reference,
className,
style,
color,
maxWidth,
...rest
} = this.props;
const renderTooltip = () => (
<StyledTooltip
theme={this.props.theme}
className={className}
style={style}
color={color}
maxWidth={maxWidth}
const renderTooltip = () => (
<StyledTooltip
theme={props.theme}
className={className}
style={style}
color={color}
maxWidth={maxWidth}
>
<ReactTooltip
id={id}
float={float}
place={place}
closeOnScroll
closeOnResize
isOpen={isOpen}
noArrow={noArrow}
render={getContent}
clickable={clickable}
afterShow={afterShow}
afterHide={afterHide}
offset={props.offset}
positionStrategy="fixed"
openOnClick={openOnClick}
anchorSelect={anchorSelect}
className="__react_component_tooltip"
middlewares={[
offset(props.offset ?? defaultOffset),
flip({
crossAxis: false,
fallbackAxisSideDirection: place,
}),
shift(),
]}
>
<ReactTooltip
theme={this.props.theme}
id={id}
ref={reference}
getContent={getContent}
effect={effect}
place={place}
offset={{
top: offsetTop,
right: offsetRight,
bottom: offsetBottom,
left: offsetLeft,
}}
wrapper="div"
afterShow={afterShow}
afterHide={afterHide}
isCapture={true}
{...rest}
>
{children}
</ReactTooltip>
</StyledTooltip>
);
{children}
</ReactTooltip>
</StyledTooltip>
);
const tooltip = renderTooltip();
const tooltip = renderTooltip();
return <Portal element={tooltip} />;
}
}
return <Portal element={tooltip} />;
};
Tooltip.propTypes = {
/** Used as HTML id property */
id: PropTypes.string,
/** Tooltip behavior */
effect: PropTypes.oneOf(["float", "solid"]),
/** Global tooltip placement */
place: PropTypes.oneOf(["top", "right", "bottom", "left"]),
/** Sets a callback function that generates the tip content dynamically */
@ -85,20 +82,10 @@ Tooltip.propTypes = {
afterHide: PropTypes.func,
/** A function to be called after the tooltip is shown */
afterShow: PropTypes.func,
/** Sets the top offset for all the tooltips on the page */
offsetTop: PropTypes.number,
/** Sets the right offset for all the tooltips on the page */
offsetRight: PropTypes.number,
/** Sets the bottom offset for all the tooltips on the page */
offsetBottom: PropTypes.number,
/** Sets the left offset for all the tooltips on the page */
offsetLeft: PropTypes.number,
/** Space between the tooltip element and anchor element (arrow not included in calculation) */
offset: PropTypes.number,
/** Child elements */
children: PropTypes.oneOfType([PropTypes.string, PropTypes.object]),
reference: PropTypes.oneOfType([
PropTypes.func,
PropTypes.shape({ current: PropTypes.any }),
]),
/** Accepts class */
className: PropTypes.string,
/** Accepts css style */
@ -107,15 +94,23 @@ Tooltip.propTypes = {
color: PropTypes.string,
/** Maximum width of the tooltip */
maxWidth: PropTypes.string,
/** The tooltip can be controlled or uncontrolled, this attribute cannot be used to handle show and hide tooltip outside tooltip */
isOpen: PropTypes.bool,
/** Allow interaction with elements inside the tooltip */
clickable: PropTypes.bool,
/** Controls whether the tooltip should open when clicking (true) or hovering (false) the anchor element */
openOnClick: PropTypes.bool,
/** Tooltip will follow the mouse position when it moves inside the anchor element */
float: PropTypes.bool,
/** The selector for the anchor elements */
anchorSelect: PropTypes.string,
/** Tooltip arrow will not be shown */
noArrow: PropTypes.bool,
};
Tooltip.defaultProps = {
effect: "float",
place: "top",
offsetTop: 0,
offsetRight: 0,
offsetBottom: 0,
offsetLeft: 0,
noArrow: true,
};
export default Tooltip;

View File

@ -15,8 +15,13 @@ const StyledTooltip = styled.div`
padding: ${(props) => props.theme.tooltip.padding};
pointer-events: ${(props) => props.theme.tooltip.pointerEvents};
max-width: ${(props) =>
props.maxWidth ? props.maxWidth : props.theme.tooltip.maxWidth};
`min(100vw, ${
props.maxWidth ? props.maxWidth : props.theme.tooltip.maxWidth
})`};
color: ${(props) => props.theme.tooltip.textColor} !important;
z-index: 999;
box-sizing: border-box;
p,
span {

View File

@ -35,20 +35,19 @@ import QuestionReactSvgUrl from 'PUBLIC_DIR/images/question.react.svg?url";
```jsx
<div
style={BodyStyle}
data-for="tooltipContent"
data-tip="You tooltip content"
data-event="click focus"
data-offset="{'top': 100, 'right': 100}"
data-place="top"
data-tooltip-id="tooltipContent"
data-tooltip-content="You tooltip content"
data-tooltip-place="top"
>
<IconButton isClickable={true} size={20} iconName={QuestionReactSvgUrl} />
</div>
<Tooltip
id="tooltipContent"
getContent={dataTip => <Text fontSize='13px'>{dataTip}</Text>}
effect="float"
float
place="top"
offset={100}
maxWidth={320}
id="tooltipContent"
getContent={({content}) => <Text fontSize='13px'>{content}</Text>}
/>
```
@ -90,27 +89,27 @@ const arrayUsers = [
```jsx
<h5 style={{ marginLeft: -5 }}>Hover group</h5>
<Link data-for="group" data-tip={0}>Bob</Link><br />
<Link data-for="group" data-tip={1}>John</Link><br />
<Link data-for="group" data-tip={2}>Kevin</Link><br />
<Link data-for="group" data-tip={3}>Alex</Link><br />
<Link data-for="group" data-tip={4}>Tomas</Link>
<Link data-tooltip-id="group" data-tooltip-content={0}>Bob</Link><br />
<Link data-tooltip-id="group" data-tooltip-content={1}>John</Link><br />
<Link data-tooltip-id="group" data-tooltip-content={2}>Kevin</Link><br />
<Link data-tooltip-id="group" data-tooltip-content={3}>Alex</Link><br />
<Link data-tooltip-id="group" data-tooltip-content={4}>Tomas</Link>
```
```jsx
<Tooltip
id="group"
offsetRight={90}
getContent={(dataTip) =>
dataTip ? (
getContent={({ content }) =>
content ? (
<div>
<Text isBold={true} fontSize="16px">
{arrayUsers[dataTip].name}
{arrayUsers[content].name}
</Text>
<Text color="#A3A9AE" fontSize="13px">
{arrayUsers[dataTip].email}
{arrayUsers[content].email}
</Text>
<Text fontSize="13px">{arrayUsers[dataTip].position}</Text>
<Text fontSize="13px">{arrayUsers[content].position}</Text>
</div>
) : null
}

View File

@ -24,7 +24,7 @@ const Template = (args) => {
return (
<div style={{ height: "240px" }}>
<div style={BodyStyle}>
<Link data-for="link" data-tip="Bob Johnston">
<Link data-tooltip-id="link" data-tooltip-content="Bob Johnston">
Bob Johnston
</Link>
</div>
@ -32,10 +32,10 @@ const Template = (args) => {
<Tooltip
{...args}
id="link"
getContent={(dataTip) => (
getContent={({ content }) => (
<div>
<Text isBold={true} fontSize="16px">
{dataTip}
{content}
</Text>
<Text color="#A3A9AE" fontSize="13px">
BobJohnston@gmail.com
@ -50,12 +50,8 @@ const Template = (args) => {
export const basic = Template.bind({});
basic.args = {
effect: "float",
float: true,
place: "top",
offsetTop: 0,
offsetRight: 0,
offsetBottom: 0,
offsetLeft: 0,
};
const arrayUsers = [
@ -96,11 +92,11 @@ const AllTemplate = (args) => {
<div>
<div>
<h5 style={{ marginLeft: -5 }}>Hover on me</h5>
<Link data-for="link" data-tip="Bob Johnston">
<Link data-tooltip-id="link" data-tooltip-content="Bob Johnston">
Bob Johnston
</Link>
</div>
<Tooltip id="link" offsetRight={0} effect="solid">
<Tooltip id="link" offset={0}>
<div>
<Text isBold={true} fontSize="16px">
Bob Johnston
@ -114,23 +110,23 @@ const AllTemplate = (args) => {
<div>
<h5 style={{ marginLeft: -5 }}>Hover group</h5>
<Link data-for="group" data-tip={0}>
<Link data-tooltip-id="group" data-tooltip-content={0}>
Bob
</Link>
<br />
<Link data-for="group" data-tip={1}>
<Link data-tooltip-id="group" data-tooltip-content={1}>
John
</Link>
<br />
<Link data-for="group" data-tip={2}>
<Link data-tooltip-id="group" data-tooltip-content={2}>
Kevin
</Link>
<br />
<Link data-for="group" data-tip={3}>
<Link data-tooltip-id="group" data-tooltip-content={3}>
Alex
</Link>
<br />
<Link data-for="group" data-tip={4}>
<Link data-tooltip-id="group" data-tooltip-content={4}>
Tomas
</Link>
</div>
@ -138,16 +134,16 @@ const AllTemplate = (args) => {
<Tooltip
id="group"
offsetRight={0}
getContent={(dataTip) =>
dataTip ? (
getContent={({ content }) =>
content ? (
<div>
<Text isBold={true} fontSize="16px">
{arrayUsers[dataTip].name}
{arrayUsers[content].name}
</Text>
<Text color="#A3A9AE" fontSize="13px">
{arrayUsers[dataTip].email}
{arrayUsers[content].email}
</Text>
<Text fontSize="13px">{arrayUsers[dataTip].position}</Text>
<Text fontSize="13px">{arrayUsers[content].position}</Text>
</div>
) : null
}

View File

@ -220,6 +220,8 @@ public interface IFolderDao<T>
CommonChunkedUploadSessionHolder sessionHolder);
Task<string> GetBackupExtensionAsync(T folderId);
#region Only for TMFolderDao
/// <summary>

View File

@ -1599,57 +1599,9 @@ internal class FolderDao : AbstractDao, IFolderDao<int>
Entries = r.Select(e => new KeyValuePair<string, FileEntryType>(e.EntryId, e.EntryType)).ToHashSet()
}));
private string GetProjectTitle(object folderID)
public async Task<string> GetBackupExtensionAsync(int folderId)
{
return "";
//if (!ApiServer.Available)
//{
// return string.Empty;
//}
//var cacheKey = "documents/folders/" + folderID.ToString();
//var projectTitle = Convert.ToString(cache.Get<string>(cacheKey));
//if (!string.IsNullOrEmpty(projectTitle)) return projectTitle;
//var bunchObjectID = GetBunchObjectID(folderID);
//if (string.IsNullOrEmpty(bunchObjectID))
// throw new Exception("Bunch Object id is null for " + folderID);
//if (!bunchObjectID.StartsWith("projects/project/"))
// return string.Empty;
//var bunchObjectIDParts = bunchObjectID.Split('/');
//if (bunchObjectIDParts.Length < 3)
// throw new Exception("Bunch object id is not supported format");
//var projectID = Convert.ToInt32(bunchObjectIDParts[bunchObjectIDParts.Length - 1]);
//if (HttpContext.Current == null || !SecurityContext.IsAuthenticated)
// return string.Empty;
//var apiServer = new ApiServer();
//var apiUrl = string.Format("{0}project/{1}.json?fields=id,title", SetupInfo.WebApiBaseUrl, projectID);
//var responseApi = JObject.Parse(Encoding.UTF8.GetString(Convert.FromBase64String(apiServer.GetApiResponse(apiUrl, "GET"))))["response"];
//if (responseApi != null && responseApi.HasValues)
//{
// projectTitle = Global.ReplaceInvalidCharsAndTruncate(responseApi["title"].Value<string>());
//}
//else
//{
// return string.Empty;
//}
//if (!string.IsNullOrEmpty(projectTitle))
//{
// cache.Insert(cacheKey, projectTitle, TimeSpan.FromMinutes(15));
//}
//return projectTitle;
return (await _globalStore.GetStoreAsync()).GetBackupExtension();
}
}

View File

@ -67,7 +67,6 @@ public class ChunkedUploadSession<T> : CommonChunkedUploadSession
chunkedUploadSession.TransformItems();
return chunkedUploadSession;
}
}

View File

@ -2681,6 +2681,12 @@ public class FileStorageService //: IFileStorageService
Error = FilesCommonResource.ErrorMassage_SecurityException_ReadFile
};
}
var fileStable = file;
if (file.Forcesave != ForcesaveType.None)
{
fileStable = await fileDao.GetFileStableAsync(file.Id, file.Version);
}
var docKey = await _documentServiceHelper.GetDocKeyAsync(fileStable);
var fileReference = new FileReference<T>
{
@ -2691,7 +2697,9 @@ public class FileStorageService //: IFileStorageService
InstanceId = (await _tenantManager.GetCurrentTenantIdAsync()).ToString()
},
Url = await _documentServiceConnector.ReplaceCommunityAdressAsync(await _pathProvider.GetFileStreamUrlAsync(file, lastVersion: true)),
FileType = file.ConvertedExtension.Trim('.')
FileType = file.ConvertedExtension.Trim('.'),
Key = docKey,
Link = _baseCommonLinkUtility.GetFullAbsolutePath(_filesLinkUtility.GetFileWebEditorUrl(file.Id)),
};
fileReference.Token = _documentServiceHelper.GetSignature(fileReference);
return fileReference;

View File

@ -472,6 +472,13 @@ internal class ProviderFolderDao : ProviderDaoBase, IFolderDao<string>
return await folderDao.CreateDataWriteOperatorAsync(folderId, chunkedUploadSession, sessionHolder);
}
public async Task<string> GetBackupExtensionAsync(string folderId)
{
var selector = _selectorFactory.GetSelector(folderId);
var folderDao = selector.GetFolderDao(folderId);
return await folderDao.GetBackupExtensionAsync(folderId);
}
private IAsyncEnumerable<Folder<string>> FilterByProvider(IAsyncEnumerable<Folder<string>> folders, ProviderFilter provider)
{
if (provider != ProviderFilter.kDrive && provider != ProviderFilter.WebDav && provider != ProviderFilter.Yandex)

View File

@ -440,6 +440,11 @@ internal class SharePointFolderDao : SharePointDaoBase, IFolderDao<string>
{
return Task.FromResult<IDataWriteOperator>(null);
}
public Task<string> GetBackupExtensionAsync(string folderId)
{
return Task.FromResult("tar.gz");
}
}
static file class Queries

View File

@ -505,6 +505,11 @@ internal class SharpBoxFolderDao : SharpBoxDaoBase, IFolderDao<string>
{
return Task.FromResult<IDataWriteOperator>(null);
}
public Task<string> GetBackupExtensionAsync(string folderId)
{
return Task.FromResult("tar.gz");
}
}
static file class Queries

View File

@ -510,6 +510,11 @@ internal class ThirdPartyFolderDao<TFile, TFolder, TItem> : BaseFolderDao, IFold
return Task.FromResult<IDataWriteOperator>(new ChunkZipWriteOperator(_tempStream, chunkedUploadSession, sessionHolder));
}
public Task<string> GetBackupExtensionAsync(string folderId)
{
return Task.FromResult("tar.gz");
}
public Task ReassignFoldersAsync(Guid oldOwnerId, Guid newOwnerId)
{
return Task.CompletedTask;

View File

@ -84,7 +84,7 @@ global using ASC.Core.Notify.Socket;
global using ASC.Core.Tenants;
global using ASC.Core.Users;
global using ASC.Data.Storage;
global using ASC.Data.Storage.ZipOperators;
global using ASC.Data.Storage.DataOperators;
global using ASC.ElasticSearch;
global using ASC.ElasticSearch.Core;
global using ASC.ElasticSearch.Service;

View File

@ -677,6 +677,14 @@ public class FileReference<T>
/// <type>System.String, System</type>
public string FileType { get; set; }
/// <summary>Key</summary>
/// <type>System.String, System</type>
public string Key { get; set; }
/// <summary>Link</summary>
/// <type>System.String, System</type>
public string Link { get; set; }
/// <summary>Token</summary>
/// <type>System.String, System</type>
public string Token { get; set; }

View File

@ -28,11 +28,13 @@ namespace ASC.Web.Files.Utils;
public class FilesChunkedUploadSessionHolder : CommonChunkedUploadSessionHolder
{
private readonly IDaoFactory _daoFactory;
private readonly IDaoFactory _daoFactory;
public FilesChunkedUploadSessionHolder(IDaoFactory daoFactory, TempPath tempPath, IDataStore dataStore, string domain, long maxChunkUploadSize = 10485760)
: base(tempPath, dataStore, domain, maxChunkUploadSize)
{
_daoFactory = daoFactory;
_daoFactory = daoFactory;
TempDomain = FileConstant.StorageDomainTmp;
}
public override async Task<string> UploadChunkAsync(CommonChunkedUploadSession uploadSession, Stream stream, long length)
{

View File

@ -47,6 +47,7 @@
onAppReady: null,
onAppError: null,
onEditorCloseCallback: null,
onAuthSuccess: null,
},
};

View File

@ -139,6 +139,7 @@ public class CspSettingsHelper
var def = csp.ByDefaultAllow
.FromSelf()
.From("data:")
.From(_filesLinkUtility.DocServiceUrl);
var scriptBuilder = csp.AllowScripts
@ -161,7 +162,9 @@ public class CspSettingsHelper
.AllowUnsafeInline();
var imageBuilder = csp.AllowImages
.FromSelf();
.FromSelf()
.From("data:")
.From("blob:");
var frameBuilder = csp.AllowFraming
.FromSelf();

795
yarn.lock

File diff suppressed because it is too large Load Diff