2020-05-20 15:14:44 +00:00
/ *
*
* ( c ) Copyright Ascensio System Limited 2010 - 2020
*
* This program is freeware . You can redistribute it and / or modify it under the terms of the GNU
* General Public License ( GPL ) version 3 as published by the Free Software Foundation ( https : //www.gnu.org/copyleft/gpl.html).
* In accordance with Section 7 ( a ) of the GNU GPL its Section 15 shall be amended to the effect that
* Ascensio System SIA expressly excludes the warranty of non - infringement of any third - party rights .
*
* THIS PROGRAM IS DISTRIBUTED WITHOUT ANY WARRANTY ; WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR
* FITNESS FOR A PARTICULAR PURPOSE . For more details , see GNU GPL at https : //www.gnu.org/copyleft/gpl.html
*
* You can contact Ascensio System SIA by email at sales @onlyoffice . com
*
* The interactive user interfaces in modified source and object code versions of ONLYOFFICE must display
* Appropriate Legal Notices , as required under Section 5 of the GNU GPL version 3.
*
* Pursuant to Section 7 § 3 ( b ) of the GNU GPL you must retain the original ONLYOFFICE logo which contains
* relevant author attributions when distributing the software . If the display of the logo in its graphic
* form is not reasonably feasible for technical reasons , you must include the words "Powered by ONLYOFFICE"
* in every copy of the program you distribute .
* Pursuant to Section 7 § 3 ( e ) we decline to grant you any rights under trademark law for use of our trademarks .
*
* /
using System ;
using System.Collections.Generic ;
using System.Data ;
using System.Data.Common ;
using System.IO ;
using System.Linq ;
using System.Text ;
using System.Threading.Tasks ;
using System.Xml.Linq ;
2020-06-15 08:19:40 +00:00
using ASC.Common ;
2020-05-20 15:14:44 +00:00
using ASC.Common.Logging ;
using ASC.Core ;
2020-06-15 08:19:40 +00:00
using ASC.Core.Common.EF ;
using ASC.Data.Backup.EF.Context ;
using ASC.Data.Backup.Exceptions ;
using ASC.Data.Backup.Extensions ;
2020-05-20 15:14:44 +00:00
using ASC.Data.Backup.Tasks.Data ;
using ASC.Data.Backup.Tasks.Modules ;
using ASC.Data.Storage ;
2020-06-15 08:19:40 +00:00
2020-05-26 08:52:47 +00:00
using Microsoft.Extensions.Options ;
2020-06-15 08:19:40 +00:00
using Newtonsoft.Json ;
2020-05-20 15:14:44 +00:00
namespace ASC.Data.Backup.Tasks
2020-10-19 15:53:15 +00:00
{
[Scope]
2020-05-20 15:14:44 +00:00
public class BackupPortalTask : PortalTaskBase
2020-09-11 14:12:35 +00:00
{
private const int MaxLength = 250 ;
2020-05-20 15:14:44 +00:00
private const int BatchLimit = 5000 ;
public string BackupFilePath { get ; private set ; }
public int Limit { get ; private set ; }
private bool Dump { get ; set ; }
2020-06-08 10:40:26 +00:00
private TenantManager TenantManager { get ; set ; }
private BackupsContext BackupRecordContext { get ; set ; }
2020-05-20 15:14:44 +00:00
2020-06-15 08:19:40 +00:00
public BackupPortalTask ( DbFactory dbFactory , DbContextManager < BackupsContext > dbContextManager , IOptionsMonitor < ILog > options , TenantManager tenantManager , CoreBaseSettings coreBaseSettings , StorageFactory storageFactory , StorageFactoryConfig storageFactoryConfig , ModuleProvider moduleProvider )
: base ( dbFactory , options , storageFactory , storageFactoryConfig , moduleProvider )
2020-06-08 10:40:26 +00:00
{
Dump = coreBaseSettings . Standalone ;
2020-06-10 12:35:10 +00:00
TenantManager = tenantManager ;
BackupRecordContext = dbContextManager . Get ( DbFactory . ConnectionStringSettings . ConnectionString ) ;
2020-06-16 11:34:44 +00:00
}
2020-06-08 10:40:26 +00:00
public void Init ( int tenantId , string fromConfigPath , string toFilePath , int limit )
2020-05-20 15:14:44 +00:00
{
if ( string . IsNullOrEmpty ( toFilePath ) )
throw new ArgumentNullException ( "toFilePath" ) ;
BackupFilePath = toFilePath ;
Limit = limit ;
2020-06-08 10:40:26 +00:00
Init ( tenantId , fromConfigPath ) ;
2020-06-10 12:35:10 +00:00
2020-05-20 15:14:44 +00:00
}
public override void RunJob ( )
{
Logger . DebugFormat ( "begin backup {0}" , TenantId ) ;
2020-10-12 19:39:23 +00:00
TenantManager . SetCurrentTenant ( TenantId ) ;
2020-05-20 15:14:44 +00:00
using ( var writer = new ZipWriteOperator ( BackupFilePath ) )
2020-06-15 08:19:40 +00:00
{
2020-05-20 15:14:44 +00:00
if ( Dump )
{
2020-06-10 12:35:10 +00:00
DoDump ( writer ) ;
2020-05-20 15:14:44 +00:00
}
else
2020-06-15 08:19:40 +00:00
{
2020-05-20 15:14:44 +00:00
var modulesToProcess = GetModulesToProcess ( ) . ToList ( ) ;
2020-06-10 12:35:10 +00:00
var fileGroups = GetFilesGroup ( ) ;
2020-05-20 15:14:44 +00:00
var stepscount = ProcessStorage ? fileGroups . Count : 0 ;
SetStepsCount ( modulesToProcess . Count + stepscount ) ;
foreach ( var module in modulesToProcess )
{
2020-06-10 12:35:10 +00:00
DoBackupModule ( writer , module ) ;
2020-05-20 15:14:44 +00:00
}
if ( ProcessStorage )
{
DoBackupStorage ( writer , fileGroups ) ;
}
}
}
Logger . DebugFormat ( "end backup {0}" , TenantId ) ;
}
2020-06-10 12:35:10 +00:00
private void DoDump ( IDataWriteOperator writer )
2020-05-20 15:14:44 +00:00
{
var tmp = Path . GetTempFileName ( ) ;
File . AppendAllText ( tmp , true . ToString ( ) ) ;
writer . WriteEntry ( KeyHelper . GetDumpKey ( ) , tmp ) ;
List < string > tables ;
var files = new List < BackupFileInfo > ( ) ;
2020-06-10 12:35:10 +00:00
using ( var connection = DbFactory . OpenConnection ( ) )
2020-05-20 15:14:44 +00:00
{
2020-05-26 08:52:47 +00:00
var command = connection . CreateCommand ( ) ;
command . CommandText = "show tables" ;
tables = ExecuteList ( command ) . Select ( r = > Convert . ToString ( r [ 0 ] ) ) . ToList ( ) ;
2020-06-15 08:19:40 +00:00
}
/ * using ( var dbManager = new DbManager ( "default" , 100000 ) )
{
tables = dbManager . ExecuteList ( "show tables;" ) . Select ( r = > Convert . ToString ( r [ 0 ] ) ) . ToList ( ) ;
} * /
2020-05-20 15:14:44 +00:00
var stepscount = tables . Count * 4 ; // (schema + data) * (dump + zip)
if ( ProcessStorage )
{
2020-06-08 10:40:26 +00:00
var tenants = TenantManager . GetTenants ( false ) . Select ( r = > r . TenantId ) ;
2020-05-20 15:14:44 +00:00
foreach ( var t in tenants )
{
files . AddRange ( GetFiles ( t ) ) ;
}
stepscount + = files . Count * 2 + 1 ;
Logger . Debug ( "files:" + files . Count ) ;
}
SetStepsCount ( stepscount ) ;
2020-06-08 10:40:26 +00:00
var excluded = ModuleProvider . AllModules . Where ( r = > IgnoredModules . Contains ( r . ModuleName ) ) . SelectMany ( r = > r . Tables ) . Select ( r = > r . Name ) . ToList ( ) ;
2020-05-20 15:14:44 +00:00
excluded . AddRange ( IgnoredTables ) ;
excluded . Add ( "res_" ) ;
var dir = Path . GetDirectoryName ( BackupFilePath ) ;
var subDir = Path . Combine ( dir , Path . GetFileNameWithoutExtension ( BackupFilePath ) ) ;
var schemeDir = Path . Combine ( subDir , KeyHelper . GetDatabaseSchema ( ) ) ;
var dataDir = Path . Combine ( subDir , KeyHelper . GetDatabaseData ( ) ) ;
if ( ! Directory . Exists ( schemeDir ) )
{
2020-10-12 19:39:23 +00:00
Directory . CreateDirectory ( schemeDir ) ;
2020-05-20 15:14:44 +00:00
}
if ( ! Directory . Exists ( dataDir ) )
{
2020-10-12 19:39:23 +00:00
Directory . CreateDirectory ( dataDir ) ;
2020-05-20 15:14:44 +00:00
}
var dict = tables . ToDictionary ( t = > t , SelectCount ) ;
tables . Sort ( ( pair1 , pair2 ) = > dict [ pair1 ] . CompareTo ( dict [ pair2 ] ) ) ;
for ( var i = 0 ; i < tables . Count ; i + = TasksLimit )
{
var tasks = new List < Task > ( TasksLimit * 2 ) ;
for ( var j = 0 ; j < TasksLimit & & i + j < tables . Count ; j + + )
{
var t = tables [ i + j ] ;
2020-06-10 12:35:10 +00:00
tasks . Add ( Task . Run ( ( ) = > DumpTableScheme ( t , schemeDir ) ) ) ;
2020-05-20 15:14:44 +00:00
if ( ! excluded . Any ( t . StartsWith ) )
{
tasks . Add ( Task . Run ( ( ) = > DumpTableData ( t , dataDir , dict [ t ] ) ) ) ;
}
else
{
SetStepCompleted ( 2 ) ;
}
}
Task . WaitAll ( tasks . ToArray ( ) ) ;
ArchiveDir ( writer , subDir ) ;
}
Logger . DebugFormat ( "dir remove start {0}" , subDir ) ;
Directory . Delete ( subDir , true ) ;
Logger . DebugFormat ( "dir remove end {0}" , subDir ) ;
if ( ProcessStorage )
{
DoDumpStorage ( writer , files ) ;
}
}
private IEnumerable < BackupFileInfo > GetFiles ( int tenantId )
{
2020-06-10 12:35:10 +00:00
var files = GetFilesToProcess ( tenantId ) . ToList ( ) ;
2020-06-15 08:19:40 +00:00
var exclude = BackupRecordContext . Backups . Where ( b = > b . TenantId = = tenantId & & b . StorageType = = 0 & & b . StoragePath ! = null ) . ToList ( ) ;
files = files . Where ( f = > ! exclude . Any ( e = > f . Path . Replace ( '\\' , '/' ) . Contains ( string . Format ( "/file_{0}/" , e . StoragePath ) ) ) ) . ToList ( ) ;
return files ;
2020-05-20 15:14:44 +00:00
}
2020-06-10 12:35:10 +00:00
private void DumpTableScheme ( string t , string dir )
2020-05-20 15:14:44 +00:00
{
try
{
Logger . DebugFormat ( "dump table scheme start {0}" , t ) ;
2020-06-10 12:35:10 +00:00
using ( var connection = DbFactory . OpenConnection ( ) )
2020-05-20 15:14:44 +00:00
{
2020-05-26 08:52:47 +00:00
var command = connection . CreateCommand ( ) ;
command . CommandText = string . Format ( "SHOW CREATE TABLE `{0}`" , t ) ;
var createScheme = ExecuteList ( command ) ;
2020-05-20 15:14:44 +00:00
var creates = new StringBuilder ( ) ;
2020-10-12 19:39:23 +00:00
creates . AppendFormat ( "DROP TABLE IF EXISTS `{0}`;" , t ) ;
creates . AppendLine ( ) ;
creates . Append ( createScheme
2020-05-20 15:14:44 +00:00
. Select ( r = > Convert . ToString ( r [ 1 ] ) )
. FirstOrDefault ( ) ) ;
2020-10-12 19:39:23 +00:00
creates . Append ( ";" ) ;
2020-05-20 15:14:44 +00:00
var path = Path . Combine ( dir , t ) ;
using ( var stream = File . OpenWrite ( path ) )
{
var bytes = Encoding . UTF8 . GetBytes ( creates . ToString ( ) ) ;
stream . Write ( bytes , 0 , bytes . Length ) ;
}
SetStepCompleted ( ) ;
2020-06-15 08:19:40 +00:00
}
2020-05-20 15:14:44 +00:00
Logger . DebugFormat ( "dump table scheme stop {0}" , t ) ;
}
catch ( Exception e )
{
Logger . Error ( e ) ;
throw ;
}
}
private int SelectCount ( string t )
{
try
{
2020-06-16 11:34:44 +00:00
using var connection = DbFactory . OpenConnection ( ) ;
using var analyzeCommand = connection . CreateCommand ( ) ;
analyzeCommand . CommandText = $"analyze table {t}" ;
2020-10-12 19:39:23 +00:00
analyzeCommand . ExecuteNonQuery ( ) ;
2020-06-16 11:34:44 +00:00
using var command = connection . CreateCommand ( ) ;
command . CommandText = "select TABLE_ROWS from INFORMATION_SCHEMA.TABLES where TABLE_NAME = '" + t + "'" ;
return int . Parse ( command . ExecuteScalar ( ) . ToString ( ) ) ;
2020-05-20 15:14:44 +00:00
}
catch ( Exception e )
{
Logger . Error ( e ) ;
throw ;
}
}
private void DumpTableData ( string t , string dir , int count )
{
try
{
if ( count = = 0 )
{
Logger . DebugFormat ( "dump table data stop {0}" , t ) ;
SetStepCompleted ( 2 ) ;
return ;
}
Logger . DebugFormat ( "dump table data start {0}" , t ) ;
var searchWithPrimary = false ;
string primaryIndex ;
2020-05-29 14:50:39 +00:00
var primaryIndexStep = 0 ;
var primaryIndexStart = 0 ;
2020-05-20 15:14:44 +00:00
List < string > columns ;
2020-06-10 12:35:10 +00:00
using ( var connection = DbFactory . OpenConnection ( ) )
2020-05-20 15:14:44 +00:00
{
2020-05-26 08:52:47 +00:00
var command = connection . CreateCommand ( ) ;
command . CommandText = string . Format ( "SHOW COLUMNS FROM `{0}`" , t ) ;
columns = ExecuteList ( command ) . Select ( r = > "`" + Convert . ToString ( r [ 0 ] ) + "`" ) . ToList ( ) ;
2020-06-10 12:35:10 +00:00
if ( command . CommandText . Contains ( "tenants_quota" ) | | command . CommandText . Contains ( "webstudio_settings" ) )
{
}
2020-05-26 08:52:47 +00:00
}
2020-06-10 12:35:10 +00:00
using ( var connection = DbFactory . OpenConnection ( ) )
2020-05-26 08:52:47 +00:00
{
var command = connection . CreateCommand ( ) ;
command . CommandText = string . Format ( "select COLUMN_NAME from information_schema.`COLUMNS` where TABLE_SCHEMA = '{0}' and TABLE_NAME = '{1}' and COLUMN_KEY = 'PRI' and DATA_TYPE = 'int'" , connection . Database , t ) ;
primaryIndex = ExecuteList ( command ) . ConvertAll ( r = > Convert . ToString ( r [ 0 ] ) ) . FirstOrDefault ( ) ;
2020-05-20 15:14:44 +00:00
2020-05-26 08:52:47 +00:00
}
2020-06-10 12:35:10 +00:00
using ( var connection = DbFactory . OpenConnection ( ) )
2020-05-26 08:52:47 +00:00
{
var command = connection . CreateCommand ( ) ;
command . CommandText = string . Format ( "SHOW INDEXES FROM {0} WHERE COLUMN_NAME='{1}' AND seq_in_index=1" , t , primaryIndex ) ;
var isLeft = ExecuteList ( command ) ;
2020-05-20 15:14:44 +00:00
searchWithPrimary = isLeft . Count = = 1 ;
2020-05-26 08:52:47 +00:00
}
2020-05-20 15:14:44 +00:00
2020-05-26 08:52:47 +00:00
if ( searchWithPrimary )
{
2020-09-29 10:28:03 +00:00
using var connection = DbFactory . OpenConnection ( ) ;
var command = connection . CreateCommand ( ) ;
command . CommandText = string . Format ( "select max({1}), min({1}) from {0}" , t , primaryIndex ) ;
var minMax = ExecuteList ( command ) . ConvertAll ( r = > new Tuple < int , int > ( Convert . ToInt32 ( r [ 0 ] ) , Convert . ToInt32 ( r [ 1 ] ) ) ) . FirstOrDefault ( ) ;
primaryIndexStart = minMax . Item2 ;
primaryIndexStep = ( minMax . Item1 - minMax . Item2 ) / count ;
if ( primaryIndexStep < Limit )
{
primaryIndexStep = Limit ;
2020-05-20 15:14:44 +00:00
}
}
2020-09-11 14:12:35 +00:00
var path = Path . Combine ( dir , t ) ;
var offset = 0 ;
do
{
List < object [ ] > result ;
if ( searchWithPrimary )
{
result = GetDataWithPrimary ( t , columns , primaryIndex , primaryIndexStart , primaryIndexStep ) ;
primaryIndexStart + = primaryIndexStep ;
}
else
{
result = GetData ( t , columns , offset ) ;
}
offset + = Limit ;
var resultCount = result . Count ;
if ( resultCount = = 0 ) break ;
SaveToFile ( path , t , columns , result ) ;
if ( resultCount < Limit ) break ;
} while ( true ) ;
2020-05-20 15:14:44 +00:00
SetStepCompleted ( ) ;
Logger . DebugFormat ( "dump table data stop {0}" , t ) ;
}
catch ( Exception e )
{
Logger . Error ( e ) ;
throw ;
}
}
private List < object [ ] > GetData ( string t , List < string > columns , int offset )
{
2020-06-16 11:34:44 +00:00
using var connection = DbFactory . OpenConnection ( ) ;
var command = connection . CreateCommand ( ) ;
var selects = string . Join ( ',' , columns ) ;
command . CommandText = $"select {selects} from {t} LIMIT {offset}, {Limit}" ;
return ExecuteList ( command ) ;
2020-05-20 15:14:44 +00:00
}
private List < object [ ] > GetDataWithPrimary ( string t , List < string > columns , string primary , int start , int step )
{
2020-06-16 11:34:44 +00:00
using var connection = DbFactory . OpenConnection ( ) ;
var command = connection . CreateCommand ( ) ;
var selects = string . Join ( ',' , columns ) ;
command . CommandText = $"select {selects} from {t} where {primary} BETWEEN {start} and {start + step} " ;
return ExecuteList ( command ) ;
2020-09-11 14:12:35 +00:00
}
private void SaveToFile ( string path , string t , IReadOnlyCollection < string > columns , List < object [ ] > data )
2020-05-20 15:14:44 +00:00
{
Logger . DebugFormat ( "save to file {0}" , t ) ;
List < object [ ] > portion ;
while ( ( portion = data . Take ( BatchLimit ) . ToList ( ) ) . Any ( ) )
2020-09-11 14:12:35 +00:00
{
using ( var sw = new StreamWriter ( path , true ) )
2020-05-20 15:14:44 +00:00
using ( var writer = new JsonTextWriter ( sw ) )
{
writer . QuoteChar = '\'' ;
writer . DateFormatString = "yyyy-MM-dd HH:mm:ss" ;
sw . Write ( "REPLACE INTO `{0}` ({1}) VALUES " , t , string . Join ( "," , columns ) ) ;
sw . WriteLine ( ) ;
for ( var j = 0 ; j < portion . Count ; j + + )
{
var obj = portion [ j ] ;
sw . Write ( "(" ) ;
for ( var i = 0 ; i < obj . Length ; i + + )
{
2020-09-28 14:39:13 +00:00
if ( obj [ i ] is byte [ ] byteArray )
2020-05-20 15:14:44 +00:00
{
sw . Write ( "0x" ) ;
foreach ( var b in byteArray )
sw . Write ( "{0:x2}" , b ) ;
}
else
{
var ser = new JsonSerializer ( ) ;
ser . Serialize ( writer , obj [ i ] ) ;
}
if ( i ! = obj . Length - 1 )
{
sw . Write ( "," ) ;
}
}
sw . Write ( ")" ) ;
if ( j ! = portion . Count - 1 )
{
sw . Write ( "," ) ;
}
else
{
sw . Write ( ";" ) ;
}
sw . WriteLine ( ) ;
}
}
data = data . Skip ( BatchLimit ) . ToList ( ) ;
}
}
private void DoDumpStorage ( IDataWriteOperator writer , IReadOnlyList < BackupFileInfo > files )
{
Logger . Debug ( "begin backup storage" ) ;
var dir = Path . GetDirectoryName ( BackupFilePath ) ;
var subDir = Path . Combine ( dir , Path . GetFileNameWithoutExtension ( BackupFilePath ) ) ;
for ( var i = 0 ; i < files . Count ; i + = TasksLimit )
{
var storageDir = Path . Combine ( subDir , KeyHelper . GetStorage ( ) ) ;
if ( ! Directory . Exists ( storageDir ) )
{
2020-10-12 19:39:23 +00:00
Directory . CreateDirectory ( storageDir ) ;
2020-05-20 15:14:44 +00:00
}
var tasks = new List < Task > ( TasksLimit ) ;
for ( var j = 0 ; j < TasksLimit & & i + j < files . Count ; j + + )
{
var t = files [ i + j ] ;
tasks . Add ( Task . Run ( ( ) = > DoDumpFile ( t , storageDir ) ) ) ;
}
Task . WaitAll ( tasks . ToArray ( ) ) ;
ArchiveDir ( writer , subDir ) ;
Directory . Delete ( storageDir , true ) ;
}
var restoreInfoXml = new XElement ( "storage_restore" , files . Select ( file = > ( object ) file . ToXElement ( ) ) . ToArray ( ) ) ;
var tmpPath = Path . Combine ( subDir , KeyHelper . GetStorageRestoreInfoZipKey ( ) ) ;
2020-10-12 19:39:23 +00:00
Directory . CreateDirectory ( Path . GetDirectoryName ( tmpPath ) ) ;
2020-05-20 15:14:44 +00:00
using ( var tmpFile = File . OpenWrite ( tmpPath ) )
{
restoreInfoXml . WriteTo ( tmpFile ) ;
}
writer . WriteEntry ( KeyHelper . GetStorageRestoreInfoZipKey ( ) , tmpPath ) ;
File . Delete ( tmpPath ) ;
SetStepCompleted ( ) ;
Directory . Delete ( subDir , true ) ;
Logger . Debug ( "end backup storage" ) ;
}
private async Task DoDumpFile ( BackupFileInfo file , string dir )
{
2020-06-10 12:35:10 +00:00
var storage = StorageFactory . GetStorage ( ConfigPath , file . Tenant . ToString ( ) , file . Module ) ;
2020-05-20 15:14:44 +00:00
var filePath = Path . Combine ( dir , file . GetZipKey ( ) ) ;
var dirName = Path . GetDirectoryName ( filePath ) ;
Logger . DebugFormat ( "backup file {0}" , filePath ) ;
if ( ! Directory . Exists ( dirName ) & & ! string . IsNullOrEmpty ( dirName ) )
{
2020-10-12 19:39:23 +00:00
Directory . CreateDirectory ( dirName ) ;
2020-09-11 14:12:35 +00:00
}
if ( ! WorkContext . IsMono & & filePath . Length > MaxLength )
{
filePath = @"\\?\" + filePath ;
}
2020-05-20 15:14:44 +00:00
using ( var fileStream = storage . GetReadStream ( file . Domain , file . Path ) )
using ( var tmpFile = File . OpenWrite ( filePath ) )
{
await fileStream . CopyToAsync ( tmpFile ) ;
}
SetStepCompleted ( ) ;
}
private void ArchiveDir ( IDataWriteOperator writer , string subDir )
{
Logger . DebugFormat ( "archive dir start {0}" , subDir ) ;
foreach ( var enumerateFile in Directory . EnumerateFiles ( subDir , "*" , SearchOption . AllDirectories ) )
{
2020-09-11 14:12:35 +00:00
var f = enumerateFile ;
if ( ! WorkContext . IsMono & & enumerateFile . Length > MaxLength )
{
f = @"\\?\" + f ;
}
writer . WriteEntry ( enumerateFile . Substring ( subDir . Length ) , f ) ;
File . Delete ( f ) ;
2020-05-20 15:14:44 +00:00
SetStepCompleted ( ) ;
}
Logger . DebugFormat ( "archive dir end {0}" , subDir ) ;
}
2020-06-10 12:35:10 +00:00
private List < IGrouping < string , BackupFileInfo > > GetFilesGroup ( )
2020-05-20 15:14:44 +00:00
{
var files = GetFilesToProcess ( TenantId ) . ToList ( ) ;
2020-06-10 12:35:10 +00:00
var exclude = BackupRecordContext . Backups . Where ( b = > b . TenantId = = TenantId & & b . StorageType = = 0 & & b . StoragePath ! = null ) . ToList ( ) ;
2020-05-20 15:14:44 +00:00
2020-05-26 08:52:47 +00:00
files = files . Where ( f = > ! exclude . Any ( e = > f . Path . Replace ( '\\' , '/' ) . Contains ( string . Format ( "/file_{0}/" , e . StoragePath ) ) ) ) . ToList ( ) ;
2020-05-20 15:14:44 +00:00
return files . GroupBy ( file = > file . Module ) . ToList ( ) ;
}
2020-06-10 12:35:10 +00:00
private void DoBackupModule ( IDataWriteOperator writer , IModuleSpecifics module )
2020-05-20 15:14:44 +00:00
{
Logger . DebugFormat ( "begin saving data for module {0}" , module . ModuleName ) ;
var tablesToProcess = module . Tables . Where ( t = > ! IgnoredTables . Contains ( t . Name ) & & t . InsertMethod ! = InsertMethod . None ) . ToList ( ) ;
var tablesCount = tablesToProcess . Count ;
var tablesProcessed = 0 ;
2020-06-10 12:35:10 +00:00
using ( var connection = DbFactory . OpenConnection ( ) )
2020-05-20 15:14:44 +00:00
{
foreach ( var table in tablesToProcess )
{
Logger . DebugFormat ( "begin load table {0}" , table . Name ) ;
using ( var data = new DataTable ( table . Name ) )
{
ActionInvoker . Try (
state = >
{
data . Clear ( ) ;
int counts ;
var offset = 0 ;
do
{
var t = ( TableInfo ) state ;
2020-06-10 12:35:10 +00:00
var dataAdapter = DbFactory . CreateDataAdapter ( ) ;
2020-05-20 15:14:44 +00:00
dataAdapter . SelectCommand = module . CreateSelectCommand ( connection . Fix ( ) , TenantId , t , Limit , offset ) . WithTimeout ( 600 ) ;
counts = ( ( DbDataAdapter ) dataAdapter ) . Fill ( data ) ;
offset + = Limit ;
} while ( counts = = Limit ) ;
} ,
table ,
maxAttempts : 5 ,
onFailure : error = > { throw ThrowHelper . CantBackupTable ( table . Name , error ) ; } ,
onAttemptFailure : error = > Logger . Warn ( "backup attempt failure: {0}" , error ) ) ;
foreach ( var col in data . Columns . Cast < DataColumn > ( ) . Where ( col = > col . DataType = = typeof ( DateTime ) ) )
{
col . DateTimeMode = DataSetDateTime . Unspecified ;
}
module . PrepareData ( data ) ;
Logger . DebugFormat ( "end load table {0}" , table . Name ) ;
Logger . DebugFormat ( "begin saving table {0}" , table . Name ) ;
var tmp = Path . GetTempFileName ( ) ;
using ( var file = File . OpenWrite ( tmp ) )
{
data . WriteXml ( file , XmlWriteMode . WriteSchema ) ;
data . Clear ( ) ;
}
writer . WriteEntry ( KeyHelper . GetTableZipKey ( module , data . TableName ) , tmp ) ;
File . Delete ( tmp ) ;
Logger . DebugFormat ( "end saving table {0}" , table . Name ) ;
}
SetCurrentStepProgress ( ( int ) ( ( + + tablesProcessed * 100 ) / ( double ) tablesCount ) ) ;
}
}
Logger . DebugFormat ( "end saving data for module {0}" , module . ModuleName ) ;
}
private void DoBackupStorage ( IDataWriteOperator writer , List < IGrouping < string , BackupFileInfo > > fileGroups )
{
Logger . Debug ( "begin backup storage" ) ;
foreach ( var group in fileGroups )
{
var filesProcessed = 0 ;
var filesCount = group . Count ( ) ;
foreach ( var file in group )
{
2020-06-10 12:35:10 +00:00
var storage = StorageFactory . GetStorage ( ConfigPath , TenantId . ToString ( ) , group . Key ) ;
2020-05-20 15:14:44 +00:00
var file1 = file ;
ActionInvoker . Try ( state = >
{
var f = ( BackupFileInfo ) state ;
2020-09-29 10:28:03 +00:00
using var fileStream = storage . GetReadStream ( f . Domain , f . Path ) ;
var tmp = Path . GetTempFileName ( ) ;
try
{
using ( var tmpFile = File . OpenWrite ( tmp ) )
{
fileStream . CopyTo ( tmpFile ) ;
}
writer . WriteEntry ( file1 . GetZipKey ( ) , tmp ) ;
}
finally
{
if ( File . Exists ( tmp ) )
{
File . Delete ( tmp ) ;
}
2020-05-20 15:14:44 +00:00
}
} , file , 5 , error = > Logger . WarnFormat ( "can't backup file ({0}:{1}): {2}" , file1 . Module , file1 . Path , error ) ) ;
SetCurrentStepProgress ( ( int ) ( + + filesProcessed * 100 / ( double ) filesCount ) ) ;
}
}
var restoreInfoXml = new XElement (
"storage_restore" ,
fileGroups
. SelectMany ( group = > group . Select ( file = > ( object ) file . ToXElement ( ) ) )
. ToArray ( ) ) ;
var tmpPath = Path . GetTempFileName ( ) ;
using ( var tmpFile = File . OpenWrite ( tmpPath ) )
{
restoreInfoXml . WriteTo ( tmpFile ) ;
}
writer . WriteEntry ( KeyHelper . GetStorageRestoreInfoZipKey ( ) , tmpPath ) ;
File . Delete ( tmpPath ) ;
Logger . Debug ( "end backup storage" ) ;
}
2020-05-26 08:52:47 +00:00
public List < object [ ] > ExecuteList ( DbCommand command )
{
2020-05-29 14:50:39 +00:00
var list = new List < object [ ] > ( ) ;
2020-05-26 08:52:47 +00:00
using ( var result = command . ExecuteReader ( ) )
{
while ( result . Read ( ) )
{
2020-05-29 14:50:39 +00:00
var objects = new object [ result . FieldCount ] ;
2020-10-12 19:39:23 +00:00
result . GetValues ( objects ) ;
2020-05-26 08:52:47 +00:00
list . Add ( objects ) ;
}
}
return list ;
}
2020-05-20 15:14:44 +00:00
}
}