Remove files from other repositories

This commit is contained in:
Alexey Safronov 2023-10-02 14:04:28 +04:00
parent e9995ea640
commit d3b5085525
4791 changed files with 0 additions and 1070082 deletions

View File

@ -1,121 +0,0 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.2.32616.157
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Backup.Core", "common\ASC.Data.Backup.Core\ASC.Data.Backup.Core.csproj", "{93D91A49-A397-40FC-90DE-85D8ED774803}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Storage", "common\ASC.Data.Storage\ASC.Data.Storage.csproj", "{582AF3E8-ACA4-49B8-809D-36A586C4307E}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Files.Core", "products\ASC.Files\Core\ASC.Files.Core.csproj", "{03B5483D-2EEF-4A42-83E7-487592434D3B}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.EventBus.Extensions.Logger", "common\ASC.EventBus.Extensions.Logger\ASC.EventBus.Extensions.Logger.csproj", "{1E3DF216-A0E6-4078-84A8-4A74A88A6FEF}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Webhooks.Core", "common\ASC.Webhooks.Core\ASC.Webhooks.Core.csproj", "{FD61091C-F0C1-47B3-B442-9B69CEA603DF}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Feed", "common\ASC.Feed\ASC.Feed.csproj", "{71A964E2-0075-46A3-AE58-9F2A78F6A5BB}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Core.Common", "common\ASC.Core.Common\ASC.Core.Common.csproj", "{A51D0454-4AFA-46DE-89D4-B03D37E1816C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migration.Creator", "common\Tools\ASC.Migration.Creator\ASC.Migration.Creator.csproj", "{4BECDA03-E5E7-4273-B2D5-65AD3B1D0AF6}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migration.Core", "common\Tools\ASC.Migration.Core\ASC.Migration.Core.csproj", "{4E971C04-8D1F-4C07-8D6F-C062C114CF7F}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migration.Runner", "common\Tools\ASC.Migration.Runner\ASC.Migration.Runner.csproj", "{249DC522-AC3B-475C-B77E-52CE0322E686}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.ActiveDirectory", "common\ASC.ActiveDirectory\ASC.ActiveDirectory.csproj", "{DD482D67-F700-4C30-9AA6-A2BE7A1079B5}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migration.PersonalToDocspace", "common\Tools\ASC.MigrationPersonalToDocspace\ASC.Migration.PersonalToDocspace.csproj", "{9493BF4B-46DF-4BE7-AC63-83992B205A00}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Common", "common\ASC.Common\ASC.Common.csproj", "{1C5691B2-4AB1-4D3C-931C-AF7AD9CE8241}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migrations.MySql.SaaS", "migrations\mysql\SaaS\ASC.Migrations.MySql.SaaS.csproj", "{57A7DD9D-373D-4270-9FF0-986F62B594EF}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migrations.MySql.Standalone", "migrations\mysql\Standalone\ASC.Migrations.MySql.Standalone.csproj", "{2608185A-415E-4D21-B5F8-A7F9C95DA918}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migrations.PostgreSql.SaaS", "migrations\postgre\SaaS\ASC.Migrations.PostgreSql.SaaS.csproj", "{50A3CFE6-F7A3-49FB-B801-70E793BC8B5A}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migrations.PostgreSql.Standalone", "migrations\postgre\Standalone\ASC.Migrations.PostgreSql.Standalone.csproj", "{46AF60C1-EEA6-4C0F-8C02-1B74A8C9720F}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{93D91A49-A397-40FC-90DE-85D8ED774803}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{93D91A49-A397-40FC-90DE-85D8ED774803}.Debug|Any CPU.Build.0 = Debug|Any CPU
{93D91A49-A397-40FC-90DE-85D8ED774803}.Release|Any CPU.ActiveCfg = Release|Any CPU
{93D91A49-A397-40FC-90DE-85D8ED774803}.Release|Any CPU.Build.0 = Release|Any CPU
{582AF3E8-ACA4-49B8-809D-36A586C4307E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{582AF3E8-ACA4-49B8-809D-36A586C4307E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{582AF3E8-ACA4-49B8-809D-36A586C4307E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{582AF3E8-ACA4-49B8-809D-36A586C4307E}.Release|Any CPU.Build.0 = Release|Any CPU
{03B5483D-2EEF-4A42-83E7-487592434D3B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{03B5483D-2EEF-4A42-83E7-487592434D3B}.Debug|Any CPU.Build.0 = Debug|Any CPU
{03B5483D-2EEF-4A42-83E7-487592434D3B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{03B5483D-2EEF-4A42-83E7-487592434D3B}.Release|Any CPU.Build.0 = Release|Any CPU
{1E3DF216-A0E6-4078-84A8-4A74A88A6FEF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1E3DF216-A0E6-4078-84A8-4A74A88A6FEF}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1E3DF216-A0E6-4078-84A8-4A74A88A6FEF}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1E3DF216-A0E6-4078-84A8-4A74A88A6FEF}.Release|Any CPU.Build.0 = Release|Any CPU
{FD61091C-F0C1-47B3-B442-9B69CEA603DF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{FD61091C-F0C1-47B3-B442-9B69CEA603DF}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FD61091C-F0C1-47B3-B442-9B69CEA603DF}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FD61091C-F0C1-47B3-B442-9B69CEA603DF}.Release|Any CPU.Build.0 = Release|Any CPU
{71A964E2-0075-46A3-AE58-9F2A78F6A5BB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{71A964E2-0075-46A3-AE58-9F2A78F6A5BB}.Debug|Any CPU.Build.0 = Debug|Any CPU
{71A964E2-0075-46A3-AE58-9F2A78F6A5BB}.Release|Any CPU.ActiveCfg = Release|Any CPU
{71A964E2-0075-46A3-AE58-9F2A78F6A5BB}.Release|Any CPU.Build.0 = Release|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Release|Any CPU.Build.0 = Release|Any CPU
{4BECDA03-E5E7-4273-B2D5-65AD3B1D0AF6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{4BECDA03-E5E7-4273-B2D5-65AD3B1D0AF6}.Debug|Any CPU.Build.0 = Debug|Any CPU
{4BECDA03-E5E7-4273-B2D5-65AD3B1D0AF6}.Release|Any CPU.ActiveCfg = Release|Any CPU
{4BECDA03-E5E7-4273-B2D5-65AD3B1D0AF6}.Release|Any CPU.Build.0 = Release|Any CPU
{4E971C04-8D1F-4C07-8D6F-C062C114CF7F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{4E971C04-8D1F-4C07-8D6F-C062C114CF7F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{4E971C04-8D1F-4C07-8D6F-C062C114CF7F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{4E971C04-8D1F-4C07-8D6F-C062C114CF7F}.Release|Any CPU.Build.0 = Release|Any CPU
{249DC522-AC3B-475C-B77E-52CE0322E686}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{249DC522-AC3B-475C-B77E-52CE0322E686}.Debug|Any CPU.Build.0 = Debug|Any CPU
{249DC522-AC3B-475C-B77E-52CE0322E686}.Release|Any CPU.ActiveCfg = Release|Any CPU
{249DC522-AC3B-475C-B77E-52CE0322E686}.Release|Any CPU.Build.0 = Release|Any CPU
{DD482D67-F700-4C30-9AA6-A2BE7A1079B5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{DD482D67-F700-4C30-9AA6-A2BE7A1079B5}.Debug|Any CPU.Build.0 = Debug|Any CPU
{DD482D67-F700-4C30-9AA6-A2BE7A1079B5}.Release|Any CPU.ActiveCfg = Release|Any CPU
{DD482D67-F700-4C30-9AA6-A2BE7A1079B5}.Release|Any CPU.Build.0 = Release|Any CPU
{9493BF4B-46DF-4BE7-AC63-83992B205A00}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9493BF4B-46DF-4BE7-AC63-83992B205A00}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9493BF4B-46DF-4BE7-AC63-83992B205A00}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9493BF4B-46DF-4BE7-AC63-83992B205A00}.Release|Any CPU.Build.0 = Release|Any CPU
{1C5691B2-4AB1-4D3C-931C-AF7AD9CE8241}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1C5691B2-4AB1-4D3C-931C-AF7AD9CE8241}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1C5691B2-4AB1-4D3C-931C-AF7AD9CE8241}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1C5691B2-4AB1-4D3C-931C-AF7AD9CE8241}.Release|Any CPU.Build.0 = Release|Any CPU
{57A7DD9D-373D-4270-9FF0-986F62B594EF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{57A7DD9D-373D-4270-9FF0-986F62B594EF}.Debug|Any CPU.Build.0 = Debug|Any CPU
{57A7DD9D-373D-4270-9FF0-986F62B594EF}.Release|Any CPU.ActiveCfg = Release|Any CPU
{57A7DD9D-373D-4270-9FF0-986F62B594EF}.Release|Any CPU.Build.0 = Release|Any CPU
{2608185A-415E-4D21-B5F8-A7F9C95DA918}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{2608185A-415E-4D21-B5F8-A7F9C95DA918}.Debug|Any CPU.Build.0 = Debug|Any CPU
{2608185A-415E-4D21-B5F8-A7F9C95DA918}.Release|Any CPU.ActiveCfg = Release|Any CPU
{2608185A-415E-4D21-B5F8-A7F9C95DA918}.Release|Any CPU.Build.0 = Release|Any CPU
{50A3CFE6-F7A3-49FB-B801-70E793BC8B5A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{50A3CFE6-F7A3-49FB-B801-70E793BC8B5A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{50A3CFE6-F7A3-49FB-B801-70E793BC8B5A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{50A3CFE6-F7A3-49FB-B801-70E793BC8B5A}.Release|Any CPU.Build.0 = Release|Any CPU
{46AF60C1-EEA6-4C0F-8C02-1B74A8C9720F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{46AF60C1-EEA6-4C0F-8C02-1B74A8C9720F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{46AF60C1-EEA6-4C0F-8C02-1B74A8C9720F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{46AF60C1-EEA6-4C0F-8C02-1B74A8C9720F}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {933E5EEB-E9B0-4389-B318-11E283E7FCF7}
EndGlobalSection
EndGlobal

View File

@ -1,233 +0,0 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.0.31903.59
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Files.Tests", "products\ASC.Files\Tests\ASC.Files.Tests.csproj", "{EFB99A37-EF25-4A0F-8D7A-786402B1554C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Api.Core", "common\ASC.Api.Core\ASC.Api.Core.csproj", "{CAA6EED2-094A-42AE-81B6-3FDF142EC277}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Common", "common\ASC.Common\ASC.Common.csproj", "{19FFE246-4270-41A6-AA8D-FB961715FB21}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Core.Common", "common\ASC.Core.Common\ASC.Core.Common.csproj", "{A51D0454-4AFA-46DE-89D4-B03D37E1816C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Storage", "common\ASC.Data.Storage\ASC.Data.Storage.csproj", "{D7F459CE-9EAF-423A-B3DC-EFD107007BCE}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.FederatedLogin", "common\ASC.FederatedLogin\ASC.FederatedLogin.csproj", "{484E9663-B9B7-40CA-B600-D5FDCD7CB862}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.MessagingSystem", "common\ASC.MessagingSystem\ASC.MessagingSystem.csproj", "{AB0EFA45-1DAF-42F4-9EE1-4A8EC497DE35}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.ElasticSearch", "common\services\ASC.ElasticSearch\ASC.ElasticSearch.csproj", "{AE1A0E06-6CD4-4E1D-8209-22BBBD6D5652}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Files.Core", "products\ASC.Files\Core\ASC.Files.Core.csproj", "{C277388C-E19E-4A62-A895-8AAB322A4004}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Files", "products\ASC.Files\Server\ASC.Files.csproj", "{77BA2F61-6155-4283-BB39-F8E42F46A0B0}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.People", "products\ASC.People\Server\ASC.People.csproj", "{DB492BA0-B072-4056-8A3D-032CD108CD82}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Web.Core", "web\ASC.Web.Core\ASC.Web.Core.csproj", "{9A703423-594E-4851-8A0D-FF582BAF9FC9}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Reassigns", "common\ASC.Data.Reassigns\ASC.Data.Reassigns.csproj", "{8682FCD3-C5C5-4946-99FA-B900E9CB68C3}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Feed", "common\ASC.Feed\ASC.Feed.csproj", "{5A0DF88C-45C8-4FE2-8144-88E8AC8B7C45}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.IPSecurity", "common\ASC.IPSecurity\ASC.IPSecurity.csproj", "{5E5496B5-5686-4415-B3E0-5F61FCD146A3}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Notify.Textile", "common\ASC.Notify.Textile\ASC.Notify.Textile.csproj", "{5FE0EDA2-1FD7-4D43-885E-DBDD858DCC86}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Textile", "common\ASC.Textile\ASC.Textile.csproj", "{1A82BE07-0836-4620-A95D-C6D1E9327589}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.VoipService", "common\ASC.VoipService\ASC.VoipService.csproj", "{664031A4-1652-4B68-8168-FD18998700EE}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Studio.Notify", "common\services\ASC.Studio.Notify\ASC.Studio.Notify.csproj", "{C024C35A-D0F0-42D6-86B2-64ABF7513C4A}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Notify", "common\services\ASC.Notify\ASC.Notify.csproj", "{B30A0D35-7B32-4E13-9F37-B8BC59F839E5}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Feed.Aggregator", "common\services\ASC.Feed.Aggregator\ASC.Feed.Aggregator.csproj", "{8ACDEBBD-12DD-43DC-86CF-D66E37528ACC}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Backup", "common\services\ASC.Data.Backup\ASC.Data.Backup.csproj", "{630E2649-71B6-4C07-A2FC-C0BC05D77A78}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Web.Api", "web\ASC.Web.Api\ASC.Web.Api.csproj", "{D7C5E8A0-0A5E-4BC4-9946-B43D6682D421}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Web.Studio", "web\ASC.Web.Studio\ASC.Web.Studio.csproj", "{9BF17F6E-04A9-4597-9273-21AD09600329}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Common.Tests", "common\Tests\ASC.Common.Tests\ASC.Common.Tests.csproj", "{E6DEAA28-9A73-470A-8F17-3E72B1E8D208}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Core.Common.Tests", "common\Tests\ASC.Core.Common.Tests\ASC.Core.Common.Tests.csproj", "{EF613F37-CFA9-4631-AA6E-512262FABC8E}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Notify.Textile.Tests", "common\Tests\ASC.Notify.Textile.Tests\ASC.Notify.Textile.Tests.csproj", "{8FAD3D1B-3ADC-470C-9933-CAE1B95A8599}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Webhooks.Core", "common\ASC.Webhooks.Core\ASC.Webhooks.Core.csproj", "{A0475D4F-29B7-490C-85C2-8AFD60118B35}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Webhooks.Tests", "common\Tests\ASC.Webhooks.Tests\ASC.Webhooks.Tests.csproj", "{B48C0E24-EBEB-4AF9-891B-D28BB4A9461E}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Webhooks.Service", "common\services\ASC.Webhooks.Service\ASC.Webhooks.Service.csproj", "{23310CB9-1A14-4ED5-BF81-8BD38277FEFE}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Web.Api.Tests", "common\Tests\ASC.Web.Api.Tests\ASC.Web.Api.Tests.csproj", "{14823E93-C749-4069-9E0A-95E63DE0B254}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Backup.Core", "common\ASC.Data.Backup.Core\ASC.Data.Backup.Core.csproj", "{5E948877-52BD-42B9-95A9-0FF37931D0A5}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Encryption", "common\ASC.Data.Encryption\ASC.Data.Encryption.csproj", "{9E572FE2-8C3C-46B2-A314-532F7198B3EB}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.AuditTrail", "common\services\ASC.AuditTrail\ASC.AuditTrail.csproj", "{B53587B7-58F6-48A9-B122-69BEA3E2572A}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{BE86ABA2-8B71-4E0D-A2A6-3921E0467F09}"
ProjectSection(SolutionItems) = preProject
.editorconfig = .editorconfig
EndProjectSection
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migrations.MySql", "migrations\mysql\ASC.Migrations.MySql.csproj", "{E81DC433-3555-4151-BDBC-16368A0FE14E}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{EFB99A37-EF25-4A0F-8D7A-786402B1554C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{EFB99A37-EF25-4A0F-8D7A-786402B1554C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EFB99A37-EF25-4A0F-8D7A-786402B1554C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EFB99A37-EF25-4A0F-8D7A-786402B1554C}.Release|Any CPU.Build.0 = Release|Any CPU
{CAA6EED2-094A-42AE-81B6-3FDF142EC277}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{CAA6EED2-094A-42AE-81B6-3FDF142EC277}.Debug|Any CPU.Build.0 = Debug|Any CPU
{CAA6EED2-094A-42AE-81B6-3FDF142EC277}.Release|Any CPU.ActiveCfg = Release|Any CPU
{CAA6EED2-094A-42AE-81B6-3FDF142EC277}.Release|Any CPU.Build.0 = Release|Any CPU
{19FFE246-4270-41A6-AA8D-FB961715FB21}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{19FFE246-4270-41A6-AA8D-FB961715FB21}.Debug|Any CPU.Build.0 = Debug|Any CPU
{19FFE246-4270-41A6-AA8D-FB961715FB21}.Release|Any CPU.ActiveCfg = Release|Any CPU
{19FFE246-4270-41A6-AA8D-FB961715FB21}.Release|Any CPU.Build.0 = Release|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Release|Any CPU.Build.0 = Release|Any CPU
{D7F459CE-9EAF-423A-B3DC-EFD107007BCE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D7F459CE-9EAF-423A-B3DC-EFD107007BCE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D7F459CE-9EAF-423A-B3DC-EFD107007BCE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D7F459CE-9EAF-423A-B3DC-EFD107007BCE}.Release|Any CPU.Build.0 = Release|Any CPU
{484E9663-B9B7-40CA-B600-D5FDCD7CB862}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{484E9663-B9B7-40CA-B600-D5FDCD7CB862}.Debug|Any CPU.Build.0 = Debug|Any CPU
{484E9663-B9B7-40CA-B600-D5FDCD7CB862}.Release|Any CPU.ActiveCfg = Release|Any CPU
{484E9663-B9B7-40CA-B600-D5FDCD7CB862}.Release|Any CPU.Build.0 = Release|Any CPU
{AB0EFA45-1DAF-42F4-9EE1-4A8EC497DE35}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{AB0EFA45-1DAF-42F4-9EE1-4A8EC497DE35}.Debug|Any CPU.Build.0 = Debug|Any CPU
{AB0EFA45-1DAF-42F4-9EE1-4A8EC497DE35}.Release|Any CPU.ActiveCfg = Release|Any CPU
{AB0EFA45-1DAF-42F4-9EE1-4A8EC497DE35}.Release|Any CPU.Build.0 = Release|Any CPU
{AE1A0E06-6CD4-4E1D-8209-22BBBD6D5652}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{AE1A0E06-6CD4-4E1D-8209-22BBBD6D5652}.Debug|Any CPU.Build.0 = Debug|Any CPU
{AE1A0E06-6CD4-4E1D-8209-22BBBD6D5652}.Release|Any CPU.ActiveCfg = Release|Any CPU
{AE1A0E06-6CD4-4E1D-8209-22BBBD6D5652}.Release|Any CPU.Build.0 = Release|Any CPU
{C277388C-E19E-4A62-A895-8AAB322A4004}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C277388C-E19E-4A62-A895-8AAB322A4004}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C277388C-E19E-4A62-A895-8AAB322A4004}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C277388C-E19E-4A62-A895-8AAB322A4004}.Release|Any CPU.Build.0 = Release|Any CPU
{77BA2F61-6155-4283-BB39-F8E42F46A0B0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{77BA2F61-6155-4283-BB39-F8E42F46A0B0}.Debug|Any CPU.Build.0 = Debug|Any CPU
{77BA2F61-6155-4283-BB39-F8E42F46A0B0}.Release|Any CPU.ActiveCfg = Release|Any CPU
{77BA2F61-6155-4283-BB39-F8E42F46A0B0}.Release|Any CPU.Build.0 = Release|Any CPU
{DB492BA0-B072-4056-8A3D-032CD108CD82}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{DB492BA0-B072-4056-8A3D-032CD108CD82}.Debug|Any CPU.Build.0 = Debug|Any CPU
{DB492BA0-B072-4056-8A3D-032CD108CD82}.Release|Any CPU.ActiveCfg = Release|Any CPU
{DB492BA0-B072-4056-8A3D-032CD108CD82}.Release|Any CPU.Build.0 = Release|Any CPU
{9A703423-594E-4851-8A0D-FF582BAF9FC9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9A703423-594E-4851-8A0D-FF582BAF9FC9}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9A703423-594E-4851-8A0D-FF582BAF9FC9}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9A703423-594E-4851-8A0D-FF582BAF9FC9}.Release|Any CPU.Build.0 = Release|Any CPU
{8682FCD3-C5C5-4946-99FA-B900E9CB68C3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{8682FCD3-C5C5-4946-99FA-B900E9CB68C3}.Debug|Any CPU.Build.0 = Debug|Any CPU
{8682FCD3-C5C5-4946-99FA-B900E9CB68C3}.Release|Any CPU.ActiveCfg = Release|Any CPU
{8682FCD3-C5C5-4946-99FA-B900E9CB68C3}.Release|Any CPU.Build.0 = Release|Any CPU
{5A0DF88C-45C8-4FE2-8144-88E8AC8B7C45}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{5A0DF88C-45C8-4FE2-8144-88E8AC8B7C45}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5A0DF88C-45C8-4FE2-8144-88E8AC8B7C45}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5A0DF88C-45C8-4FE2-8144-88E8AC8B7C45}.Release|Any CPU.Build.0 = Release|Any CPU
{5E5496B5-5686-4415-B3E0-5F61FCD146A3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{5E5496B5-5686-4415-B3E0-5F61FCD146A3}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5E5496B5-5686-4415-B3E0-5F61FCD146A3}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5E5496B5-5686-4415-B3E0-5F61FCD146A3}.Release|Any CPU.Build.0 = Release|Any CPU
{5FE0EDA2-1FD7-4D43-885E-DBDD858DCC86}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{5FE0EDA2-1FD7-4D43-885E-DBDD858DCC86}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5FE0EDA2-1FD7-4D43-885E-DBDD858DCC86}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5FE0EDA2-1FD7-4D43-885E-DBDD858DCC86}.Release|Any CPU.Build.0 = Release|Any CPU
{1A82BE07-0836-4620-A95D-C6D1E9327589}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1A82BE07-0836-4620-A95D-C6D1E9327589}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1A82BE07-0836-4620-A95D-C6D1E9327589}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1A82BE07-0836-4620-A95D-C6D1E9327589}.Release|Any CPU.Build.0 = Release|Any CPU
{664031A4-1652-4B68-8168-FD18998700EE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{664031A4-1652-4B68-8168-FD18998700EE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{664031A4-1652-4B68-8168-FD18998700EE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{664031A4-1652-4B68-8168-FD18998700EE}.Release|Any CPU.Build.0 = Release|Any CPU
{C024C35A-D0F0-42D6-86B2-64ABF7513C4A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C024C35A-D0F0-42D6-86B2-64ABF7513C4A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C024C35A-D0F0-42D6-86B2-64ABF7513C4A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C024C35A-D0F0-42D6-86B2-64ABF7513C4A}.Release|Any CPU.Build.0 = Release|Any CPU
{B30A0D35-7B32-4E13-9F37-B8BC59F839E5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{B30A0D35-7B32-4E13-9F37-B8BC59F839E5}.Debug|Any CPU.Build.0 = Debug|Any CPU
{B30A0D35-7B32-4E13-9F37-B8BC59F839E5}.Release|Any CPU.ActiveCfg = Release|Any CPU
{B30A0D35-7B32-4E13-9F37-B8BC59F839E5}.Release|Any CPU.Build.0 = Release|Any CPU
{8ACDEBBD-12DD-43DC-86CF-D66E37528ACC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{8ACDEBBD-12DD-43DC-86CF-D66E37528ACC}.Debug|Any CPU.Build.0 = Debug|Any CPU
{8ACDEBBD-12DD-43DC-86CF-D66E37528ACC}.Release|Any CPU.ActiveCfg = Release|Any CPU
{8ACDEBBD-12DD-43DC-86CF-D66E37528ACC}.Release|Any CPU.Build.0 = Release|Any CPU
{630E2649-71B6-4C07-A2FC-C0BC05D77A78}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{630E2649-71B6-4C07-A2FC-C0BC05D77A78}.Debug|Any CPU.Build.0 = Debug|Any CPU
{630E2649-71B6-4C07-A2FC-C0BC05D77A78}.Release|Any CPU.ActiveCfg = Release|Any CPU
{630E2649-71B6-4C07-A2FC-C0BC05D77A78}.Release|Any CPU.Build.0 = Release|Any CPU
{D7C5E8A0-0A5E-4BC4-9946-B43D6682D421}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D7C5E8A0-0A5E-4BC4-9946-B43D6682D421}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D7C5E8A0-0A5E-4BC4-9946-B43D6682D421}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D7C5E8A0-0A5E-4BC4-9946-B43D6682D421}.Release|Any CPU.Build.0 = Release|Any CPU
{9BF17F6E-04A9-4597-9273-21AD09600329}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9BF17F6E-04A9-4597-9273-21AD09600329}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9BF17F6E-04A9-4597-9273-21AD09600329}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9BF17F6E-04A9-4597-9273-21AD09600329}.Release|Any CPU.Build.0 = Release|Any CPU
{E6DEAA28-9A73-470A-8F17-3E72B1E8D208}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{E6DEAA28-9A73-470A-8F17-3E72B1E8D208}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E6DEAA28-9A73-470A-8F17-3E72B1E8D208}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E6DEAA28-9A73-470A-8F17-3E72B1E8D208}.Release|Any CPU.Build.0 = Release|Any CPU
{EF613F37-CFA9-4631-AA6E-512262FABC8E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{EF613F37-CFA9-4631-AA6E-512262FABC8E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EF613F37-CFA9-4631-AA6E-512262FABC8E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EF613F37-CFA9-4631-AA6E-512262FABC8E}.Release|Any CPU.Build.0 = Release|Any CPU
{8FAD3D1B-3ADC-470C-9933-CAE1B95A8599}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{8FAD3D1B-3ADC-470C-9933-CAE1B95A8599}.Debug|Any CPU.Build.0 = Debug|Any CPU
{8FAD3D1B-3ADC-470C-9933-CAE1B95A8599}.Release|Any CPU.ActiveCfg = Release|Any CPU
{8FAD3D1B-3ADC-470C-9933-CAE1B95A8599}.Release|Any CPU.Build.0 = Release|Any CPU
{A0475D4F-29B7-490C-85C2-8AFD60118B35}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A0475D4F-29B7-490C-85C2-8AFD60118B35}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A0475D4F-29B7-490C-85C2-8AFD60118B35}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A0475D4F-29B7-490C-85C2-8AFD60118B35}.Release|Any CPU.Build.0 = Release|Any CPU
{B48C0E24-EBEB-4AF9-891B-D28BB4A9461E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{B48C0E24-EBEB-4AF9-891B-D28BB4A9461E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{B48C0E24-EBEB-4AF9-891B-D28BB4A9461E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{B48C0E24-EBEB-4AF9-891B-D28BB4A9461E}.Release|Any CPU.Build.0 = Release|Any CPU
{23310CB9-1A14-4ED5-BF81-8BD38277FEFE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{23310CB9-1A14-4ED5-BF81-8BD38277FEFE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{23310CB9-1A14-4ED5-BF81-8BD38277FEFE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{14823E93-C749-4069-9E0A-95E63DE0B254}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{14823E93-C749-4069-9E0A-95E63DE0B254}.Debug|Any CPU.Build.0 = Debug|Any CPU
{14823E93-C749-4069-9E0A-95E63DE0B254}.Release|Any CPU.ActiveCfg = Release|Any CPU
{14823E93-C749-4069-9E0A-95E63DE0B254}.Release|Any CPU.Build.0 = Release|Any CPU
{5E948877-52BD-42B9-95A9-0FF37931D0A5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{5E948877-52BD-42B9-95A9-0FF37931D0A5}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5E948877-52BD-42B9-95A9-0FF37931D0A5}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5E948877-52BD-42B9-95A9-0FF37931D0A5}.Release|Any CPU.Build.0 = Release|Any CPU
{9E572FE2-8C3C-46B2-A314-532F7198B3EB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9E572FE2-8C3C-46B2-A314-532F7198B3EB}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9E572FE2-8C3C-46B2-A314-532F7198B3EB}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9E572FE2-8C3C-46B2-A314-532F7198B3EB}.Release|Any CPU.Build.0 = Release|Any CPU
{B53587B7-58F6-48A9-B122-69BEA3E2572A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{B53587B7-58F6-48A9-B122-69BEA3E2572A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{B53587B7-58F6-48A9-B122-69BEA3E2572A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{B53587B7-58F6-48A9-B122-69BEA3E2572A}.Release|Any CPU.Build.0 = Release|Any CPU
{E81DC433-3555-4151-BDBC-16368A0FE14E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{E81DC433-3555-4151-BDBC-16368A0FE14E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E81DC433-3555-4151-BDBC-16368A0FE14E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E81DC433-3555-4151-BDBC-16368A0FE14E}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {73201867-1BA9-4F5E-AA69-AFA74662FFFC}
EndGlobalSection
EndGlobal

View File

@ -1,267 +0,0 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.0.31903.59
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Web.Studio", "web\ASC.Web.Studio\ASC.Web.Studio.csproj", "{90183112-BCD6-4E16-9CA2-12231930DAB4}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Web.Api", "web\ASC.Web.Api\ASC.Web.Api.csproj", "{4AA9F8E3-2F48-44DA-B6C5-37ED7A4739C1}"
ProjectSection(ProjectDependencies) = postProject
{BE4816E7-7CD2-4D9B-ABC6-D9E5C04E3926} = {BE4816E7-7CD2-4D9B-ABC6-D9E5C04E3926}
EndProjectSection
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Common", "common\ASC.Common\ASC.Common.csproj", "{EB8F47B3-39DE-4B7D-8EC6-01726368B45D}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Core.Common", "common\ASC.Core.Common\ASC.Core.Common.csproj", "{A51D0454-4AFA-46DE-89D4-B03D37E1816C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.People", "products\ASC.People\Server\ASC.People.csproj", "{BE4816E7-7CD2-4D9B-ABC6-D9E5C04E3926}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Api.Core", "common\ASC.Api.Core\ASC.Api.Core.csproj", "{62C49C91-1A5A-4C0D-A3B3-A9AE8C9718CE}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Storage", "common\ASC.Data.Storage\ASC.Data.Storage.csproj", "{3ABB21D3-D990-4005-9EEF-701A93948C27}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.FederatedLogin", "common\ASC.FederatedLogin\ASC.FederatedLogin.csproj", "{481B0D9C-B2F8-4DE9-8597-BA8D24585099}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Web.Core", "web\ASC.Web.Core\ASC.Web.Core.csproj", "{02C40A64-FE22-41D0-9037-69F0D6F787A9}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.MessagingSystem", "common\ASC.MessagingSystem\ASC.MessagingSystem.csproj", "{BD8A18A5-60C5-4411-9719-0AA11B4BE0E9}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.IPSecurity", "common\ASC.IPSecurity\ASC.IPSecurity.csproj", "{2FF2177F-2D1A-4396-84EB-51F14FD99385}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Feed", "common\ASC.Feed\ASC.Feed.csproj", "{B0431EC9-21D8-4D96-B333-6DD430031C82}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Reassigns", "common\ASC.Data.Reassigns\ASC.Data.Reassigns.csproj", "{104D045A-F9CC-4DD7-B04D-8DA7543435DD}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Notify", "common\services\ASC.Notify\ASC.Notify.csproj", "{8484A675-1C93-4D87-8FF2-7530A5711208}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Studio.Notify", "common\services\ASC.Studio.Notify\ASC.Studio.Notify.csproj", "{E3567AB9-0926-444D-A0D0-A369D5890EAA}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Textile", "common\ASC.Textile\ASC.Textile.csproj", "{C8F410B4-B83B-47B9-9ECD-07590A8750A7}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Notify.Textile", "common\ASC.Notify.Textile\ASC.Notify.Textile.csproj", "{DB50E2EF-B4D8-493A-8568-29CAC0DF9062}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.ElasticSearch", "common\services\ASC.ElasticSearch\ASC.ElasticSearch.csproj", "{AE1A0E06-6CD4-4E1D-8209-22BBBD6D5652}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Files", "products\ASC.Files\Server\ASC.Files.csproj", "{77BA2F61-6155-4283-BB39-F8E42F46A0B0}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Files.Service", "products\ASC.Files\Service\ASC.Files.Service.csproj", "{5D41FFFF-816C-40B2-95CD-E2DDDCB83784}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Feed.Aggregator", "common\services\ASC.Feed.Aggregator\ASC.Feed.Aggregator.csproj", "{07CCC11F-76CB-448E-B15A-72E09FBB348B}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Files.Core", "products\ASC.Files\Core\ASC.Files.Core.csproj", "{F0A39728-940D-4DBE-A37A-05D4EB57F342}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.TelegramService", "common\services\ASC.TelegramService\ASC.TelegramService.csproj", "{95CE7371-17B6-4EEE-8E38-2FDE6347E955}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.AuditTrail", "common\services\ASC.AuditTrail\ASC.AuditTrail.csproj", "{2C111161-B7C5-4869-9F52-EA725E64BA40}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{0E35EB77-EC53-44C2-99EB-3D845C79675D}"
ProjectSection(SolutionItems) = preProject
.editorconfig = .editorconfig
EndProjectSection
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Encryption", "common\ASC.Data.Encryption\ASC.Data.Encryption.csproj", "{C4DF1A63-C9EB-4D8F-A4E5-4FD9249A5089}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Web.HealthChecks.UI", "web\ASC.Web.HealthChecks.UI\ASC.Web.HealthChecks.UI.csproj", "{0C1A387E-0CD0-4BE8-82FC-9FCAD05BF289}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.ClearEvents", "common\services\ASC.ClearEvents\ASC.ClearEvents.csproj", "{448221A8-EABA-4200-9192-E08BF241A487}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Backup.Core", "common\ASC.Data.Backup.Core\ASC.Data.Backup.Core.csproj", "{F5D9DE01-06CD-4881-9F41-46882E9ED45C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Backup", "common\services\ASC.Data.Backup\ASC.Data.Backup.csproj", "{027EEE53-7491-48F4-B467-6404D68798A7}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Webhooks.Core", "common\ASC.Webhooks.Core\ASC.Webhooks.Core.csproj", "{760BFF3A-1A67-43A1-A94C-78D11A4BB1E6}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Backup.BackgroundTasks", "common\services\ASC.Data.Backup.BackgroundTasks\ASC.Data.Backup.BackgroundTasks.csproj", "{C0C28A02-943C-4A38-B474-A2B49C6201ED}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.EventBus", "common\ASC.EventBus\ASC.EventBus.csproj", "{26540DA7-604B-474B-97BA-9CDC85A84B6D}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.EventBus.RabbitMQ", "common\ASC.EventBus.RabbitMQ\ASC.EventBus.RabbitMQ.csproj", "{3FACF656-6DED-407E-AC23-A7EF08D704E3}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.EventBus.Extensions.Logger", "common\ASC.EventBus.Extensions.Logger\ASC.EventBus.Extensions.Logger.csproj", "{ED8CEB38-7C95-43A8-B208-9C9828654AC1}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migration", "common\ASC.Migration\ASC.Migration.csproj", "{05B8FF27-446B-49BF-B508-4A4C096D2BB2}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.ActiveDirectory", "common\ASC.ActiveDirectory\ASC.ActiveDirectory.csproj", "{9F81862F-303D-467F-8DC9-044BE2CCF329}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.EventBus.ActiveMQ", "common\ASC.EventBus.ActiveMQ\ASC.EventBus.ActiveMQ.csproj", "{86916EF2-4A1B-441C-B673-EB0F68EC9C3A}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Data.Storage.Encryption", "common\services\ASC.Data.Storage.Encryption\ASC.Data.Storage.Encryption.csproj", "{76BFD0A6-3F28-4BCE-983B-9FE3FBDD966D}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.ApiSystem", "common\services\ASC.ApiSystem\ASC.ApiSystem.csproj", "{EB0FC2DF-D8AC-460B-8FBE-307A7B163C6C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.ApiCache", "common\services\ASC.ApiCache\ASC.ApiCache.csproj", "{AD4F5F31-625C-472D-BE2C-AD1FB693E065}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{90183112-BCD6-4E16-9CA2-12231930DAB4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{90183112-BCD6-4E16-9CA2-12231930DAB4}.Debug|Any CPU.Build.0 = Debug|Any CPU
{90183112-BCD6-4E16-9CA2-12231930DAB4}.Release|Any CPU.ActiveCfg = Release|Any CPU
{90183112-BCD6-4E16-9CA2-12231930DAB4}.Release|Any CPU.Build.0 = Release|Any CPU
{4AA9F8E3-2F48-44DA-B6C5-37ED7A4739C1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{4AA9F8E3-2F48-44DA-B6C5-37ED7A4739C1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{4AA9F8E3-2F48-44DA-B6C5-37ED7A4739C1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{4AA9F8E3-2F48-44DA-B6C5-37ED7A4739C1}.Release|Any CPU.Build.0 = Release|Any CPU
{EB8F47B3-39DE-4B7D-8EC6-01726368B45D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{EB8F47B3-39DE-4B7D-8EC6-01726368B45D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EB8F47B3-39DE-4B7D-8EC6-01726368B45D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EB8F47B3-39DE-4B7D-8EC6-01726368B45D}.Release|Any CPU.Build.0 = Release|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A51D0454-4AFA-46DE-89D4-B03D37E1816C}.Release|Any CPU.Build.0 = Release|Any CPU
{BE4816E7-7CD2-4D9B-ABC6-D9E5C04E3926}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{BE4816E7-7CD2-4D9B-ABC6-D9E5C04E3926}.Debug|Any CPU.Build.0 = Debug|Any CPU
{BE4816E7-7CD2-4D9B-ABC6-D9E5C04E3926}.Release|Any CPU.ActiveCfg = Release|Any CPU
{BE4816E7-7CD2-4D9B-ABC6-D9E5C04E3926}.Release|Any CPU.Build.0 = Release|Any CPU
{62C49C91-1A5A-4C0D-A3B3-A9AE8C9718CE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{62C49C91-1A5A-4C0D-A3B3-A9AE8C9718CE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{62C49C91-1A5A-4C0D-A3B3-A9AE8C9718CE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{62C49C91-1A5A-4C0D-A3B3-A9AE8C9718CE}.Release|Any CPU.Build.0 = Release|Any CPU
{3ABB21D3-D990-4005-9EEF-701A93948C27}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{3ABB21D3-D990-4005-9EEF-701A93948C27}.Debug|Any CPU.Build.0 = Debug|Any CPU
{3ABB21D3-D990-4005-9EEF-701A93948C27}.Release|Any CPU.ActiveCfg = Release|Any CPU
{3ABB21D3-D990-4005-9EEF-701A93948C27}.Release|Any CPU.Build.0 = Release|Any CPU
{481B0D9C-B2F8-4DE9-8597-BA8D24585099}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{481B0D9C-B2F8-4DE9-8597-BA8D24585099}.Debug|Any CPU.Build.0 = Debug|Any CPU
{481B0D9C-B2F8-4DE9-8597-BA8D24585099}.Release|Any CPU.ActiveCfg = Release|Any CPU
{481B0D9C-B2F8-4DE9-8597-BA8D24585099}.Release|Any CPU.Build.0 = Release|Any CPU
{02C40A64-FE22-41D0-9037-69F0D6F787A9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{02C40A64-FE22-41D0-9037-69F0D6F787A9}.Debug|Any CPU.Build.0 = Debug|Any CPU
{02C40A64-FE22-41D0-9037-69F0D6F787A9}.Release|Any CPU.ActiveCfg = Release|Any CPU
{02C40A64-FE22-41D0-9037-69F0D6F787A9}.Release|Any CPU.Build.0 = Release|Any CPU
{BD8A18A5-60C5-4411-9719-0AA11B4BE0E9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{BD8A18A5-60C5-4411-9719-0AA11B4BE0E9}.Debug|Any CPU.Build.0 = Debug|Any CPU
{BD8A18A5-60C5-4411-9719-0AA11B4BE0E9}.Release|Any CPU.ActiveCfg = Release|Any CPU
{BD8A18A5-60C5-4411-9719-0AA11B4BE0E9}.Release|Any CPU.Build.0 = Release|Any CPU
{2FF2177F-2D1A-4396-84EB-51F14FD99385}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{2FF2177F-2D1A-4396-84EB-51F14FD99385}.Debug|Any CPU.Build.0 = Debug|Any CPU
{2FF2177F-2D1A-4396-84EB-51F14FD99385}.Release|Any CPU.ActiveCfg = Release|Any CPU
{2FF2177F-2D1A-4396-84EB-51F14FD99385}.Release|Any CPU.Build.0 = Release|Any CPU
{B0431EC9-21D8-4D96-B333-6DD430031C82}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{B0431EC9-21D8-4D96-B333-6DD430031C82}.Debug|Any CPU.Build.0 = Debug|Any CPU
{B0431EC9-21D8-4D96-B333-6DD430031C82}.Release|Any CPU.ActiveCfg = Release|Any CPU
{B0431EC9-21D8-4D96-B333-6DD430031C82}.Release|Any CPU.Build.0 = Release|Any CPU
{104D045A-F9CC-4DD7-B04D-8DA7543435DD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{104D045A-F9CC-4DD7-B04D-8DA7543435DD}.Debug|Any CPU.Build.0 = Debug|Any CPU
{104D045A-F9CC-4DD7-B04D-8DA7543435DD}.Release|Any CPU.ActiveCfg = Release|Any CPU
{104D045A-F9CC-4DD7-B04D-8DA7543435DD}.Release|Any CPU.Build.0 = Release|Any CPU
{8484A675-1C93-4D87-8FF2-7530A5711208}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{8484A675-1C93-4D87-8FF2-7530A5711208}.Debug|Any CPU.Build.0 = Debug|Any CPU
{8484A675-1C93-4D87-8FF2-7530A5711208}.Release|Any CPU.ActiveCfg = Release|Any CPU
{8484A675-1C93-4D87-8FF2-7530A5711208}.Release|Any CPU.Build.0 = Release|Any CPU
{E3567AB9-0926-444D-A0D0-A369D5890EAA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{E3567AB9-0926-444D-A0D0-A369D5890EAA}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E3567AB9-0926-444D-A0D0-A369D5890EAA}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E3567AB9-0926-444D-A0D0-A369D5890EAA}.Release|Any CPU.Build.0 = Release|Any CPU
{C8F410B4-B83B-47B9-9ECD-07590A8750A7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C8F410B4-B83B-47B9-9ECD-07590A8750A7}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C8F410B4-B83B-47B9-9ECD-07590A8750A7}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C8F410B4-B83B-47B9-9ECD-07590A8750A7}.Release|Any CPU.Build.0 = Release|Any CPU
{DB50E2EF-B4D8-493A-8568-29CAC0DF9062}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{DB50E2EF-B4D8-493A-8568-29CAC0DF9062}.Debug|Any CPU.Build.0 = Debug|Any CPU
{DB50E2EF-B4D8-493A-8568-29CAC0DF9062}.Release|Any CPU.ActiveCfg = Release|Any CPU
{DB50E2EF-B4D8-493A-8568-29CAC0DF9062}.Release|Any CPU.Build.0 = Release|Any CPU
{AE1A0E06-6CD4-4E1D-8209-22BBBD6D5652}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{AE1A0E06-6CD4-4E1D-8209-22BBBD6D5652}.Debug|Any CPU.Build.0 = Debug|Any CPU
{AE1A0E06-6CD4-4E1D-8209-22BBBD6D5652}.Release|Any CPU.ActiveCfg = Release|Any CPU
{AE1A0E06-6CD4-4E1D-8209-22BBBD6D5652}.Release|Any CPU.Build.0 = Release|Any CPU
{77BA2F61-6155-4283-BB39-F8E42F46A0B0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{77BA2F61-6155-4283-BB39-F8E42F46A0B0}.Debug|Any CPU.Build.0 = Debug|Any CPU
{77BA2F61-6155-4283-BB39-F8E42F46A0B0}.Release|Any CPU.ActiveCfg = Release|Any CPU
{77BA2F61-6155-4283-BB39-F8E42F46A0B0}.Release|Any CPU.Build.0 = Release|Any CPU
{5D41FFFF-816C-40B2-95CD-E2DDDCB83784}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{5D41FFFF-816C-40B2-95CD-E2DDDCB83784}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5D41FFFF-816C-40B2-95CD-E2DDDCB83784}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5D41FFFF-816C-40B2-95CD-E2DDDCB83784}.Release|Any CPU.Build.0 = Release|Any CPU
{07CCC11F-76CB-448E-B15A-72E09FBB348B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{07CCC11F-76CB-448E-B15A-72E09FBB348B}.Debug|Any CPU.Build.0 = Debug|Any CPU
{07CCC11F-76CB-448E-B15A-72E09FBB348B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{07CCC11F-76CB-448E-B15A-72E09FBB348B}.Release|Any CPU.Build.0 = Release|Any CPU
{F0A39728-940D-4DBE-A37A-05D4EB57F342}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F0A39728-940D-4DBE-A37A-05D4EB57F342}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F0A39728-940D-4DBE-A37A-05D4EB57F342}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F0A39728-940D-4DBE-A37A-05D4EB57F342}.Release|Any CPU.Build.0 = Release|Any CPU
{95CE7371-17B6-4EEE-8E38-2FDE6347E955}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{95CE7371-17B6-4EEE-8E38-2FDE6347E955}.Debug|Any CPU.Build.0 = Debug|Any CPU
{95CE7371-17B6-4EEE-8E38-2FDE6347E955}.Release|Any CPU.ActiveCfg = Release|Any CPU
{95CE7371-17B6-4EEE-8E38-2FDE6347E955}.Release|Any CPU.Build.0 = Release|Any CPU
{2C111161-B7C5-4869-9F52-EA725E64BA40}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{2C111161-B7C5-4869-9F52-EA725E64BA40}.Debug|Any CPU.Build.0 = Debug|Any CPU
{2C111161-B7C5-4869-9F52-EA725E64BA40}.Release|Any CPU.ActiveCfg = Release|Any CPU
{2C111161-B7C5-4869-9F52-EA725E64BA40}.Release|Any CPU.Build.0 = Release|Any CPU
{C4DF1A63-C9EB-4D8F-A4E5-4FD9249A5089}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C4DF1A63-C9EB-4D8F-A4E5-4FD9249A5089}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C4DF1A63-C9EB-4D8F-A4E5-4FD9249A5089}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C4DF1A63-C9EB-4D8F-A4E5-4FD9249A5089}.Release|Any CPU.Build.0 = Release|Any CPU
{0C1A387E-0CD0-4BE8-82FC-9FCAD05BF289}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{0C1A387E-0CD0-4BE8-82FC-9FCAD05BF289}.Debug|Any CPU.Build.0 = Debug|Any CPU
{0C1A387E-0CD0-4BE8-82FC-9FCAD05BF289}.Release|Any CPU.ActiveCfg = Release|Any CPU
{0C1A387E-0CD0-4BE8-82FC-9FCAD05BF289}.Release|Any CPU.Build.0 = Release|Any CPU
{448221A8-EABA-4200-9192-E08BF241A487}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{448221A8-EABA-4200-9192-E08BF241A487}.Debug|Any CPU.Build.0 = Debug|Any CPU
{448221A8-EABA-4200-9192-E08BF241A487}.Release|Any CPU.ActiveCfg = Release|Any CPU
{448221A8-EABA-4200-9192-E08BF241A487}.Release|Any CPU.Build.0 = Release|Any CPU
{F5D9DE01-06CD-4881-9F41-46882E9ED45C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F5D9DE01-06CD-4881-9F41-46882E9ED45C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F5D9DE01-06CD-4881-9F41-46882E9ED45C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F5D9DE01-06CD-4881-9F41-46882E9ED45C}.Release|Any CPU.Build.0 = Release|Any CPU
{027EEE53-7491-48F4-B467-6404D68798A7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{027EEE53-7491-48F4-B467-6404D68798A7}.Debug|Any CPU.Build.0 = Debug|Any CPU
{027EEE53-7491-48F4-B467-6404D68798A7}.Release|Any CPU.ActiveCfg = Release|Any CPU
{027EEE53-7491-48F4-B467-6404D68798A7}.Release|Any CPU.Build.0 = Release|Any CPU
{760BFF3A-1A67-43A1-A94C-78D11A4BB1E6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{760BFF3A-1A67-43A1-A94C-78D11A4BB1E6}.Debug|Any CPU.Build.0 = Debug|Any CPU
{760BFF3A-1A67-43A1-A94C-78D11A4BB1E6}.Release|Any CPU.ActiveCfg = Release|Any CPU
{760BFF3A-1A67-43A1-A94C-78D11A4BB1E6}.Release|Any CPU.Build.0 = Release|Any CPU
{C0C28A02-943C-4A38-B474-A2B49C6201ED}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C0C28A02-943C-4A38-B474-A2B49C6201ED}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C0C28A02-943C-4A38-B474-A2B49C6201ED}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C0C28A02-943C-4A38-B474-A2B49C6201ED}.Release|Any CPU.Build.0 = Release|Any CPU
{26540DA7-604B-474B-97BA-9CDC85A84B6D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{26540DA7-604B-474B-97BA-9CDC85A84B6D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{26540DA7-604B-474B-97BA-9CDC85A84B6D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{26540DA7-604B-474B-97BA-9CDC85A84B6D}.Release|Any CPU.Build.0 = Release|Any CPU
{3FACF656-6DED-407E-AC23-A7EF08D704E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{3FACF656-6DED-407E-AC23-A7EF08D704E3}.Debug|Any CPU.Build.0 = Debug|Any CPU
{3FACF656-6DED-407E-AC23-A7EF08D704E3}.Release|Any CPU.ActiveCfg = Release|Any CPU
{3FACF656-6DED-407E-AC23-A7EF08D704E3}.Release|Any CPU.Build.0 = Release|Any CPU
{ED8CEB38-7C95-43A8-B208-9C9828654AC1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{ED8CEB38-7C95-43A8-B208-9C9828654AC1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{ED8CEB38-7C95-43A8-B208-9C9828654AC1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{ED8CEB38-7C95-43A8-B208-9C9828654AC1}.Release|Any CPU.Build.0 = Release|Any CPU
{05B8FF27-446B-49BF-B508-4A4C096D2BB2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{05B8FF27-446B-49BF-B508-4A4C096D2BB2}.Debug|Any CPU.Build.0 = Debug|Any CPU
{05B8FF27-446B-49BF-B508-4A4C096D2BB2}.Release|Any CPU.ActiveCfg = Release|Any CPU
{05B8FF27-446B-49BF-B508-4A4C096D2BB2}.Release|Any CPU.Build.0 = Release|Any CPU
{9F81862F-303D-467F-8DC9-044BE2CCF329}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9F81862F-303D-467F-8DC9-044BE2CCF329}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9F81862F-303D-467F-8DC9-044BE2CCF329}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9F81862F-303D-467F-8DC9-044BE2CCF329}.Release|Any CPU.Build.0 = Release|Any CPU
{86916EF2-4A1B-441C-B673-EB0F68EC9C3A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{86916EF2-4A1B-441C-B673-EB0F68EC9C3A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{86916EF2-4A1B-441C-B673-EB0F68EC9C3A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{86916EF2-4A1B-441C-B673-EB0F68EC9C3A}.Release|Any CPU.Build.0 = Release|Any CPU
{76BFD0A6-3F28-4BCE-983B-9FE3FBDD966D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{76BFD0A6-3F28-4BCE-983B-9FE3FBDD966D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{76BFD0A6-3F28-4BCE-983B-9FE3FBDD966D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{76BFD0A6-3F28-4BCE-983B-9FE3FBDD966D}.Release|Any CPU.Build.0 = Release|Any CPU
{EB0FC2DF-D8AC-460B-8FBE-307A7B163C6C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{EB0FC2DF-D8AC-460B-8FBE-307A7B163C6C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EB0FC2DF-D8AC-460B-8FBE-307A7B163C6C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EB0FC2DF-D8AC-460B-8FBE-307A7B163C6C}.Release|Any CPU.Build.0 = Release|Any CPU
{AD4F5F31-625C-472D-BE2C-AD1FB693E065}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{AD4F5F31-625C-472D-BE2C-AD1FB693E065}.Debug|Any CPU.Build.0 = Debug|Any CPU
{AD4F5F31-625C-472D-BE2C-AD1FB693E065}.Release|Any CPU.ActiveCfg = Release|Any CPU
{AD4F5F31-625C-472D-BE2C-AD1FB693E065}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {62783077-E041-40BA-A406-E7EF12CAFF2D}
EndGlobalSection
EndGlobal

View File

@ -1,44 +0,0 @@
{
"solution": {
"path": "ASC.Web.sln",
"projects": [
"common\\ASC.ActiveDirectory\\ASC.ActiveDirectory.csproj",
"common\\ASC.Api.Core\\ASC.Api.Core.csproj",
"common\\ASC.Common\\ASC.Common.csproj",
"common\\ASC.Core.Common\\ASC.Core.Common.csproj",
"common\\ASC.Data.Backup.Core\\ASC.Data.Backup.Core.csproj",
"common\\ASC.Data.Encryption\\ASC.Data.Encryption.csproj",
"common\\ASC.Data.Reassigns\\ASC.Data.Reassigns.csproj",
"common\\ASC.Data.Storage\\ASC.Data.Storage.csproj",
"common\\ASC.EventBus.ActiveMQ\\ASC.EventBus.ActiveMQ.csproj",
"common\\ASC.EventBus.Extensions.Logger\\ASC.EventBus.Extensions.Logger.csproj",
"common\\ASC.EventBus.RabbitMQ\\ASC.EventBus.RabbitMQ.csproj",
"common\\ASC.EventBus\\ASC.EventBus.csproj",
"common\\ASC.FederatedLogin\\ASC.FederatedLogin.csproj",
"common\\ASC.Feed\\ASC.Feed.csproj",
"common\\ASC.IPSecurity\\ASC.IPSecurity.csproj",
"common\\ASC.MessagingSystem\\ASC.MessagingSystem.csproj",
"common\\ASC.Notify.Textile\\ASC.Notify.Textile.csproj",
"common\\ASC.Textile\\ASC.Textile.csproj",
"common\\ASC.Webhooks.Core\\ASC.Webhooks.Core.csproj",
"common\\services\\ASC.ApiCache\\ASC.ApiCache.csproj",
"common\\services\\ASC.ApiSystem\\ASC.ApiSystem.csproj",
"common\\services\\ASC.AuditTrail\\ASC.AuditTrail.csproj",
"common\\services\\ASC.ClearEvents\\ASC.ClearEvents.csproj",
"common\\services\\ASC.Data.Backup.BackgroundTasks\\ASC.Data.Backup.BackgroundTasks.csproj",
"common\\services\\ASC.Data.Backup\\ASC.Data.Backup.csproj",
"common\\services\\ASC.ElasticSearch\\ASC.ElasticSearch.csproj",
"common\\services\\ASC.Feed.Aggregator\\ASC.Feed.Aggregator.csproj",
"common\\services\\ASC.Notify\\ASC.Notify.csproj",
"common\\services\\ASC.Studio.Notify\\ASC.Studio.Notify.csproj",
"products\\ASC.Files\\Core\\ASC.Files.Core.csproj",
"products\\ASC.Files\\Server\\ASC.Files.csproj",
"products\\ASC.Files\\Service\\ASC.Files.Service.csproj",
"products\\ASC.People\\Server\\ASC.People.csproj",
"web\\ASC.Web.Api\\ASC.Web.Api.csproj",
"web\\ASC.Web.Core\\ASC.Web.Core.csproj",
"web\\ASC.Web.HealthChecks.UI\\ASC.Web.HealthChecks.UI.csproj",
"web\\ASC.Web.Studio\\ASC.Web.Studio.csproj"
]
}
}

View File

@ -1,14 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<solution>
<add key="disableSourceControlIntegration" value="true" />
</solution>
<packageSources>
<add key="Custom NuGet Server" value=".nuget\packages" />
<add key="NuGet official package source" value="https://api.nuget.org/v3/index.json" />
</packageSources>
<packageRestore>
<add key="enabled" value="True" />
<add key="automatic" value="True" />
</packageRestore>
</configuration>

113
build/Jenkinsfile vendored
View File

@ -1,113 +0,0 @@
pipeline {
agent none
stages {
stage('Build') {
parallel {
stage('Unix') {
agent { label 'net-core' }
stages {
stage('Frontend') {
steps {
sh 'yarn install --frozen-lockfile && yarn build'
}
}
stage('Backend') {
steps {
sh 'dotnet build -c Release ASC.Web.slnf'
}
}
}
}
stage('Windows') {
agent { label 'win-core' }
stages {
stage('Frontend') {
steps {
bat "yarn install --frozen-lockfile && yarn build"
}
}
stage('Backend') {
steps {
bat 'dotnet build -c Release ASC.Web.slnf'
}
}
}
}
}
}
stage('Test') {
when { expression { return env.CHANGE_ID != null } }
parallel {
stage('Unix') {
agent { label 'net-core' }
stages {
stage('Components') {
steps {
sh "yarn install --frozen-lockfile && yarn build && cd ${env.WORKSPACE}/packages/components && yarn test:coverage --ci --reporters=default --reporters=jest-junit || true"
}
post {
success {
junit 'packages/components/junit.xml'
publishHTML target: [
allowMissing : false,
alwaysLinkToLastBuild: false,
keepAll : true,
reportDir : 'packages/components/coverage/lcov-report',
reportFiles : 'index.html',
reportName : 'Unix Test Report'
]
publishCoverage adapters: [coberturaAdapter('packages/components/coverage/cobertura-coverage.xml')]
}
}
}
stage('Files') {
steps {
sh "git submodule update --progress --init -- products/ASC.Files/Server/DocStore && dotnet build ASC.Web.slnf && cd ${env.WORKSPACE}/products/ASC.Files/Tests/ && dotnet test ASC.Files.Tests.csproj -r linux-x64 -l \"console;verbosity=detailed\""
}
}
}
}
stage('Windows') {
agent { label 'win-core' }
stages {
stage('Components') {
steps {
bat "yarn install --frozen-lockfile && yarn build && cd ${env.WORKSPACE}\\packages\\components && yarn test:coverage --ci --reporters=default --reporters=jest-junit || true"
}
post {
success {
junit 'packages\\components\\junit.xml'
publishHTML target: [
allowMissing : false,
alwaysLinkToLastBuild: false,
keepAll : true,
reportDir : 'packages\\components\\coverage\\lcov-report',
reportFiles : 'index.html',
reportName : 'Windows Test Report'
]
}
}
}
stage('Files') {
steps {
bat "git submodule update --progress --init -- products\\ASC.Files\\Server\\DocStore && dotnet build ASC.Web.slnf && cd ${env.WORKSPACE}\\products\\ASC.Files\\Tests\\ && dotnet test ASC.Files.Tests.csproj"
}
}
}
}
}
}
stage('Notify') {
when { expression { return env.CHANGE_ID != null && env.BUILD_NUMBER == '1' } }
agent { label 'net-core' }
options { skipDefaultCheckout() }
environment {
Telegram_Token = credentials('telegram_token')
Chat_Id = credentials('telegram_chat')
}
steps {
sh 'curl -s -X GET -G "https://api.telegram.org/bot$Telegram_Token/sendMessage" --data-urlencode "chat_id=$Chat_Id" --data "text=CHANGE URL:$CHANGE_URL %0A Build Url: $BUILD_URL %0A Branch Name:$CHANGE_TITLE"'
}
}
}
}

View File

@ -1,40 +0,0 @@
@echo off
echo Start build backend...
echo.
cd /D "%~dp0"
call runasadmin.bat "%~dpnx0"
if %errorlevel% == 0 (
call start\stop.bat nopause
dotnet build ..\asc.web.slnf /fl1 /flp1:logfile=asc.web.log;verbosity=normal
echo.
)
if %errorlevel% == 0 (
echo install nodejs projects dependencies...
echo.
for /R "scripts\" %%f in (*.bat) do (
echo Run script %%~nxf...
echo.
call scripts\%%~nxf
)
)
echo.
if %errorlevel% == 0 (
call start\start.bat nopause
)
echo.
if "%1"=="nopause" goto end
pause
:end

View File

@ -1,7 +0,0 @@
@echo off
pwsh %~dp0/build.backend.docker.ps1 %*
echo.
pause

View File

@ -1,91 +0,0 @@
$PSversionMajor = $PSVersionTable.PSVersion | sort-object major | ForEach-Object { $_.major }
$PSversionMinor = $PSVersionTable.PSVersion | sort-object minor | ForEach-Object { $_.minor }
if ($PSversionMajor -lt 7 -or $PSversionMinor -lt 2) {
Write-Error "Powershell version must be greater than or equal to 7.2."
exit
}
$RootDir = Split-Path -Parent $PSScriptRoot
$DockerDir = "$RootDir\build\install\docker"
$LocalIp = (Get-CimInstance -ClassName Win32_NetworkAdapterConfiguration | Where-Object { $_.DHCPEnabled -ne $null -and $_.DefaultIPGateway -ne $null }).IPAddress | Select-Object -First 1
$Doceditor = ($LocalIp + ":5013")
$Login = ($LocalIp + ":5011")
$Client = ($LocalIp + ":5001")
$PortalUrl = ("http://" + $LocalIp + ":8092")
$ProxyVersion="v1.0.0"
# Stop all backend services"
& "$PSScriptRoot\start\stop.backend.docker.ps1"
$Env:COMPOSE_IGNORE_ORPHANS = "True"
$Force = $False
if ($args[0] -eq "--force") {
$Force = $True
}
Write-Host "FORCE BUILD BASE IMAGES: $Force" -ForegroundColor Blue
$ExistsNetwork= docker network ls --format '{{.Name}}' | findstr "onlyoffice"
if (-not $ExistsNetwork) {
docker network create --driver bridge onlyoffice
}
Write-Host "Run MySQL" -ForegroundColor Green
docker compose -f "$DockerDir\db.yml" up -d
Write-Host "Build backend services (to `publish/` folder)" -ForegroundColor Green
& "$PSScriptRoot\install\common\build-services.ps1"
Set-Location -Path $RootDir
$DotnetVersion = "dev"
$NodeVersion = "dev"
$ProxyVersion = "dev"
$ExistsDotnet= docker images --format "{{.Repository}}:{{.Tag}}" | findstr "onlyoffice/4testing-docspace-dotnet-runtime:$DotnetVersion"
$ExistsNode= docker images --format "{{.Repository}}:{{.Tag}}" | findstr "onlyoffice/4testing-docspace-nodejs-runtime:$NodeVersion"
$ExistsProxy= docker images --format "{{.Repository}}:{{.Tag}}" | findstr "onlyoffice/4testing-docspace-proxy-runtime:$ProxyVersion"
if (!$ExistsDotnet -or $Force) {
Write-Host "Build dotnet base image from source (apply new dotnet config)" -ForegroundColor Green
docker build -t "onlyoffice/4testing-docspace-dotnet-runtime:$DotnetVersion" -f "$DockerDir\Dockerfile.runtime" --target dotnetrun .
} else {
Write-Host "SKIP build dotnet base image (already exists)" -ForegroundColor Blue
}
if (!$ExistsNode -or $Force) {
Write-Host "Build node base image from source" -ForegroundColor Green
docker build -t "onlyoffice/4testing-docspace-nodejs-runtime:$NodeVersion" -f "$DockerDir\Dockerfile.runtime" --target noderun .
} else {
Write-Host "SKIP build node base image (already exists)" -ForegroundColor Blue
}
if (!$ExistsProxy -or $Force) {
Write-Host "Build proxy base image from source (apply new nginx config)" -ForegroundColor Green
docker build -t "onlyoffice/4testing-docspace-proxy-runtime:$ProxyVersion" -f "$DockerDir\Dockerfile.runtime" --target router .
} else {
Write-Host "SKIP build proxy base image (already exists)" -ForegroundColor Blue
}
Write-Host "Run migration and services" -ForegroundColor Green
$Env:ENV_EXTENSION="dev"
$Env:Baseimage_Dotnet_Run="onlyoffice/4testing-docspace-dotnet-runtime:$DotnetVersion"
$Env:Baseimage_Nodejs_Run="onlyoffice/4testing-docspace-nodejs-runtime:$NodeVersion"
$Env:Baseimage_Proxy_Run="onlyoffice/4testing-docspace-proxy-runtime:$ProxyVersion"
$Env:DOCUMENT_SERVER_IMAGE_NAME="onlyoffice/documentserver-de:latest"
$Env:SERVICE_DOCEDITOR=$Doceditor
$Env:SERVICE_LOGIN=$Login
$Env:SERVICE_CLIENT=$Client
$Env:ROOT_DIR=$RootDir
$Env:BUILD_PATH="/var/www"
$Env:SRC_PATH="$RootDir\publish\services"
$Env:DATA_DIR="$RootDir\Data"
$Env:APP_URL_PORTAL=$PortalUrl
docker compose -f "$DockerDir\docspace.profiles.yml" -f "$DockerDir\docspace.overcome.yml" --profile migration-runner --profile backend-local up -d
Set-Location -Path $PSScriptRoot

View File

@ -1,112 +0,0 @@
#!/bin/bash
rd="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "Run script directory:" $dir
dir=$(builtin cd $rd/../; pwd)
dockerDir="$dir/build/install/docker"
echo "Root directory:" $dir
echo "Docker files root directory:" $dockerDir
local_ip=$(ipconfig getifaddr en0)
echo "LOCAL IP: $local_ip"
doceditor=${local_ip}:5013
login=${local_ip}:5011
client=${local_ip}:5001
portal_url="http://$local_ip:8092"
echo "SERVICE_DOCEDITOR: $doceditor"
echo "SERVICE_LOGIN: $login"
echo "SERVICE_CLIENT: $client"
echo "APP_URL_PORTAL: $portal_url"
force=false
if [ "$1" = "--force" ]; then
force=true
fi
echo "FORCE BUILD BASE IMAGES: $force"
# Stop all backend services"
$dir/build/start/stop.backend.docker.sh
echo "Run MySQL"
arch_name="$(uname -m)"
existsnetwork=$(docker network ls | awk '{print $2;}' | { grep -x onlyoffice || true; });
if [[ -z ${existsnetwork} ]]; then
docker network create --driver bridge onlyoffice
fi
if [ "${arch_name}" = "x86_64" ]; then
echo "CPU Type: x86_64 -> run db.yml"
docker compose -f $dockerDir/db.yml up -d
elif [ "${arch_name}" = "arm64" ]; then
echo "CPU Type: arm64 -> run db.yml with arm64v8 image"
MYSQL_IMAGE=arm64v8/mysql:8.0.32-oracle \
docker compose -f $dockerDir/db.yml up -d
else
echo "Error: Unknown CPU Type: ${arch_name}."
exit 1
fi
echo "Clear publish folder"
rm -rf $dir/publish
echo "Build backend services (to "publish/" folder)"
bash $dir/build/install/common/build-services.sh -pb backend-publish -pc Debug -de "$dockerDir/docker-entrypoint.py"
dotnet_version=dev
exists=$(docker images | egrep "onlyoffice/4testing-docspace-dotnet-runtime" | egrep "$dotnet_version" | awk 'NR>0 {print $1 ":" $2}')
if [ "${exists}" = "" ] || [ "$force" = true ]; then
echo "Build dotnet base image from source (apply new dotnet config)"
docker build -t onlyoffice/4testing-docspace-dotnet-runtime:$dotnet_version -f ./build/install/docker/Dockerfile.runtime --target dotnetrun .
else
echo "SKIP build dotnet base image (already exists)"
fi
node_version=dev
exists=$(docker images | egrep "onlyoffice/4testing-docspace-nodejs-runtime" | egrep "$node_version" | awk 'NR>0 {print $1 ":" $2}')
if [ "${exists}" = "" ] || [ "$force" = true ]; then
echo "Build nodejs base image from source"
docker build -t onlyoffice/4testing-docspace-nodejs-runtime:$node_version -f ./build/install/docker/Dockerfile.runtime --target noderun .
else
echo "SKIP build nodejs base image (already exists)"
fi
proxy_version=dev
exists=$(docker images | egrep "onlyoffice/4testing-docspace-proxy-runtime" | egrep "$proxy_version" | awk 'NR>0 {print $1 ":" $2}')
if [ "${exists}" = "" ] || [ "$force" = true ]; then
echo "Build proxy base image from source (apply new nginx config)"
docker build -t onlyoffice/4testing-docspace-proxy-runtime:$proxy_version -f ./build/install/docker/Dockerfile.runtime --target router .
else
echo "SKIP build proxy base image (already exists)"
fi
echo "Run migration and services"
ENV_EXTENSION="dev" \
Baseimage_Dotnet_Run="onlyoffice/4testing-docspace-dotnet-runtime:$dotnet_version" \
Baseimage_Nodejs_Run="onlyoffice/4testing-docspace-nodejs-runtime:$node_version" \
Baseimage_Proxy_Run="onlyoffice/4testing-docspace-proxy-runtime:$proxy_version" \
DOCUMENT_SERVER_IMAGE_NAME=onlyoffice/documentserver-de:latest \
SERVICE_DOCEDITOR=$doceditor \
SERVICE_LOGIN=$login \
SERVICE_CLIENT=$client \
ROOT_DIR=$dir \
BUILD_PATH="/var/www" \
SRC_PATH="$dir/publish/services" \
DATA_DIR="$dir/Data" \
APP_URL_PORTAL=$portal_url \
docker-compose -f $dockerDir/docspace.profiles.yml -f $dockerDir/docspace.overcome.yml --profile migration-runner --profile backend-local up -d

View File

@ -1,27 +0,0 @@
@echo off
echo "##########################################################"
echo "######### Start build and deploy #######################"
echo "##########################################################"
echo.
PUSHD %~dp0
call runasadmin.bat "%~dpnx0"
if %errorlevel% == 0 (
call start\stop.bat nopause
echo "FRONT-END (for start run command 'yarn start' inside the root folder)"
call build.frontend.bat nopause
echo "BACK-END"
call build.backend.bat nopause
call start\start.bat nopause
echo.
pause
)

View File

@ -1,7 +0,0 @@
@echo off
pwsh %~dp0/build.document.server.docker.ps1 %1
echo.
pause

View File

@ -1,15 +0,0 @@
$PSversionMajor = $PSVersionTable.PSVersion | sort-object major | ForEach-Object { $_.major }
$PSversionMinor = $PSVersionTable.PSVersion | sort-object minor | ForEach-Object { $_.minor }
if ($PSversionMajor -lt 7 -or $PSversionMinor -lt 2) {
Write-Error "Powershell version must be greater than or equal to 7.2."
exit
}
$RootDir = Split-Path -Parent $PSScriptRoot
Write-Host "Run Document server" -ForegroundColor Green
$DOCUMENT_SERVER_IMAGE_NAME = "onlyoffice/documentserver-de:latest"
docker run -i -t -d -p 8085:80 -e JWT_ENABLED=true -e JWT_SECRET=secret -e JWT_HEADER=AuthorizationJwt --restart=always -v $RootDir/Data:/var/www/onlyoffice/Data $DOCUMENT_SERVER_IMAGE_NAME

View File

@ -1,6 +0,0 @@
PUSHD %~dp0..
REM call yarn wipe
call yarn install
POPD

View File

@ -1,33 +0,0 @@
#!/bin/bash
echo off
echo "##########################################################"
echo "######### Start build and deploy #######################"
echo "##########################################################"
echo ""
rd="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "Run script directory:" $rd
dir=$(builtin cd $rd/../; pwd)
echo "Root directory:" $dir
pushd $dir
echo "FRONT-END (for start run command 'yarn start' inside the root folder)"
yarn install
echo "BACK-END"
dotnet build $dir/asc.web.slnf /fl1 /flp1:logfile=asc.web.log;verbosity=normal
echo "install nodejs projects dependencies..."
pushd $dir/common/ASC.Socket.IO/
yarn install
pushd $dir/common/ASC.SsoAuth/
yarn install
pushd $dir/common/ASC.WebDav/
yarn install
pushd $dir/common/ASC.WebPlugins/
yarn install

View File

@ -1,63 +0,0 @@
@echo off
cd /D "%~dp0"
call runasadmin.bat "%~dpnx0"
if %errorlevel% == 0 (
PUSHD %~dp0..
IF "%2"=="personal" (
echo "mode=%2"
) ELSE (
echo "mode="
)
REM call yarn wipe
call yarn install
REM call yarn build
IF "%2"=="personal" (
call yarn build:personal
) ELSE (
call yarn build
)
REM call yarn wipe
IF "%2"=="personal" (
call yarn deploy:personal
) ELSE (
call yarn deploy
)
REM copy nginx configurations to deploy folder
xcopy config\nginx\onlyoffice.conf build\deploy\nginx\ /E /R /Y
powershell -Command "(gc build\deploy\nginx\onlyoffice.conf) -replace '#', '' | Out-File -encoding ASCII build\deploy\nginx\onlyoffice.conf"
xcopy config\nginx\sites-enabled\* build\deploy\nginx\sites-enabled\ /E /R /Y
REM fix paths
powershell -Command "(gc build\deploy\nginx\sites-enabled\onlyoffice-client.conf) -replace 'ROOTPATH', '%~dp0deploy\client' -replace '\\', '/' | Out-File -encoding ASCII build\deploy\nginx\sites-enabled\onlyoffice-client.conf"
REM restart nginx
echo service nginx stop
call sc stop nginx > nul
REM sleep 5 seconds
call ping 127.0.0.1 -n 6 > nul
echo service nginx start
call sc start nginx > nul
if NOT %errorlevel% == 0 (
echo Couldn't restart Onlyoffice%%~nf service
)
)
echo.
POPD
if "%1"=="nopause" goto start
pause
:start

View File

@ -1,25 +0,0 @@
@echo off
echo "##########################################################"
echo "######### Start build and deploy #######################"
echo "##########################################################"
echo.
cd /D "%~dp0"
call runasadmin.bat "%~dpnx0"
if %errorlevel% == 0 (
echo "FRONT-END static"
call build.static.bat nopause
echo "BACK-END"
call build.backend.bat nopause
echo.
pause
)

View File

@ -1,7 +0,0 @@
@echo off
pwsh %~dp0/clear.backend.docker.ps1
echo.
pause

View File

@ -1,37 +0,0 @@
$Containers = docker ps -aqf "name=^onlyoffice"
$Images = docker images onlyoffice/4testing-docspace* -q
$RootDir = Split-Path -Parent $PSScriptRoot
$DockerDir = ($RootDir + "\build\install\docker")
Write-Host "Clean up containers, volumes or networks" -ForegroundColor Green
if ($Containers -or $Images) {
Write-Host "Remove all backend containers" -ForegroundColor Blue
$Env:DOCUMENT_SERVER_IMAGE_NAME="onlyoffice/documentserver-de:latest"
$Env:Baseimage_Dotnet_Run="onlyoffice/4testing-docspace-dotnet-runtime:dev"
$Env:Baseimage_Nodejs_Run="onlyoffice/4testing-docspace-nodejs-runtime:dev"
$Env:Baseimage_Proxy_Run="onlyoffice/4testing-docspace-proxy-runtime:dev"
$Env:SERVICE_CLIENT="localhost:5001"
$Env:BUILD_PATH="/var/www"
$Env:SRC_PATH="$RootDir\publish\services"
$Env:ROOT_DIR=$RootDir
$Env:DATA_DIR="$RootDir\Data"
docker compose -f "$DockerDir\docspace.profiles.yml" -f "$DockerDir\docspace.overcome.yml" --profile "migration-runner" --profile "backend-local" down --volumes
Write-Host "Remove docker contatiners 'mysql'" -ForegroundColor Blue
docker compose -f "$DockerDir\db.yml" down --volumes
Write-Host "Remove docker volumes" -ForegroundColor Blue
docker volume prune -f -a
Write-Host "Remove docker base images (onlyoffice/4testing-docspace)" -ForegroundColor Blue
docker rmi -f $Images
Write-Host "Remove docker networks" -ForegroundColor Blue
docker network prune -f
}
else {
Write-Host "No containers, images, volumes or networks to clean up" -ForegroundColor Green
}

View File

@ -1,38 +0,0 @@
#!/bin/bash
Containers=$(docker ps -a | egrep "onlyoffice" | awk 'NR>0 {print $1}')
RunDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
RootDir=$(builtin cd $RunDir/../; pwd)
DockerDir="${RootDir}/build/install/docker"
echo "Clean up containers, volumes or networks"
if [[ $Containers != "" ]]
then
echo "Remove all backend containers"
DOCUMENT_SERVER_IMAGE_NAME=onlyoffice/documentserver-de:latest \
Baseimage_Dotnet_Run="onlyoffice/4testing-docspace-dotnet-runtime:dev" \
Baseimage_Nodejs_Run="onlyoffice/4testing-docspace-nodejs-runtime:dev" \
Baseimage_Proxy_Run="onlyoffice/4testing-docspace-proxy-runtime:dev" \
SERVICE_CLIENT="localhost:5001" \
BUILD_PATH="/var/www" \
SRC_PATH="${RootDir}/publish/services" \
ROOT_DIR=$RootDir \
DATA_DIR="${RootDir}/Data" \
docker-compose -f "${DockerDir}/docspace.profiles.yml" -f "${DockerDir}/docspace.overcome.yml" --profile migration-runner --profile backend-local down --volumes
echo "Remove docker contatiners 'mysql'"
docker compose -f "${DockerDir}/db.yml" down --volumes
echo "Remove docker volumes"
docker volume prune -f -a
echo "Remove docker base images (onlyoffice/4testing-docspace)"
docker rmi -f $(docker images -a | egrep "onlyoffice/4testing-docspace" | awk 'NR>0 {print $3}')
echo "Remove unused networks."
docker network prune -f
else
echo "No containers, images, volumes or networks to clean up"
fi

View File

@ -1,6 +0,0 @@
@echo "MIGRATIONS"
@echo off
PUSHD %~dp0..\common\Tools\ASC.Migration.Creator
dotnet run --project ASC.Migration.Creator.csproj
pause

View File

@ -1,45 +0,0 @@
## Install ONLYOFFICE DocSpace using OneClickInstall script
### Enterprise version
#### Get the installation script:
wget https://download.onlyoffice.com/docspace/docspace-enterprise-install.sh
#### Install ONLYOFFICE DocSpace RPM\DEB using script:
bash docspace-enterprise-install.sh package
#### Install ONLYOFFICE DocSpace Docker using script:
bash docspace-enterprise-install.sh docker
#### Display available script RPM\DEB parameters:
bash docspace-enterprise-install.sh package -h
#### Display available script Docker parameters:
bash docspace-enterprise-install.sh docker -h
### Community version
#### Get the installation script:
wget https://download.onlyoffice.com/docspace/docspace-install.sh
#### Install ONLYOFFICE DocSpace RPM\DEB using script:
bash docspace-install.sh package
#### Install ONLYOFFICE DocSpace Docker using script:
bash docspace-install.sh docker
#### Display available script RPM\DEB parameters:
bash docspace-install.sh package -h
#### Display available script Docker parameters:
bash docspace-install.sh docker -h

View File

@ -1,187 +0,0 @@
#!/bin/bash
#
# (c) Copyright Ascensio System SIA 2021
#
# This program is a free software product. You can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License (AGPL)
# version 3 as published by the Free Software Foundation. In accordance with
# Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
# that Ascensio System SIA expressly excludes the warranty of non-infringement
# of any third-party rights.
#
# This program is distributed WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
# details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
#
# You can contact Ascensio System SIA at 20A-12 Ernesta Birznieka-Upisha
# street, Riga, Latvia, EU, LV-1050.
#
# The interactive user interfaces in modified source and object code versions
# of the Program must display Appropriate Legal Notices, as required under
# Section 5 of the GNU AGPL version 3.
#
# Pursuant to Section 7(b) of the License you must retain the original Product
# logo when distributing the program. Pursuant to Section 7(e) we decline to
# grant you any rights under trademark law for use of our trademarks.
#
# All the Product's GUI elements, including illustrations and icon sets, as
# well as technical writing content are licensed under the terms of the
# Creative Commons Attribution-ShareAlike 4.0 International. See the License
# terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
#
PARAMETERS="$PARAMETERS -it COMMUNITY";
DOCKER="";
LOCAL_SCRIPTS="false"
product="docspace"
FILE_NAME="$(basename "$0")"
while [ "$1" != "" ]; do
case $1 in
-ls | --localscripts )
if [ "$2" == "true" ] || [ "$2" == "false" ]; then
PARAMETERS="$PARAMETERS ${1}";
LOCAL_SCRIPTS=$2
shift
fi
;;
-gb | --gitbranch )
if [ "$2" != "" ]; then
PARAMETERS="$PARAMETERS ${1}";
GIT_BRANCH=$2
shift
fi
;;
docker )
DOCKER="true";
shift && continue
;;
package )
DOCKER="false";
shift && continue
;;
"-?" | -h | --help )
if [ -z "$DOCKER" ]; then
echo "Run 'bash $FILE_NAME docker' to install docker version of application or 'bash $FILE_NAME package' to install deb/rpm version."
echo "Run 'bash $FILE_NAME docker -h' or 'bash $FILE_NAME package -h' to get more details."
exit 0;
fi
PARAMETERS="$PARAMETERS -ht $FILE_NAME";
;;
esac
PARAMETERS="$PARAMETERS ${1}";
shift
done
root_checking () {
if [ ! $( id -u ) -eq 0 ]; then
echo "To perform this action you must be logged in with root rights"
exit 1;
fi
}
command_exists () {
type "$1" &> /dev/null;
}
install_curl () {
if command_exists apt-get; then
apt-get -y update
apt-get -y -q install curl
elif command_exists yum; then
yum -y install curl
fi
if ! command_exists curl; then
echo "command curl not found"
exit 1;
fi
}
read_installation_method () {
echo "Select 'Y' to install ONLYOFFICE $product using Docker (recommended). Select 'N' to install it using RPM/DEB packages.";
read -p "Install with Docker [Y/N/C]? " choice
case "$choice" in
y|Y )
DOCKER="true";
;;
n|N )
DOCKER="false";
;;
c|C )
exit 0;
;;
* )
echo "Please, enter Y, N or C to cancel";
;;
esac
if [ "$DOCKER" == "" ]; then
read_installation_method;
fi
}
root_checking
if ! command_exists curl ; then
install_curl;
fi
if [ -z "$DOCKER" ]; then
read_installation_method;
fi
if [ -z $GIT_BRANCH ]; then
DOWNLOAD_URL_PREFIX="https://download.onlyoffice.com/${product}"
else
DOWNLOAD_URL_PREFIX="https://raw.githubusercontent.com/ONLYOFFICE/${product}/${GIT_BRANCH}/build/install/OneClickInstall"
fi
if [ "$DOCKER" == "true" ]; then
if [ "$LOCAL_SCRIPTS" == "true" ]; then
bash install-Docker.sh ${PARAMETERS}
else
curl -s -O ${DOWNLOAD_URL_PREFIX}/install-Docker.sh
bash install-Docker.sh ${PARAMETERS}
rm install-Docker.sh
fi
else
if [ -f /etc/redhat-release ] ; then
DIST=$(cat /etc/redhat-release |sed s/\ release.*//);
REV=$(cat /etc/redhat-release | sed s/.*release\ // | sed s/\ .*//);
REV_PARTS=(${REV//\./ });
REV=${REV_PARTS[0]};
if [[ "${DIST}" == CentOS* ]] && [ ${REV} -lt 7 ]; then
echo "CentOS 7 or later is required";
exit 1;
fi
if [ "$LOCAL_SCRIPTS" == "true" ]; then
bash install-RedHat.sh ${PARAMETERS}
else
curl -s -O ${DOWNLOAD_URL_PREFIX}/install-RedHat.sh
bash install-RedHat.sh ${PARAMETERS}
rm install-RedHat.sh
fi
elif [ -f /etc/debian_version ] ; then
if [ "$LOCAL_SCRIPTS" == "true" ]; then
bash install-Debian.sh ${PARAMETERS}
else
curl -s -O ${DOWNLOAD_URL_PREFIX}/install-Debian.sh
bash install-Debian.sh ${PARAMETERS}
rm install-Debian.sh
fi
else
echo "Not supported OS";
exit 1;
fi
fi

View File

@ -1,153 +0,0 @@
#!/bin/bash
set -e
package_sysname="onlyoffice";
DS_COMMON_NAME="onlyoffice";
product_name="DocSpace"
product=$(tr '[:upper:]' '[:lower:]' <<< ${product_name})
INSTALLATION_TYPE="ENTERPRISE"
MAKESWAP="true"
RES_APP_INSTALLED="is already installed";
RES_APP_CHECK_PORTS="uses ports"
RES_CHECK_PORTS="please, make sure that the ports are free.";
RES_INSTALL_SUCCESS="Thank you for installing ONLYOFFICE ${product_name}.";
RES_QUESTIONS="In case you have any questions contact us via http://support.onlyoffice.com or visit our forum at http://forum.onlyoffice.com"
while [ "$1" != "" ]; do
case $1 in
-u | --update )
if [ "$2" != "" ]; then
UPDATE=$2
shift
fi
;;
-je | --jwtenabled )
if [ "$2" != "" ]; then
DS_JWT_ENABLED=$2
shift
fi
;;
-jh | --jwtheader )
if [ "$2" != "" ]; then
DS_JWT_HEADER=$2
shift
fi
;;
-js | --jwtsecret )
if [ "$2" != "" ]; then
DS_JWT_SECRET=$2
shift
fi
;;
-gb | --gitbranch )
if [ "$2" != "" ]; then
PARAMETERS="$PARAMETERS ${1}";
GIT_BRANCH=$2
shift
fi
;;
-ls | --localscripts )
if [ "$2" != "" ]; then
LOCAL_SCRIPTS=$2
shift
fi
;;
-skiphc | --skiphardwarecheck )
if [ "$2" != "" ]; then
SKIP_HARDWARE_CHECK=$2
shift
fi
;;
-it | --installation_type )
if [ "$2" != "" ]; then
INSTALLATION_TYPE=$(echo "$2" | awk '{print toupper($0)}');
shift
fi
;;
-ms | --makeswap )
if [ "$2" != "" ]; then
MAKESWAP=$2
shift
fi
;;
-? | -h | --help )
echo " Usage $0 [PARAMETER] [[PARAMETER], ...]"
echo " Parameters:"
echo " -it, --installation_type installation type (community|enterprise)"
echo " -u, --update use to update existing components (true|false)"
echo " -je, --jwtenabled specifies the enabling the JWT validation (true|false)"
echo " -jh, --jwtheader defines the http header that will be used to send the JWT"
echo " -js, --jwtsecret defines the secret key to validate the JWT in the request"
echo " -ls, --local_scripts use 'true' to run local scripts (true|false)"
echo " -skiphc, --skiphardwarecheck use to skip hardware check (true|false)"
echo " -ms, --makeswap make swap file (true|false)"
echo " -?, -h, --help this help"
echo
exit 0
;;
esac
shift
done
if [ -z "${UPDATE}" ]; then
UPDATE="false";
fi
if [ -z "${LOCAL_SCRIPTS}" ]; then
LOCAL_SCRIPTS="false";
fi
if [ -z "${SKIP_HARDWARE_CHECK}" ]; then
SKIP_HARDWARE_CHECK="false";
fi
if [ $(dpkg-query -W -f='${Status}' curl 2>/dev/null | grep -c "ok installed") -eq 0 ]; then
apt-get update;
apt-get install -yq curl;
fi
if [ -z $GIT_BRANCH ]; then
DOWNLOAD_URL_PREFIX="https://download.onlyoffice.com/${product}/install-Debian"
else
DOWNLOAD_URL_PREFIX="https://raw.githubusercontent.com/ONLYOFFICE/${product}/${GIT_BRANCH}/build/install/OneClickInstall/install-Debian"
fi
if [ "${LOCAL_SCRIPTS}" == "true" ]; then
source install-Debian/bootstrap.sh
else
source <(curl ${DOWNLOAD_URL_PREFIX}/bootstrap.sh)
fi
# add onlyoffice repo
mkdir -p -m 700 $HOME/.gnupg
echo "deb [signed-by=/usr/share/keyrings/onlyoffice.gpg] http://download.onlyoffice.com/repo/debian squeeze main" | tee /etc/apt/sources.list.d/onlyoffice.list
curl -fsSL https://download.onlyoffice.com/GPG-KEY-ONLYOFFICE | gpg --no-default-keyring --keyring gnupg-ring:/usr/share/keyrings/onlyoffice.gpg --import
chmod 644 /usr/share/keyrings/onlyoffice.gpg
declare -x LANG="en_US.UTF-8"
declare -x LANGUAGE="en_US:en"
declare -x LC_ALL="en_US.UTF-8"
if [ "${LOCAL_SCRIPTS}" == "true" ]; then
source install-Debian/tools.sh
source install-Debian/check-ports.sh
source install-Debian/install-preq.sh
source install-Debian/install-app.sh
else
source <(curl ${DOWNLOAD_URL_PREFIX}/tools.sh)
source <(curl ${DOWNLOAD_URL_PREFIX}/check-ports.sh)
source <(curl ${DOWNLOAD_URL_PREFIX}/install-preq.sh)
source <(curl ${DOWNLOAD_URL_PREFIX}/install-app.sh)
fi

View File

@ -1,35 +0,0 @@
#!/bin/bash
set -e
cat<<EOF
#######################################
# BOOTSTRAP
#######################################
EOF
if [ -f /etc/needrestart/needrestart.conf ]; then
sed -e "s_#\$nrconf{restart}_\$nrconf{restart}_" -e "s_\(\$nrconf{restart} =\).*_\1 'a';_" -i /etc/needrestart/needrestart.conf
fi
if ! dpkg -l | grep -q "sudo"; then
apt-get install -yq sudo
fi
if ! dpkg -l | grep -q "net-tools"; then
apt-get install -yq net-tools
fi
if ! dpkg -l | grep -q "dirmngr"; then
apt-get install -yq dirmngr
fi
if ! dpkg -l | grep -q "debian-archive-keyring"; then
apt-get install -yq debian-archive-keyring
fi
if ! dpkg -l | grep -q "debconf-utils"; then
apt-get install -yq debconf-utils
fi

View File

@ -1,39 +0,0 @@
#!/bin/bash
set -e
cat<<EOF
#######################################
# CHECK PORTS
#######################################
EOF
if dpkg -l | grep -q "${product}"; then
echo "${product} $RES_APP_INSTALLED"
PRODUCT_INSTALLED="true";
elif [ $UPDATE != "true" ] && netstat -lnp | awk '{print $4}' | grep -qE ":80$|:8081$|:8083$|:5001$|:5002$|:8080$|:80$"; then
echo "${product} $RES_APP_CHECK_PORTS: 80, 8081, 8083, 5001, 5002";
echo "$RES_CHECK_PORTS"
exit
else
PRODUCT_INSTALLED="false";
fi
if dpkg -l | grep -q "${package_sysname}-documentserver"; then
echo "${package_sysname}-documentserver $RES_APP_INSTALLED"
DOCUMENT_SERVER_INSTALLED="true";
elif [ $UPDATE != "true" ] && netstat -lnp | awk '{print $4}' | grep -qE ":8083$|:5432$|:5672$|:6379$|:8000$|:8080$"; then
echo "${package_sysname}-documentserver $RES_APP_CHECK_PORTS: 8083, 5432, 5672, 6379, 8000, 8080";
echo "$RES_CHECK_PORTS"
exit
else
DOCUMENT_SERVER_INSTALLED="false";
fi
if [ "$PRODUCT_INSTALLED" = "true" ] || [ "$DOCUMENT_SERVER_INSTALLED" = "true" ]; then
if [ "$UPDATE" != "true" ]; then
exit;
fi
fi

View File

@ -1,95 +0,0 @@
#!/bin/bash
set -e
cat<<EOF
#######################################
# INSTALL APP
#######################################
EOF
apt-get -y update
if [ "$UPDATE" = "true" ] && [ "$DOCUMENT_SERVER_INSTALLED" = "true" ]; then
ds_pkg_installed_name=$(dpkg -l | grep ${package_sysname}-documentserver | tail -n1 | awk '{print $2}');
if [ "$INSTALLATION_TYPE" = "COMMUNITY" ]; then
ds_pkg_name="${package_sysname}-documentserver";
elif [ "$INSTALLATION_TYPE" = "ENTERPRISE" ]; then
ds_pkg_name="${package_sysname}-documentserver-ee";
fi
if [ -n $ds_pkg_name ]; then
if ! dpkg -l ${ds_pkg_name} &> /dev/null; then
debconf-get-selections | grep ^${ds_pkg_installed_name} | sed s/${ds_pkg_installed_name}/${ds_pkg_name}/g | debconf-set-selections
DEBIAN_FRONTEND=noninteractive apt-get purge -yq ${ds_pkg_installed_name}
apt-get install -yq ${ds_pkg_name}
RECONFIGURE_PRODUCT="true"
else
apt-get install -y --only-upgrade ${ds_pkg_name};
fi
fi
fi
if [ "$DOCUMENT_SERVER_INSTALLED" = "false" ]; then
DS_PORT=${DS_PORT:-8083};
DS_DB_HOST=localhost;
DS_DB_NAME=$DS_COMMON_NAME;
DS_DB_USER=$DS_COMMON_NAME;
DS_DB_PWD=$DS_COMMON_NAME;
DS_JWT_ENABLED=${DS_JWT_ENABLED:-true};
DS_JWT_SECRET=${DS_JWT_SECRET:-$(cat /dev/urandom | tr -dc A-Za-z0-9 | head -c 32)};
DS_JWT_HEADER=${DS_JWT_HEADER:-AuthorizationJwt};
if ! su - postgres -s /bin/bash -c "psql -lqt" | cut -d \| -f 1 | grep -q ${DS_DB_NAME}; then
su - postgres -s /bin/bash -c "psql -c \"CREATE USER ${DS_DB_USER} WITH password '${DS_DB_PWD}';\""
su - postgres -s /bin/bash -c "psql -c \"CREATE DATABASE ${DS_DB_NAME} OWNER ${DS_DB_USER};\""
fi
echo ${package_sysname}-documentserver $DS_COMMON_NAME/ds-port select $DS_PORT | sudo debconf-set-selections
echo ${package_sysname}-documentserver $DS_COMMON_NAME/db-pwd select $DS_DB_PWD | sudo debconf-set-selections
echo ${package_sysname}-documentserver $DS_COMMON_NAME/db-user select $DS_DB_USER | sudo debconf-set-selections
echo ${package_sysname}-documentserver $DS_COMMON_NAME/db-name select $DS_DB_NAME | sudo debconf-set-selections
echo ${package_sysname}-documentserver $DS_COMMON_NAME/jwt-enabled select ${DS_JWT_ENABLED} | sudo debconf-set-selections
echo ${package_sysname}-documentserver $DS_COMMON_NAME/jwt-secret select ${DS_JWT_SECRET} | sudo debconf-set-selections
echo ${package_sysname}-documentserver $DS_COMMON_NAME/jwt-header select ${DS_JWT_HEADER} | sudo debconf-set-selections
if [ "$INSTALLATION_TYPE" = "COMMUNITY" ]; then
apt-get install -yq ${package_sysname}-documentserver
else
apt-get install -yq ${package_sysname}-documentserver-ee
fi
fi
if [ "$PRODUCT_INSTALLED" = "false" ]; then
echo ${product} ${product}/db-pwd select $MYSQL_SERVER_PASS | sudo debconf-set-selections
echo ${product} ${product}/db-user select $MYSQL_SERVER_USER | sudo debconf-set-selections
echo ${product} ${product}/db-name select $MYSQL_SERVER_DB_NAME | sudo debconf-set-selections
apt-get install -y ${product} || true #Fix error 'Failed to fetch'
apt-get install -y ${product}
elif [ "$UPDATE" = "true" ] && [ "$PRODUCT_INSTALLED" = "true" ]; then
CURRENT_VERSION=$(dpkg-query -W -f='${Version}' ${product} 2>/dev/null)
AVAILABLE_VERSIONS=$(apt show ${product} 2>/dev/null | grep -E '^Version:' | awk '{print $2}')
if [[ "$AVAILABLE_VERSIONS" != *"$CURRENT_VERSION"* ]]; then
apt-get install -o DPkg::options::="--force-confnew" -y --only-upgrade ${product} elasticsearch=${ELASTIC_VERSION}
elif [ "${RECONFIGURE_PRODUCT}" = "true" ]; then
DEBIAN_FRONTEND=noninteractive dpkg-reconfigure ${product}
fi
fi
if [ "$MAKESWAP" == "true" ]; then
make_swap
fi
echo ""
echo "$RES_INSTALL_SUCCESS"
echo "$RES_QUESTIONS"
echo ""

View File

@ -1,143 +0,0 @@
#!/bin/bash
set -e
cat<<EOF
#######################################
# INSTALL PREREQUISITES
#######################################
EOF
if [ "$DIST" = "debian" ] && [ $(apt-cache search ttf-mscorefonts-installer | wc -l) -eq 0 ]; then
echo "deb http://ftp.uk.debian.org/debian/ $DISTRIB_CODENAME main contrib" >> /etc/apt/sources.list
echo "deb-src http://ftp.uk.debian.org/debian/ $DISTRIB_CODENAME main contrib" >> /etc/apt/sources.list
fi
apt-get -y update
if ! command -v locale-gen &> /dev/null; then
apt-get install -yq locales
fi
if ! dpkg -l | grep -q "apt-transport-https"; then
apt-get install -yq apt-transport-https
fi
if ! dpkg -l | grep -q "software-properties-common"; then
apt-get install -yq software-properties-common
fi
locale-gen en_US.UTF-8
# add elasticsearch repo
ELASTIC_VERSION="7.16.3"
ELASTIC_DIST=$(echo $ELASTIC_VERSION | awk '{ print int($1) }')
curl -fsSL https://artifacts.elastic.co/GPG-KEY-elasticsearch | gpg --no-default-keyring --keyring gnupg-ring:/usr/share/keyrings/elastic-${ELASTIC_DIST}.x.gpg --import
echo "deb [signed-by=/usr/share/keyrings/elastic-${ELASTIC_DIST}.x.gpg] https://artifacts.elastic.co/packages/${ELASTIC_DIST}.x/apt stable main" | tee /etc/apt/sources.list.d/elastic-${ELASTIC_DIST}.x.list
chmod 644 /usr/share/keyrings/elastic-${ELASTIC_DIST}.x.gpg
# add nodejs repo
[[ "$DISTRIB_CODENAME" =~ ^(bionic|stretch)$ ]] && NODE_VERSION="16" || NODE_VERSION="18"
echo "deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_VERSION.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --no-default-keyring --keyring gnupg-ring:/usr/share/keyrings/nodesource.gpg --import
chmod 644 /usr/share/keyrings/nodesource.gpg
#add dotnet repo
if [ "$DIST" = "debian" ] && [ "$DISTRIB_CODENAME" = "stretch" ]; then
curl https://packages.microsoft.com/config/$DIST/10/packages-microsoft-prod.deb -O
elif [ "$DISTRIB_CODENAME" = "bookworm" ]; then
#Temporary fix for missing dotnet repository for debian bookworm
curl https://packages.microsoft.com/config/$DIST/11/packages-microsoft-prod.deb -O
else
curl https://packages.microsoft.com/config/$DIST/$REV/packages-microsoft-prod.deb -O
fi
echo -e "Package: *\nPin: origin \"packages.microsoft.com\"\nPin-Priority: 1002" | tee /etc/apt/preferences.d/99microsoft-prod.pref
dpkg -i packages-microsoft-prod.deb && rm packages-microsoft-prod.deb
MYSQL_REPO_VERSION="$(curl https://repo.mysql.com | grep -oP 'mysql-apt-config_\K.*' | grep -o '^[^_]*' | sort --version-sort --field-separator=. | tail -n1)"
MYSQL_PACKAGE_NAME="mysql-apt-config_${MYSQL_REPO_VERSION}_all.deb"
if ! dpkg -l | grep -q "mysql-server"; then
MYSQL_SERVER_HOST=${MYSQL_SERVER_HOST:-"localhost"}
MYSQL_SERVER_DB_NAME=${MYSQL_SERVER_DB_NAME:-"${package_sysname}"}
MYSQL_SERVER_USER=${MYSQL_SERVER_USER:-"root"}
MYSQL_SERVER_PASS=${MYSQL_SERVER_PASS:-"$(cat /dev/urandom | tr -dc A-Za-z0-9 | head -c 12)"}
# setup mysql 8.0 package
curl -OL http://repo.mysql.com/${MYSQL_PACKAGE_NAME}
echo "mysql-apt-config mysql-apt-config/repo-codename select $DISTRIB_CODENAME" | debconf-set-selections
echo "mysql-apt-config mysql-apt-config/repo-distro select $DIST" | debconf-set-selections
echo "mysql-apt-config mysql-apt-config/select-server select mysql-8.0" | debconf-set-selections
DEBIAN_FRONTEND=noninteractive dpkg -i ${MYSQL_PACKAGE_NAME}
rm -f ${MYSQL_PACKAGE_NAME}
#Temporary fix for missing mysql repository for debian bookworm
[ "$DISTRIB_CODENAME" = "bookworm" ] && sed -i "s/$DIST/ubuntu/g; s/$DISTRIB_CODENAME/jammy/g" /etc/apt/sources.list.d/mysql.list
echo mysql-community-server mysql-community-server/root-pass password ${MYSQL_SERVER_PASS} | debconf-set-selections
echo mysql-community-server mysql-community-server/re-root-pass password ${MYSQL_SERVER_PASS} | debconf-set-selections
echo mysql-community-server mysql-server/default-auth-override select "Use Strong Password Encryption (RECOMMENDED)" | debconf-set-selections
echo mysql-server-8.0 mysql-server/root_password password ${MYSQL_SERVER_PASS} | debconf-set-selections
echo mysql-server-8.0 mysql-server/root_password_again password ${MYSQL_SERVER_PASS} | debconf-set-selections
elif dpkg -l | grep -q "mysql-apt-config" && [ "$(apt-cache policy mysql-apt-config | awk 'NR==2{print $2}')" != "${MYSQL_REPO_VERSION}" ]; then
curl -OL http://repo.mysql.com/${MYSQL_PACKAGE_NAME}
DEBIAN_FRONTEND=noninteractive dpkg -i ${MYSQL_PACKAGE_NAME}
rm -f ${MYSQL_PACKAGE_NAME}
fi
if [ "$DIST" = "debian" ] && [ "$DISTRIB_CODENAME" = "stretch" ]; then
apt-get install -yq mysql-server mysql-client --allow-unauthenticated
fi
# add redis repo
if [ "$DIST" = "ubuntu" ]; then
curl -fsSL https://packages.redis.io/gpg | gpg --no-default-keyring --keyring gnupg-ring:/usr/share/keyrings/redis.gpg --import
echo "deb [signed-by=/usr/share/keyrings/redis.gpg] https://packages.redis.io/deb $DISTRIB_CODENAME main" | tee /etc/apt/sources.list.d/redis.list
chmod 644 /usr/share/keyrings/redis.gpg
fi
#add nginx repo
curl -s http://nginx.org/keys/nginx_signing.key | gpg --no-default-keyring --keyring gnupg-ring:/usr/share/keyrings/nginx.gpg --import
echo "deb [signed-by=/usr/share/keyrings/nginx.gpg] http://nginx.org/packages/$DIST/ $DISTRIB_CODENAME nginx" | tee /etc/apt/sources.list.d/nginx.list
chmod 644 /usr/share/keyrings/nginx.gpg
#Temporary fix for missing nginx repository for debian bookworm
[ "$DISTRIB_CODENAME" = "bookworm" ] && sed -i "s/$DISTRIB_CODENAME/buster/g" /etc/apt/sources.list.d/nginx.list
#add openresty repo
curl -fsSL https://openresty.org/package/pubkey.gpg | gpg --no-default-keyring --keyring gnupg-ring:/usr/share/keyrings/openresty.gpg --import
echo "deb [signed-by=/usr/share/keyrings/openresty.gpg] http://openresty.org/package/$DIST $DISTRIB_CODENAME $([ "$DIST" = "ubuntu" ] && echo "main" || echo "openresty" )" | tee /etc/apt/sources.list.d/openresty.list
chmod 644 /usr/share/keyrings/openresty.gpg
#Temporary fix for missing openresty repository for debian bookworm
[ "$DISTRIB_CODENAME" = "bookworm" ] && sed -i "s/$DISTRIB_CODENAME/bullseye/g" /etc/apt/sources.list.d/openresty.list
# setup msttcorefonts
echo ttf-mscorefonts-installer msttcorefonts/accepted-mscorefonts-eula select true | debconf-set-selections
# install
apt-get -y update
apt-get install -o DPkg::options::="--force-confnew" -yq \
expect \
nano \
nodejs \
gcc \
make \
dotnet-sdk-7.0 \
mysql-server \
mysql-client \
postgresql \
redis-server \
rabbitmq-server \
ffmpeg
if ! dpkg -l | grep -q "elasticsearch"; then
apt-get install -yq elasticsearch=${ELASTIC_VERSION}
fi
# disable apparmor for mysql
if which apparmor_parser && [ ! -f /etc/apparmor.d/disable/usr.sbin.mysqld ] && [ -f /etc/apparmor.d/disable/usr.sbin.mysqld ]; then
ln -sf /etc/apparmor.d/usr.sbin.mysqld /etc/apparmor.d/disable/;
apparmor_parser -R /etc/apparmor.d/usr.sbin.mysqld;
fi

View File

@ -1,82 +0,0 @@
#!/bin/bash
set -e
make_swap () {
DISK_REQUIREMENTS=6144; #6Gb free space
MEMORY_REQUIREMENTS=11000; #RAM ~12Gb
SWAPFILE="/${PRODUCT}_swapfile";
AVAILABLE_DISK_SPACE=$(df -m / | tail -1 | awk '{ print $4 }');
TOTAL_MEMORY=$(free -m | grep -oP '\d+' | head -n 1);
EXIST=$(swapon -s | awk '{ print $1 }' | { grep -x ${SWAPFILE} || true; });
if [[ -z $EXIST ]] && [ ${TOTAL_MEMORY} -lt ${MEMORY_REQUIREMENTS} ] && [ ${AVAILABLE_DISK_SPACE} -gt ${DISK_REQUIREMENTS} ]; then
fallocate -l 6G ${SWAPFILE}
chmod 600 ${SWAPFILE}
mkswap ${SWAPFILE}
swapon ${SWAPFILE}
echo "$SWAPFILE none swap sw 0 0" >> /etc/fstab
fi
}
command_exists () {
type "$1" &> /dev/null;
}
check_hardware () {
DISK_REQUIREMENTS=40960;
MEMORY_REQUIREMENTS=8192;
CORE_REQUIREMENTS=4;
AVAILABLE_DISK_SPACE=$(df -m / | tail -1 | awk '{ print $4 }');
if [ ${AVAILABLE_DISK_SPACE} -lt ${DISK_REQUIREMENTS} ]; then
echo "Minimal requirements are not met: need at least $DISK_REQUIREMENTS MB of free HDD space"
exit 1;
fi
TOTAL_MEMORY=$(free -m | grep -oP '\d+' | head -n 1);
if [ ${TOTAL_MEMORY} -lt ${MEMORY_REQUIREMENTS} ]; then
echo "Minimal requirements are not met: need at least $MEMORY_REQUIREMENTS MB of RAM"
exit 1;
fi
CPU_CORES_NUMBER=$(cat /proc/cpuinfo | grep processor | wc -l);
if [ ${CPU_CORES_NUMBER} -lt ${CORE_REQUIREMENTS} ]; then
echo "The system does not meet the minimal hardware requirements. CPU with at least $CORE_REQUIREMENTS cores is required"
exit 1;
fi
}
if [ "$SKIP_HARDWARE_CHECK" != "true" ]; then
check_hardware
fi
ARCH="$(dpkg --print-architecture)"
if [ "$ARCH" != "amd64" ]; then
echo "ONLYOFFICE ${product^^} doesn't support architecture '$ARCH'"
exit;
fi
REV=`cat /etc/debian_version`
DIST='Debian'
if [ -f /etc/lsb-release ] ; then
DIST=`cat /etc/lsb-release | grep '^DISTRIB_ID' | awk -F= '{ print $2 }'`
REV=`cat /etc/lsb-release | grep '^DISTRIB_RELEASE' | awk -F= '{ print $2 }'`
DISTRIB_CODENAME=`cat /etc/lsb-release | grep '^DISTRIB_CODENAME' | awk -F= '{ print $2 }'`
DISTRIB_RELEASE=`cat /etc/lsb-release | grep '^DISTRIB_RELEASE' | awk -F= '{ print $2 }'`
elif [ -f /etc/lsb_release ] || [ -f /usr/bin/lsb_release ] ; then
DIST=`lsb_release -a 2>&1 | grep 'Distributor ID:' | awk -F ":" '{print $2 }' | tr -d '[:space:]'`
REV=`lsb_release -a 2>&1 | grep 'Release:' | awk -F ":" '{print $2 }' | tr -d '[:space:]'`
DISTRIB_CODENAME=`lsb_release -a 2>&1 | grep 'Codename:' | awk -F ":" '{print $2 }' | tr -d '[:space:]'`
DISTRIB_RELEASE=`lsb_release -a 2>&1 | grep 'Release:' | awk -F ":" '{print $2 }' | tr -d '[:space:]'`
elif [ -f /etc/os-release ] ; then
DISTRIB_CODENAME=$(grep "VERSION=" /etc/os-release |awk -F= {' print $2'}|sed s/\"//g |sed s/[0-9]//g | sed s/\)$//g |sed s/\(//g | tr -d '[:space:]')
DISTRIB_RELEASE=$(grep "VERSION_ID=" /etc/os-release |awk -F= {' print $2'}|sed s/\"//g |sed s/[0-9]//g | sed s/\)$//g |sed s/\(//g | tr -d '[:space:]')
fi
DIST=`echo "$DIST" | tr '[:upper:]' '[:lower:]' | xargs`;
DISTRIB_CODENAME=`echo "$DISTRIB_CODENAME" | tr '[:upper:]' '[:lower:]' | xargs`;

File diff suppressed because it is too large Load Diff

View File

@ -1,152 +0,0 @@
#!/bin/bash
set -e
package_manager="yum"
package_sysname="onlyoffice";
product_name="DocSpace"
product=$(tr '[:upper:]' '[:lower:]' <<< ${product_name})
INSTALLATION_TYPE="ENTERPRISE"
MAKESWAP="true"
RES_APP_INSTALLED="is already installed";
RES_APP_CHECK_PORTS="uses ports"
RES_CHECK_PORTS="please, make sure that the ports are free.";
RES_INSTALL_SUCCESS="Thank you for installing ONLYOFFICE ${product_name}.";
RES_QUESTIONS="In case you have any questions contact us via http://support.onlyoffice.com or visit our forum at http://forum.onlyoffice.com"
RES_MARIADB="To continue the installation, you need to remove MariaDB"
res_unsupported_version () {
RES_CHOICE="Please, enter Y or N"
RES_CHOICE_INSTALLATION="Continue installation [Y/N]? "
RES_UNSPPORTED_VERSION="You have an unsupported version of $DIST installed"
RES_SELECT_INSTALLATION="Select 'N' to cancel the ONLYOFFICE installation (recommended). Select 'Y' to continue installing ONLYOFFICE"
RES_ERROR_REMINDER="Please note, that if you continue with the installation, there may be errors"
}
while [ "$1" != "" ]; do
case $1 in
-u | --update )
if [ "$2" != "" ]; then
UPDATE=$2
shift
fi
;;
-je | --jwtenabled )
if [ "$2" != "" ]; then
JWT_ENABLED=$2
shift
fi
;;
-jh | --jwtheader )
if [ "$2" != "" ]; then
JWT_HEADER=$2
shift
fi
;;
-js | --jwtsecret )
if [ "$2" != "" ]; then
JWT_SECRET=$2
shift
fi
;;
-gb | --gitbranch )
if [ "$2" != "" ]; then
PARAMETERS="$PARAMETERS ${1}";
GIT_BRANCH=$2
shift
fi
;;
-ls | --localscripts )
if [ "$2" != "" ]; then
LOCAL_SCRIPTS=$2
shift
fi
;;
-skiphc | --skiphardwarecheck )
if [ "$2" != "" ]; then
SKIP_HARDWARE_CHECK=$2
shift
fi
;;
-it | --installation_type )
if [ "$2" != "" ]; then
INSTALLATION_TYPE=$(echo "$2" | awk '{print toupper($0)}');
shift
fi
;;
-ms | --makeswap )
if [ "$2" != "" ]; then
MAKESWAP=$2
shift
fi
;;
-? | -h | --help )
echo " Usage $0 [PARAMETER] [[PARAMETER], ...]"
echo " Parameters:"
echo " -it, --installation_type installation type (community|enterprise)"
echo " -u, --update use to update existing components (true|false)"
echo " -je, --jwtenabled specifies the enabling the JWT validation (true|false)"
echo " -jh, --jwtheader defines the http header that will be used to send the JWT"
echo " -js, --jwtsecret defines the secret key to validate the JWT in the request"
echo " -ls, --local_scripts use 'true' to run local scripts (true|false)"
echo " -skiphc, --skiphardwarecheck use to skip hardware check (true|false)"
echo " -ms, --makeswap make swap file (true|false)"
echo " -?, -h, --help this help"
echo
exit 0
;;
esac
shift
done
if [ -z "${UPDATE}" ]; then
UPDATE="false";
fi
if [ -z "${LOCAL_SCRIPTS}" ]; then
LOCAL_SCRIPTS="false";
fi
if [ -z "${SKIP_HARDWARE_CHECK}" ]; then
SKIP_HARDWARE_CHECK="false";
fi
cat > /etc/yum.repos.d/onlyoffice.repo <<END
[onlyoffice]
name=onlyoffice repo
baseurl=http://download.onlyoffice.com/repo/centos/main/noarch/
gpgcheck=1
enabled=1
gpgkey=https://download.onlyoffice.com/GPG-KEY-ONLYOFFICE
END
if [ -z $GIT_BRANCH ]; then
DOWNLOAD_URL_PREFIX="https://download.onlyoffice.com/${product}/install-RedHat"
else
DOWNLOAD_URL_PREFIX="https://raw.githubusercontent.com/ONLYOFFICE/${product}/${GIT_BRANCH}/build/install/OneClickInstall/install-RedHat"
fi
if [ "$LOCAL_SCRIPTS" = "true" ]; then
source install-RedHat/tools.sh
source install-RedHat/bootstrap.sh
source install-RedHat/check-ports.sh
source install-RedHat/install-preq.sh
source install-RedHat/install-app.sh
else
source <(curl ${DOWNLOAD_URL_PREFIX}/tools.sh)
source <(curl ${DOWNLOAD_URL_PREFIX}/bootstrap.sh)
source <(curl ${DOWNLOAD_URL_PREFIX}/check-ports.sh)
source <(curl ${DOWNLOAD_URL_PREFIX}/install-preq.sh)
source <(curl ${DOWNLOAD_URL_PREFIX}/install-app.sh)
fi

View File

@ -1,15 +0,0 @@
#!/bin/bash
set -e
cat<<EOF
#######################################
# BOOTSTRAP
#######################################
EOF
if ! rpm -q net-tools; then
${package_manager} -y install net-tools;
fi

View File

@ -1,39 +0,0 @@
#!/bin/bash
set -e
cat<<EOF
#######################################
# CHECK PORTS
#######################################
EOF
if rpm -qa | grep ${product}; then
echo "${product} $RES_APP_INSTALLED"
PRODUCT_INSTALLED="true";
elif [ "${UPDATE}" != "true" ] && netstat -lnp | awk '{print $4}' | grep -qE ":80$|:8081$|:8083$|:5001$|:5002$|:8080$|:80$"; then
echo "${product} $RES_APP_CHECK_PORTS: 80, 8081, 8083, 5001, 5002, 9200, 2181, 9092";
echo "$RES_CHECK_PORTS"
exit
else
PRODUCT_INSTALLED="false";
fi
if rpm -qa | grep ${package_sysname}-documentserver; then
echo "${package_sysname}-documentserver $RES_APP_INSTALLED"
DOCUMENT_SERVER_INSTALLED="true";
elif [ "${UPDATE}" != "true" ] && netstat -lnp | awk '{print $4}' | grep -qE ":8083$|:5432$|:5672$|:6379$|:8000$|:8080$"; then
echo "${package_sysname}-documentserver $RES_APP_CHECK_PORTS: 8083, 5432, 5672, 6379, 8000, 8080";
echo "$RES_CHECK_PORTS"
exit
else
DOCUMENT_SERVER_INSTALLED="false";
fi
if [ "$PRODUCT_INSTALLED" = "true" ] || [ "$DOCUMENT_SERVER_INSTALLED" = "true" ]; then
if [ "$UPDATE" != "true" ]; then
exit;
fi
fi

View File

@ -1,177 +0,0 @@
#!/bin/bash
set -e
cat<<EOF
#######################################
# INSTALL APP
#######################################
EOF
sed "/host\s*all\s*all\s*127\.0\.0\.1\/32\s*ident$/s|ident$|trust|" -i /var/lib/pgsql/data/pg_hba.conf
sed "/host\s*all\s*all\s*::1\/128\s*ident$/s|ident$|trust|" -i /var/lib/pgsql/data/pg_hba.conf
for SVC in $package_services; do
systemctl start $SVC
systemctl enable $SVC
done
if [ "$UPDATE" = "true" ] && [ "$DOCUMENT_SERVER_INSTALLED" = "true" ]; then
ds_pkg_installed_name=$(rpm -qa --qf '%{NAME}\n' | grep ${package_sysname}-documentserver);
if [ "$INSTALLATION_TYPE" = "COMMUNITY" ]; then
ds_pkg_name="${package_sysname}-documentserver";
fi
if [ "$INSTALLATION_TYPE" = "ENTERPRISE" ]; then
ds_pkg_name="${package_sysname}-documentserver-ee";
fi
if [ -n $ds_pkg_name ]; then
if ! rpm -qi ${ds_pkg_name} &> /dev/null; then
${package_manager} -y remove ${ds_pkg_installed_name}
DOCUMENT_SERVER_INSTALLED="false"
RECONFIGURE_PRODUCT="true"
else
${package_manager} -y update ${ds_pkg_name}
fi
fi
fi
MYSQL_SERVER_HOST=${MYSQL_SERVER_HOST:-"localhost"}
MYSQL_SERVER_DB_NAME=${MYSQL_SERVER_DB_NAME:-"${package_sysname}"}
MYSQL_SERVER_USER=${MYSQL_SERVER_USER:-"root"}
MYSQL_SERVER_PORT=${MYSQL_SERVER_PORT:-3306}
if [ "${MYSQL_FIRST_TIME_INSTALL}" = "true" ]; then
MYSQL_TEMPORARY_ROOT_PASS="";
if [ -f "/var/log/mysqld.log" ]; then
MYSQL_TEMPORARY_ROOT_PASS=$(cat /var/log/mysqld.log | grep "temporary password" | rev | cut -d " " -f 1 | rev | tail -1);
fi
while ! mysqladmin ping -u root --silent; do
sleep 1
done
if ! mysql "-u$MYSQL_SERVER_USER" "-p$MYSQL_TEMPORARY_ROOT_PASS" -e ";" >/dev/null 2>&1; then
if [ -z $MYSQL_TEMPORARY_ROOT_PASS ]; then
MYSQL="mysql --connect-expired-password -u$MYSQL_SERVER_USER -D mysql";
else
MYSQL="mysql --connect-expired-password -u$MYSQL_SERVER_USER -p${MYSQL_TEMPORARY_ROOT_PASS} -D mysql";
MYSQL_ROOT_PASS=$(echo $MYSQL_TEMPORARY_ROOT_PASS | sed -e 's/;/%/g' -e 's/=/%/g');
fi
MYSQL_AUTHENTICATION_PLUGIN=$($MYSQL -e "SHOW VARIABLES LIKE 'default_authentication_plugin';" -s | awk '{print $2}')
MYSQL_AUTHENTICATION_PLUGIN=${MYSQL_AUTHENTICATION_PLUGIN:-caching_sha2_password}
$MYSQL -e "ALTER USER '${MYSQL_SERVER_USER}'@'localhost' IDENTIFIED WITH ${MYSQL_AUTHENTICATION_PLUGIN} BY '${MYSQL_ROOT_PASS}'" >/dev/null 2>&1 \
|| $MYSQL -e "UPDATE user SET plugin='${MYSQL_AUTHENTICATION_PLUGIN}', authentication_string=PASSWORD('${MYSQL_ROOT_PASS}') WHERE user='${MYSQL_SERVER_USER}' and host='localhost';"
systemctl restart mysqld
fi
fi
if [ "$DOCUMENT_SERVER_INSTALLED" = "false" ]; then
declare -x DS_PORT=8083
DS_RABBITMQ_HOST=localhost;
DS_RABBITMQ_USER=guest;
DS_RABBITMQ_PWD=guest;
DS_REDIS_HOST=localhost;
DS_COMMON_NAME=${DS_COMMON_NAME:-"ds"};
DS_DB_HOST=localhost;
DS_DB_NAME=$DS_COMMON_NAME;
DS_DB_USER=$DS_COMMON_NAME;
DS_DB_PWD=$DS_COMMON_NAME;
declare -x JWT_ENABLED=${JWT_ENABLED:-true};
declare -x JWT_SECRET=${JWT_SECRET:-$(cat /dev/urandom | tr -dc A-Za-z0-9 | head -c 32)};
declare -x JWT_HEADER=${JWT_HEADER:-AuthorizationJwt};
if ! su - postgres -s /bin/bash -c "psql -lqt" | cut -d \| -f 1 | grep -q ${DS_DB_NAME}; then
su - postgres -s /bin/bash -c "psql -c \"CREATE USER ${DS_DB_USER} WITH password '${DS_DB_PWD}';\""
su - postgres -s /bin/bash -c "psql -c \"CREATE DATABASE ${DS_DB_NAME} OWNER ${DS_DB_USER};\""
fi
if [ "$INSTALLATION_TYPE" = "COMMUNITY" ]; then
${package_manager} -y install ${package_sysname}-documentserver
else
${package_manager} -y install ${package_sysname}-documentserver-ee
fi
expect << EOF
set timeout -1
log_user 1
spawn documentserver-configure.sh
expect "Configuring database access..."
expect -re "Host"
send "\025$DS_DB_HOST\r"
expect -re "Database name"
send "\025$DS_DB_NAME\r"
expect -re "User"
send "\025$DS_DB_USER\r"
expect -re "Password"
send "\025$DS_DB_PWD\r"
if { "${INSTALLATION_TYPE}" == "ENTERPRISE" } {
expect "Configuring redis access..."
send "\025$DS_REDIS_HOST\r"
}
expect "Configuring AMQP access... "
expect -re "Host"
send "\025$DS_RABBITMQ_HOST\r"
expect -re "User"
send "\025$DS_RABBITMQ_USER\r"
expect -re "Password"
send "\025$DS_RABBITMQ_PWD\r"
expect eof
EOF
fi
{ ${package_manager} check-update ${product}; PRODUCT_CHECK_UPDATE=$?; } || true
if [ "$PRODUCT_INSTALLED" = "false" ]; then
${package_manager} install -y ${product}
${product}-configuration \
-mysqlh ${MYSQL_SERVER_HOST} \
-mysqld ${MYSQL_SERVER_DB_NAME} \
-mysqlu ${MYSQL_SERVER_USER} \
-mysqlp ${MYSQL_ROOT_PASS}
elif [[ "${PRODUCT_CHECK_UPDATE}" -eq "${UPDATE_AVAILABLE_CODE}" || "${RECONFIGURE_PRODUCT}" = "true" ]]; then
ENVIRONMENT=$(grep -oP 'ENVIRONMENT=\K.*' /usr/lib/systemd/system/${product}-api.service)
CONNECTION_STRING=$(json -f /etc/${package_sysname}/${product}/appsettings.$ENVIRONMENT.json ConnectionStrings.default.connectionString)
${package_manager} -y update ${product}
${product}-configuration \
-e $ENVIRONMENT \
-mysqlh $(grep -oP 'Server=\K[^;]*' <<< "$CONNECTION_STRING") \
-mysqld $(grep -oP 'Database=\K[^;]*' <<< "$CONNECTION_STRING") \
-mysqlu $(grep -oP 'User ID=\K[^;]*' <<< "$CONNECTION_STRING") \
-mysqlp $(grep -oP 'Password=\K[^;]*' <<< "$CONNECTION_STRING")
fi
if [ "$MAKESWAP" == "true" ]; then
make_swap
fi
echo ""
echo "$RES_INSTALL_SUCCESS"
echo "$RES_QUESTIONS"
echo ""

View File

@ -1,130 +0,0 @@
#!/bin/bash
set -e
cat<<EOF
#######################################
# INSTALL PREREQUISITES
#######################################
EOF
# clean yum cache
${package_manager} clean all
${package_manager} -y install yum-utils
DIST=$(rpm -q --whatprovides redhat-release || rpm -q --whatprovides centos-release);
DIST=$(echo $DIST | sed -n '/-.*/s///p');
REV=$(cat /etc/redhat-release | sed s/.*release\ // | sed s/\ .*//);
REV_PARTS=(${REV//\./ });
REV=${REV_PARTS[0]};
if ! [[ "$REV" =~ ^[0-9]+$ ]]; then
REV=7;
fi
{ yum check-update postgresql; PSQLExitCode=$?; } || true #Checking for postgresql update
{ yum check-update $DIST*-release; exitCode=$?; } || true #Checking for distribution update
UPDATE_AVAILABLE_CODE=100
if [[ $exitCode -eq $UPDATE_AVAILABLE_CODE ]]; then
res_unsupported_version
echo $RES_UNSPPORTED_VERSION
echo $RES_SELECT_INSTALLATION
echo $RES_ERROR_REMINDER
echo $RES_QUESTIONS
read_unsupported_installation
fi
if rpm -qa | grep mariadb.*config >/dev/null 2>&1; then
echo $RES_MARIADB && exit 0
fi
#Add repositories: EPEL, REMI and RPMFUSION
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-$REV.noarch.rpm || true
rpm -ivh https://rpms.remirepo.net/enterprise/remi-release-$REV.rpm || true
yum localinstall -y --nogpgcheck https://download1.rpmfusion.org/free/el/rpmfusion-free-release-$REV.noarch.rpm
if [ "$REV" = "9" ]; then
[ $DIST != "redhat" ] && TESTING_REPO="--enablerepo=crb" || /usr/bin/crb enable
update-crypto-policies --set DEFAULT:SHA1
elif [ "$REV" = "8" ]; then
[ $DIST != "redhat" ] && POWERTOOLS_REPO="--enablerepo=powertools" || /usr/bin/crb enable
fi
#add rabbitmq & erlang repo
curl -s https://packagecloud.io/install/repositories/rabbitmq/rabbitmq-server/script.rpm.sh | os=centos dist=$REV bash
curl -s https://packagecloud.io/install/repositories/rabbitmq/erlang/script.rpm.sh | os=centos dist=$REV bash
#add nodejs repo
[ "$REV" = "8" ] && NODEJS_OPTION="--setopt=nodesource-nodejs.module_hotfixes=1"
[ "$REV" = "7" ] && NODE_VERSION="16" || NODE_VERSION="18"
yum install -y https://rpm.nodesource.com/pub_${NODE_VERSION}.x/nodistro/repo/nodesource-release-nodistro-1.noarch.rpm || true
#add dotnet repo
[ $REV = "7" ] && rpm -Uvh https://packages.microsoft.com/config/centos/$REV/packages-microsoft-prod.rpm || true
#add mysql repo
[ "$REV" != "7" ] && dnf remove -y @mysql && dnf module -y reset mysql && dnf module -y disable mysql
MYSQL_REPO_VERSION="$(curl https://repo.mysql.com | grep -oP "mysql80-community-release-el${REV}-\K.*" | grep -o '^[^.]*' | sort | tail -n1)"
yum localinstall -y https://repo.mysql.com/mysql80-community-release-el${REV}-${MYSQL_REPO_VERSION}.noarch.rpm || true
if ! rpm -q mysql-community-server; then
MYSQL_FIRST_TIME_INSTALL="true";
fi
#add elasticsearch repo
ELASTIC_VERSION="7.16.3"
ELASTIC_DIST=$(echo $ELASTIC_VERSION | awk '{ print int($1) }')
rpm --import https://artifacts.elastic.co/GPG-KEY-elasticsearch
cat > /etc/yum.repos.d/elasticsearch.repo <<END
[elasticsearch]
name=Elasticsearch repository for ${ELASTIC_DIST}.x packages
baseurl=https://artifacts.elastic.co/packages/${ELASTIC_DIST}.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=0
autorefresh=1
type=rpm-md
END
# add nginx repo
cat > /etc/yum.repos.d/nginx.repo <<END
[nginx-stable]
name=nginx stable repo
baseurl=https://nginx.org/packages/centos/$REV/\$basearch/
gpgcheck=1
enabled=1
gpgkey=https://nginx.org/keys/nginx_signing.key
module_hotfixes=true
END
rpm --import https://openresty.org/package/pubkey.gpg
OPENRESTY_REPO_FILE=$( [[ "$REV" -ge 9 ]] && echo "openresty2.repo" || echo "openresty.repo" )
curl -o /etc/yum.repos.d/openresty.repo "https://openresty.org/package/centos/${OPENRESTY_REPO_FILE}"
${package_manager} -y install epel-release \
python3 \
nodejs ${NODEJS_OPTION} \
dotnet-sdk-7.0 \
elasticsearch-${ELASTIC_VERSION} --enablerepo=elasticsearch \
mysql-server \
postgresql \
postgresql-server \
rabbitmq-server$rabbitmq_version \
redis --enablerepo=remi \
SDL2 $POWERTOOLS_REPO \
expect \
ffmpeg $TESTING_REPO
if [[ $PSQLExitCode -eq $UPDATE_AVAILABLE_CODE ]]; then
yum -y install postgresql-upgrade
postgresql-setup --upgrade || true
fi
postgresql-setup initdb || true
semanage permissive -a httpd_t
package_services="rabbitmq-server postgresql redis mysqld"

View File

@ -1,67 +0,0 @@
#!/bin/bash
set -e
function make_swap () {
local DISK_REQUIREMENTS=6144; #6Gb free space
local MEMORY_REQUIREMENTS=11000; #RAM ~12Gb
SWAPFILE="/${PRODUCT}_swapfile";
local AVAILABLE_DISK_SPACE=$(df -m / | tail -1 | awk '{ print $4 }');
local TOTAL_MEMORY=$(free -m | grep -oP '\d+' | head -n 1);
local EXIST=$(swapon -s | awk '{ print $1 }' | { grep -x ${SWAPFILE} || true; });
if [[ -z $EXIST ]] && [ ${TOTAL_MEMORY} -lt ${MEMORY_REQUIREMENTS} ] && [ ${AVAILABLE_DISK_SPACE} -gt ${DISK_REQUIREMENTS} ]; then
dd if=/dev/zero of=${SWAPFILE} count=6144 bs=1MiB
chmod 600 ${SWAPFILE}
mkswap ${SWAPFILE}
swapon ${SWAPFILE}
echo "$SWAPFILE none swap sw 0 0" >> /etc/fstab
fi
}
check_hardware () {
DISK_REQUIREMENTS=40960;
MEMORY_REQUIREMENTS=8192;
CORE_REQUIREMENTS=4;
AVAILABLE_DISK_SPACE=$(df -m / | tail -1 | awk '{ print $4 }');
if [ ${AVAILABLE_DISK_SPACE} -lt ${DISK_REQUIREMENTS} ]; then
echo "Minimal requirements are not met: need at least $DISK_REQUIREMENTS MB of free HDD space"
exit 1;
fi
TOTAL_MEMORY=$(free -m | grep -oP '\d+' | head -n 1);
if [ ${TOTAL_MEMORY} -lt ${MEMORY_REQUIREMENTS} ]; then
echo "Minimal requirements are not met: need at least $MEMORY_REQUIREMENTS MB of RAM"
exit 1;
fi
CPU_CORES_NUMBER=$(cat /proc/cpuinfo | grep processor | wc -l);
if [ ${CPU_CORES_NUMBER} -lt ${CORE_REQUIREMENTS} ]; then
echo "The system does not meet the minimal hardware requirements. CPU with at least $CORE_REQUIREMENTS cores is required"
exit 1;
fi
}
if [ "$SKIP_HARDWARE_CHECK" != "true" ]; then
check_hardware
fi
read_unsupported_installation () {
read -p "$RES_CHOICE_INSTALLATION " CHOICE_INSTALLATION
case "$CHOICE_INSTALLATION" in
y|Y ) yum -y install $DIST*-release
;;
n|N ) exit 0;
;;
* ) echo $RES_CHOICE;
read_unsupported_installation
;;
esac
}

View File

@ -1,132 +0,0 @@
import base64
import hashlib
import hmac
import json
import base64
import sys
import os
import http.client
from radicale.auth import BaseAuth
from radicale.log import logger
import platform
from urllib.parse import urlparse
if platform.system() == "Linux":
sys.path.insert(0,'/usr/lib/python3/site-packages')
import requests
from urllib import request
from urllib.parse import urlsplit
from datetime import datetime, date, time
PLUGIN_CONFIG_SCHEMA = {
"auth": {
"portal_url": {"value": "", "type": str},
"machine_key": {"value": "", "type": str}
}
}
class Auth(BaseAuth):
def __init__(self, configuration):
super().__init__(configuration.copy(PLUGIN_CONFIG_SCHEMA))
def create_auth_token(self, pkey, machine_key):
machine_key = bytes(machine_key, 'UTF-8')
now = datetime.strftime(datetime.utcnow(), "%Y%m%d%H%M%S")
message = bytes('{0}\n{1}'.format(now, pkey), 'UTF-8')
_hmac = hmac.new(machine_key, message, hashlib.sha1)
signature = str(base64.urlsafe_b64encode(_hmac.digest()), 'UTF-8')
signature = signature.replace('-', '+')
signature = signature.replace('_', '/')
token = 'ASC {0}:{1}:{2}'.format(pkey, now, signature)
logger.info('Auth token: %r', token)
return token
def get_external_login(self, environ):
self._environ = environ
portal = ""
if self._environ.get("PATH_INFO"):
if len(self._environ.get("PATH_INFO").split('/')) >= 2:
userName = self._environ.get("PATH_INFO").split('/')[1]
if userName.find('@')!=-1:
portal = userName.split('@')[2]
if self._environ.get("HTTP_X_REWRITER_URL"):
os.environ[portal + 'HTTP_X_REWRITER_URL'] = self._environ["HTTP_X_REWRITER_URL"] # hack: common value for all modules
else:
urlScheme = ""
try:
c = http.client.HTTPSConnection(portal)
c.request("GET", "/")
response = c.getresponse()
urlScheme = "https"
os.environ[portal + 'HTTP_X_REWRITER_URL'] = self._environ["HTTP_X_REWRITER_URL"]
except:
urlScheme = "http"
os.environ[portal + 'HTTP_X_REWRITER_URL'] = urlScheme + "://" + portal
return()
def login(self, login, password):
portal_url = ""
machine_key = self.configuration.get("auth", "machine_key")
auth_token = self.create_auth_token("radicale", machine_key)
portal = ""
if self._environ.get("PATH_INFO"):
if len(self._environ.get("PATH_INFO").split('/')) >= 2:
userName = self._environ.get("PATH_INFO").split('/')[1]
if userName.find('@')!=-1:
portal = userName.split('@')[2]
remote_host = ""
rewriter_url = ""
if os.environ[portal + 'HTTP_X_REWRITER_URL']:
rewriter_url = os.environ[portal + 'HTTP_X_REWRITER_URL']
parsed_uri = urlparse(rewriter_url)
if parsed_uri.netloc != '':
remote_host = parsed_uri.netloc.replace("'", "").split(':')[0]
elif parsed_uri.path != '':
remote_host = parsed_uri.path.replace("'", "").split(':')[0]
else:
logger.error("Authenticated error. Parse REWRITER_URL")
return ""
else:
logger.error("Authenticated error. not exist HTTP_X_REWRITER_URL")
return ""
try:
logger.info('Remote host: %r', remote_host)
portal_url = self.configuration.get("auth", "portal_url")
url = portal_url+"/is_caldav_authenticated"
payload = {'User': login+"@"+remote_host, 'Password': password}
headers = {'Content-type': 'application/json', 'Authorization': auth_token, 'HTTP_X_REWRITER_URL': rewriter_url}
res = requests.post(url, data=json.dumps(payload), headers=headers)
except:
logger.error("Authenticated error. API system")
res = False
try:
response = res.json()
except:
logger.error("Authenticated error.")
return ""
if res.status_code != 200:
logger.error("Error login response: %r", response)
return ""
if 'error' in response:
logger.error("Error login response: %r", response)
return ""
else:
if 'value' in response:
if response['value'] != "true":
logger.error("Error login response: %r", response)
return ""
else:
return login+"@"+remote_host

View File

@ -1,5 +0,0 @@
#!/usr/bin/env python3
from distutils.core import setup
setup(name="app_auth_plugin", version='1.0.0', packages=["app_auth_plugin"])

View File

@ -1,67 +0,0 @@
import configparser
import re
from radicale import pathutils, rights
from radicale.log import logger
from radicale import pathutils
class Rights(rights.BaseRights):
def __init__(self, configuration):
super().__init__(configuration)
self._filename = configuration.get("rights", "file")
self.full_write_file_path = ""
def authorization(self, user, path):
user = user or ""
sane_path = pathutils.strip_path(path)
# Prevent "regex injection"
escaped_user = re.escape(user)
rights_config = configparser.ConfigParser()
try:
if not rights_config.read(self._filename):
raise RuntimeError("No such file: %r" %
self._filename)
except Exception as e:
raise RuntimeError("Failed to load rights file %r: %s" %
(self._filename, e)) from e
if path=="/" or path=='':
return rights_config.get("owner-write", "permissions")
if path.find("_write.ics") != -1:
self.full_write_file_path = path
elif path.find(".ics") != -1 and path.find("_write.ics") == -1:
self.full_write_file_path = ""
for section in rights_config.sections():
try:
user_pattern = rights_config.get(section, "user")
collection_pattern = rights_config.get(section, "collection")
# Use empty format() for harmonized handling of curly braces
user_match = re.fullmatch(user_pattern.format(), user)
collection_match = user_match and re.fullmatch(
collection_pattern.format(
*map(re.escape, user_match.groups()),
user=escaped_user), sane_path)
file_match = True if self.full_write_file_path.find(path) != -1 and self.full_write_file_path != "" else False
except Exception as e:
raise RuntimeError("Error in section %r of rights file %r: "
"%s" % (section, self._filename, e)) from e
if user_match and collection_match:
if file_match and section == "allow-readonly":
logger.debug("Rule %r:%r matches %r:%r from section %r Full Access",
user, sane_path, user_pattern,
collection_pattern, section)
self.full_write_file_path = ""
return rights_config.get("admin", "permissions")
else:
logger.debug("Rule %r:%r matches %r:%r from section %r",
user, sane_path, user_pattern,
collection_pattern, section)
return rights_config.get(section, "permissions")
logger.debug("Rule %r:%r doesn't match %r:%r from section %r",
user, sane_path, user_pattern, collection_pattern,
section)
logger.info("Rights: %r:%r doesn't match any section", user, sane_path)
return ""

View File

@ -1,5 +0,0 @@
#!/usr/bin/env python3
from distutils.core import setup
setup(name="app_rights_plugin", version='1.0.0', packages=["app_rights_plugin"])

View File

@ -1,177 +0,0 @@
# This file is part of Radicale Server - Calendar Server
# Copyright © 2014 Jean-Marc Martins
# Copyright © 2012-2017 Guillaume Ayoub
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
"""
Storage backend that stores data in the file system.
Uses one folder per collection and one file per collection entry.
"""
import contextlib
import os
import sys
import time
import platform
from itertools import chain
from tempfile import TemporaryDirectory
from radicale import pathutils, storage
from radicale.storage.multifilesystem.cache import CollectionCacheMixin
from radicale.storage.multifilesystem.create_collection import \
StorageCreateCollectionMixin
from app_store_plugin.delete import CollectionDeleteMixin
from radicale.storage.multifilesystem.discover import StorageDiscoverMixin
from radicale.storage.multifilesystem.get import CollectionGetMixin
from app_store_plugin.history import CollectionHistoryMixin
from radicale.storage.multifilesystem.lock import (CollectionLockMixin,
StorageLockMixin)
from radicale.storage.multifilesystem.meta import CollectionMetaMixin
from radicale.storage.multifilesystem.move import StorageMoveMixin
from app_store_plugin.sync import CollectionSyncMixin
from app_store_plugin.upload import CollectionUploadMixin
from app_store_plugin.cache import CollectionCacheMixin
from app_store_plugin import log
from radicale.storage.multifilesystem.verify import StorageVerifyMixin
PLUGIN_CONFIG_SCHEMA = {
"storage": {
"portal_url": {"value": "", "type": str}
}
}
class Collection(
CollectionCacheMixin, CollectionDeleteMixin, CollectionGetMixin,
CollectionHistoryMixin, CollectionLockMixin, CollectionMetaMixin,
CollectionSyncMixin, CollectionUploadMixin, storage.BaseCollection):
def __init__(self, storage_, path, filesystem_path=None):
self._storage = storage_
folder = self._storage._get_collection_root_folder()
# Path should already be sanitized
self._path = pathutils.strip_path(path)
self._encoding = self._storage.configuration.get("encoding", "stock")
if filesystem_path is None:
filesystem_path = pathutils.path_to_filesystem(folder, self.path)
self._filesystem_path = filesystem_path
self._etag_cache = None
# Start logging
filename = os.path.expanduser("radicale.log.config")
try:
log.start("radicale", filename)
except Exception as e:
print("ERROR: Failed to start logger: %s" % e, file=sys.stderr)
super().__init__()
@property
def path(self):
return self._path
@contextlib.contextmanager
def _atomic_write(self, path, mode="w", newline=None):
parent_dir, name = os.path.split(path)
prefix = ''
# Do not use mkstemp because it creates with permissions 0o600
with TemporaryDirectory(
prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
with open(prefix+os.path.join(tmp_dir, name), mode, newline=newline,
encoding=None if "b" in mode else self._encoding) as tmp:
yield tmp
tmp.flush()
self._storage._fsync(tmp)
os.replace(prefix+os.path.join(tmp_dir, name), path)
self._storage._sync_directory(parent_dir)
@property
def last_modified(self):
relevant_files = chain(
(self._filesystem_path,),
(self._props_path,) if os.path.exists(self._props_path) else (),
(os.path.join(self._filesystem_path, h) for h in self._list()))
last = max(map(os.path.getmtime, relevant_files))
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))
@property
def etag(self):
# reuse cached value if the storage is read-only
if self._storage._lock.locked == "w" or self._etag_cache is None:
self._etag_cache = super().etag
return self._etag_cache
class Storage(
StorageCreateCollectionMixin, StorageDiscoverMixin, StorageLockMixin,
StorageMoveMixin, StorageVerifyMixin, storage.BaseStorage):
_collection_class = Collection
def __init__(self, configuration):
super().__init__(configuration.copy(PLUGIN_CONFIG_SCHEMA))
folder = configuration.get("storage", "filesystem_folder")
self._makedirs_synced(folder)
def _get_collection_root_folder(self):
filesystem_folder = self.configuration.get(
"storage", "filesystem_folder")
return os.path.join(filesystem_folder, "collection-root")
def _fsync(self, f):
if self.configuration.get("storage", "_filesystem_fsync"):
try:
pathutils.fsync(f.fileno())
except OSError as e:
raise RuntimeError("Fsync'ing file %r failed: %s" %
(f.name, e)) from e
def _sync_directory(self, path):
"""Sync directory to disk.
This only works on POSIX and does nothing on other systems.
"""
if not self.configuration.get("storage", "_filesystem_fsync"):
return
if os.name == "posix":
try:
fd = os.open(path, 0)
try:
pathutils.fsync(fd)
finally:
os.close(fd)
except OSError as e:
raise RuntimeError("Fsync'ing directory %r failed: %s" %
(path, e)) from e
def _makedirs_synced(self, filesystem_path):
"""Recursively create a directory and its parents in a sync'ed way.
This method acts silently when the folder already exists.
"""
if os.path.isdir(filesystem_path):
return
parent_filesystem_path = os.path.dirname(filesystem_path)
# Prevent infinite loop
if filesystem_path != parent_filesystem_path:
# Create parent dirs recursively
self._makedirs_synced(parent_filesystem_path)
# Possible race!
os.makedirs(filesystem_path, exist_ok=True)
self._sync_directory(parent_filesystem_path)

View File

@ -1,98 +0,0 @@
import os
import pickle
import time
import platform
from hashlib import sha256
from radicale import pathutils, storage
from radicale.log import logger
class CollectionCacheMixin:
def _clean_cache(self, folder, names, max_age=None):
"""Delete all ``names`` in ``folder`` that are older than ``max_age``.
"""
age_limit = time.time() - max_age if max_age is not None else None
modified = False
for name in names:
if not pathutils.is_safe_filesystem_path_component(name):
continue
if age_limit is not None:
try:
# Race: Another process might have deleted the file.
mtime = os.path.getmtime(os.path.join(folder, name))
except FileNotFoundError:
continue
if mtime > age_limit:
continue
logger.debug("Found expired item in cache: %r", name)
# Race: Another process might have deleted or locked the
# file.
try:
os.remove(os.path.join(folder, name))
except (FileNotFoundError, PermissionError):
continue
modified = True
if modified:
self._storage._sync_directory(folder)
@staticmethod
def _item_cache_hash(raw_text):
_hash = sha256()
_hash.update(storage.CACHE_VERSION)
_hash.update(raw_text)
return _hash.hexdigest()
def _item_cache_content(self, item, cache_hash=None):
text = item.serialize()
if cache_hash is None:
cache_hash = self._item_cache_hash(text.encode(self._encoding))
return (cache_hash, item.uid, item.etag, text, item.name,
item.component_name, *item.time_range)
def _store_item_cache(self, href, item, cache_hash=None):
prefix = ''
if platform.system() == 'Windows':
prefix = '\\\\?\\'
cache_folder = os.path.join(prefix+self._filesystem_path, ".Radicale.cache",
"item")
content = self._item_cache_content(item, cache_hash)
self._storage._makedirs_synced(cache_folder)
try:
# Race: Other processes might have created and locked the
# file.
with self._atomic_write(os.path.join(cache_folder, href),
"wb") as f:
pickle.dump(content, f)
except PermissionError:
pass
return content
def _load_item_cache(self, href, input_hash):
prefix = ''
if platform.system() == 'Windows':
prefix = '\\\\?\\'
cache_folder = os.path.join(prefix+self._filesystem_path, ".Radicale.cache",
"item")
cache_hash = uid = etag = text = name = tag = start = end = None
try:
with open(os.path.join(cache_folder, href), "rb") as f:
cache_hash, *content = pickle.load(f)
if cache_hash == input_hash:
uid, etag, text, name, tag, start, end = content
except FileNotFoundError:
pass
except (pickle.UnpicklingError, ValueError) as e:
logger.warning("Failed to load item cache entry %r in %r: %s",
href, self.path, e, exc_info=True)
return cache_hash, uid, etag, text, name, tag, start, end
def _clean_item_cache(self):
prefix = ''
if platform.system() == 'Windows':
prefix = '\\\\?\\'
cache_folder = os.path.join(prefix+self._filesystem_path, ".Radicale.cache",
"item")
self._clean_cache(cache_folder, (
e.name for e in os.scandir(cache_folder) if not
os.path.isfile(os.path.join(self._filesystem_path, e.name))))

View File

@ -1,52 +0,0 @@
import os
from tempfile import TemporaryDirectory
from radicale import pathutils, storage
import requests
from urllib import request
class CollectionDeleteMixin:
def delete(self, href=None):
if href != None:
user = self.path.split("/")[0]
domain = user.split("@")[2]
self.delete_event_portals(self.path+"/"+href, domain)
if href is None:
# Delete the collection
parent_dir = os.path.dirname(self._filesystem_path)
try:
os.rmdir(self._filesystem_path)
except OSError:
with TemporaryDirectory(
prefix=".Radicale.tmp-", dir=parent_dir) as tmp:
os.rename(self._filesystem_path, os.path.join(
tmp, os.path.basename(self._filesystem_path)))
self._storage._sync_directory(parent_dir)
else:
self._storage._sync_directory(parent_dir)
else:
# Delete an item
if not pathutils.is_safe_filesystem_path_component(href):
raise pathutils.UnsafePathError(href)
path = pathutils.path_to_filesystem(self._filesystem_path, href)
if not os.path.isfile(path):
raise storage.ComponentNotFoundError(href)
os.remove(path)
self._storage._sync_directory(os.path.dirname(path))
# Track the change
self._update_history_etag(href, None)
self._clean_history()
def delete_event_portals(self, path, domain):
portal = ""
userName = path.split('/')[0]
portal = userName.split('@')[2]
rewriter_url = os.environ.get(portal + 'HTTP_X_REWRITER_URL', '')
portal_url = self._storage.configuration.get("storage", "portal_url")
machine_key = self._storage.configuration.get("auth", "machine_key")
auth_token = self.create_auth_token("radicale", machine_key)
headers = {'Authorization': auth_token, 'HTTP_X_REWRITER_URL': rewriter_url if rewriter_url.find(domain) != -1 else ""}
url = portal_url+"/caldav_delete_event?eventInfo={}".format (path)
resp = requests.get(url, headers=headers)

View File

@ -1,73 +0,0 @@
import binascii
import os
import pickle
import platform
from radicale import item as radicale_item
from radicale import pathutils
from radicale.log import logger
class CollectionHistoryMixin:
def _update_history_etag(self, href, item):
"""Updates and retrieves the history etag from the history cache.
The history cache contains a file for each current and deleted item
of the collection. These files contain the etag of the item (empty
string for deleted items) and a history etag, which is a hash over
the previous history etag and the etag separated by "/".
"""
prefix = ''
if platform.system() == 'Windows':
prefix = '\\\\?\\'
history_folder = os.path.join(prefix+self._filesystem_path,
".Radicale.cache", "history")
try:
with open(os.path.join(history_folder, href), "rb") as f:
cache_etag, history_etag = pickle.load(f)
except (FileNotFoundError, pickle.UnpicklingError, ValueError) as e:
if isinstance(e, (pickle.UnpicklingError, ValueError)):
logger.warning(
"Failed to load history cache entry %r in %r: %s",
href, self.path, e, exc_info=True)
cache_etag = ""
# Initialize with random data to prevent collisions with cleaned
# expired items.
history_etag = binascii.hexlify(os.urandom(16)).decode("ascii")
etag = item.etag if item else ""
if etag != cache_etag:
self._storage._makedirs_synced(history_folder)
history_etag = radicale_item.get_etag(
history_etag + "/" + etag).strip("\"")
try:
# Race: Other processes might have created and locked the file.
with self._atomic_write(os.path.join(history_folder, href),
"wb") as f:
pickle.dump([etag, history_etag], f)
except PermissionError:
pass
return history_etag
def _get_deleted_history_hrefs(self):
"""Returns the hrefs of all deleted items that are still in the
history cache."""
history_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "history")
try:
for entry in os.scandir(history_folder):
href = entry.name
if not pathutils.is_safe_filesystem_path_component(href):
continue
if os.path.isfile(os.path.join(self._filesystem_path, href)):
continue
yield href
except FileNotFoundError:
pass
def _clean_history(self):
# Delete all expired history entries of deleted items.
history_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "history")
self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
max_age=self._storage.configuration.get(
"storage", "max_sync_token_age"))

View File

@ -1,51 +0,0 @@
import logging
import logging.config
import signal
import sys
def configure_from_file(logger, filename, debug):
logging.config.fileConfig(filename, disable_existing_loggers=False)
if debug:
logger.setLevel(logging.DEBUG)
for handler in logger.handlers:
handler.setLevel(logging.DEBUG)
return logger
class RemoveTracebackFilter(logging.Filter):
def filter(self, record):
record.exc_info = None
return True
def start(name="radicale", filename=None, debug=False):
"""Start the logging according to the configuration."""
logger = logging.getLogger(name)
if debug:
logger.setLevel(logging.DEBUG)
else:
logger.addFilter(RemoveTracebackFilter())
if filename:
# Configuration taken from file
try:
configure_from_file(logger, filename, debug)
except Exception as e:
raise RuntimeError("Failed to load logging configuration file %r: "
"%s" % (filename, e)) from e
# Reload config on SIGHUP (UNIX only)
if hasattr(signal, "SIGHUP"):
def handler(signum, frame):
try:
configure_from_file(logger, filename, debug)
except Exception as e:
logger.error("Failed to reload logging configuration file "
"%r: %s", filename, e, exc_info=True)
signal.signal(signal.SIGHUP, handler)
else:
# Default configuration, standard output
handler = logging.StreamHandler(sys.stderr)
handler.setFormatter(
logging.Formatter("[%(thread)x] %(levelname)s: %(message)s"))
logger.addHandler(handler)
return logger

View File

@ -1,106 +0,0 @@
import itertools
import os
import pickle
import platform
from hashlib import sha256
from radicale.log import logger
class CollectionSyncMixin:
def sync(self, old_token=None):
# The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
# where TOKEN_NAME is the sha256 hash of all history etags of present
# and past items of the collection.
def check_token_name(token_name):
if len(token_name) != 64:
return False
for c in token_name:
if c not in "0123456789abcdef":
return False
return True
old_token_name = None
if old_token:
# Extract the token name from the sync token
if not old_token.startswith("http://radicale.org/ns/sync/"):
raise ValueError("Malformed token: %r" % old_token)
old_token_name = old_token[len("http://radicale.org/ns/sync/"):]
if not check_token_name(old_token_name):
raise ValueError("Malformed token: %r" % old_token)
# Get the current state and sync-token of the collection.
state = {}
token_name_hash = sha256()
# Find the history of all existing and deleted items
for href, item in itertools.chain(
((item.href, item) for item in self.get_all()),
((href, None) for href in self._get_deleted_history_hrefs())):
history_etag = self._update_history_etag(href, item)
state[href] = history_etag
token_name_hash.update((href + "/" + history_etag).encode())
token_name = token_name_hash.hexdigest()
token = "http://radicale.org/ns/sync/%s" % token_name
if token_name == old_token_name:
# Nothing changed
return token, ()
token_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "sync-token")
token_path = os.path.join(token_folder, token_name)
old_state = {}
if old_token_name:
# load the old token state
old_token_path = os.path.join(token_folder, old_token_name)
try:
# Race: Another process might have deleted the file.
with open(old_token_path, "rb") as f:
old_state = pickle.load(f)
except (FileNotFoundError, pickle.UnpicklingError,
ValueError) as e:
if isinstance(e, (pickle.UnpicklingError, ValueError)):
logger.warning(
"Failed to load stored sync token %r in %r: %s",
old_token_name, self.path, e, exc_info=True)
# Delete the damaged file
try:
os.remove(old_token_path)
except (FileNotFoundError, PermissionError):
pass
raise ValueError("Token not found: %r" % old_token)
# write the new token state or update the modification time of
# existing token state
if not os.path.exists(token_path):
self._storage._makedirs_synced(token_folder)
try:
# Race: Other processes might have created and locked the file.
prefix = ''
if platform.system() == 'Windows':
prefix = '\\\\?\\'
with self._atomic_write(prefix+token_path, "wb") as f:
pickle.dump(state, f)
except PermissionError:
pass
else:
# clean up old sync tokens and item cache
self._clean_cache(token_folder, os.listdir(token_folder),
max_age=self._storage.configuration.get(
"storage", "max_sync_token_age"))
self._clean_history()
else:
# Try to update the modification time
try:
# Race: Another process might have deleted the file.
os.utime(token_path)
except FileNotFoundError:
pass
changes = []
# Find all new, changed and deleted (that are still in the item cache)
# items
for href, history_etag in state.items():
if history_etag != old_state.get(href):
changes.append(href)
# Find all deleted items that are no longer in the item cache
for href, history_etag in old_state.items():
if href not in state:
changes.append(href)
return token, changes

View File

@ -1,151 +0,0 @@
# This file is part of Radicale Server - Calendar Server
# Copyright © 2014 Jean-Marc Martins
# Copyright © 2012-2017 Guillaume Ayoub
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
import os
import pickle
from radicale import item as radicale_item
from radicale import pathutils
from radicale import logger
from datetime import datetime, date, time
import time
import base64
import hashlib
import hmac
import requests
from urllib import request
from threading import Thread
from configparser import RawConfigParser
class CollectionUploadMixin:
def upload(self, href, item):
if not pathutils.is_safe_filesystem_path_component(href):
raise pathutils.UnsafePathError(href)
try:
self._store_item_cache(href, item)
except Exception as e:
raise ValueError("Failed to store item %r in collection %r: %s" %
(href, self.path, e)) from e
path = pathutils.path_to_filesystem(self._filesystem_path, href)
with self._atomic_write(path, newline="") as fd:
fd.write(item.serialize())
# Clean the cache after the actual item is stored, or the cache entry
# will be removed again.
self._clean_item_cache()
# Track the change
user = self.path.split("/")[0]
domain = user.split("@")[2]
try:
if item.serialize().find("PRODID:-//Office//Portal//EN") == -1:
th = Thread(target=self.set_to_portals, args=(self.path + "/" + href,domain))
th.start()
except:
logger.error("Portal sending error.")
self._update_history_etag(href, item)
self._clean_history()
return self._get(href, verify_href=False)
def create_auth_token(self, pkey, machine_key):
machine_key = bytes(machine_key, 'UTF-8')
now = datetime.strftime(datetime.utcnow(), "%Y%m%d%H%M%S")
message = bytes('{0}\n{1}'.format(now, pkey), 'UTF-8')
_hmac = hmac.new(machine_key, message, hashlib.sha1)
signature = str(base64.urlsafe_b64encode(_hmac.digest()), 'UTF-8')
signature = signature.replace('-', '+')
signature = signature.replace('_', '/')
token = 'ASC {0}:{1}:{2}'.format(pkey, now, signature)
logger.debug('Auth token: %r', token)
return token
def set_to_portals(self, path, domain):
portal = ""
userName = path.split('/')[0]
portal = userName.split('@')[2]
rewriter_url = os.environ.get("localhost" + 'HTTP_X_REWRITER_URL', '')
portal_url = self._storage.configuration.get("storage", "portal_url")
machine_key = self._storage.configuration.get("auth", "machine_key")
auth_token = self.create_auth_token("radicale", machine_key)
headers = {'Authorization': auth_token, 'HTTP_X_REWRITER_URL': rewriter_url if rewriter_url.find(domain) != -1 else "" }
url = portal_url+"/change_to_storage?change={}".format (path)
resp = requests.get(url, headers=headers)
def _upload_all_nonatomic(self, items, suffix=""):
"""Upload a new set of items.
This takes a list of vobject items and
uploads them nonatomic and without existence checks.
"""
cache_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "item")
self._storage._makedirs_synced(cache_folder)
hrefs = set()
for item in items:
uid = item.uid
try:
cache_content = self._item_cache_content(item)
except Exception as e:
raise ValueError(
"Failed to store item %r in temporary collection %r: %s" %
(uid, self.path, e)) from e
href_candidate_funtions = []
if os.name in ("nt", "posix"):
href_candidate_funtions.append(
lambda: uid if uid.lower().endswith(suffix.lower())
else uid + suffix)
href_candidate_funtions.extend((
lambda: radicale_item.get_etag(uid).strip('"') + suffix,
lambda: radicale_item.find_available_uid(hrefs.__contains__,
suffix)))
href = f = None
while href_candidate_funtions:
href = href_candidate_funtions.pop(0)()
if href in hrefs:
continue
if not pathutils.is_safe_filesystem_path_component(href):
if not href_candidate_funtions:
raise pathutils.UnsafePathError(href)
continue
try:
f = open(pathutils.path_to_filesystem(
self._filesystem_path, href),
"w", newline="", encoding=self._encoding)
break
except OSError as e:
if href_candidate_funtions and (
os.name == "posix" and e.errno == 22 or
os.name == "nt" and e.errno == 123):
continue
raise
with f:
f.write(item.serialize())
f.flush()
self._storage._fsync(f)
hrefs.add(href)
with open(os.path.join(cache_folder, href), "wb") as f:
pickle.dump(cache_content, f)
f.flush()
self._storage._fsync(f)
self._storage._sync_directory(cache_folder)
self._storage._sync_directory(self._filesystem_path)

View File

@ -1,3 +0,0 @@
from distutils.core import setup
setup(name="app_store_plugin", version='1.0.0', packages=["app_store_plugin"])

View File

@ -1,13 +0,0 @@
./app_store_plugin
./app_auth_plugin
./app_rights_plugin
pytz
vobject
certifi
chardet
idna
python_dateutil
six
urllib3
requests
radicale==3.0.5

View File

@ -1,58 +0,0 @@
#!/bin/bash
set -xe
SRC_PATH="/AppServer"
ARGS=""
while [ "$1" != "" ]; do
case $1 in
-sp | --srcpath )
if [ "$2" != "" ]; then
SRC_PATH=$2
shift
fi
;;
-ar | --arguments )
if [ "$2" != "" ]; then
ARGS=$2
shift
fi
;;
-? | -h | --help )
echo " Usage: bash build-backend.sh [PARAMETER] [[PARAMETER], ...]"
echo " Parameters:"
echo " -sp, --srcpath path to AppServer root directory"
echo " -ar, --arguments additional arguments publish the .NET runtime with your application"
echo " -?, -h, --help this help"
echo " Examples"
echo " bash build-backend.sh -sp /app/AppServer"
exit 0
;;
* )
echo "Unknown parameter $1" 1>&2
exit 1
;;
esac
shift
done
echo "== BACK-END-BUILD =="
cd ${SRC_PATH}
dotnet build ASC.Web.slnf ${ARGS}
dotnet build ASC.Migrations.sln -o ${SRC_PATH}/ASC.Migration.Runner/service/
# Array of names backend services in directory common (Nodejs)
services_name_backend_nodejs=()
services_name_backend_nodejs+=(ASC.Socket.IO)
services_name_backend_nodejs+=(ASC.SsoAuth)
# Build backend services (Nodejs)
for i in ${!services_name_backend_nodejs[@]}; do
echo "== Build ${services_name_backend_nodejs[$i]} project =="
yarn install --cwd common/${services_name_backend_nodejs[$i]} --frozen-lockfile
done

View File

@ -1,66 +0,0 @@
#!/bin/bash
set -xe
SRC_PATH="/AppServer"
BUILD_ARGS="build"
DEPLOY_ARGS="deploy"
DEBUG_INFO="true"
while [ "$1" != "" ]; do
case $1 in
-sp | --srcpath )
if [ "$2" != "" ]; then
SRC_PATH=$2
shift
fi
;;
-ba | --build-args )
if [ "$2" != "" ]; then
BUILD_ARGS=$2
shift
fi
;;
-da | --deploy-args )
if [ "$2" != "" ]; then
DEPLOY_ARGS=$2
shift
fi
;;
-di | --depbug-info )
if [ "$2" != "" ]; then
DEBUG_INFO=$2
shift
fi
;;
-? | -h | --help )
echo " Usage: bash build-backend.sh [PARAMETER] [[PARAMETER], ...]"
echo " Parameters:"
echo " -sp, --srcpath path to AppServer root directory"
echo " -ba, --build-args arguments for yarn building"
echo " -da, --deploy-args arguments for yarn deploying"
echo " -di, --depbug-info arguments for yarn debug info configure"
echo " -?, -h, --help this help"
echo " Examples"
echo " bash build-backend.sh -sp /app/AppServer"
exit 0
;;
* )
echo "Unknown parameter $1" 1>&2
exit 1
;;
esac
shift
done
echo "== FRONT-END-BUILD =="
cd ${SRC_PATH}
yarn install
# debug config
if [ "$DEBUG_INFO" = true ]; then
yarn debug-info
fi
yarn ${BUILD_ARGS}
yarn ${DEPLOY_ARGS}

View File

@ -1,57 +0,0 @@
$SRC_PATH=(get-item $PSScriptRoot ).parent.parent.parent.FullName
$BUILD_PATH="$SRC_PATH\publish"
$BACKEND_NODEJS_SERVICES="ASC.Socket.IO","ASC.SsoAuth"
$BACKEND_DOTNETCORE_SERVICES="ASC.Files", "ASC.People", "ASC.Data.Backup", "ASC.Files.Service", "ASC.Notify", "ASC.Studio.Notify", "ASC.Web.Api", "ASC.Web.Studio", "ASC.Data.Backup.BackgroundTasks", "ASC.ClearEvents", "ASC.ApiSystem", "ASC.Web.HealthChecks.UI"
$SELF_CONTAINED="false"
$PUBLISH_CNF="Debug"
$FRONTEND_BUILD_ARGS="build"
$FRONTEND_DEPLOY_ARGS="deploy"
$DEBUG_INFO_CHECK=""
$MIGRATION_CHECK="true"
$DOCKER_ENTRYPOINT="$SRC_PATH\build\install\docker\docker-entrypoint.py"
if(Test-Path -Path "$BUILD_PATH\services" ){
Write-Host "== Clean up services ==" -ForegroundColor Green
Remove-Item "$BUILD_PATH\services" -Recurse
}
Write-Host "== Build ASC.Web.slnf ==" -ForegroundColor Green
dotnet build "$SRC_PATH\ASC.Web.slnf"
Write-Host "== Build ASC.Migrations.sln ==" -ForegroundColor Green
dotnet build "$SRC_PATH\ASC.Migrations.sln" -o "$BUILD_PATH\services\ASC.Migration.Runner\service\"
Write-Host "== Add docker-migration-entrypoint.sh to ASC.Migration.Runner ==" -ForegroundColor Green
$FilePath = "$BUILD_PATH\services\ASC.Migration.Runner\service\docker-migration-entrypoint.sh"
Get-Content "$SRC_PATH\build\install\docker\docker-migration-entrypoint.sh" -raw | % {$_ -replace "`r", ""} | Set-Content -NoNewline $FilePath
foreach ($SERVICE in $BACKEND_NODEJS_SERVICES)
{
Write-Host "== Build $SERVICE project ==" -ForegroundColor Green
yarn install --cwd "$SRC_PATH\common\$SERVICE" --frozen-lockfile
$DST = "$BUILD_PATH\services\$SERVICE\service\"
if(!(Test-Path -Path $DST )){
New-Item -ItemType "directory" -Path $DST
}
Write-Host "== Copy service data to `publish\services\${SERVICE}\service` ==" -ForegroundColor Green
Copy-Item -Path "$SRC_PATH\common\$SERVICE\*" -Destination $DST -Recurse
Write-Host "== Add docker-entrypoint.py to $SERVICE ==" -ForegroundColor Green
Copy-Item $DOCKER_ENTRYPOINT -Destination $DST
}
Write-Host "== Publish ASC.Web.slnf ==" -ForegroundColor Green
dotnet publish "$SRC_PATH\ASC.Web.slnf" -p "PublishProfile=FolderProfile"
Set-Location -Path $PSScriptRoot
foreach ($SERVICE in $BACKEND_DOTNETCORE_SERVICES)
{
Write-Host "== Add docker-entrypoint.py to $SERVICE ==" -ForegroundColor Green
$DST = "$BUILD_PATH\services\$SERVICE\service\"
Copy-Item $DOCKER_ENTRYPOINT -Destination $DST
}

View File

@ -1,253 +0,0 @@
#!/bin/bash
set -xe
PROJECT_REPOSITORY_NAME=${PROJECT_REPOSITORY_NAME:-"DocSpace"}
SRC_PATH=${SRC_PATH:-$(pwd | sed "s/${PROJECT_REPOSITORY_NAME}.*/${PROJECT_REPOSITORY_NAME}/g")}
BUILD_PATH=${BUILD_PATH:-${SRC_PATH}/publish}
BUILD_DOTNET_CORE_ARGS=${BUILD_DOTNET_CORE_ARGS:-"false"}
PROPERTY_BUILD=${PROPERTY_BUILD:-"all"}
BACKEND_NODEJS_SERVICES=${BACKEND_NODEJS_SERVICES:-"ASC.Socket.IO, ASC.SsoAuth"}
BACKEND_DOTNETCORE_SERVICES=${BACKEND_DOTNETCORE_SERVICES:-"ASC.Files, ASC.People, ASC.Data.Backup, ASC.Files.Service, ASC.Notify, \
ASC.Studio.Notify, ASC.Web.Api, ASC.Web.Studio, ASC.Data.Backup.BackgroundTasks, ASC.ClearEvents, ASC.ApiSystem, ASC.Web.HealthChecks.UI"}
SELF_CONTAINED=${SELF_CONTAINED:-"false"}
PUBLISH_BACKEND_ARGS=${PUBLISH_BACKEND_ARGS:-"false"}
PUBLISH_CNF=${PUBLISH_CNF:-"Release"}
FRONTEND_BUILD_ARGS=${FRONTEND_BUILD_ARGS:-"build"}
FRONTEND_DEPLOY_ARGS=${FRONTEND_DEPLOY_ARGS:-"deploy"}
DEBUG_INFO_CHECK=${DEBUG_INFO_CHECK:-""}
MIGRATION_CHECK=${MIGRATION_CHECK:-"true"}
DOCKER_ENTRYPOINT=${DOCKER_ENTRYPOINT:-"false"}
ARRAY_NAME_SERVICES=()
while [ "$1" != "" ]; do
case $1 in
-sp | --srcpath )
if [[ "$2" != "" && ! "$2" =~ ^- ]]; then
SRC_PATH=$2
BUILD_PATH=${SRC_PATH}/publish
shift
fi
;;
-bp | --buildpath )
if [[ "$2" != "" && ! "$2" =~ ^- ]]; then
BUILD_PATH=$2
shift
fi
;;
-pb | --property-build )
if [[ "$2" != "" && ! "$2" =~ ^- ]]; then
PROPERTY_BUILD=$2
shift
fi
;;
-sc | --self-contained )
if [[ "$2" != "" && ! "$2" =~ ^- ]]; then
SELF_CONTAINED=$2
shift
fi
;;
-pc | --publish-configuration )
if [[ "$2" != "" && ! "$2" =~ ^- ]]; then
PUBLISH_CNF=$2
shift
fi
;;
-yb | --frontend-build-args )
if [[ "$2" != "" && ! "$2" =~ ^- ]]; then
FRONTEND_BUILD_ARGS=$2
shift
fi
;;
-yd | --frontend-deploy-args )
if [[ "$2" != "" && ! "$2" =~ ^- ]]; then
FRONTEND_DEPLOY_ARGS=$2
shift
fi
;;
-dc | --debug-check )
if [[ "$2" != "" && ! "$2" =~ ^- ]]; then
DEBUG_INFO_CHECK=$2
shift
fi
;;
-mc | --migration-check )
if [[ "$2" != "" && ! "$2" =~ ^- ]]; then
MIGRATION_CHECK=$2
shift
fi
;;
-de | --docker-entrypoint )
if [[ "$2" != "" && ! "$2" =~ ^- ]]; then
DOCKER_ENTRYPOINT=$2
shift
fi
;;
-? | -h | --help )
echo " Usage: bash build-services.sh [PARAMETER] [[PARAMETER], ...]"
echo " Parameters:"
echo -e " -sp, --srcpath \t path to project root directory"
echo -e " -bp, --buildpath \t path where generated output is placed (by default={SRC_PATH}/publish)"
echo -e " -st, --status \t build status Ex. all/frontend-build/backend-publish/backend-dotnet-publish/backend-nodejs-publish/backend-build"
echo -e " -sc, --self-contained \t publish the .NET runtime with your application (by default=false)"
echo -e " -pc, --publish-configuration \t dotnet publish configuration Ex. Release/Debug"
echo -e " -yb, --frontend-build-args \t arguments for yarn building"
echo -e " -yd, --frontend-deploy-args \t arguments for yarn deploy"
echo -e " -dc, --debug-check \t arguments for yarn debug info configure"
echo -e " -mc, --migration-check \t check migration build (by default=true)"
echo " -?, -h, --help this help"
echo " Examples"
echo " bash build-services.sh -sp /app/DocSpace"
exit 0
;;
* )
echo "Unknown parameter $1" 1>&2
exit 1
;;
esac
shift
done
cd ${SRC_PATH}
function get_services_name {
if [[ $# -gt 0 ]]
then
ARRAY_NAME_SERVICES=($(echo $1 | tr "," " "))
fi
}
# Builds a project dotnetcore dependencies
function build_dotnetcore_backend {
if [[ ${BUILD_DOTNET_CORE_ARGS} == "false" ]]
then
echo "== Build ASC.Web.slnf =="
dotnet build ASC.Web.slnf
else
echo "== Build ASC.Web.slnf ${BUILD_DOTNET_CORE_ARGS} =="
dotnet build ASC.Web.slnf ${BUILD_DOTNET_CORE_ARGS}
fi
if [[ $# -gt 0 ]]
then
local migration_check=$(echo $1 | tr '[:upper:]' '[:lower:]' | tr -d ' ')
if [[ ${migration_check} == "true" ]]
then
echo "== Build ASC.Migrations.sln =="
dotnet build ASC.Migrations.sln -o ${BUILD_PATH}/services/ASC.Migration.Runner/service/
fi
if [[ ${DOCKER_ENTRYPOINT} != "false" ]]
then
echo "== ADD ${SRC_PATH}/build/install/docker/docker-migration-entrypoint.sh to ASC.Migration.Runner =="
cp ${SRC_PATH}/build/install/docker/docker-migration-entrypoint.sh ${BUILD_PATH}/services/ASC.Migration.Runner/service/
fi
fi
}
# Publish BACKEND dotnetcore services
function backend-dotnet-publish {
# List of names for nodejs backend projects
get_services_name "${BACKEND_DOTNETCORE_SERVICES}"
echo "== Publish ASC.Web.slnf =="
if [[ ${PUBLISH_BACKEND_ARGS} == "false" ]]
then
dotnet publish $SRC_PATH/ASC.Web.slnf -p "PublishProfile=FolderProfile"
else
dotnet publish $SRC_PATH/ASC.Web.slnf ${PUBLISH_BACKEND_ARGS} -p "PublishProfile=FolderProfile"
fi
if [[ ${DOCKER_ENTRYPOINT} != "false" ]]
then
for i in ${!ARRAY_NAME_SERVICES[@]}; do
echo "== ADD ${DOCKER_ENTRYPOINT} to ${ARRAY_NAME_SERVICES[$i]} =="
cp ${DOCKER_ENTRYPOINT} ${BUILD_PATH}/services/${ARRAY_NAME_SERVICES[$i]}/service/
done
fi
ARRAY_NAME_SERVICES=()
}
# Install BACKEND dependencies for nodjs's projects
function backend-nodejs-publish {
# List of names for nodejs backend projects
get_services_name "${BACKEND_NODEJS_SERVICES}"
for i in ${!ARRAY_NAME_SERVICES[@]}; do
echo "== Build ${ARRAY_NAME_SERVICES[$i]} project =="
yarn install --cwd ${SRC_PATH}/common/${ARRAY_NAME_SERVICES[$i]} --frozen-lockfile && \
mkdir -p ${BUILD_PATH}/services/${ARRAY_NAME_SERVICES[$i]}/service/ && \
cp -rfv ${SRC_PATH}/common/${ARRAY_NAME_SERVICES[$i]}/* ${BUILD_PATH}/services/${ARRAY_NAME_SERVICES[$i]}/service/
if [[ ${DOCKER_ENTRYPOINT} != "false" ]]
then
echo "== ADD ${DOCKER_ENTRYPOINT} to ${ARRAY_NAME_SERVICES[$i]} =="
cp ${DOCKER_ENTRYPOINT} ${BUILD_PATH}/services/${ARRAY_NAME_SERVICES[$i]}/service/
fi
done
ARRAY_NAME_SERVICES=()
}
# Install FRONTEND dependencies for nodjs's projects
function build_nodejs_frontend {
echo "== yarn install =="
yarn install
# Install debug config mode
if [[ $# -gt 0 ]]
then
local debug_info_check=$(echo $1 | tr '[:upper:]' '[:lower:]' | tr -d ' ')
if [[ ${debug_info_check} == "true" ]]
then
echo "== yarn debug-info =="
yarn debug-info
fi
fi
echo "== yarn ${FRONTEND_BUILD_ARGS} =="
yarn ${FRONTEND_BUILD_ARGS}
echo "== yarn ${FRONTEND_DEPLOY_ARGS} =="
yarn ${FRONTEND_DEPLOY_ARGS}
if [[ ${DOCKER_ENTRYPOINT} != "false" ]]
then
echo "== ADD ${DOCKER_ENTRYPOINT} to ASC.Login =="
cp ${DOCKER_ENTRYPOINT} ${SRC_PATH}/build/deploy/login/
echo "== ADD ${DOCKER_ENTRYPOINT} toASC.Editors =="
cp ${DOCKER_ENTRYPOINT} ${SRC_PATH}/build/deploy/editor/
fi
}
function run {
case $1 in
all )
build_dotnetcore_backend "${MIGRATION_CHECK}"
backend-nodejs-publish
build_nodejs_frontend "${DEBUG_INFO_CHECK}"
backend-dotnet-publish
;;
frontend-build )
build_nodejs_frontend "${DEBUG_INFO_CHECK}"
;;
backend-publish )
build_dotnetcore_backend "${MIGRATION_CHECK}"
backend-nodejs-publish
backend-dotnet-publish
;;
backend-dotnet-publish )
build_dotnetcore_backend "${MIGRATION_CHECK}"
backend-dotnet-publish
;;
backend-nodejs-publish )
backend-nodejs-publish
;;
backend-build )
build_dotnetcore_backend "${MIGRATION_CHECK}"
;;
* )
echo "Unknown - \"$1\", Ex. all/frontend-build/backend-publish/backend-dotnet-publish/backend-nodejs-publish/backend-build" 1>&2
exit 1
;;
esac
}
run "${PROPERTY_BUILD}"

View File

@ -1,16 +0,0 @@
/var/log/onlyoffice/{{product}}/*.log {
daily
missingok
rotate 30
compress
dateext
delaycompress
notifempty
nocreate
sharedscripts
postrotate
if pgrep -x ""systemd"" >/dev/null; then
systemctl restart {{product}}* > /dev/null
fi
endscript
}

View File

@ -1,820 +0,0 @@
#!/bin/bash
set -e
PRODUCT="docspace"
ENVIRONMENT="production"
PACKAGE_SYSNAME="onlyoffice"
APP_DIR="/etc/${PACKAGE_SYSNAME}/${PRODUCT}"
PRODUCT_DIR="/var/www/${PRODUCT}"
LOG_DIR="/var/log/${PACKAGE_SYSNAME}/${PRODUCT}"
USER_CONF="$APP_DIR/appsettings.$ENVIRONMENT.json"
OPENRESTY_CONF="/usr/local/openresty/nginx/conf/nginx.conf"
OPENRESTY_DIR="/etc/openresty"
DB_HOST="localhost"
DB_PORT="3306"
DB_NAME="${PACKAGE_SYSNAME}"
DB_USER="root"
DB_PWD=""
APP_HOST="localhost"
APP_PORT="80"
ELK_SHEME="http"
ELK_HOST="localhost"
ELK_PORT="9200"
RABBITMQ_HOST="localhost"
RABBITMQ_USER="guest"
RABBITMQ_PASSWORD="guest"
RABBITMQ_PORT="5672"
REDIS_HOST="127.0.0.1"
REDIS_PORT="6379"
JSON="json -I -f"
[ $(id -u) -ne 0 ] && { echo "Root privileges required"; exit 1; }
check_localhost() { [ "$1" = "localhost" ] || [ "$1" = "127.0.0.1" ] && return 1 || return 0; }
while [ "$1" != "" ]; do
case $1 in
-ash | --appshost )
if [ "$2" != "" ]; then
APP_HOST=$2
shift
fi
;;
-asp | --appsport )
if [ "$2" != "" ]; then
APP_PORT=$2
shift
fi
;;
-ess | --elasticsheme )
if [ "$2" != "" ]; then
ELK_SHEME=$2
shift
fi
;;
-esh | --elastichost )
if [ "$2" != "" ]; then
ELK_HOST=$2
check_localhost "$ELK_HOST" && EXTERNAL_ELK_FLAG="true"
shift
fi
;;
-esp | --elasticport )
if [ "$2" != "" ]; then
ELK_PORT=$2
shift
fi
;;
-e | --environment )
if [ "$2" != "" ]; then
ENVIRONMENT=$2
shift
fi
;;
-mysqlh | --mysqlhost )
if [ "$2" != "" ]; then
DB_HOST=$2
shift
fi
;;
-mysqld | --mysqldatabase )
if [ "$2" != "" ]; then
DB_NAME=$2
shift
fi
;;
-mysqlu | --mysqluser )
if [ "$2" != "" ]; then
DB_USER=$2
shift
fi
;;
-mysqlp | --mysqlpassword )
if [ "$2" != "" ]; then
DB_PWD=$2
shift
fi
;;
-rdh | --redishost )
if [ "$2" != "" ]; then
REDIS_HOST=$2
check_localhost "$REDIS_HOST" && EXTERNAL_REDIS_FLAG="true"
shift
fi
;;
-rdp | --redisport )
if [ "$2" != "" ]; then
REDIS_PORT=$2
shift
fi
;;
-rbh | --rabbitmqhost )
if [ "$2" != "" ]; then
RABBITMQ_HOST=$2
check_localhost "$REDIS_HOST" && EXTERNAL_RABBITMQ_FLAG="true"
shift
fi
;;
-rbu | --rabbitmquser )
if [ "$2" != "" ]; then
RABBITMQ_USER=$2
shift
fi
;;
-rbpw | --rabbitmqpassword )
if [ "$2" != "" ]; then
RABBITMQ_PASSWORD=$2
shift
fi
;;
-rbp | --rabbitmqport )
if [ "$2" != "" ]; then
RABBITMQ_PORT=$2
shift
fi
;;
-mk | --machinekey )
if [ "$2" != "" ]; then
CORE_MACHINEKEY=$2
shift
fi
;;
-js | --jwtsecret )
if [ "$2" != "" ]; then
DOCUMENT_SERVER_JWT_SECRET=$2
shift
fi
;;
-jh | --jwtheader )
if [ "$2" != "" ]; then
DOCUMENT_SERVER_JWT_HEADER=$2
shift
fi
;;
-docsurl | --docsurl )
if [ "$2" != "" ]; then
DOCUMENT_SERVER_URL_EXTERNAL=$2
shift
fi
;;
-? | -h | --help )
echo " Usage: bash ${PRODUCT}-configuration [PARAMETER] [[PARAMETER], ...]"
echo
echo " Parameters:"
echo " -ash, --appshost ${PRODUCT} ip"
echo " -asp, --appsport ${PRODUCT} port (default 80)"
echo " -docsurl, --docsurl $PACKAGE_SYSNAME docs server address (example http://$PACKAGE_SYSNAME-docs-address:8083)"
echo " -esh, --elastichost elasticsearch ip"
echo " -esp, --elasticport elasticsearch port (default 9200)"
echo " -rdh, --redishost redis ip"
echo " -rdp, --redisport redis port (default 6379)"
echo " -rbh, --rabbitmqhost rabbitmq ip"
echo " -rbp, --rabbitmqport rabbitmq port"
echo " -rbu, --rabbitmquser rabbitmq user"
echo " -rbpw, --rabbitmqpassword rabbitmq password"
echo " -mysqlh, --mysqlhost mysql server host"
echo " -mysqld, --mysqldatabase ${PRODUCT} database name"
echo " -mysqlu, --mysqluser ${PRODUCT} database user"
echo " -mysqlp, --mysqlpassword ${PRODUCT} database password"
echo " -js, --jwtsecret defines the secret key to validate the JWT in the request"
echo " -jh. --jwtheader defines the http header that will be used to send the JWT"
echo " -mk, --machinekey setting for core.machinekey"
echo " -e, --environment environment (default 'production')"
echo " -?, -h, --help this help"
echo
exit 0
;;
* )
echo "Unknown parameter $1" 1>&2
exit 1
;;
esac
shift
done
set_core_machinekey () {
if [[ -f $APP_DIR/.private/machinekey ]] || [[ -n $CORE_MACHINEKEY ]]; then
CORE_MACHINEKEY=${CORE_MACHINEKEY:-$(cat $APP_DIR/.private/machinekey)};
else
CORE_MACHINEKEY=$(cat /dev/urandom | tr -dc A-Za-z0-9 | head -c 12);
if [ "$DIST" = "RedHat" ]; then
echo $CORE_MACHINEKEY > $APP_DIR/.private/machinekey
chmod o-rwx $APP_DIR/.private/machinekey
fi
fi
save_undefined_param "${USER_CONF}" "core.machinekey" "${CORE_MACHINEKEY}"
save_undefined_param "${USER_CONF}" "core['base-domain']" "${APP_HOST}"
save_undefined_param "${APP_DIR}/apisystem.${ENVIRONMENT}.json" "core.machinekey" "${CORE_MACHINEKEY}"
save_undefined_param "${APP_DIR}/apisystem.${ENVIRONMENT}.json" "core['base-domain']" "${CORE_MACHINEKEY}"
sed "s^\(machine_key\)\s*=.*^\1 = ${CORE_MACHINEKEY}^g" -i $APP_DIR/radicale.config
}
install_json() {
if ! command -v json; then
echo -n "Install json package... "
curl -L https://github.com/trentm/json/raw/master/lib/json.js > /usr/bin/json
chmod 755 /usr/bin/json
echo "OK"
fi
}
save_undefined_param() {
local JSON_FILE="$1"
local FIELD_PATH="$2"
local FIELD_VALUE="$3"
[ "$4" != "rewrite" ] && local CONDITION="if(this.${FIELD_PATH}===undefined)"
IFS='.' read -ra PATH_ELEMENTS <<< "${FIELD_PATH}"
for ELEMENT in "${PATH_ELEMENTS[@]::${#PATH_ELEMENTS[@]}-1}"; do
local CURRENT_PATH+=".$ELEMENT"
${JSON} ${JSON_FILE} -e "if(this${CURRENT_PATH}===undefined)this${CURRENT_PATH}={};" >/dev/null 2>&1
done
${JSON} ${JSON_FILE} -e "${CONDITION}this.${FIELD_PATH}=\"${FIELD_VALUE}\"" >/dev/null 2>&1
}
restart_services() {
chown -R ${PACKAGE_SYSNAME}:${PACKAGE_SYSNAME} $APP_DIR $PRODUCT_DIR $LOG_DIR /var/www/$PACKAGE_SYSNAME/Data
sed "s_\(ENVIRONMENT=\).*_\1${ENVIRONMENT}_i" -i ${SYSTEMD_DIR}/${PRODUCT}*.service >/dev/null 2>&1
systemctl daemon-reload
echo -n "Updating database... "
systemctl start ${PRODUCT}-migration-runner >/dev/null 2>&1 || true
while systemctl is-active ${PRODUCT}-migration-runner &>/dev/null; do
sleep 5
done
echo "OK"
echo -n "Restarting services... "
for SVC in login api socket studio-notify notify \
people-server files files-services studio backup \
clear-events backup-background ssoauth doceditor healthchecks
do
systemctl enable ${PRODUCT}-$SVC >/dev/null 2>&1
systemctl restart ${PRODUCT}-$SVC
done
echo "OK"
}
input_db_params(){
local CONNECTION_STRING=$(json -f $USER_CONF ConnectionStrings.default.connectionString)
local def_DB_HOST=$(grep -oP 'Server=\K[^;]*' <<< "$CONNECTION_STRING")
local def_DB_NAME=$(grep -oP 'Database=\K[^;]*' <<< "$CONNECTION_STRING")
local def_DB_USER=$(grep -oP 'User ID=\K[^;]*' <<< "$CONNECTION_STRING")
if [ -z $def_DB_HOST ] && [ -z $DB_HOST ]; then
read -e -p "Database host: " -i "$DB_HOST" DB_HOST;
else
DB_HOST=${DB_HOST:-$def_DB_HOST}
fi
if [ -z $def_DB_NAME ] && [ -z $DB_NAME ]; then
read -e -p "Database name: " -i "$DB_NAME" DB_NAME;
else
DB_NAME=${DB_NAME:-$def_DB_NAME}
fi
if [ -z $def_DB_USER ] && [ -z $DB_USER ]; then
read -e -p "Database user: " -i "$DB_USER" DB_USER;
else
DB_USER=${DB_USER:-$def_DB_USER}
fi
if [ -z $DB_PWD ]; then
read -e -p "Database password: " -i "$DB_PWD" -s DB_PWD;
fi
}
establish_mysql_conn(){
echo -n "Trying to establish MySQL connection... "
command -v mysql >/dev/null 2>&1 || { echo "MySQL client not found"; exit 1; }
MYSQL="mysql -P$DB_PORT -h$DB_HOST -u$DB_USER"
if [ -n "$DB_PWD" ]; then
MYSQL="$MYSQL -p$DB_PWD"
fi
$MYSQL -e ";" >/dev/null 2>&1
ERRCODE=$?
if [ $ERRCODE -ne 0 ]; then
systemctl ${MYSQL_PACKAGE} start >/dev/null 2>&1
$MYSQL -e ";" >/dev/null 2>&1 || { echo "FAILURE"; exit 1; }
fi
if $PACKAGE_MANAGER mysql-server >/dev/null 2>&1 || $PACKAGE_MANAGER mysql-community-server >/dev/null 2>&1; then
change_mysql_config
fi
#Save db settings in .json
CONNECTION_STRING="Server=$DB_HOST;Port=$DB_PORT;Database=$DB_NAME;User ID=$DB_USER;Password=$DB_PWD;Pooling=true; \
Character Set=utf8; AutoEnlist=false; SSL Mode=none;AllowPublicKeyRetrieval=true;Connection Timeout=30;Maximum Pool Size=300"
save_undefined_param "${USER_CONF}" "ConnectionStrings.default.connectionString" "${CONNECTION_STRING}"
save_undefined_param "${APP_DIR}/apisystem.${ENVIRONMENT}.json" "ConnectionStrings.default.connectionString" "${CONNECTION_STRING}"
sed "s&\(\"ConnectionString\":\).*&\1 \"$(printf "%q" "${CONNECTION_STRING}")\"&" -i $PRODUCT_DIR/services/ASC.Migration.Runner/appsettings.runner.json
#Enable database migration
save_undefined_param "${USER_CONF}" "migration.enabled" "true"
echo "OK"
}
change_mysql_config(){
if [ "$DIST" = "RedHat" ]; then
local CNF_PATH="/etc/my.cnf";
local CNF_SERVICE_PATH="/usr/lib/systemd/system/mysqld.service";
if ! grep -q "\[mysqld\]" ${CNF_PATH}; then
CNF_PATH="/etc/my.cnf.d/server.cnf";
if ! grep -q "\[mysqld\]" ${CNF_PATH}; then
exit 1;
fi
fi
if ! grep -q "\[Unit\]" ${CNF_SERVICE_PATH}; then
CNF_SERVICE_PATH="/lib/systemd/system/mysqld.service";
if ! grep -q "\[Unit\]" ${CNF_SERVICE_PATH}; then
CNF_SERVICE_PATH="/lib/systemd/system/mariadb.service";
if ! grep -q "\[Unit\]" ${CNF_SERVICE_PATH}; then
exit 1;
fi
fi
fi
elif [ "$DIST" = "Debian" ]; then
sed "s/#max_connections.*/max_connections = 1000/" -i /etc/mysql/my.cnf || true # ignore errors
CNF_PATH="/etc/mysql/mysql.conf.d/mysqld.cnf";
CNF_SERVICE_PATH="/lib/systemd/system/mysql.service";
if mysql -V | grep -q "MariaDB"; then
CNF_PATH="/etc/mysql/mariadb.conf.d/50-server.cnf";
CNF_SERVICE_PATH="/lib/systemd/system/mariadb.service";
fi
fi
sed '/skip-networking/d' -i ${CNF_PATH} || true # ignore errors
if ! grep -q "^sql_mode" ${CNF_PATH}; then
sed "/\[mysqld\]/a sql_mode = 'NO_ENGINE_SUBSTITUTION'" -i ${CNF_PATH} # disable new STRICT mode in mysql 5.7
else
sed "s/sql_mode.*/sql_mode = 'NO_ENGINE_SUBSTITUTION'/" -i ${CNF_PATH} || true # ignore errors
fi
if ! grep -q "^max_connections" ${CNF_PATH}; then
sed '/\[mysqld\]/a max_connections = 1000' -i ${CNF_PATH}
else
sed "s/max_connections.*/max_connections = 1000/" -i ${CNF_PATH} || true # ignore errors
fi
if ! grep -q "^group_concat_max_len" ${CNF_PATH}; then
sed '/\[mysqld\]/a group_concat_max_len = 2048' -i ${CNF_PATH}
else
sed "s/group_concat_max_len.*/group_concat_max_len = 2048/" -i ${CNF_PATH} || true # ignore errors
fi
if ! grep -q "^max_allowed_packet" ${CNF_PATH}; then
sed '/\[mysqld\]/a max_allowed_packet = 1048576000' -i ${CNF_PATH}
else
sed "s/max_allowed_packet.*/max_allowed_packet = 1048576000/" -i ${CNF_PATH} || true # ignore errors
fi
if ! grep -q "^character_set_server" ${CNF_PATH}; then
sed '/\[mysqld\]/a character_set_server = utf8' -i ${CNF_PATH}
else
sed "s/character_set_server.*/character_set_server = utf8/" -i ${CNF_PATH} || true # ignore errors
fi
if ! grep -q "^collation_server" ${CNF_PATH}; then
sed '/\[mysqld\]/a collation_server = utf8_general_ci' -i ${CNF_PATH}
else
sed "s/collation_server.*/collation_server = utf8_general_ci/" -i ${CNF_PATH} || true # ignore errors
fi
MYSQL_AUTHENTICATION_PLUGIN=$($MYSQL -e "SHOW VARIABLES LIKE 'default_authentication_plugin';" -s | awk '{print $2}' >/dev/null 2>&1)
MYSQL_AUTHENTICATION_PLUGIN=${MYSQL_AUTHENTICATION_PLUGIN:-caching_sha2_password}
if ! grep -q "^default-authentication-plugin" ${CNF_PATH}; then
sed "/\[mysqld\]/a default-authentication-plugin = ${MYSQL_AUTHENTICATION_PLUGIN}" -i ${CNF_PATH}
else
sed "s/default-authentication-plugin.*/default-authentication-plugin = ${MYSQL_AUTHENTICATION_PLUGIN}/" -i ${CNF_PATH} || true # ignore errors
fi
if [ -e ${CNF_SERVICE_PATH} ]; then
if ! grep -q "^LimitNOFILE" ${CNF_SERVICE_PATH}; then
sed '/\[Service\]/a LimitNOFILE = infinity' -i ${CNF_SERVICE_PATH}
else
sed "s/LimitNOFILE.*/LimitNOFILE = infinity/" -i ${CNF_SERVICE_PATH} || true # ignore errors
fi
if ! grep -q "^LimitMEMLOCK" ${CNF_SERVICE_PATH}; then
sed '/\[Service\]/a LimitMEMLOCK = infinity' -i ${CNF_SERVICE_PATH}
else
sed "s/LimitMEMLOCK.*/LimitMEMLOCK = infinity/" -i ${CNF_SERVICE_PATH} || true # ignore errors
fi
fi
systemctl daemon-reload >/dev/null 2>&1
systemctl enable ${MYSQL_PACKAGE} >/dev/null 2>&1
systemctl restart ${MYSQL_PACKAGE}
}
setup_openresty(){
echo -n "Configuring openresty... "
cp -rf ${APP_DIR}/openresty/nginx.conf.template ${OPENRESTY_CONF}
[ ! -f "${OPENRESTY_DIR}/mime.types" ] && cp -rf "$(dirname ${OPENRESTY_CONF})/mime.types" ${OPENRESTY_DIR}
sed 's/\(listen .*:\)\([0-9]\{2,5\}\b\)\( default_server\)\?\(;\)/\1'${APP_PORT}'\3\4/' -i ${OPENRESTY_DIR}/conf.d/${PACKAGE_SYSNAME}-proxy.conf
sed "s!\(^worker_processes\).*;!\1 ${NGINX_WORKER_PROCESSES:-$(grep processor /proc/cpuinfo | wc -l)};!" -i "${OPENRESTY_CONF}"
sed "s!\(worker_connections\).*;!\1 ${NGINX_WORKER_CONNECTIONS:-$(ulimit -n)};!" -i "${OPENRESTY_CONF}"
# Check for old configuration files
if [ -f "${OPENRESTY_DIR}/conf.d/${PACKAGE_SYSNAME}-proxy.conf.dpkg-old" ]; then
PROXY_CONF="${OPENRESTY_DIR}/conf.d/${PACKAGE_SYSNAME}-proxy.conf.dpkg-old"
elif [ -f "${OPENRESTY_DIR}/conf.d/${PACKAGE_SYSNAME}-proxy.conf.rpmsave" ]; then
PROXY_CONF="${OPENRESTY_DIR}/conf.d/${PACKAGE_SYSNAME}-proxy.conf.rpmsave"
fi
# If the configuration file is found, extract the paths to the certificate and key
if [ ! -z "${PROXY_CONF}" ]; then
CERTIFICATE_PATH=$(grep -oP 'ssl_certificate\s+\K\S+' "${PROXY_CONF}" | tr -d ';')
CERTIFICATE_KEY_PATH=$(grep -oP 'ssl_certificate_key\s+\K\S+' "${PROXY_CONF}" | tr -d ';')
# If both values are found, start SSL configuration
if [ ! -z "${CERTIFICATE_PATH}" ] && [ ! -z "${CERTIFICATE_KEY_PATH}" ]; then
/usr/bin/${PRODUCT}-ssl-setup -f "${CERTIFICATE_PATH}" "${CERTIFICATE_KEY_PATH}"
fi
fi
if [ "$DIST" = "RedHat" ]; then
shopt -s nocasematch
PORTS=()
if command -v getenforce &> /dev/null; then
case $(getenforce) in
enforcing|permissive)
PORTS+=('5000') #ASC.Web.Api
PORTS+=('5001') #client
PORTS+=('5003') #ASC.Web.Studio
PORTS+=('5004') #ASC.People
PORTS+=('5005') #ASC.Notify
PORTS+=('5006') #ASC.Studio.Notify
PORTS+=('5007') #ASC.Files/server
PORTS+=('5009') #ASC.Files/service
PORTS+=('5010') #ASC.ApiSystem
PORTS+=('5011') #ASC.Login
PORTS+=('5012') #ASC.Data.Backup
PORTS+=('5013') #ASC.Files/editor
PORTS+=('5027') #ASC.ClearEvents
PORTS+=('5032') #ASC.Data.Backup.BackgroundTasks
PORTS+=('5033') #ASC.Web.HealthChecks
PORTS+=('5100') #ASC.ApiCache
PORTS+=('8081') #Storybook
PORTS+=('9834') #ASC.SsoAuth
PORTS+=('9899') #ASC.Socket.IO
setsebool -P httpd_can_network_connect on
;;
disabled)
:
;;
esac
for PORT in ${PORTS[@]}; do
semanage port -a -t http_port_t -p tcp $PORT >/dev/null 2>&1 || \
semanage port -m -t http_port_t -p tcp $PORT >/dev/null 2>&1 || \
true
done
fi
if rpm -q "firewalld"; then
firewall-cmd --permanent --zone=public --add-service=http
firewall-cmd --permanent --zone=public --add-service=https
systemctl restart firewalld.service
fi
elif [ "$DIST" = "Debian" ]; then
if ! id "nginx" &>/dev/null; then
rm -dfr /var/log/nginx/*
rm -dfr /var/cache/nginx/*
useradd -s /bin/false nginx
fi
fi
chown nginx:nginx ${OPENRESTY_DIR}* -R
systemctl enable openresty >/dev/null 2>&1
systemctl restart openresty
echo "OK"
}
# Function gets Document server host and port using regular expression, we need it to check connection
parse_external_docs_url () {
if [[ $DOCUMENT_SERVER_URL_EXTERNAL =~ ^(https?://)?([^:/]+)(:([0-9]+))?(/.*)?$ ]]; then
DOCUMENT_SERVER_PORT="${BASH_REMATCH[4]:-80}"
DOCUMENT_SERVER_HOST="${BASH_REMATCH[2]}"
[[ -n ${BASH_REMATCH[1]} ]] || DOCUMENT_SERVER_URL_EXTERNAL="http://$DOCUMENT_SERVER_HOST:$DOCUMENT_SERVER_PORT"
fi
}
setup_docs() {
echo -n "Configuring docs... "
if [ $1 == "LOCAL_DOCS_SERVER" ]; then
local DS_CONF_DIR="/etc/${PACKAGE_SYSNAME}/documentserver"
local DOCUMENT_SERVER_PORT=$(grep -oP '(?<=:)\d+(?=\s)' ${DS_CONF_DIR}/nginx/ds.conf)
local DOCUMENT_SERVER_JWT_SECRET=${DOCUMENT_SERVER_JWT_SECRET:-$(json -f ${DS_CONF_DIR}/local.json services.CoAuthoring.secret.inbox.string)}
local DOCUMENT_SERVER_JWT_HEADER=${DOCUMENT_SERVER_JWT_HEADER:-$(json -f ${DS_CONF_DIR}/local.json services.CoAuthoring.token.inbox.header)}
$JSON ${DS_CONF_DIR}/local.json -e "this.rabbitmq = { 'url': 'amqp://${RABBITMQ_USER}:${RABBITMQ_PASSWORD}@${RABBITMQ_HOST}:${RABBITMQ_PORT}' }" >/dev/null 2>&1
$JSON ${DS_CONF_DIR}/local.json -e "this.services.CoAuthoring.redis = { 'host': '$REDIS_HOST' }" >/dev/null 2>&1
sed 's/\(listen .*:\)\([0-9]\{2,5\}\b\)\( default_server\)\?\(;\)/\1'${DOCUMENT_SERVER_PORT}'\3\4/' -i ${DS_CONF_DIR}/nginx/ds.conf
chown ds:ds ${DS_CONF_DIR}/local.json
elif [ $1 == "EXTERNAL_DOCS_SERVER" ]; then
local APP_HOST=$(curl -s ifconfig.me)
local EXTERNAL_DOCS_FLAG="rewrite"
fi
#Changing the Docs port in nginx conf
sed "0,/proxy_pass .*;/{s#proxy_pass .*;#proxy_pass ${DOCUMENT_SERVER_URL_EXTERNAL:-http://$APP_HOST:$DOCUMENT_SERVER_PORT};#}" -i ${OPENRESTY_DIR}/conf.d/${PACKAGE_SYSNAME}.conf
#Save Docs address and JWT in .json
save_undefined_param "${USER_CONF}" "files.docservice.secret.value" "${DOCUMENT_SERVER_JWT_SECRET}" "rewrite"
save_undefined_param "${USER_CONF}" "files.docservice.secret.header" "${DOCUMENT_SERVER_JWT_HEADER:-"AuthorizationJwt"}" "rewrite"
save_undefined_param "${USER_CONF}" "files.docservice.url.public" "${DOCUMENT_SERVER_URL_EXTERNAL:-"/ds-vpath/"}" "$EXTERNAL_DOCS_FLAG"
save_undefined_param "${USER_CONF}" "files.docservice.url.internal" "${DOCUMENT_SERVER_URL_EXTERNAL:-"http://$APP_HOST:$DOCUMENT_SERVER_PORT"}" "$EXTERNAL_DOCS_FLAG"
save_undefined_param "${USER_CONF}" "files.docservice.url.portal" "http://${APP_HOST}:${APP_PORT}" "$EXTERNAL_DOCS_FLAG"
echo "OK"
}
setup_enterprise() {
if $PACKAGE_MANAGER ${PACKAGE_SYSNAME}-documentserver-ee >/dev/null 2>&1; then
LICENCE_PATH="/var/www/$PACKAGE_SYSNAME/Data/license.lic"
fi
save_undefined_param "${USER_CONF}" "license.file.path" "${LICENCE_PATH}" "rewrite"
}
change_elasticsearch_config(){
systemctl stop elasticsearch
local ELASTIC_SEARCH_CONF_PATH="/etc/elasticsearch/elasticsearch.yml"
local ELASTIC_SEARCH_JAVA_CONF_PATH="/etc/elasticsearch/jvm.options";
if /usr/share/elasticsearch/bin/elasticsearch-plugin list | grep -q "ingest-attachment"; then
/usr/share/elasticsearch/bin/elasticsearch-plugin remove -s ingest-attachment
fi
/usr/share/elasticsearch/bin/elasticsearch-plugin install -s -b ingest-attachment
if [ -f ${ELASTIC_SEARCH_CONF_PATH}.rpmnew ]; then
cp -rf ${ELASTIC_SEARCH_CONF_PATH}.rpmnew ${ELASTIC_SEARCH_CONF_PATH};
fi
if [ -f ${ELASTIC_SEARCH_JAVA_CONF_PATH}.rpmnew ]; then
cp -rf ${ELASTIC_SEARCH_JAVA_CONF_PATH}.rpmnew ${ELASTIC_SEARCH_JAVA_CONF_PATH};
fi
if ! grep -q "indices.fielddata.cache.size" ${ELASTIC_SEARCH_CONF_PATH}; then
echo "indices.fielddata.cache.size: 30%" >> ${ELASTIC_SEARCH_CONF_PATH}
else
sed -i "s/indices.fielddata.cache.size.*/indices.fielddata.cache.size: 30%/" ${ELASTIC_SEARCH_CONF_PATH}
fi
if ! grep -q "indices.memory.index_buffer_size" ${ELASTIC_SEARCH_CONF_PATH}; then
echo "indices.memory.index_buffer_size: 30%" >> ${ELASTIC_SEARCH_CONF_PATH}
else
sed -i "s/indices.memory.index_buffer_size.*/indices.memory.index_buffer_size: 30%/" ${ELASTIC_SEARCH_CONF_PATH}
fi
if grep -q "HeapDumpOnOutOfMemoryError" ${ELASTIC_SEARCH_JAVA_CONF_PATH}; then
sed "/-XX:+HeapDumpOnOutOfMemoryError/d" -i ${ELASTIC_SEARCH_JAVA_CONF_PATH}
fi
if ! grep -q "Dlog4j2.formatMsgNoLookups" ${ELASTIC_SEARCH_JAVA_CONF_PATH}; then
echo "-Dlog4j2.formatMsgNoLookups=true" >> ${ELASTIC_SEARCH_JAVA_CONF_PATH}
else
sed -i "s/Dlog4j2.formatMsgNoLookups.*/Dlog4j2.formatMsgNoLookups=true/" ${ELASTIC_SEARCH_JAVA_CONF_PATH}
fi
if ! grep -q "ingest.geoip.downloader.enabled" ${ELASTIC_SEARCH_CONF_PATH}; then
echo "ingest.geoip.downloader.enabled: false" >> ${ELASTIC_SEARCH_CONF_PATH}
else
sed -i "s/ingest.geoip.downloader.enabled.*/ingest.geoip.downloader.enabled: false/" ${ELASTIC_SEARCH_CONF_PATH}
fi
local TOTAL_MEMORY=$(free -m | grep -oP '\d+' | head -n 1);
local MEMORY_REQUIREMENTS=12228; #RAM ~4*3Gb
if [ ${TOTAL_MEMORY} -gt ${MEMORY_REQUIREMENTS} ]; then
if ! grep -q "[-]Xms1g" ${ELASTIC_SEARCH_JAVA_CONF_PATH}; then
echo "-Xms4g" >> ${ELASTIC_SEARCH_JAVA_CONF_PATH}
else
sed -i "s/-Xms1g/-Xms4g/" ${ELASTIC_SEARCH_JAVA_CONF_PATH}
fi
if ! grep -q "[-]Xmx1g" ${ELASTIC_SEARCH_JAVA_CONF_PATH}; then
echo "-Xmx4g" >> ${ELASTIC_SEARCH_JAVA_CONF_PATH}
else
sed -i "s/-Xmx1g/-Xmx4g/" ${ELASTIC_SEARCH_JAVA_CONF_PATH}
fi
fi
if [ -d /etc/elasticsearch/ ]; then
chmod g+ws /etc/elasticsearch/
fi
}
setup_elasticsearch() {
echo -n "Configuring elasticsearch... "
#Save elasticsearch parameters in .json
[[ $1 == "EXTERNAL_ELASTIC_SERVER" ]] && local EXTERNAL_ELASTIC_FLAG="rewrite"
save_undefined_param "${APP_DIR}/elastic.${ENVIRONMENT}.json" "elastic.Scheme" "${ELK_SHEME}" "$EXTERNAL_ELASTIC_FLAG"
save_undefined_param "${APP_DIR}/elastic.${ENVIRONMENT}.json" "elastic.Host" "${ELK_HOST}" "$EXTERNAL_ELASTIC_FLAG"
save_undefined_param "${APP_DIR}/elastic.${ENVIRONMENT}.json" "elastic.Port" "${ELK_PORT}" "$EXTERNAL_ELASTIC_FLAG"
save_undefined_param "${APP_DIR}/elastic.${ENVIRONMENT}.json" "elastic.Threads" "1" "$EXTERNAL_ELASTIC_FLAG"
if [ $1 == "LOCAL_ELASTIC_SERVER" ]; then
change_elasticsearch_config
systemctl enable elasticsearch >/dev/null 2>&1
systemctl restart elasticsearch
fi
echo "OK"
}
setup_redis() {
echo -n "Configuring redis... "
$JSON $APP_DIR/redis.$ENVIRONMENT.json -e "this.Redis={'Hosts': [ { Host: \"${REDIS_HOST}\", Port: \"${REDIS_PORT}\" } ] }" >/dev/null 2>&1
sed -i "s~\(redis_host =\).*~\1 \"$REDIS_HOST\"~" "${OPENRESTY_DIR}/conf.d/${PACKAGE_SYSNAME}.conf"
sed -i "s~\(redis_port =\).*~\1 $REDIS_PORT~" "${OPENRESTY_DIR}/conf.d/${PACKAGE_SYSNAME}.conf"
if [ $1 == "LOCAL_REDIS_SERVER" ]; then
if [ -f "/etc/redis/redis.conf" ]; then
REDIS_CONF="/etc/redis/redis.conf"
elif [ -f "/etc/redis.conf" ]; then
REDIS_CONF="/etc/redis.conf"
fi
sed "s_\(^bind\).*_\1 ${REDIS_HOST}_" -i ${REDIS_CONF}
sed -r "/^save\s[0-9]+/d" -i ${REDIS_CONF}
systemctl enable $REDIS_PACKAGE >/dev/null 2>&1
systemctl restart $REDIS_PACKAGE
fi
echo "OK"
}
setup_rabbitmq() {
echo -n "Configuring rabbitmq... "
[[ $1 == "EXTERNAL_RABBITMQ_SERVER" ]] && local EXTERNAL_RABBITMQ_FLAG="rewrite"
save_undefined_param "${APP_DIR}/rabbitmq.${ENVIRONMENT}.json" "RabbitMQ.Hostname" "${RABBITMQ_HOST}" "$EXTERNAL_RABBITMQ_FLAG"
save_undefined_param "${APP_DIR}/rabbitmq.${ENVIRONMENT}.json" "RabbitMQ.UserName" "${RABBITMQ_USER}" "$EXTERNAL_RABBITMQ_FLAG"
save_undefined_param "${APP_DIR}/rabbitmq.${ENVIRONMENT}.json" "RabbitMQ.Password" "${RABBITMQ_PASSWORD}" "$EXTERNAL_RABBITMQ_FLAG"
save_undefined_param "${APP_DIR}/rabbitmq.${ENVIRONMENT}.json" "RabbitMQ.Port" "${RABBITMQ_PORT}" "$EXTERNAL_RABBITMQ_FLAG"
save_undefined_param "${APP_DIR}/rabbitmq.${ENVIRONMENT}.json" "RabbitMQ.VirtualHost" "/"
if [ $1 == "LOCAL_RABBITMQ_SERVER" ]; then
systemctl enable rabbitmq-server >/dev/null 2>&1
systemctl restart rabbitmq-server
fi
echo "OK"
}
product_configuration(){
echo -n "Configuring ${PRODUCT}... "
#Creating environment configuration files
enviromentFiles=("appsettings.$ENVIRONMENT.json" "apisystem.$ENVIRONMENT.json" "elastic.$ENVIRONMENT.json" "rabbitmq.$ENVIRONMENT.json" "redis.$ENVIRONMENT.json")
for i in "${!enviromentFiles[@]}"; do
if [ ! -e "$APP_DIR/${enviromentFiles[$i]}" ]; then
echo "{}" > "$APP_DIR/${enviromentFiles[$i]}"
chmod o-rwx "$APP_DIR/${enviromentFiles[$i]}"
fi
done
set_core_machinekey
echo "OK"
}
# Function below checks if there is a connection to the external service. 0 - OK, connection established
check_connection_external_services() {
exec {FD}<> /dev/tcp/$1/$2 && exec {FD}>&-
HOST_RESPONCE=$?
if [[ $HOST_RESPONCE -ne 0 ]]; then
echo -e "$3 external server is not responding: $1:$2"
return $HOST_RESPONCE
fi
return $HOST_RESPONCE
}
if command -v yum >/dev/null 2>&1; then
DIST="RedHat"
PACKAGE_MANAGER="rpm -q"
MYSQL_PACKAGE="mysqld"
REDIS_PACKAGE="redis"
SYSTEMD_DIR="/usr/lib/systemd/system"
elif command -v apt >/dev/null 2>&1; then
DIST="Debian"
PACKAGE_MANAGER="dpkg -s"
MYSQL_PACKAGE="mysql"
REDIS_PACKAGE="redis-server"
SYSTEMD_DIR="$(dirname $(dpkg-query -L ${PRODUCT}-api | grep systemd/system/))"
fi
install_json
product_configuration
if $PACKAGE_MANAGER mysql-client >/dev/null 2>&1 || $PACKAGE_MANAGER mysql-community-client >/dev/null 2>&1; then
input_db_params
establish_mysql_conn || exit $?
fi
if [[ ! -z $DOCUMENT_SERVER_URL_EXTERNAL ]]; then
parse_external_docs_url "$DOCUMENT_SERVER_URL_EXTERNAL"
check_connection_external_services "$DOCUMENT_SERVER_HOST" "$DOCUMENT_SERVER_PORT" "${PACKAGE_SYSNAME^^} Docs"
setup_docs "EXTERNAL_DOCS_SERVER"
elif $PACKAGE_MANAGER ${PACKAGE_SYSNAME}-documentserver >/dev/null 2>&1 || $PACKAGE_MANAGER ${PACKAGE_SYSNAME}-documentserver-de >/dev/null 2>&1 || $PACKAGE_MANAGER ${PACKAGE_SYSNAME}-documentserver-ee >/dev/null 2>&1; then
setup_docs "LOCAL_DOCS_SERVER"
setup_enterprise
fi
if $PACKAGE_MANAGER openresty >/dev/null 2>&1; then
setup_openresty
fi
if [[ ! -z $EXTERNAL_ELK_FLAG ]]; then
check_connection_external_services "$ELK_HOST" "$ELK_PORT" "Elasticsearch"
setup_elasticsearch "EXTERNAL_ELASTIC_SERVER"
elif $PACKAGE_MANAGER elasticsearch >/dev/null 2>&1; then
setup_elasticsearch "LOCAL_ELASTIC_SERVER"
fi
if [[ ! -z $EXTERNAL_REDIS_FLAG ]]; then
check_connection_external_services "$REDIS_HOST" "$REDIS_PORT" "Redis"
setup_redis "EXTERNAL_REDIS_SERVER"
elif $PACKAGE_MANAGER $REDIS_PACKAGE >/dev/null 2>&1; then
setup_redis "LOCAL_REDIS_SERVER"
fi
if [[ ! -z $EXTERNAL_RABBITMQ_FLAG ]]; then
check_connection_external_services "$RABBITMQ_HOST" "$RABBITMQ_PORT" "RabbitMQ"
setup_rabbitmq "EXTERNAL_RABBITMQ_SERVER"
elif $PACKAGE_MANAGER rabbitmq-server >/dev/null 2>&1; then
setup_rabbitmq "LOCAL_RABBITMQ_SERVER"
fi
restart_services

View File

@ -1,97 +0,0 @@
#!/bin/bash
set -e
PRODUCT="docspace"
DIR="/usr/bin"
LETSENCRYPT="/etc/letsencrypt/live";
OPENRESTY="/etc/openresty/conf.d"
DHPARAM_FILE="/etc/ssl/certs/dhparam.pem"
WEBROOT_PATH="/var/www/${PRODUCT}"
if [ "$#" -ge "2" ]; then
if [ "$1" != "-f" ]; then
MAIL=$1
DOMAIN=$2
LETSENCRYPT_ENABLE="true"
# Install certbot if not already installed
if ! type "certbot" &> /dev/null; then
if type "apt-get" &> /dev/null; then
apt-get -y update -qq
apt-get -y -q install certbot
elif type "yum" &> /dev/null; then
yum -y install certbot
fi
fi
echo "Generating Let's Encrypt SSL Certificates..."
# Request and generate Let's Encrypt SSL certificate
echo certbot certonly --expand --webroot -w ${WEBROOT_PATH} --cert-name ${PRODUCT} --noninteractive --agree-tos --email ${MAIL} -d ${DOMAIN} > /var/log/le-start.log
certbot certonly --expand --webroot -w ${WEBROOT_PATH} --cert-name ${PRODUCT} --noninteractive --agree-tos --email ${MAIL} -d ${DOMAIN} > /var/log/le-new.log
else
echo "Using specified files to configure SSL..."
CERTIFICATE_FILE=$2
PRIVATEKEY_FILE=$3
fi
[[ ! -f "${DHPARAM_FILE}" ]] && openssl dhparam -out ${DHPARAM_FILE} 2048
CERTIFICATE_FILE="${CERTIFICATE_FILE:-"${LETSENCRYPT}/${PRODUCT}/fullchain.pem"}"
PRIVATEKEY_FILE="${PRIVATEKEY_FILE:-"${LETSENCRYPT}/${PRODUCT}/privkey.pem"}"
if [ -f "${CERTIFICATE_FILE}" -a -f ${PRIVATEKEY_FILE} ]; then
if [ -f "${OPENRESTY}/onlyoffice-proxy-ssl.conf.template" ]; then
cp -f ${OPENRESTY}/onlyoffice-proxy-ssl.conf.template ${OPENRESTY}/onlyoffice-proxy.conf
ENVIRONMENT=$(grep -oP 'ENVIRONMENT=\K.*' $(dirname $(dpkg-query -L ${PRODUCT}-api | grep systemd/system/))/${PRODUCT}-api.service)
sed -i "s/\(\"portal\":\).*/\1 \"https:\/\/${DOMAIN:-$(hostname --fqdn)}\"/" /etc/onlyoffice/docspace/appsettings.$ENVIRONMENT.json
sed -i "s~\(ssl_certificate \).*;~\1${CERTIFICATE_FILE};~g" ${OPENRESTY}/onlyoffice-proxy.conf
sed -i "s~\(ssl_certificate_key \).*;~\1${PRIVATEKEY_FILE};~g" ${OPENRESTY}/onlyoffice-proxy.conf
sed -i "s~\(ssl_dhparam \).*;~\1${DHPARAM_FILE};~g" ${OPENRESTY}/onlyoffice-proxy.conf
if [[ "${LETSENCRYPT_ENABLE}" = "true" ]]; then
# Create and set permissions for ${PRODUCT}-renew-letsencrypt
echo '#!/bin/bash' > ${DIR}/${PRODUCT}-renew-letsencrypt
echo "certbot renew >> /var/log/le-renew.log" >> ${DIR}/${PRODUCT}-renew-letsencrypt
if [ $(pgrep -x ""systemd"" | wc -l) -gt 0 ]; then
echo 'systemctl reload openresty' >> ${DIR}/${PRODUCT}-renew-letsencrypt
else
echo 'service openresty reload' >> ${DIR}/${PRODUCT}-renew-letsencrypt
fi
chmod a+x ${DIR}/${PRODUCT}-renew-letsencrypt
# Add cron job if /etc/cron.d directory exists
if [ -d /etc/cron.d ]; then
echo -e "@weekly root ${DIR}/${PRODUCT}-renew-letsencrypt" | tee /etc/cron.d/${PRODUCT}-letsencrypt
fi
fi
[ $(pgrep -x ""systemd"" | wc -l) -gt 0 ] && systemctl reload openresty || service openresty reload
echo "OK"
else
echo "Error: proxy configuration file not found." && exit 1
fi
else
echo "Error: certificate or private key file not found." && exit 1
fi
else
echo ""
echo "This script provided to automatically setup SSL Certificates for DocSpace"
echo "Automatically get Let's Encrypt SSL Certificates:"
echo " docspace-ssl-setup EMAIL DOMAIN"
echo " EMAIL Email used for registration and recovery contact."
echo " Use comma to register multiple emails, ex:"
echo " u1@example.com,u2@example.com."
echo " DOMAIN Domain name to apply"
echo ""
echo "Using your own certificates via the -f parameter:"
echo " docspace-ssl-setup -f CERTIFICATE PRIVATEKEY"
echo " CERTIFICATE Path to the certificate file for the domain."
echo " PRIVATEKEY Path to the private key file for the certificate."
echo ""
fi

View File

@ -1,111 +0,0 @@
#!/bin/bash
set -xe
SRC_PATH="/AppServer"
BUILD_PATH="/publish"
SELF_CONTAINED="false"
ARGS=""
PUBLISH_CNF="Release"
while [ "$1" != "" ]; do
case $1 in
-sp | --srcpath )
if [ "$2" != "" ]; then
SRC_PATH=$2
BUILD_PATH=${SRC_PATH}/publish
shift
fi
;;
-bp | --buildpath )
if [ "$2" != "" ]; then
BUILD_PATH=$2
shift
fi
;;
-sc | --self-contained )
if [ "$2" != "" ]; then
SELF_CONTAINED=$2
shift
fi
;;
-ar | --arguments )
if [ "$2" != "" ]; then
ARGS=$2
shift
fi
;;
-pc | --publish-configuration )
if [ "$2" != "" ]; then
PUBLISH_CNF=$2
shift
fi
;;
-? | -h | --help )
echo " Usage: bash publish-backend.sh [PARAMETER] [[PARAMETER], ...]"
echo " Parameters:"
echo " -sp, --srcpath path to AppServer root directory (by default=/AppServer)"
echo " -bp, --buildpath path where generated output is placed (by default=/publish)"
echo " -sc, --self-contained publish the .NET runtime with your application (by default=false)"
echo " -ar, --arguments additional arguments publish the .NET runtime with your application"
echo " -pc, --publish-configuration dotnet publish configuration Ex. Release/Debug"
echo " -?, -h, --help this help"
echo " Examples"
echo " bash publish-backend.sh -sp /app/AppServer"
exit 0
;;
* )
echo "Unknown parameter $1" 1>&2
exit 1
;;
esac
shift
done
# Array of names server in directory products
servers_products_name_backend=()
servers_products_name_backend+=(ASC.Files)
servers_products_name_backend+=(ASC.People)
# Publish server backend products
for i in ${!servers_products_name_backend[@]}; do
echo "== Publish ${servers_products_name_backend[$i]}.csproj project =="
SERVICE_DIR="$(dirname "$(find ${SRC_PATH} -type f -name "${servers_products_name_backend[$i]}".csproj)")"
cd ${SERVICE_DIR}
dotnet publish -c ${PUBLISH_CNF} --self-contained ${SELF_CONTAINED} ${ARGS} -o ${BUILD_PATH}/products/${servers_products_name_backend[$i]}/server/
done
# Array of names backend services
services_name_backend=()
services_name_backend+=(ASC.Data.Backup)
services_name_backend+=(ASC.Files.Service)
services_name_backend+=(ASC.Notify)
services_name_backend+=(ASC.Studio.Notify)
services_name_backend+=(ASC.Web.Api)
services_name_backend+=(ASC.Web.Studio)
services_name_backend+=(ASC.Data.Backup.BackgroundTasks)
services_name_backend+=(ASC.ClearEvents)
services_name_backend+=(ASC.ApiSystem)
services_name_backend+=(ASC.Web.HealthChecks.UI)
# Publish backend services
for i in ${!services_name_backend[@]}; do
echo "== Publish ${services_name_backend[$i]}.csproj project =="
SERVICE_DIR="$(dirname "$(find ${SRC_PATH} -type f -name "${services_name_backend[$i]}".csproj)")"
cd ${SERVICE_DIR}
dotnet publish -c ${PUBLISH_CNF} --self-contained ${SELF_CONTAINED} ${ARGS} -o ${BUILD_PATH}/services/${services_name_backend[$i]}/service/
done
# Array of names backend services in directory common (Nodejs)
services_name_backend_nodejs=()
services_name_backend_nodejs+=(ASC.Socket.IO)
services_name_backend_nodejs+=(ASC.SsoAuth)
# Publish backend services (Nodejs)
for i in ${!services_name_backend_nodejs[@]}; do
echo "== Publish ${services_name_backend_nodejs[$i]} project =="
SERVICE_DIR="$(find ${SRC_PATH} -type d -name ${services_name_backend_nodejs[$i]})"
cd ${SERVICE_DIR}
mkdir -p ${BUILD_PATH}/services/${services_name_backend_nodejs[$i]}/service/ && cp -arfv ./* ${BUILD_PATH}/services/${services_name_backend_nodejs[$i]}/service/
done

View File

@ -1,185 +0,0 @@
#!/bin/bash
set -xe
BASEDIR="$(cd $(dirname $0) && pwd)"
BUILD_PATH="$BASEDIR/modules"
while [ "$1" != "" ]; do
case $1 in
-bp | --buildpath )
if [ "$2" != "" ]; then
BUILD_PATH=$2
shift
fi
;;
-? | -h | --help )
echo " Usage: bash build.sh [PARAMETER] [[PARAMETER], ...]"
echo " Parameters:"
echo " -bp, --buildpath output path"
echo " -?, -h, --help this help"
echo " Examples"
echo " bash build.sh -bp /etc/systemd/system/"
exit 0
;;
* )
echo "Unknown parameter $1" 1>&2
exit 1
;;
esac
shift
done
PRODUCT="docspace"
BASE_DIR="/var/www/${PRODUCT}"
PATH_TO_CONF="/etc/onlyoffice/${PRODUCT}"
STORAGE_ROOT="/var/www/onlyoffice/Data"
LOG_DIR="/var/log/onlyoffice/${PRODUCT}"
DOTNET_RUN="/usr/bin/dotnet"
NODE_RUN="/usr/bin/node"
APP_URLS="http://0.0.0.0"
ENVIRONMENT=" --ENVIRONMENT=production"
CORE=" --core:products:folder=${BASE_DIR}/products --core:products:subfolder=server"
SERVICE_NAME=(
api
api-system
socket
studio-notify
notify
people-server
files
files-services
studio
backup
ssoauth
clear-events
backup-background
doceditor
migration-runner
login
healthchecks
)
reassign_values (){
case $1 in
api )
SERVICE_PORT="5000"
WORK_DIR="${BASE_DIR}/studio/ASC.Web.Api/"
EXEC_FILE="ASC.Web.Api.dll"
;;
api-system )
SERVICE_PORT="5010"
WORK_DIR="${BASE_DIR}/services/ASC.ApiSystem/"
EXEC_FILE="ASC.ApiSystem.dll"
;;
socket )
SERVICE_PORT="9899"
WORK_DIR="${BASE_DIR}/services/ASC.Socket.IO/"
EXEC_FILE="server.js"
;;
studio-notify )
SERVICE_PORT="5006"
WORK_DIR="${BASE_DIR}/services/ASC.Studio.Notify/"
EXEC_FILE="ASC.Studio.Notify.dll"
;;
notify )
SERVICE_PORT="5005"
WORK_DIR="${BASE_DIR}/services/ASC.Notify/"
EXEC_FILE="ASC.Notify.dll"
CORE_EVENT_BUS=" --core:eventBus:subscriptionClientName=asc_event_bus_notify_queue"
;;
people-server )
SERVICE_PORT="5004"
WORK_DIR="${BASE_DIR}/products/ASC.People/server/"
EXEC_FILE="ASC.People.dll"
;;
files )
SERVICE_PORT="5007"
WORK_DIR="${BASE_DIR}/products/ASC.Files/server/"
EXEC_FILE="ASC.Files.dll"
;;
files-services )
SERVICE_PORT="5009"
WORK_DIR="${BASE_DIR}/products/ASC.Files/service/"
EXEC_FILE="ASC.Files.Service.dll"
CORE_EVENT_BUS=" --core:eventBus:subscriptionClientName=asc_event_bus_files_service_queue"
;;
studio )
SERVICE_PORT="5003"
WORK_DIR="${BASE_DIR}/studio/ASC.Web.Studio/"
EXEC_FILE="ASC.Web.Studio.dll"
;;
backup )
SERVICE_PORT="5012"
WORK_DIR="${BASE_DIR}/services/ASC.Data.Backup/"
EXEC_FILE="ASC.Data.Backup.dll"
;;
ssoauth )
SERVICE_PORT="9834"
WORK_DIR="${BASE_DIR}/services/ASC.SsoAuth/"
EXEC_FILE="app.js"
;;
clear-events )
SERVICE_PORT="5027"
WORK_DIR="${BASE_DIR}/services/ASC.ClearEvents/"
EXEC_FILE="ASC.ClearEvents.dll"
;;
backup-background )
SERVICE_PORT="5032"
WORK_DIR="${BASE_DIR}/services/ASC.Data.Backup.BackgroundTasks/"
EXEC_FILE="ASC.Data.Backup.BackgroundTasks.dll"
CORE_EVENT_BUS=" --core:eventBus:subscriptionClientName=asc_event_bus_backup_queue"
;;
doceditor )
SERVICE_PORT="5013"
WORK_DIR="${BASE_DIR}/products/ASC.Files/editor/"
EXEC_FILE="server.js"
;;
migration-runner )
WORK_DIR="${BASE_DIR}/services/ASC.Migration.Runner/"
EXEC_FILE="ASC.Migration.Runner.dll"
;;
login )
SERVICE_PORT="5011"
WORK_DIR="${BASE_DIR}/products/ASC.Login/login/"
EXEC_FILE="server.js"
;;
healthchecks )
SERVICE_PORT="5033"
WORK_DIR="${BASE_DIR}/services/ASC.Web.HealthChecks.UI/"
EXEC_FILE="ASC.Web.HealthChecks.UI.dll"
;;
esac
SERVICE_NAME="$1"
if [[ "${EXEC_FILE}" == *".js" ]]; then
SERVICE_TYPE="simple"
RESTART="always"
EXEC_START="${NODE_RUN} ${WORK_DIR}${EXEC_FILE} --app.port=${SERVICE_PORT} --app.appsettings=${PATH_TO_CONF} --app.environment=${ENVIRONMENT}"
elif [[ "${SERVICE_NAME}" = "migration-runner" ]]; then
SERVICE_TYPE="simple"
RESTART="no"
EXEC_START="${DOTNET_RUN} ${WORK_DIR}${EXEC_FILE} standalone=true"
else
SERVICE_TYPE="notify"
RESTART="always"
EXEC_START="${DOTNET_RUN} ${WORK_DIR}${EXEC_FILE} --urls=${APP_URLS}:${SERVICE_PORT} --pathToConf=${PATH_TO_CONF} \
--\$STORAGE_ROOT=${STORAGE_ROOT} --log:dir=${LOG_DIR} --log:name=${SERVICE_NAME}${CORE}${CORE_EVENT_BUS}${ENVIRONMENT}"
unset CORE_EVENT_BUS
fi
}
write_to_file () {
sed -i -e 's#${SERVICE_NAME}#'$SERVICE_NAME'#g' -e 's#${WORK_DIR}#'$WORK_DIR'#g' -e "s#\${RESTART}#$RESTART#g" \
-e "s#\${EXEC_START}#$EXEC_START#g" -e "s#\${SERVICE_TYPE}#$SERVICE_TYPE#g" $BUILD_PATH/${PRODUCT}-${SERVICE_NAME[$i]}.service
}
mkdir -p $BUILD_PATH
for i in ${!SERVICE_NAME[@]}; do
cp $BASEDIR/service $BUILD_PATH/${PRODUCT}-${SERVICE_NAME[$i]}.service
reassign_values "${SERVICE_NAME[$i]}"
write_to_file $i
done

View File

@ -1,17 +0,0 @@
[Unit]
Description=DocSpace-${SERVICE_NAME}
After=network.target
[Service]
Type=${SERVICE_TYPE}
User=onlyoffice
Group=onlyoffice
WorkingDirectory=${WORK_DIR}
ExecStart=${EXEC_START}
TimeoutSec=600
Restart=${RESTART}
PrivateTmp=false
[Install]
WantedBy=multi-user.target

View File

@ -1,5 +0,0 @@
{{product}} ({{package_header_tag_version}}) unstable; urgency=medium
* Initial Release.
-- Ascensio System SIA <support@onlyoffice.com> Fri, 19 Mar 2021 18:39:30 +0300

View File

@ -1 +0,0 @@
10

View File

@ -1,34 +0,0 @@
#!/bin/sh -e
set -e
. /usr/share/debconf/confmodule
db_input low {{product}}/environment || true
db_input low {{product}}/host || true
db_input low {{product}}/port || true
db_input low {{product}}/elasticsearch-sheme || true
db_input low {{product}}/elasticsearch-host || true
db_input low {{product}}/elasticsearch-port || true
db_input low {{product}}/redis-host || true
db_input low {{product}}/redis-port || true
db_input low {{product}}/rabbitmq-host || true
db_input low {{product}}/rabbitmq-user || true
db_input low {{product}}/rabbitmq-port || true
db_input low {{product}}/rabbitmq-password || true
db_input low {{product}}/ds-url || true
db_input low {{product}}/jwt-secret || true
db_input low {{product}}/jwt-header || true
db_input low {{product}}/db-host || true
db_input high {{product}}/db-name || true
db_input high {{product}}/db-user || true
db_go
db_input critical {{product}}/db-pwd || true
db_go

View File

@ -1,211 +0,0 @@
Source: {{product}}
Section: web
Priority: optional
Maintainer: Ascensio System SIA <support@onlyoffice.com>
Build-Depends: debhelper (>= 10), po-debconf, nodejs (>=18), dotnet-sdk-7.0, yarn
Standards-Version: {{package_header_tag_version}}
Homepage: https://www.onlyoffice.com/
Architecture: all
Multi-Arch: foreign
Package: {{product}}
Architecture: all
Multi-Arch: foreign
Depends: debconf,
${misc:Depends}, ${shlibs:Depends},
{{product}}-api (= {{package_header_tag_version}}),
{{product}}-api-system (= {{package_header_tag_version}}),
{{product}}-backup (= {{package_header_tag_version}}),
{{product}}-backup-background (= {{package_header_tag_version}}),
{{product}}-clear-events (= {{package_header_tag_version}}),
{{product}}-doceditor(= {{package_header_tag_version}}),
{{product}}-files (= {{package_header_tag_version}}),
{{product}}-files-services (= {{package_header_tag_version}}),
{{product}}-healthchecks (= {{package_header_tag_version}}),
{{product}}-login (= {{package_header_tag_version}}),
{{product}}-migration-runner (= {{package_header_tag_version}}),
{{product}}-notify (= {{package_header_tag_version}}),
{{product}}-people-server (= {{package_header_tag_version}}),
{{product}}-proxy (= {{package_header_tag_version}}),
{{product}}-radicale (= {{package_header_tag_version}}),
{{product}}-socket (= {{package_header_tag_version}}),
{{product}}-ssoauth (= {{package_header_tag_version}}),
{{product}}-studio (= {{package_header_tag_version}}),
{{product}}-studio-notify (= {{package_header_tag_version}})
Description: {{product}}
ONLYOFFICE {{product}} is a new way to collaborate on documents with teams,
clients, partners, etc., based on the concept of rooms - special spaces with
predefined permissions. Dependency package.
Package: {{product}}-common
Architecture: all
Multi-Arch: foreign
Depends: adduser, logrotate, ${misc:Depends}, ${shlibs:Depends}
Recommends: default-mysql-client
Description: {{product}}-common
A package containing configs and scripts
Package: {{product}}-backup
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-backup
The service which handles API requests related to backup
Package: {{product}}-files
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Recommends: elasticsearch (= 7.16.3)
Description: {{product}}-files
The service which handles API requests related to
documents and launches the OFormService service
Package: {{product}}-files-services
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Recommends: ffmpeg, elasticsearch (= 7.16.3)
Description: {{product}}-files-services
The service which launches additional services related to file management:
- ElasticSearchIndexService - indexes documents using elasticsearch;
- FeedAggregatorService - aggregates notifications;
- FeedCleanerService - removes notifications;
- FileConverterService - converts documents;
- ThumbnailBuilderService - generates thumbnails for documents;
- Launcher - removes outdated files from Trash;
Package: {{product}}-notify
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Recommends: ffmpeg, elasticsearch (= 7.16.3)
Description: {{product}}-notify
The service which launches additional services
related to notifications about DocSpace events:
NotifySenderService which sends messages from the base,
and NotifyCleanerService which removes messages
Package: {{product}}-people-server
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-people-server
The service which handles API requests related to the People module
Package: {{product}}-socket
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), nodejs (>= 16), ${misc:Depends}, ${shlibs:Depends}
Recommends: redis-server
Description: {{product}}-socket
The service which provides two-way communication between a client and a server
Package: {{product}}-studio-notify
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-studio-notify
The service responsible for creating notifications and
sending them to other services, for example, TelegramService and NotifyService
Package: {{product}}-api
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Recommends: rabbitmq-server, apache-activemq, redis-server
Description: {{product}}-api
The service which is used for working with a certain portal. This service
handles API requests not related to backup, documents, and the People
module, for example, requests related to settings, audit, authentication, etc.
Package: {{product}}-api-system
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-api-system
The service which is used for working with portals (creating, removing, etc.)
Package: {{product}}-studio
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-studio
The service which processes storage handlers and authorization pages
Package: {{product}}-proxy
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), openresty, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-proxy
The service which is used as a web server and reverse proxy,
it receives and handles requests, transmits them to other services,
receives a response from them and returns it to the client
Package: {{product}}-ssoauth
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), nodejs (>= 16), ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-ssoauth
The service responsible for enabling and configuring
SAML-based single sign-on (SSO) authentication to provide a more quick,
easy and secure way to access DocSpace for users
Package: {{product}}-backup-background
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-backup-background
The service which launches additional services related to backup creation:
- BackupWorkerService - launches WorkerService which runs backup/restore, etc;
- BackupListenerService - waits for a signal to delete backups;
- BackupCleanerTempFileService - removes temporary backup files;
- BackupCleanerService - removes outdated backup files;
- BackupSchedulerService - runs backup according to a schedule;
Package: {{product}}-clear-events
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-clear-events
The service responsible for clearing the login_events and audit_events tables
by LoginHistoryLifeTime and AuditTrailLifeTime to log out users after a timeout
Package: {{product}}-migration-runner
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-migration-runner
The service responsible for the database creation.
A database connection is transferred to the service and
the service creates tables and populates them with values
Package: {{product}}-radicale
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), python3-pip, python3-requests, python3-setuptools, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-radicale
Radicale is a server designed to support the CalDav and CardDav protocols.
It operates either as a standalone package using its own internal http server
or can be integrated with an existing webserver
Package: {{product}}-doceditor
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), nodejs (>= 16), ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-doceditor
The service which allows interaction with documentserver
Package: {{product}}-login
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), nodejs (>= 16), ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-login
The service which is used for logging users and displaying the wizard
Package: {{product}}-healthchecks
Architecture: all
Multi-Arch: foreign
Depends: {{product}}-common (= {{package_header_tag_version}}), dotnet-sdk-7.0, ${misc:Depends}, ${shlibs:Depends}
Description: {{product}}-healthchecks
The service which displays launched services

View File

@ -1,33 +0,0 @@
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: ONLYOFFICE-{{product}}
Source: http://onlyoffice.com
Files: *
Copyright: 2023, Ascensio System SIA <support@onlyoffice.com>
License: AGPL-3
This program is a free software product. You can redistribute it and/or
modify it under the terms of the GNU Affero General Public License (AGPL)
version 3 as published by the Free Software Foundation. In accordance with
Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
that Ascensio System SIA expressly excludes the warranty of non-infringement
of any third-party rights.
.
This program is distributed WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
.
You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia,
EU, LV-1021.
.
The interactive user interfaces in modified source and object code versions
of the Program must display Appropriate Legal Notices, as required under
Section 5 of the GNU AGPL version 3.
.
Pursuant to Section 7(b) of the License you must retain the original Product
logo when distributing the program. Pursuant to Section 7(e) we decline to
grant you any rights under trademark law for use of our trademarks.
.
All the Product's GUI elements, including illustrations and icon sets, as
well as technical writing content are licensed under the terms of the
Creative Commons Attribution-ShareAlike 4.0 International. See the License
terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode

View File

@ -1,3 +0,0 @@
# List of source files containing translatable strings.
[type: gettext/rfc822deb] {{product}}.templates

View File

@ -1 +0,0 @@
# List of source files containing translatable strings but should be ignored.

View File

@ -1,18 +0,0 @@
msgid ""
msgstr ""
"Project-Id-Version: {{product}}\n"
"Report-Msgid-Bugs-To: support@onlyoffice.com\n"
"POT-Creation-Date: 2023-01-24 18:30+0300\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: Ascensio System SIA <support@onlyoffice.com>\n"
"Language-Team: Ascensio System SIA <support@onlyoffice.com>\n"
"Language: ru_RU\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
#. Type: string
#. Description
#: ../{{product}}.templates:1001
msgid "Select environment for {{product}} configuration:"
msgstr "Выберите наименование среды для конфигурации {{product}}:"

View File

@ -1,24 +0,0 @@
# SOME DESCRIPTIVE TITLE.
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
# This file is distributed under the same license as the {{product}} package.
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: {{product}}\n"
"Report-Msgid-Bugs-To: support@onlyoffice.com\n"
"POT-Creation-Date: 2023-01-24 18:26+0300\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"Language: \n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=CHARSET\n"
"Content-Transfer-Encoding: 8bit\n"
#. Type: string
#. Description
#: ../{{product}}.templates:1001
msgid "Select environment for {{product}} configuration:"
msgstr ""

View File

@ -1,109 +0,0 @@
#!/bin/bash
set -e
. /usr/share/debconf/confmodule
case "$1" in
configure)
db_get {{product}}/environment || true
ENVIRONMENT="$RET"
db_get {{product}}/host || true
APP_HOST="$RET"
db_get {{product}}/port || true
APP_PORT="$RET"
db_get {{product}}/machinekey || true
CORE_MACHINEKEY="$RET"
args+=(-e "$ENVIRONMENT" -ash "$APP_HOST" -asp "$APP_PORT" );
[ -z "$CORE_MACHINEKEY" ] && CORE_MACHINEKEY=$(cat /dev/urandom | tr -dc A-Za-z0-9 | head -c 12) && db_set {{product}}/machinekey $CORE_MACHINEKEY
args+=(-mk "$CORE_MACHINEKEY" )
db_get {{product}}/db-host || true
DB_HOST="$RET"
db_get {{product}}/db-name || true
DB_NAME="$RET"
db_get {{product}}/db-user || true
DB_USER="$RET"
db_get {{product}}/db-pwd || true
DB_PWD="$RET"
args+=(-mysqlh "$DB_HOST" -mysqld "$DB_NAME" -mysqlu "$DB_USER" );
[ -n "$DB_PWD" ] && args+=(-mysqlp "$DB_PWD" );
command -v mysql >/dev/null 2>&1 || { echo "MySQL client not found"; exit 1; }
MYSQL="mysql -h$DB_HOST -u$DB_USER"
if [ -n "$DB_PWD" ]; then
MYSQL="$MYSQL -p$DB_PWD"
fi
$MYSQL -e ";" >/dev/null 2>&1
ERRCODE=$?
if [ $ERRCODE -ne 0 ]; then
systemctl mysql start >/dev/null 2>&1
$MYSQL -e ";" >/dev/null 2>&1 || { echo "Trying to establish MySQL connection... FAILURE"; exit 1; }
fi
db_get {{product}}/redis-host || true
REDIS_HOST="$RET"
db_get {{product}}/redis-port || true
REDIS_PORT="$RET"
[ -n "$REDIS_HOST" ] && args+=(-rdh "$REDIS_HOST" -rdp "$REDIS_PORT" );
db_get {{product}}/rabbitmq-host || true
RABBITMQ_HOST="$RET"
db_get {{product}}/rabbitmq-user || true
RABBITMQ_USER="$RET"
db_get {{product}}/rabbitmq-port || true
RABBITMQ_PORT="$RET"
db_get {{product}}/rabbitmq-password || true
RABBITMQ_PASSWORD="$RET"
[ -n "$RABBITMQ_HOST" ] && args+=(-rbh "$RABBITMQ_HOST" -rbp "$RABBITMQ_PORT" -rbu "$RABBITMQ_USER" -rbpw "$RABBITMQ_PASSWORD" );
db_get {{product}}/elasticsearch-sheme || true
ELK_SHEME="$RET"
db_get {{product}}/elasticsearch-host || true
ELK_HOST="$RET"
db_get {{product}}/elasticsearch-port || true
ELK_PORT="$RET"
[ -n "$ELK_HOST" ] && args+=(-ess "$ELK_SHEME" -esh "$ELK_HOST" -esp "$ELK_PORT" );
db_get {{product}}/ds-url || true
DOCUMENT_SERVER_URL_EXTERNAL="$RET"
db_get {{product}}/jwt-secret || true
DOCUMENT_SERVER_JWT_SECRET="$RET"
db_get {{product}}/jwt-header || true
DOCUMENT_SERVER_JWT_HEADER="$RET"
if [ -n "$DOCUMENT_SERVER_URL_EXTERNAL" ]; then
if [ -n "$DOCUMENT_SERVER_JWT_SECRET" ] && [ -n "$DOCUMENT_SERVER_JWT_HEADER" ]; then
args+=(-docsurl "$DOCUMENT_SERVER_URL_EXTERNAL" -js "$DOCUMENT_SERVER_JWT_SECRET" -jh "$DOCUMENT_SERVER_JWT_HEADER")
else
echo "You should provide JWT-secret and JWT-header along with ONLYOFFICE Docs URL. "
fi
fi
bash /usr/bin/{{product}}-configuration "${args[@]}"
exit 0
;;
abort-upgrade|abort-remove|abort-deconfigure)
;;
*)
echo "postinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
# dh_installdeb will replace this with shell code automatically
# generated by other debhelper scripts.
#DEBHELPER#
exit 0

View File

@ -1,45 +0,0 @@
#!/bin/sh
# postrm script for {{package_sysname}}
#
# see: dh_installdeb(1)
set -e
# summary of how this script can be called:
# * <postrm> `remove'
# * <postrm> `purge'
# * <old-postrm> `upgrade' <new-version>
# * <new-postrm> `failed-upgrade' <old-version>
# * <new-postrm> `abort-install'
# * <new-postrm> `abort-install' <old-version>
# * <new-postrm> `abort-upgrade' <old-version>
# * <disappearer's-postrm> `disappear' <overwriter>
# <overwriter-version>
# for details, see http://www.debian.org/doc/debian-policy/ or
# the debian-policy package
if [ "$1" = purge ] && [ -e /usr/share/debconf/confmodule ]; then
. /usr/share/debconf/confmodule
fi
case "$1" in
purge)
rm -rf /var/www/{{product}}
rm -rf /etc/onlyoffice/{{product}}
;;
remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
;;
*)
echo "postrm called with unknown argument \`$1'" >&2
exit 1
;;
esac
# dh_installdeb will replace this with shell code automatically
# generated by other debhelper scripts.
#DEBHELPER#
exit 0

View File

@ -1 +0,0 @@
../../../publish/services/ASC.ApiSystem/service/* var/www/{{product}}/services/ASC.ApiSystem

View File

@ -1 +0,0 @@
../../../publish/services/ASC.Web.Api/service/* var/www/{{product}}/studio/ASC.Web.Api

View File

@ -1 +0,0 @@
../../../publish/services/ASC.Data.Backup.BackgroundTasks/service/* var/www/{{product}}/services/ASC.Data.Backup.BackgroundTasks

View File

@ -1 +0,0 @@
../../../publish/services/ASC.Data.Backup/service/* var/www/{{product}}/services/ASC.Data.Backup

View File

@ -1 +0,0 @@
../../../publish/services/ASC.ClearEvents/service/* var/www/{{product}}/services/ASC.ClearEvents

View File

@ -1,3 +0,0 @@
/var/log/onlyoffice/{{product}}
/etc/onlyoffice/{{product}}/.private
/var/www/onlyoffice/Data

View File

@ -1,4 +0,0 @@
../../../config/*.json etc/onlyoffice/{{product}}
../../../config/*.config etc/onlyoffice/{{product}}
../common/{{product}}-configuration usr/bin
../common/logrotate/{{product}}-common etc/logrotate.d

View File

@ -1,22 +0,0 @@
#!/bin/bash
#
# see: dh_installdeb(1)
set -e
if ! cat /etc/passwd | grep -q "onlyoffice:"; then
adduser --quiet --home /var/www/{{product}} --system --group onlyoffice
fi
if ! cat /etc/group | grep -q "nginx:"; then
addgroup --quiet --system nginx
fi
if ! cat /etc/passwd | grep -q "nginx:"; then
adduser --quiet --group --no-create-home --home /nonexistent --system nginx
usermod -aG nginx nginx
fi
usermod -aG onlyoffice,nginx onlyoffice
#DEBHELPER#

View File

@ -1 +0,0 @@
../../../build/deploy/editor var/www/{{product}}/products/ASC.Files

View File

@ -1 +0,0 @@
../../../publish/services/ASC.Files.Service/service var/www/{{product}}/products/ASC.Files

View File

@ -1,2 +0,0 @@
../../../publish/products/ASC.Files/server var/www/{{product}}/products/ASC.Files
../../../products/ASC.Files/Server/DocStore var/www/{{product}}/products/ASC.Files/server

View File

@ -1 +0,0 @@
../../../publish/services/ASC.Web.HealthChecks.UI/service/* var/www/{{product}}/services/ASC.Web.HealthChecks.UI

View File

@ -1 +0,0 @@
../../../build/deploy/login/* var/www/{{product}}/products/ASC.Login/login

View File

@ -1 +0,0 @@
../../../ASC.Migration.Runner/service/* var/www/{{product}}/services/ASC.Migration.Runner

View File

@ -1 +0,0 @@
../../../publish/services/ASC.Notify/service/* var/www/{{product}}/services/ASC.Notify

View File

@ -1 +0,0 @@
../../../publish/products/ASC.People/server var/www/{{product}}/products/ASC.People

View File

@ -1,9 +0,0 @@
## COPY PUBLIC ##
../../../build/install/common/{{product}}-ssl-setup usr/bin
../../../build/install/docker/config/nginx/templates/*.template etc/onlyoffice/{{product}}/openresty
../../../build/install/docker/config/nginx/onlyoffice* etc/openresty/conf.d
../../../config/nginx/onlyoffice*.conf etc/openresty/conf.d
../../../build/install/docker/config/nginx/letsencrypt* etc/openresty/includes
../../../config/nginx/includes/onlyoffice*.conf etc/openresty/includes
../../../build/deploy/public/* var/www/{{product}}/public
../../../build/deploy/client/* var/www/{{product}}/client

View File

@ -1,10 +0,0 @@
#!/bin/bash
set -e
# (DS v1.1.3) Removing old nginx configs to prevent conflicts before upgrading on OpenResty.
if [ -f /etc/nginx/conf.d/onlyoffice.conf ]; then
rm -rf /etc/nginx/conf.d/onlyoffice*
systemctl reload nginx
fi
exit 0

View File

@ -1 +0,0 @@
../../../build/install/RadicalePlugins/* var/www/{{product}}/Tools/radicale/plugins

View File

@ -1,17 +0,0 @@
#!/bin/bash
#
# see: dh_installdeb(1)
set -e
DIR="/var/www/{{product}}"
[ -f /usr/lib/python3.$(python3 -c 'import sys; print(sys.version_info.minor)')/EXTERNALLY-MANAGED ] && \
rm /usr/lib/python3.$(python3 -c 'import sys; print(sys.version_info.minor)')/EXTERNALLY-MANAGED
python3 -m pip install --upgrade radicale
python3 -m pip install --upgrade ${DIR}/Tools/radicale/plugins/app_auth_plugin/.
python3 -m pip install --upgrade ${DIR}/Tools/radicale/plugins/app_store_plugin/.
python3 -m pip install --upgrade ${DIR}/Tools/radicale/plugins/app_rights_plugin/.
#DEBHELPER#

View File

@ -1 +0,0 @@
../../../publish/services/ASC.Socket.IO/service/* var/www/{{product}}/services/ASC.Socket.IO

View File

@ -1 +0,0 @@
../../../publish/services/ASC.SsoAuth/service/* var/www/{{product}}/services/ASC.SsoAuth

View File

@ -1 +0,0 @@
../../../publish/services/ASC.Studio.Notify/service/* var/www/{{product}}/services/ASC.Studio.Notify

View File

@ -1 +0,0 @@
../../../publish/services/ASC.Web.Studio/service/* var/www/{{product}}/studio/ASC.Web.Studio

View File

@ -1,92 +0,0 @@
Template: {{product}}/environment
Type: string
Default: production
_Description: Select environment for {{product}} configuration:
Template: {{product}}/host
Type: string
Default: localhost
Description: {{product}} host:
Template: {{product}}/port
Type: string
Default: 80
Description: {{product}} listening port:
Template: {{product}}/db-host
Type: string
Default: localhost
Description: Database host:
Template: {{product}}/db-user
Type: string
Default: root
Description: Database user:
Template: {{product}}/db-pwd
Type: password
Description: Database password:
Template: {{product}}/db-name
Type: string
Default: onlyoffice
Description: Database name:
Template: {{product}}/ds-url
Type: string
Description: ONLYOFFICE Docs URL:
Template: {{product}}/elasticsearch-sheme
Type: select
Choices: http, https
Default: http
Description: Elasticsearch protocol:
Template: {{product}}/elasticsearch-host
Type: string
Description: Elasticsearch host:
Template: {{product}}/elasticsearch-port
Type: string
Default: 9200
Description: Elasticsearch port:
Template: {{product}}/redis-host
Type: string
Description: Redis host:
Template: {{product}}/redis-port
Type: string
Default: 6379
Description: Redis port:
Template: {{product}}/rabbitmq-host
Type: string
Description: RabbitMQ host:
Template: {{product}}/rabbitmq-port
Type: string
Default: 5672
Description: RabbitMQ port:
Template: {{product}}/rabbitmq-user
Type: string
Default: guest
Description: RabbitMQ user:
Template: {{product}}/rabbitmq-password
Type: string
Default: guest
Description: RabbitMQ password:
Template: {{product}}/machinekey
Type: string
Description: Enter your own machinekey:
Template: {{product}}/jwt-header
Type: string
Description: Enter JWT header:
Template: {{product}}/jwt-secret
Type: string
Description: Enter JWT secret:

View File

@ -1,80 +0,0 @@
#!/usr/bin/make -f
# -*- makefile -*-
# Uncomment this to turn on verbose mode.
export DH_VERBOSE=1
export DH_OPTIONS=-v
%:
dh $@ --with=systemd
PRODUCT={{product}}
CURRENT_PATH=${CURDIR}
SRC_PATH=$(shell cd ../../../; pwd)
SCRIPT_PATH=build/install/common
override_dh_auto_clean:
@echo "RULES.$@"
dh_testdir
rm -rf ${CURRENT_PATH}/debian/*.service
rm -rf ${CURRENT_PATH}/debian/*.lintian-overrides
rm -rf ${SRC_PATH}/build/install/${PRODUCT}*
override_dh_auto_configure:
@echo "RULES.$@"
dh_testdir
dh_auto_configure
dh_lintian
override_dh_auto_build:
cd ${SRC_PATH}/${SCRIPT_PATH}/systemd; \
bash build.sh -bp "${CURRENT_PATH}/debian/"; \
cd ${SRC_PATH}/${SCRIPT_PATH}; \
bash build-frontend.sh -sp ${SRC_PATH}; \
bash build-backend.sh -sp ${SRC_PATH}; \
bash publish-backend.sh -sp ${SRC_PATH}
find ${SRC_PATH}/publish/ \
-depth -type f -regex '.*\(eslintrc.*\|npmignore\|gitignore\|gitattributes\|gitmodules\|un~\|DS_Store\)' -exec rm -f {} \;
rm -f ${SRC_PATH}/config/nginx/onlyoffice-login.conf
if ! grep -q 'var/www/${PRODUCT}' ${SRC_PATH}/config/nginx/*.conf; then find ${SRC_PATH}/config/nginx/ -name "*.conf" -exec sed -i "s@\(var/www/\)@\1${PRODUCT}/@" {} +; fi
json -I -f ${SRC_PATH}/config/appsettings.services.json -e "this.logPath=\"/var/log/onlyoffice/${PRODUCT}\"" -e "this.socket={ 'path': '../ASC.Socket.IO/' }" \
-e "this.ssoauth={ 'path': '../ASC.SsoAuth/' }" -e "this.logLevel=\"warning\"" -e "this.core={ 'products': { 'folder': '/var/www/${PRODUCT}/products', 'subfolder': 'server'} }"
find ${SRC_PATH}/config/ -type f -regex '.*\.\(test\|dev\).*' -delete
json -I -f ${SRC_PATH}/config/appsettings.json -e "this.core.notify.postman=\"services\"" -e "this.Logging.LogLevel.Default=\"Warning\"" -e "this['debug-info'].enabled=\"false\"" -e "this.web.samesite=\"None\""
json -I -f ${SRC_PATH}/config/apisystem.json -e "this.core.notify.postman=\"services\""
sed 's_\(minlevel=\)".*"_\1"Warn"_g' -i ${SRC_PATH}/config/nlog.config
sed 's_etc/nginx_etc/openresty_g' -i ${SRC_PATH}/config/nginx/*.conf
sed 's/teamlab.info/onlyoffice.com/g' -i ${SRC_PATH}/config/autofac.consumers.json
sed -e 's/$$router_host/127.0.0.1/g' -e 's/the_host/host/g' -e 's/the_scheme/scheme/g' -e 's_includes_/etc/openresty/includes_g' -i ${SRC_PATH}/build/install/docker/config/nginx/onlyoffice-proxy*.conf
sed "s_\(.*root\).*;_\1 \"/var/www/${PRODUCT}\";_g" -i ${SRC_PATH}/build/install/docker/config/nginx/letsencrypt.conf
sed -e '/.pid/d' -e '/temp_path/d' -e 's_etc/nginx_etc/openresty_g' -i ${SRC_PATH}/build/install/docker/config/nginx/templates/nginx.conf.template
mv -f ${SRC_PATH}/build/install/docker/config/nginx/onlyoffice-proxy-ssl.conf ${SRC_PATH}/build/install/docker/config/nginx/onlyoffice-proxy-ssl.conf.template
for i in ${PRODUCT} $$(ls ${CURRENT_PATH}/debian/*.install | grep -oP 'debian/\K.*' | grep -o '^[^.]*'); do \
cp ${CURRENT_PATH}/debian/source/lintian-overrides ${CURRENT_PATH}/debian/$$i.lintian-overrides; \
done
execute_after_dh_fixperms:
chmod o-rwx debian/${PRODUCT}-common/etc/onlyoffice/${PRODUCT}/*
override_dh_auto_install:
dh_systemd_enable --no-enable
dh_systemd_start --no-start
override_dh_strip:
dh_strip -Xarm --no-automatic-dbgsym
override_dh_shlibdeps:
dh_shlibdeps -Xarm -Xkafka -- -xlibgcc-s1 --ignore-missing-info -xlibgcc1
override_dh_builddeb:
dh_builddeb -- -Zxz
override_dh_installinit:
# don't do anything, silences lintian warnings "init.d-script-not-included-in-package"

View File

@ -1 +0,0 @@
1.0

View File

@ -1,69 +0,0 @@
# Ignoring node_modules errors due to lack of ability to influence them
embedded-javascript-library var/www/{{product}}/services/*/node_modules/*
# Ignoring node_modules errors due to lack of ability to influence them
executable-not-elf-or-script var/www/{{product}}/services/*/node_modules/*
# Ignoring node_modules errors due to lack of ability to influence them
privacy-breach-generic var/www/{{product}}/services/*/node_modules/*
# Ignoring node_modules errors due to lack of ability to influence them
script-not-executable var/www/{{product}}/services/*/node_modules/*
# Ignoring node_modules errors due to lack of ability to influence them
unusual-interpreter */node_modules/*
# The use of the /var/www directory is caused by its past history as the default document root
dir-or-file-in-var-www
# Our project uses embedded libraries such as librdkafka.so
embedded-library
# DLLs exported need to be executable
executable-not-elf-or-script
# Dh_shibdeps generate not needed ldconfig call
package-has-unnecessary-activation-of-ldconfig-trigger
# Temporary ignoring of description errors
description-is-pkg-name
description-starts-with-package-name
description-too-short
description-synopsis-is-duplicated
# There is no manual page for {{product}}-configuration
binary-without-manpage
# Chown is used for the directories of our project, the user and group are created before that
maintainer-script-should-not-use-recursive-chown-or-chmod
# Scripts are not designed to be run manually
script-not-executable
# first number (major version) must be at least 2, but we currently version 1
invalid-standards-version
# Temporary ignoring of translation errors
untranslatable-debconf-templates
# We use this to protect sensitive information (ie passwords) in the config file
non-standard-file-perm
# There are instances where temporary or future code sections need to be retained for documentation or future development purposes
no-code-sections
# Ignoring errors due to lack of ability to influence them
library-not-linked-against-libc
# Some file triggers a privacy concern, specifically references an image files .png
privacy-breach-generic
# Building a cross-platform project, so those files are arch-independent
arch-independent-package-contains-binary-or-object
# Specifying a dependency on glibc would be redundant and could create unnecessary dependencies.
# We tested the application in different environments and made sure that it works stably without explicit dependence on glibc.
missing-dependency-on-libc
# Some binary files are executable, but are not intended for debugging.
# Including debug symbols in these files makes no sense and increases the size of the package.
unstripped-binary-or-object
# The systemctl call is used to manage MySQL and ensure that it is configured correctly
maintainer-script-calls-systemctl

View File

@ -1,11 +0,0 @@
node_modules
bin
.yarn
.git
.vscode
.github
Logs
Data
TestsResults
i18next
*.bat

View File

@ -1,130 +0,0 @@
# docker-compose tags #
PRODUCT=onlyoffice
REPO=${PRODUCT}
INSTALLATION_TYPE=COMMUNITY
STATUS=""
DOCKER_IMAGE_PREFIX=${STATUS}docspace
DOCKER_TAG=latest
CONTAINER_PREFIX=${PRODUCT}-
MYSQL_VERSION=8.0.32
MYSQL_IMAGE=mysql:${MYSQL_VERSION}
ELK_VERSION=7.16.3
SERVICE_PORT=5050
DOCUMENT_SERVER_IMAGE_NAME=onlyoffice/4testing-documentserver-ee:latest
DOCKERFILE=Dockerfile.app
APP_DOTNET_ENV=""
EXTERNAL_PORT="80"
# zookeeper #
ZOO_PORT=2181
ZOO_HOST=${CONTAINER_PREFIX}zookeeper
ZOO_SERVER=server.1=${ZOO_HOST}:2888:3888
# kafka #
KAFKA_HOST=${CONTAINER_PREFIX}kafka
KAFKA_ADVERTISED_LISTENERS=LISTENER_DOCKER_INTERNAL://${KAFKA_HOST}:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME=LISTENER_DOCKER_INTERNAL
KAFKA_ZOOKEEPER_CONNECT=${ZOO_HOST}:2181
KAFKA_BROKER_ID=1
KAFKA_LOG4J_LOGGERS=kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
# elasticsearch #
ELK_CONTAINER_NAME=${CONTAINER_PREFIX}elasticsearch
ELK_SHEME=http
ELK_HOST=""
ELK_PORT=9200
# app service environment #
ENV_EXTENSION=none
APP_CORE_BASE_DOMAIN=localhost
APP_URL_PORTAL="http://localhost:8092"
OAUTH_REDIRECT_URL="https://service.onlyoffice.com/oauth2.aspx"
LOG_LEVEL="Warning"
DEBUG_INFO="false"
APP_KNOWN_PROXIES=""
APP_KNOWN_NETWORKS=""
APP_CORE_MACHINEKEY=your_core_machinekey
CERTIFICATE_PATH=""
CERTIFICATE_KEY_PATH=""
DHPARAM_PATH=""
# docs #
DOCUMENT_CONTAINER_NAME=${CONTAINER_PREFIX}document-server
DOCUMENT_SERVER_URL_EXTERNAL=""
DOCUMENT_SERVER_JWT_SECRET=your_jwt_secret
DOCUMENT_SERVER_JWT_HEADER=AuthorizationJwt
DOCUMENT_SERVER_URL_PUBLIC=/ds-vpath/
# redis #
REDIS_CONTAINER_NAME=${CONTAINER_PREFIX}redis
REDIS_HOST=""
REDIS_PORT=6379
REDIS_USER_NAME=""
REDIS_PASSWORD=""
# rabbitmq #
RABBIT_CONTAINER_NAME=${CONTAINER_PREFIX}rabbitmq
RABBIT_HOST=""
RABBIT_PORT=5672
RABBIT_VIRTUAL_HOST=/
RABBIT_USER_NAME=guest
RABBIT_PASSWORD=guest
# mysql #
MYSQL_CONTAINER_NAME=${CONTAINER_PREFIX}mysql-server
MYSQL_HOST=""
MYSQL_PORT=3306
MYSQL_ROOT_PASSWORD=my-secret-pw
MYSQL_DATABASE=docspace
MYSQL_USER=${PRODUCT}_user
MYSQL_PASSWORD=${PRODUCT}_pass
DATABASE_MIGRATION=false
# service host #
API_SYSTEM_HOST=${CONTAINER_PREFIX}api-system
BACKUP_HOST=${CONTAINER_PREFIX}backup
BACKUP_BACKGRUOND_TASKS_HOST=${CONTAINER_PREFIX}backup-background-tasks
CLEAR_EVENTS_HOST=${CONTAINER_PREFIX}clear-events
FILES_HOST=${CONTAINER_PREFIX}files
FILES_SERVICES_HOST=${CONTAINER_PREFIX}files-services
STORAGE_MIGRATION_HOST=${CONTAINER_PREFIX}storage-migration
NOTIFY_HOST=${CONTAINER_PREFIX}notify
PEOPLE_SERVER_HOST=${CONTAINER_PREFIX}people-server
SOCKET_HOST=${CONTAINER_PREFIX}socket
STUDIO_NOTIFY_HOST=${CONTAINER_PREFIX}studio-notify
API_HOST=${CONTAINER_PREFIX}api
STUDIO_HOST=${CONTAINER_PREFIX}studio
SSOAUTH_HOST=${CONTAINER_PREFIX}ssoauth
MIGRATION_RUNNER_HOST=${CONTAINER_PREFIX}migration-runner
PROXY_HOST=${CONTAINER_PREFIX}proxy
ROUTER_HOST=${CONTAINER_PREFIX}router
DOCEDITOR_HOST=${CONTAINER_PREFIX}doceditor
LOGIN_HOST=${CONTAINER_PREFIX}login
HELTHCHECKS_HOST=${CONTAINER_PREFIX}healthchecks
# router upstream environment #
SERVICE_API_SYSTEM=${API_SYSTEM_HOST}:${SERVICE_PORT}
SERVICE_BACKUP=${BACKUP_HOST}:${SERVICE_PORT}
SERVICE_BACKUP_BACKGRUOND_TASKS=${BACKUP_BACKGRUOND_TASKS_HOST}:${SERVICE_PORT}
SERVICE_CLEAR_EVENTS=${CLEAR_EVENTS_HOST}:${SERVICE_PORT}
SERVICE_FILES=${FILES_HOST}:${SERVICE_PORT}
SERVICE_FILES_SERVICES=${FILES_SERVICES_HOST}:${SERVICE_PORT}
SERVICE_STORAGE_MIGRATION=${STORAGE_MIGRATION_HOST}:${SERVICE_PORT}
SERVICE_NOTIFY=${NOTIFY_HOST}:${SERVICE_PORT}
SERVICE_PEOPLE_SERVER=${PEOPLE_SERVER_HOST}:${SERVICE_PORT}
SERVICE_SOCKET=${SOCKET_HOST}:${SERVICE_PORT}
SERVICE_STUDIO_NOTIFY=${STUDIO_NOTIFY_HOST}:${SERVICE_PORT}
SERVICE_API=${API_HOST}:${SERVICE_PORT}
SERVICE_STUDIO=${STUDIO_HOST}:${SERVICE_PORT}
SERVICE_SSOAUTH=${SSOAUTH_HOST}:${SERVICE_PORT}
SERVICE_DOCEDITOR=${DOCEDITOR_HOST}:5013
SERVICE_LOGIN=${LOGIN_HOST}:5011
SERVICE_HELTHCHECKS=${HELTHCHECKS_HOST}:${SERVICE_PORT}
NETWORK_NAME=${PRODUCT}
COMPOSE_IGNORE_ORPHANS=True

Some files were not shown because too many files have changed in this diff Show More