Merge branch 'develop' into feature/socket-newtag

This commit is contained in:
pavelbannov 2022-11-24 17:56:32 +03:00
commit 130532927b
437 changed files with 8874 additions and 4884 deletions

1
.gitignore vendored
View File

@ -50,4 +50,3 @@ TestsResults/
**/.yarn/cache
**/.yarn/install-state.gz
config/appsettings.dev.json

42
ASC.Web.slnf Normal file
View File

@ -0,0 +1,42 @@
{
"solution": {
"path": "ASC.Web.sln",
"projects": [
"common\\ASC.ActiveDirectory\\ASC.ActiveDirectory.csproj",
"common\\ASC.Api.Core\\ASC.Api.Core.csproj",
"common\\ASC.Common\\ASC.Common.csproj",
"common\\ASC.Core.Common\\ASC.Core.Common.csproj",
"common\\ASC.Data.Backup.Core\\ASC.Data.Backup.Core.csproj",
"common\\ASC.Data.Encryption\\ASC.Data.Encryption.csproj",
"common\\ASC.Data.Reassigns\\ASC.Data.Reassigns.csproj",
"common\\ASC.Data.Storage\\ASC.Data.Storage.csproj",
"common\\ASC.EventBus.ActiveMQ\\ASC.EventBus.ActiveMQ.csproj",
"common\\ASC.EventBus.Extensions.Logger\\ASC.EventBus.Extensions.Logger.csproj",
"common\\ASC.EventBus.RabbitMQ\\ASC.EventBus.RabbitMQ.csproj",
"common\\ASC.EventBus\\ASC.EventBus.csproj",
"common\\ASC.FederatedLogin\\ASC.FederatedLogin.csproj",
"common\\ASC.Feed\\ASC.Feed.csproj",
"common\\ASC.IPSecurity\\ASC.IPSecurity.csproj",
"common\\ASC.MessagingSystem\\ASC.MessagingSystem.csproj",
"common\\ASC.Notify.Textile\\ASC.Notify.Textile.csproj",
"common\\ASC.Textile\\ASC.Textile.csproj",
"common\\services\\ASC.ApiSystem\\ASC.ApiSystem.csproj",
"common\\services\\ASC.AuditTrail\\ASC.AuditTrail.csproj",
"common\\services\\ASC.ClearEvents\\ASC.ClearEvents.csproj",
"common\\services\\ASC.Data.Backup.BackgroundTasks\\ASC.Data.Backup.BackgroundTasks.csproj",
"common\\services\\ASC.Data.Backup\\ASC.Data.Backup.csproj",
"common\\services\\ASC.ElasticSearch\\ASC.ElasticSearch.csproj",
"common\\services\\ASC.Feed.Aggregator\\ASC.Feed.Aggregator.csproj",
"common\\services\\ASC.Notify\\ASC.Notify.csproj",
"common\\services\\ASC.Studio.Notify\\ASC.Studio.Notify.csproj",
"products\\ASC.Files\\Core\\ASC.Files.Core.csproj",
"products\\ASC.Files\\Server\\ASC.Files.csproj",
"products\\ASC.Files\\Service\\ASC.Files.Service.csproj",
"products\\ASC.People\\Server\\ASC.People.csproj",
"web\\ASC.Web.Api\\ASC.Web.Api.csproj",
"web\\ASC.Web.Core\\ASC.Web.Core.csproj",
"web\\ASC.Web.HealthChecks.UI\\ASC.Web.HealthChecks.UI.csproj",
"web\\ASC.Web.Studio\\ASC.Web.Studio.csproj"
]
}
}

8
build/Jenkinsfile vendored
View File

@ -13,7 +13,7 @@ pipeline {
}
stage('Backend') {
steps {
sh 'dotnet build -c Release ASC.Web.sln'
sh 'dotnet build -c Release ASC.Web.slnf'
}
}
}
@ -28,7 +28,7 @@ pipeline {
}
stage('Backend') {
steps {
bat 'dotnet build -c Release ASC.Web.sln'
bat 'dotnet build -c Release ASC.Web.slnf'
}
}
}
@ -62,7 +62,7 @@ pipeline {
}
stage('Files') {
steps {
sh "git submodule update --progress --init -- products/ASC.Files/Server/DocStore && dotnet build ASC.Web.sln && cd ${env.WORKSPACE}/products/ASC.Files/Tests/ && dotnet test ASC.Files.Tests.csproj -r linux-x64 -l \"console;verbosity=detailed\""
sh "git submodule update --progress --init -- products/ASC.Files/Server/DocStore && dotnet build ASC.Web.slnf && cd ${env.WORKSPACE}/products/ASC.Files/Tests/ && dotnet test ASC.Files.Tests.csproj -r linux-x64 -l \"console;verbosity=detailed\""
}
}
}
@ -90,7 +90,7 @@ pipeline {
}
stage('Files') {
steps {
bat "git submodule update --progress --init -- products\\ASC.Files\\Server\\DocStore && dotnet build ASC.Web.sln && cd ${env.WORKSPACE}\\products\\ASC.Files\\Tests\\ && dotnet test ASC.Files.Tests.csproj"
bat "git submodule update --progress --init -- products\\ASC.Files\\Server\\DocStore && dotnet build ASC.Web.slnf && cd ${env.WORKSPACE}\\products\\ASC.Files\\Tests\\ && dotnet test ASC.Files.Tests.csproj"
}
}
}

View File

@ -8,7 +8,7 @@ call runasadmin.bat "%~dpnx0"
if %errorlevel% == 0 (
call start\stop.bat nopause
dotnet build ..\asc.web.sln /fl1 /flp1:logfile=asc.web.log;verbosity=normal
dotnet build ..\asc.web.slnf /fl1 /flp1:logfile=asc.web.log;verbosity=normal
echo.
)

View File

@ -0,0 +1,7 @@
@echo off
pwsh %~dp0/build.backend.docker.ps1 %1
echo.
pause

View File

@ -0,0 +1,81 @@
$PSversionMajor = $PSVersionTable.PSVersion | sort-object major | ForEach-Object { $_.major }
$PSversionMinor = $PSVersionTable.PSVersion | sort-object minor | ForEach-Object { $_.minor }
if ($PSversionMajor -lt 7 -or $PSversionMinor -lt 2) {
Write-Error "Powershell version must be greater than or equal to 7.2."
exit
}
$Branch = git branch --show-current
$BranchExistRemote = git ls-remote --heads origin $Branch
if (-not $BranchExistRemote) {
Write-Error "The current branch does not exist in the remote repository. Please push changes."
exit
}
$RootDir = Split-Path -Parent $PSScriptRoot
$DockerDir = ($RootDir + "\build\install\docker")
$BuildDate = Get-Date -Format "yyyy-MM-dd"
$LocalIp = (Get-CimInstance -ClassName Win32_NetworkAdapterConfiguration | Where-Object { $_.DHCPEnabled -ne $null -and $_.DefaultIPGateway -ne $null }).IPAddress | Select-Object -First 1
$Doceditor = ($LocalIp + ":5013")
$Login = ($LocalIp + ":5011")
$Client = ($LocalIp + ":5001")
$DockerFile = "Dockerfile.dev"
$EnvExtension = "dev"
$CoreBaseDomain = "localhost"
# Stop all backend services"
& "$PSScriptRoot\start\stop.backend.docker.ps1"
$Env:COMPOSE_IGNORE_ORPHANS = "True"
$Containers = docker ps -a -f "name=^onlyoffice" --format="{{.ID}} {{.Names}}" | Select-String -Pattern ("mysql|rabbitmq|redis|elasticsearch|documentserver") -NotMatch | ConvertFrom-String | ForEach-Object P1
$Images = docker images onlyoffice/docspace* -q
if ($Containers) {
Write-Host "Remove all backend containers" -ForegroundColor Blue
docker rm -f $Containers
}
if ($Images) {
Write-Host "Remove all docker images except 'mysql, rabbitmq, redis, elasticsearch, documentserver'" -ForegroundColor Blue
docker rmi -f $Images
}
Write-Host "Run MySQL" -ForegroundColor Green
docker compose -f ($DockerDir + "\db.yml") up -d
Write-Host "Run environments (redis, rabbitmq)" -ForegroundColor Green
$Env:DOCKERFILE = $DockerFile
docker compose -f ($DockerDir + "\redis.yml") -f ($DockerDir + "\rabbitmq.yml") up -d
if ($args[0] -eq "--no_ds") {
Write-Host "SKIP Document server" -ForegroundColor Blue
}
else {
Write-Host "Run Document server" -ForegroundColor Green
$Env:DOCUMENT_SERVER_IMAGE_NAME = "onlyoffice/documentserver-de:latest"
$Env:ROOT_DIR = $RootDir
docker compose -f ($DockerDir + "\ds.dev.yml") up -d
}
Write-Host "Build all backend services" -ForegroundColor Blue
$Env:DOCKERFILE = $DockerFile
$Env:RELEASE_DATE = $BuildDate
$Env:GIT_BRANCH = $Branch
$Env:SERVICE_DOCEDITOR = $Doceditor
$Env:SERVICE_LOGIN = $Login
$Env:SERVICE_CLIENT = $Client
$Env:APP_CORE_BASE_DOMAIN = $CoreBaseDomain
$Env:ENV_EXTENSION = $EnvExtension
docker compose -f ($DockerDir + "\build.dev.yml") build --build-arg GIT_BRANCH=$Branch --build-arg RELEASE_DATE=$BuildDate
Write-Host "Run DB migration" -ForegroundColor Green
$Env:DOCKERFILE = $DockerFile
docker compose -f ($DockerDir + "\migration-runner.yml") up -d
# Start all backend services"
& "$PSScriptRoot\start\start.backend.docker.ps1"

View File

@ -9,10 +9,17 @@ echo "Root directory:" $dir
cd $dir
branch=$(git branch | sed -n -e 's/^\* \(.*\)/\1/p')
branch=$(git branch --show-current)
echo "GIT_BRANCH:" $branch
branch_exist_remote=$(git ls-remote --heads origin $branch)
if [ -z "$branch_exist_remote" ]; then
echo "The current branch does not exist in the remote repository. Please push changes."
exit 1
fi
cd $dir/build/install/docker/
docker_dir="$( pwd )"
@ -43,11 +50,9 @@ $dir/build/start/stop.backend.docker.sh
echo "Remove all backend containers"
docker rm -f $(docker ps -a | egrep "onlyoffice" | egrep -v "mysql|rabbitmq|redis|elasticsearch|documentserver" | awk 'NR>0 {print $1}')
echo "Remove all backend images"
docker rmi -f $(docker images -a | egrep "onlyoffice" | egrep -v "mysql|rabbitmq|redis|elasticsearch|documentserver" | awk 'NR>0 {print $3}')
echo "Remove all docker images except 'mysql, rabbitmq, redis, elasticsearch, documentserver'"
docker image rm -f $(docker images -a | egrep "onlyoffice" | egrep -v "mysql|rabbitmq|redis|elasticsearch|documentserver" | awk 'NR>0 {print $3}')
docker rmi -f $(docker images -a | egrep "onlyoffice" | egrep -v "mysql|rabbitmq|redis|elasticsearch|documentserver" | awk 'NR>0 {print $3}')
echo "Run MySQL"
@ -58,7 +63,7 @@ if [ "${arch_name}" = "x86_64" ]; then
docker compose -f db.yml up -d
elif [ "${arch_name}" = "arm64" ]; then
echo "CPU Type: arm64 -> run db.yml with arm64v8 image"
MYSQL_IMAGE=arm64v8/mysql:oracle \
MYSQL_IMAGE=arm64v8/mysql:8.0.31-oracle \
docker compose -f db.yml up -d
else
echo "Error: Unknown CPU Type: ${arch_name}."

View File

@ -20,7 +20,7 @@ echo "FRONT-END (for start run command 'yarn start' inside the root folder)"
yarn install
echo "BACK-END"
dotnet build $dir/asc.web.sln /fl1 /flp1:logfile=asc.web.log;verbosity=normal
dotnet build $dir/asc.web.slnf /fl1 /flp1:logfile=asc.web.log;verbosity=normal
echo "install nodejs projects dependencies..."
pushd $dir/common/ASC.Socket.IO/
@ -29,7 +29,5 @@ pushd $dir/common/ASC.SsoAuth/
yarn install
pushd $dir/common/ASC.WebDav/
yarn install
pushd $dir/common/ASC.UrlShortener/
yarn install
pushd $dir/common/ASC.WebPlugins/
yarn install

View File

@ -0,0 +1,7 @@
@echo off
pwsh %~dp0/clear.backend.docker.ps1
echo.
pause

View File

@ -0,0 +1,21 @@
$Containers = docker ps -aqf "name=^onlyoffice"
$Images = docker images onlyoffice/docspace* -q
if ($Containers) {
Write-Host "Stop all backend containers" -ForegroundColor Blue
docker stop $Containers
Write-Host "Remove all backend containers" -ForegroundColor Blue
docker rm -f $Containers
}
if ($Images) {
Write-Host "Remove all docker images except 'mysql, rabbitmq, redis, elasticsearch, documentserver'" -ForegroundColor Blue
docker rmi -f $Images
}
Write-Host "Remove unused volumes." -ForegroundColor Blue
docker volume prune -f
Write-Host "Remove unused networks." -ForegroundColor Blue
docker network prune -f

View File

@ -42,12 +42,11 @@ done
echo "== BACK-END-BUILD =="
cd ${SRC_PATH}
dotnet build ASC.Web.sln ${ARGS}
dotnet build ASC.Web.slnf ${ARGS}
dotnet build ASC.Migrations.sln -o ${SRC_PATH}/ASC.Migration.Runner/service/
# Array of names backend services in directory common (Nodejs)
services_name_backend_nodejs=()
services_name_backend_nodejs+=(ASC.UrlShortener)
services_name_backend_nodejs+=(ASC.Socket.IO)
services_name_backend_nodejs+=(ASC.SsoAuth)

View File

@ -5,12 +5,13 @@
DOCKER_IMAGE_PREFIX=${STATUS}docspace
DOCKER_TAG=latest
CONTAINER_PREFIX=${PRODUCT}-
MYSQL_VERSION=8.0.18
MYSQL_VERSION=8.0.31
MYSQL_IMAGE=mysql:${MYSQL_VERSION}
ELK_VERSION=7.13.1
SERVICE_PORT=5050
DOCUMENT_SERVER_IMAGE_NAME=onlyoffice/4testing-documentserver-ee:latest
DOCKERFILE=Dockerfile.app
APP_DOTNET_ENV=""
# zookeeper #
ZOO_PORT=2181

View File

@ -140,8 +140,8 @@ RUN cd /app/onlyoffice/src/ && \
cp -f config/nginx/onlyoffice*.conf /etc/nginx/conf.d/ && \
mkdir -p /etc/nginx/includes/ && cp -f config/nginx/includes/onlyoffice*.conf /etc/nginx/includes/ && \
sed -e 's/#//' -i /etc/nginx/conf.d/onlyoffice.conf && \
dotnet restore ASC.Web.sln && \
dotnet build -r linux-x64 ASC.Web.sln && \
dotnet restore ASC.Web.slnf && \
dotnet build -r linux-x64 ASC.Web.slnf && \
cd products/ASC.People/Server && \
dotnet -d publish --no-build --self-contained -r linux-x64 -o /var/www/products/ASC.People/server && \
cd ../../../ && \

View File

@ -169,17 +169,21 @@ RUN chown nginx:nginx /etc/nginx/* -R && \
## Doceditor ##
FROM noderun as doceditor
WORKDIR ${BUILD_PATH}/products/ASC.Files/editor
WORKDIR ${BUILD_PATH}/products/ASC.Editors/editor
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${SRC_PATH}/build/deploy/editor/ .
ENTRYPOINT ["node", "server.js"]
CMD ["server.js", "ASC.Editors"]
## Login ##
FROM noderun as login
WORKDIR ${BUILD_PATH}/products/ASC.Login/login
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${SRC_PATH}/build/deploy/login/ .
ENTRYPOINT ["node", "server.js"]
CMD ["server.js", "ASC.Login"]
## ASC.Data.Backup.BackgroundTasks ##
FROM dotnetrun AS backup_background

View File

@ -130,6 +130,7 @@ ENV DNS_NAMESERVER=127.0.0.11 \
RUN apt-get -y update && \
apt-get install -yq vim && \
apt-get install -y dos2unix && \
rm -rf /var/lib/apt/lists/* && \
rm -rf /usr/share/nginx/html/*
@ -138,8 +139,13 @@ COPY --from=base /etc/nginx/conf.d /etc/nginx/conf.d
COPY --from=base /etc/nginx/includes /etc/nginx/includes
COPY /config/nginx/templates/upstream.conf.template /etc/nginx/templates/upstream.conf.template
COPY /config/nginx/templates/nginx.conf.template /etc/nginx/nginx.conf.template
COPY prepare-nginx-proxy.sh /docker-entrypoint.d/prepare-nginx-proxy.sh
RUN dos2unix /docker-entrypoint.d/prepare-nginx-proxy.sh && \
apt-get --purge remove -y dos2unix && \
rm -rf /var/lib/apt/lists/*
# add defualt user and group for no-root run
RUN chown nginx:nginx /etc/nginx/* -R && \
chown nginx:nginx /docker-entrypoint.d/* && \
@ -169,13 +175,13 @@ COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Data.B
CMD ["ASC.Data.Backup.BackgroundTasks.dll", "ASC.Data.Backup.BackgroundTasks"]
# ASC.ApiSystem ##
FROM dotnetrun AS api_system
WORKDIR ${BUILD_PATH}/services/ASC.ApiSystem/
# FROM dotnetrun AS api_system
# WORKDIR ${BUILD_PATH}/services/ASC.ApiSystem/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.ApiSystem/service/ .
# COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
# COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.ApiSystem/service/ .
CMD [" ASC.ApiSystem.dll", " ASC.ApiSystem"]
# CMD ["ASC.ApiSystem.dll", "ASC.ApiSystem"]
## ASC.ClearEvents ##
FROM dotnetrun AS clear-events
@ -187,13 +193,13 @@ COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.ClearE
CMD ["ASC.ClearEvents.dll", "ASC.ClearEvents"]
## ASC.Migration ##
FROM dotnetrun AS migration
WORKDIR ${BUILD_PATH}/services/ASC.Migration/
# FROM dotnetrun AS migration
# WORKDIR ${BUILD_PATH}/services/ASC.Migration/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Migration/service/ .
# COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
# COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Migration/service/ .
CMD ["ASC.Migration.dll", "ASC.Migration"]
# CMD ["ASC.Migration.dll", "ASC.Migration"]
## ASC.Data.Backup ##
FROM dotnetrun AS backup
@ -223,13 +229,13 @@ COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Files.
CMD ["ASC.Files.Service.dll", "ASC.Files.Service"]
## ASC.Notify ##
FROM dotnetrun AS notify
WORKDIR ${BUILD_PATH}/services/ASC.Notify/service
# FROM dotnetrun AS notify
# WORKDIR ${BUILD_PATH}/services/ASC.Notify/service
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Notify/service/ .
# COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
# COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Notify/service/ .
CMD ["ASC.Notify.dll", "ASC.Notify"]
# CMD ["ASC.Notify.dll", "ASC.Notify"]
## ASC.People ##
FROM dotnetrun AS people_server
@ -268,22 +274,22 @@ COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Studio
CMD ["ASC.Studio.Notify.dll", "ASC.Studio.Notify"]
## ASC.TelegramService ##
FROM dotnetrun AS telegram_service
WORKDIR ${BUILD_PATH}/services/ASC.TelegramService/
# FROM dotnetrun AS telegram_service
# WORKDIR ${BUILD_PATH}/services/ASC.TelegramService/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.TelegramService/service/ .
# COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
# COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.TelegramService/service/ .
CMD ["ASC.TelegramService.dll", "ASC.TelegramService"]
# CMD ["ASC.TelegramService.dll", "ASC.TelegramService"]
## ASC.UrlShortener ##
FROM noderun AS urlshortener
WORKDIR ${BUILD_PATH}/services/ASC.UrlShortener/service/
# FROM noderun AS urlshortener
# WORKDIR ${BUILD_PATH}/services/ASC.UrlShortener/service/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.UrlShortener/service/ .
# COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
# COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.UrlShortener/service/ .
CMD ["index.js", "ASC.UrlShortener"]
# CMD ["index.js", "ASC.UrlShortener"]
## ASC.Web.Api ##
FROM dotnetrun AS api
@ -295,13 +301,13 @@ COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Web.Ap
CMD ["ASC.Web.Api.dll", "ASC.Web.Api"]
## ASC.Webhooks.Service ##
FROM dotnetrun AS webhooks-service
WORKDIR ${BUILD_PATH}/services/ASC.Webhooks.Service/
# FROM dotnetrun AS webhooks-service
# WORKDIR ${BUILD_PATH}/services/ASC.Webhooks.Service/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Webhooks.Service/service/ .
# COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
# COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Webhooks.Service/service/ .
CMD ["ASC.Webhooks.Service.dll", "ASC.Webhooks.Service"]
# CMD ["ASC.Webhooks.Service.dll", "ASC.Webhooks.Service"]
## ASC.Web.Studio ##
FROM dotnetrun AS studio
@ -319,26 +325,16 @@ ARG SRC_PATH
ENV BUILD_PATH=${BUILD_PATH}
ENV SRC_PATH=${SRC_PATH}
WORKDIR ${BUILD_PATH}/services/ASC.Migration.Runner/
COPY ./docker-migration-entrypoint.sh ./docker-migration-entrypoint.sh
RUN apt-get update && \
apt-get install -y dos2unix
COPY docker-migration-entrypoint.sh docker-migration-entrypoint.sh
RUN dos2unix docker-migration-entrypoint.sh && \
apt-get --purge remove -y dos2unix && \
rm -rf /var/lib/apt/lists/*
COPY --from=base ${SRC_PATH}/ASC.Migration.Runner/service/ .
ENTRYPOINT ["./docker-migration-entrypoint.sh"]
## image for k8s bin-share ##
FROM busybox:latest AS bin_share
RUN mkdir -p /app/appserver/ASC.Files/server && \
mkdir -p /app/appserver/ASC.People/server/ && \
addgroup --system --gid 107 onlyoffice && \
adduser -u 104 onlyoffice --home /var/www/onlyoffice --system -G onlyoffice
COPY bin-share-docker-entrypoint.sh /app/docker-entrypoint.sh
COPY --from=base /var/www/products/ASC.Files/server/ /app/appserver/ASC.Files/server/
COPY --from=base /var/www/products/ASC.People/server/ /app/appserver/ASC.People/server/
ENTRYPOINT ["./app/docker-entrypoint.sh"]
## image for k8s wait-bin-share ##
FROM busybox:latest AS wait_bin_share
RUN mkdir /app
COPY wait-bin-share-docker-entrypoint.sh /app/docker-entrypoint.sh
ENTRYPOINT ["./app/docker-entrypoint.sh"]

View File

@ -1,6 +1,5 @@
version: "3.8"
x-service:
&x-service-base
x-service: &x-service-base
container_name: base
restart: always
expose:
@ -206,8 +205,8 @@ services:
networks:
default:
external:
name: ${NETWORK_NAME}
external: true
volumes:
es_data:

View File

@ -15,12 +15,12 @@ services:
target: clear-events
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-clear-events:${DOCKER_TAG}"
onlyoffice-migration:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: migration
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-migration:${DOCKER_TAG}"
# onlyoffice-migration:
# build:
# context: ./
# dockerfile: "${DOCKERFILE}"
# target: migration
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-migration:${DOCKER_TAG}"
onlyoffice-backup:
build:
@ -43,12 +43,12 @@ services:
target: files_services
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-files-services:${DOCKER_TAG}"
onlyoffice-notify:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: notify
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-notify:${DOCKER_TAG}"
# onlyoffice-notify:
# build:
# context: ./
# dockerfile: "${DOCKERFILE}"
# target: notify
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-notify:${DOCKER_TAG}"
onlyoffice-people-server:
build:
@ -71,19 +71,19 @@ services:
target: studio_notify
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-studio-notify:${DOCKER_TAG}"
onlyoffice-telegram-service:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: telegram_service
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-telegram-service:${DOCKER_TAG}"
# onlyoffice-telegram-service:
# build:
# context: ./
# dockerfile: "${DOCKERFILE}"
# target: telegram_service
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-telegram-service:${DOCKER_TAG}"
onlyoffice-urlshortener:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: urlshortener
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-urlshortener:${DOCKER_TAG}"
# onlyoffice-urlshortener:
# build:
# context: ./
# dockerfile: "${DOCKERFILE}"
# target: urlshortener
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-urlshortener:${DOCKER_TAG}"
onlyoffice-api:
build:
@ -92,12 +92,12 @@ services:
target: api
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-api:${DOCKER_TAG}"
onlyoffice-api-system:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: api_system
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-api-system:${DOCKER_TAG}"
# onlyoffice-api-system:
# build:
# context: ./
# dockerfile: "${DOCKERFILE}"
# target: api_system
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-api-system:${DOCKER_TAG}"
onlyoffice-studio:
build:
@ -113,26 +113,12 @@ services:
target: ssoauth
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-ssoauth:${DOCKER_TAG}"
onlyoffice-webhooks-service:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: webhooks-service
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-webhooks-service:${DOCKER_TAG}"
onlyoffice-bin-share:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: bin_share
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-bin-share:${DOCKER_TAG}"
onlyoffice-wait-bin-share:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: wait_bin_share
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-wait-bin-share:${DOCKER_TAG}"
# onlyoffice-webhooks-service:
# build:
# context: ./
# dockerfile: "${DOCKERFILE}"
# target: webhooks-service
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-webhooks-service:${DOCKER_TAG}"
onlyoffice-proxy:
build:

View File

@ -66,10 +66,10 @@ services:
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-clear-events:${DOCKER_TAG}"
container_name: ${CLEAR_EVENTS_HOST}
onlyoffice-migration:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-migration:${DOCKER_TAG}"
container_name: ${MIGRATION_HOST}
# onlyoffice-migration:
# <<: *x-service-base
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-migration:${DOCKER_TAG}"
# container_name: ${MIGRATION_HOST}
onlyoffice-files:
<<: *x-service-base
@ -98,28 +98,28 @@ services:
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-studio-notify:${DOCKER_TAG}"
container_name: ${STUDIO_NOTIFY_HOST}
onlyoffice-telegram-service:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-telegram-service:${DOCKER_TAG}"
container_name: ${TELEGRAM_SERVICE_HOST}
# onlyoffice-telegram-service:
# <<: *x-service-base
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-telegram-service:${DOCKER_TAG}"
# container_name: ${TELEGRAM_SERVICE_HOST}
onlyoffice-urlshortener:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-urlshortener:${DOCKER_TAG}"
container_name: ${URLSHORTENER_HOST}
expose:
- ${SERVICE_PORT}
- "9999"
# onlyoffice-urlshortener:
# <<: *x-service-base
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-urlshortener:${DOCKER_TAG}"
# container_name: ${URLSHORTENER_HOST}
# expose:
# - ${SERVICE_PORT}
# - "9999"
onlyoffice-api:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-api:${DOCKER_TAG}"
container_name: ${API_HOST}
onlyoffice-api-system:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-api-system:${DOCKER_TAG}"
container_name: ${API_SYSTEM_HOST}
# onlyoffice-api-system:
# <<: *x-service-base
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-api-system:${DOCKER_TAG}"
# container_name: ${API_SYSTEM_HOST}
onlyoffice-studio:
<<: *x-service-base
@ -134,10 +134,10 @@ services:
- ${SERVICE_PORT}
- "9834"
onlyoffice-webhooks-service:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-webhooks-service:${DOCKER_TAG}"
container_name: ${WEBHOOKS_SERVICE_HOST}
# onlyoffice-webhooks-service:
# <<: *x-service-base
# image: "${REPO}/${DOCKER_IMAGE_PREFIX}-webhooks-service:${DOCKER_TAG}"
# container_name: ${WEBHOOKS_SERVICE_HOST}
onlyoffice-proxy:
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-proxy:${DOCKER_TAG}"
@ -153,17 +153,17 @@ services:
- onlyoffice-backup-background-tasks
- onlyoffice-backup
- onlyoffice-clear-events
- onlyoffice-migration
- onlyoffice-webhooks-service
# - onlyoffice-migration
# - webhooks-service
- onlyoffice-files
- onlyoffice-files-services
- onlyoffice-people-server
- onlyoffice-socket
- onlyoffice-studio-notify
- onlyoffice-telegram-service
- onlyoffice-urlshortener
# - onlyoffice-telegram-service
# - onlyoffice-urlshortener
- onlyoffice-api
- onlyoffice-api-system
# - onlyoffice-api-system
- onlyoffice-studio
- onlyoffice-ssoauth
environment:
@ -171,16 +171,16 @@ services:
- SERVICE_FILES=${SERVICE_FILES}
- SERVICE_FILES_SERVICES=${SERVICE_FILES_SERVICES}
- SERVICE_CLEAR_EVENTS=${SERVICE_CLEAR_EVENTS}
- SERVICE_MIGRATION=${SERVICE_MIGRATION}
- SERVICE_WEBHOOKS_SERVICE=${SERVICE_WEBHOOKS_SERVICE}
- SERVICE_NOTIFY=${SERVICE_NOTIFY}
# - SERVICE_MIGRATION=${SERVICE_MIGRATION}
# - SERVICE_WEBHOOKS_SERVICE=${SERVICE_WEBHOOKS_SERVICE}
# - SERVICE_NOTIFY=${SERVICE_NOTIFY}
- SERVICE_PEOPLE_SERVER=${SERVICE_PEOPLE_SERVER}
- SERVICE_SOCKET=${SERVICE_SOCKET}
- SERVICE_STUDIO_NOTIFY=${SERVICE_STUDIO_NOTIFY}
- SERVICE_TELEGRAM_SERVICE=${SERVICE_TELEGRAM_SERVICE}
- SERVICE_URLSHORTENER=${SERVICE_URLSHORTENER}
# - SERVICE_TELEGRAM_SERVICE=${SERVICE_TELEGRAM_SERVICE}
# - SERVICE_URLSHORTENER=${SERVICE_URLSHORTENER}
- SERVICE_API=${SERVICE_API}
- SERVICE_API_SYSTEM=${SERVICE_API_SYSTEM}
# - SERVICE_API_SYSTEM=${SERVICE_API_SYSTEM}
- SERVICE_STUDIO=${SERVICE_STUDIO}
- SERVICE_SSOAUTH=${SERVICE_SSOAUTH}
- SERVICE_DOCEDITOR=${SERVICE_DOCEDITOR}
@ -196,8 +196,8 @@ services:
networks:
default:
external:
name: ${NETWORK_NAME}
external: true
volumes:
es_data:

View File

@ -13,5 +13,5 @@ services:
networks:
default:
external:
name: ${NETWORK_NAME}
external: true

View File

@ -1,2 +1,3 @@
#!/bin/sh
envsubst '$MAP_HASH_BUCKET_SIZE,$COUNT_WORKER_CONNECTIONS' < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf

View File

@ -1,4 +1,4 @@
version: '3'
version: "3"
services:
onlyoffice-rabbitmq:
image: rabbitmq:3
@ -9,5 +9,5 @@ services:
- "80"
networks:
default:
external:
name: ${NETWORK_NAME}
external: true

View File

@ -1,4 +1,4 @@
version: '3'
version: "3"
services:
onlyoffice-redis:
image: redis:7
@ -8,5 +8,5 @@ services:
- "6379"
networks:
default:
external:
name: ${NETWORK_NAME}
external: true

View File

@ -8,7 +8,7 @@ set SRC_PATH=%~s2
pushd %~1
call dotnet build ASC.Web.sln
call dotnet build ASC.Web.slnf
call dotnet build ASC.Migrations.sln -o %SRC_PATH%\services\ASC.Migration.Runner\service
echo "== Build ASC.UrlShortener =="

View File

@ -1,4 +0,0 @@
@echo off
PUSHD %~dp0..\..
set servicepath=%cd%\common\services\ASC.Data.Storage.Encryption\bin\Debug\ASC.Data.Storage.Encryption.exe urls=http://0.0.0.0:5019 $STORAGE_ROOT=%cd%\Data log:dir=%cd%\Logs log:name=studio.notify pathToConf=%cd%\config core:products:folder=%cd%\products core:eventBus:subscriptionClientName=asc_event_bus_encryption_queue

View File

@ -1,4 +0,0 @@
@echo off
PUSHD %~dp0..\..
set servicepath=%cd%\common\ASC.Migration\bin\Debug\ASC.Migration.exe urls=http://0.0.0.0:5034 $STORAGE_ROOT=%cd%\Data pathToConf=%cd%\config log:dir=%cd%\Logs log:name=migration core:products:folder=%cd%\products

View File

@ -1,10 +0,0 @@
<service>
<id>OnlyofficeRadicale</id>
<name>ONLYOFFICE Radicale</name>
<startmode>manual</startmode>
<executable>python</executable>
<arguments>-m radicale --config %BASE%/../../config/radicale.config</arguments>
<log mode="none"/>
<delayedAutoStart>true</delayedAutoStart>
<onfailure action="restart" delay="5 sec" />
</service>

View File

@ -1,4 +0,0 @@
@echo off
PUSHD %~dp0..\..
set servicepath=%cd%\common\services\ASC.TelegramService\bin\Debug\ASC.TelegramService.exe urls=http://0.0.0.0:51702 $STORAGE_ROOT=%cd%\Data log:dir=%cd%\Logs log:name=telegram pathToConf=%cd%\config core:eventBus:subscriptionClientName=asc_event_bus_telegram_queue

View File

@ -1,10 +0,0 @@
<service>
<id>OnlyofficeUrlShortenerService</id>
<name>ONLYOFFICE UrlShortenerService</name>
<startmode>manual</startmode>
<executable>node</executable>
<arguments>../../common/ASC.UrlShortener/index.js</arguments>
<log mode="none"/>
<delayedAutoStart>true</delayedAutoStart>
<onfailure action="restart" delay="5 sec" />
</service>

View File

@ -1,4 +0,0 @@
@echo off
PUSHD %~dp0..\..
set servicepath=%cd%\common\services\ASC.Webhooks.Service\bin\Debug\ASC.Webhooks.Service.exe urls=http://0.0.0.0:5031 $STORAGE_ROOT=%cd%\Data log:dir=%cd%\Logs log:name=webhooks pathToConf=%cd%\config core:products:folder=%cd%\products

View File

@ -3,7 +3,7 @@
cd /D "%~dp0"
call start\stop.bat nopause
dotnet build ..\asc.web.sln
dotnet build ..\asc.web.slnf
dotnet build ..\ASC.Migrations.sln
PUSHD %~dp0..\common\Tools\ASC.Migration.Runner\bin\Debug\net6.0
dotnet ASC.Migration.Runner.dll

View File

@ -9,7 +9,7 @@ echo "Run script directory:" $rd
dir=$(builtin cd $rd/../; pwd)
echo "Root directory:" $dir
dotnet build $dir/asc.web.sln
dotnet build $dir/asc.web.slnf
dotnet build $dir/ASC.Migrations.sln
pushd $dir/common/Tools/ASC.Migration.Runner/bin/Debug/net6.0

View File

@ -1,7 +0,0 @@
PUSHD %~dp0..
cd %~dp0../../common/ASC.UrlShortener/
call yarn install --immutable
POPD

View File

@ -0,0 +1,7 @@
@echo off
pwsh %~dp0/restart.backend.docker.ps1
echo.
pause

View File

@ -0,0 +1,5 @@
# Stop all backend services"
& "$PSScriptRoot\stop.backend.docker.ps1"
# Start all backend services"
& "$PSScriptRoot\start.backend.docker.ps1"

View File

@ -0,0 +1,7 @@
@echo off
pwsh %~dp0/start.backend.docker.ps1
echo.
pause

View File

@ -0,0 +1,41 @@
$PSversionMajor = $PSVersionTable.PSVersion | sort-object major | ForEach-Object { $_.major }
$PSversionMinor = $PSVersionTable.PSVersion | sort-object minor | ForEach-Object { $_.minor }
if ($PSversionMajor -lt 7 -or $PSversionMinor -lt 2) {
Write-Error "Powershell version must be greater than or equal to 7.2."
exit
}
$Branch = git branch --show-current
$BranchExistRemote = git ls-remote --heads origin $Branch
if (-not $BranchExistRemote) {
Write-Error "The current branch does not exist in the remote repository. Please push changes."
exit
}
$RootDir = Split-Path (Split-Path -Parent $PSScriptRoot) -Parent
$DockerDir = ($RootDir + "\build\install\docker")
$BuildDate = Get-Date -Format "yyyy-MM-dd"
$LocalIp = (Get-WmiObject -Class Win32_NetworkAdapterConfiguration | Where-Object { $_.DHCPEnabled -ne $null -and $_.DefaultIPGateway -ne $null }).IPAddress | Select-Object -First 1
$Doceditor = ($LocalIp + ":5013")
$Login = ($LocalIp + ":5011")
$Client = ($LocalIp + ":5001")
$DockerFile = "Dockerfile.dev"
$EnvExtension = "dev"
$CoreBaseDomain = "localhost"
Write-Host "Start all backend services (containers)" -ForegroundColor Green
$Env:DOCKERFILE = $DockerFile
$Env:ROOT_DIR = $RootDir
$Env:RELEASE_DATE = $BuildDate
$Env:GIT_BRANCH = $Branch
$Env:SERVICE_DOCEDITOR = $Doceditor
$Env:SERVICE_LOGIN = $Login
$Env:SERVICE_CLIENT = $Client
$Env:APP_CORE_BASE_DOMAIN = $CoreBaseDomain
$Env:APP_URL_PORTAL = ("http://" + $LocalIp + ":8092")
$Env:ENV_EXTENSION = $EnvExtension
docker compose -f ($DockerDir + "\docspace.dev.yml") up -d

View File

@ -9,10 +9,17 @@ echo "Root directory:" $dir
cd $dir
branch=$(git branch | sed -n -e 's/^\* \(.*\)/\1/p')
branch=$(git branch --show-current)
echo "GIT_BRANCH:" $branch
branch_exist_remote=$(git ls-remote --heads origin $branch)
if [ -z "$branch_exist_remote" ]; then
echo "The current branch does not exist in the remote repository. Please push changes."
exit 1
fi
cd $dir/build/install/docker/
docker_dir="$( pwd )"

View File

@ -0,0 +1,7 @@
@echo off
pwsh %~dp0/stop.backend.docker.ps1
echo.
pause

View File

@ -0,0 +1,17 @@
$PSversionMajor = $PSVersionTable.PSVersion | sort-object major | ForEach-Object { $_.major }
$PSversionMinor = $PSVersionTable.PSVersion | sort-object minor | ForEach-Object { $_.minor }
if ($PSversionMajor -lt 7 -or $PSversionMinor -lt 2) {
Write-Error "Powershell version must be greater than or equal to 7.2."
exit
}
$Containers = docker ps -a -f "name=^onlyoffice" --format="{{.ID}} {{.Names}}" | Select-String -Pattern ("mysql|rabbitmq|redis|elasticsearch|documentserver") -NotMatch | ConvertFrom-String | ForEach-Object P1
if (-not $Containers) {
Write-Host "No containers to stop" -ForegroundColor Blue
exit
}
Write-Host "Stop all backend services (containers)" -ForegroundColor Green
docker stop $Containers

View File

@ -364,7 +364,7 @@ public class LdapUserImporter : IDisposable
return result;
}
public bool TrySyncUserGroupMembership(Tuple<UserInfo, LdapObject> ldapUserInfo)
public async Task<bool> TrySyncUserGroupMembership(Tuple<UserInfo, LdapObject> ldapUserInfo)
{
if (ldapUserInfo == null ||
!Settings.GroupMembership)
@ -401,12 +401,12 @@ public class LdapUserImporter : IDisposable
groupInfo = UserManager.SaveGroupInfo(_ldapObjectExtension.ToGroupInfo(ldapUserGroup, Settings));
_logger.DebugTrySyncUserGroupMembershipAddingUserToGroup(userInfo.UserName, ldapUser.Sid, groupInfo.Name, groupInfo.Sid);
UserManager.AddUserIntoGroup(userInfo.Id, groupInfo.ID);
await UserManager.AddUserIntoGroup(userInfo.Id, groupInfo.ID);
}
else if (!portalUserLdapGroups.Contains(groupInfo))
{
_logger.DebugTrySyncUserGroupMembershipAddingUserToGroup(userInfo.UserName, ldapUser.Sid, groupInfo.Name, groupInfo.Sid);
UserManager.AddUserIntoGroup(userInfo.Id, groupInfo.ID);
await UserManager.AddUserIntoGroup(userInfo.Id, groupInfo.ID);
}
actualPortalLdapGroups.Add(groupInfo);

View File

@ -136,7 +136,7 @@ public class LdapOperationJob : DistributedTaskProgress
InitDisturbedTask();
}
protected override void DoJob()
protected override async Task DoJob()
{
try
{
@ -206,7 +206,7 @@ public class LdapOperationJob : DistributedTaskProgress
default:
throw new ArgumentOutOfRangeException();
}
Do();
await Do();
}
catch (AuthorizingException authError)
{
@ -247,7 +247,7 @@ public class LdapOperationJob : DistributedTaskProgress
}
}
private void Do()
private async Task Do()
{
try
{
@ -288,7 +288,7 @@ public class LdapOperationJob : DistributedTaskProgress
_logger.DebugLdapSettings(sb.ToString());
}
SyncLDAP();
await SyncLDAP();
if (!string.IsNullOrEmpty(Error))
{
@ -380,7 +380,7 @@ public class LdapOperationJob : DistributedTaskProgress
_logger.DebugSaveUserInfo(existingLDAPUser.GetUserInfoString());
_userManager.SaveUserInfo(existingLDAPUser);
_userManager.UpdateUserInfo(existingLDAPUser);
break;
case LdapOperationType.SaveTest:
case LdapOperationType.SyncTest:
@ -394,7 +394,7 @@ public class LdapOperationJob : DistributedTaskProgress
}
}
private void SyncLDAP()
private async Task SyncLDAP()
{
var currentDomainSettings = _settingsManager.Load<LdapCurrentDomain>();
@ -408,21 +408,21 @@ public class LdapOperationJob : DistributedTaskProgress
{
_logger.DebugSyncLDAPUsers();
SyncLDAPUsers();
await SyncLDAPUsers();
}
else
{
_logger.DebugSyncLDAPUsersInGroups();
SyncLDAPUsersInGroups();
await SyncLDAPUsersInGroups();
}
SyncLdapAvatar();
await SyncLdapAvatar();
SyncLdapAccessRights();
await SyncLdapAccessRights();
}
private void SyncLdapAvatar()
private async Task SyncLdapAvatar()
{
SetProgress(90, Resource.LdapSettingsStatusUpdatingUserPhotos);
@ -438,7 +438,7 @@ public class LdapOperationJob : DistributedTaskProgress
foreach (var guid in ph.CurrentPhotos.Keys)
{
_logger.InfoSyncLdapAvatarsRemovingPhoto(guid);
_userPhotoManager.RemovePhoto(guid);
await _userPhotoManager.RemovePhoto(guid);
_userPhotoManager.ResetThumbnailSettings(guid);
}
@ -511,16 +511,16 @@ public class LdapOperationJob : DistributedTaskProgress
_settingsManager.Save(photoSettings);
}
private void SyncLdapAccessRights()
private async Task SyncLdapAccessRights()
{
SetProgress(95, Resource.LdapSettingsStatusUpdatingAccessRights);
var currentUserRights = new List<LdapSettings.AccessRight>();
TakeUsersRights(_currentUser != null ? currentUserRights : null);
await TakeUsersRights(_currentUser != null ? currentUserRights : null);
if (LDAPSettings.GroupMembership && LDAPSettings.AccessRights != null && LDAPSettings.AccessRights.Count > 0)
{
GiveUsersRights(LDAPSettings.AccessRights, _currentUser != null ? currentUserRights : null);
await GiveUsersRights(LDAPSettings.AccessRights, _currentUser != null ? currentUserRights : null);
}
if (currentUserRights.Count > 0)
@ -531,7 +531,7 @@ public class LdapOperationJob : DistributedTaskProgress
_settingsManager.Save(LDAPSettings);
}
private void TakeUsersRights(List<LdapSettings.AccessRight> currentUserRights)
private async Task TakeUsersRights(List<LdapSettings.AccessRight> currentUserRights)
{
var current = _settingsManager.Load<LdapCurrentAcccessSettings>();
@ -558,7 +558,7 @@ public class LdapOperationJob : DistributedTaskProgress
else
{
_logger.DebugTakingAdminRights(right.Key, user);
_webItemSecurity.SetProductAdministrator(LdapSettings.AccessRightsGuids[right.Key], userId, false);
await _webItemSecurity.SetProductAdministrator(LdapSettings.AccessRightsGuids[right.Key], userId, false);
}
}
}
@ -567,7 +567,7 @@ public class LdapOperationJob : DistributedTaskProgress
_settingsManager.Save(current);
}
private void GiveUsersRights(Dictionary<LdapSettings.AccessRight, string> accessRightsSettings, List<LdapSettings.AccessRight> currentUserRights)
private async Task GiveUsersRights(Dictionary<LdapSettings.AccessRight, string> accessRightsSettings, List<LdapSettings.AccessRight> currentUserRights)
{
var current = _settingsManager.Load<LdapCurrentAcccessSettings>();
var currentAccessRights = new Dictionary<LdapSettings.AccessRight, List<string>>();
@ -618,7 +618,7 @@ public class LdapOperationJob : DistributedTaskProgress
if (_webItemSecurity.IsProductAdministrator(prodId, user.Id))
{
cleared = true;
_webItemSecurity.SetProductAdministrator(prodId, user.Id, false);
await _webItemSecurity.SetProductAdministrator(prodId, user.Id, false);
}
}
@ -636,7 +636,7 @@ public class LdapOperationJob : DistributedTaskProgress
SetProgress((int)currentPercent,
string.Format(Resource.LdapSettingsStatusGivingRights, _userFormatter.GetUserName(user, DisplayUserNameFormat.Default), access.Key));
_webItemSecurity.SetProductAdministrator(LdapSettings.AccessRightsGuids[access.Key], user.Id, true);
await _webItemSecurity.SetProductAdministrator(LdapSettings.AccessRightsGuids[access.Key], user.Id, true);
if (currentUserRights != null && currentUserRights.Contains(access.Key))
{
@ -651,7 +651,7 @@ public class LdapOperationJob : DistributedTaskProgress
_settingsManager.Save(current);
}
private void SyncLDAPUsers()
private async Task SyncLDAPUsers()
{
SetProgress(15, Resource.LdapSettingsStatusGettingUsersFromLdap);
@ -675,14 +675,14 @@ public class LdapOperationJob : DistributedTaskProgress
: Resource.LdapSettingsStatusSyncingUsers,
"");
SyncDbUsers(ldapUsers);
await SyncDbUsers(ldapUsers);
SetProgress(70, Resource.LdapSettingsStatusRemovingOldGroups, "");
RemoveOldDbGroups(new List<GroupInfo>()); // Remove all db groups with sid
}
private void SyncLDAPUsersInGroups()
private async Task SyncLDAPUsersInGroups()
{
SetProgress(15, Resource.LdapSettingsStatusGettingGroupsFromLdap);
@ -717,11 +717,11 @@ public class LdapOperationJob : DistributedTaskProgress
: Resource.LdapSettingsStatusSyncingUsers,
"");
var newUniqueLdapGroupUsers = SyncGroupsUsers(uniqueLdapGroupUsers);
var newUniqueLdapGroupUsers = await SyncGroupsUsers(uniqueLdapGroupUsers);
SetProgress(60, Resource.LdapSettingsStatusSavingGroups, "");
SyncDbGroups(ldapGroupsUsers);
await SyncDbGroups(ldapGroupsUsers);
SetProgress(80, Resource.LdapSettingsStatusRemovingOldGroups, "");
@ -732,7 +732,7 @@ public class LdapOperationJob : DistributedTaskProgress
RemoveOldDbUsers(newUniqueLdapGroupUsers);
}
private void SyncDbGroups(Dictionary<GroupInfo, List<UserInfo>> ldapGroupsWithUsers)
private async Task SyncDbGroups(Dictionary<GroupInfo, List<UserInfo>> ldapGroupsWithUsers)
{
const double percents = 20;
@ -765,18 +765,18 @@ public class LdapOperationJob : DistributedTaskProgress
if (Equals(dbLdapGroup, Constants.LostGroupInfo))
{
AddNewGroup(ldapGroup, ldapGroupUsers, gIndex, gCount);
await AddNewGroup(ldapGroup, ldapGroupUsers, gIndex, gCount);
}
else
{
UpdateDbGroup(dbLdapGroup, ldapGroup, ldapGroupUsers, gIndex, gCount);
await UpdateDbGroup(dbLdapGroup, ldapGroup, ldapGroupUsers, gIndex, gCount);
}
percentage += step;
}
}
private void AddNewGroup(GroupInfo ldapGroup, List<UserInfo> ldapGroupUsers, int gIndex, int gCount)
private async Task AddNewGroup(GroupInfo ldapGroup, List<UserInfo> ldapGroupUsers, int gIndex, int gCount)
{
if (!ldapGroupUsers.Any()) // Skip empty groups
{
@ -815,7 +815,7 @@ public class LdapOperationJob : DistributedTaskProgress
++index, count,
_userFormatter.GetUserName(userBySid, DisplayUserNameFormat.Default)));
_userManager.AddUserIntoGroup(userBySid.Id, ldapGroup.ID);
await _userManager.AddUserIntoGroup(userBySid.Id, ldapGroup.ID);
}
break;
case LdapOperationType.SaveTest:
@ -846,7 +846,7 @@ public class LdapOperationJob : DistributedTaskProgress
return needUpdate;
}
private void UpdateDbGroup(GroupInfo dbLdapGroup, GroupInfo ldapGroup, List<UserInfo> ldapGroupUsers, int gIndex,
private async Task UpdateDbGroup(GroupInfo dbLdapGroup, GroupInfo ldapGroup, List<UserInfo> ldapGroupUsers, int gIndex,
int gCount)
{
SetProgress(currentSource:
@ -911,7 +911,7 @@ public class LdapOperationJob : DistributedTaskProgress
++index, count,
_userFormatter.GetUserName(userInfo, DisplayUserNameFormat.Default)));
_userManager.AddUserIntoGroup(userInfo.Id, dbLdapGroup.ID);
await _userManager.AddUserIntoGroup(userInfo.Id, dbLdapGroup.ID);
}
if (dbGroupMembers.All(dbUser => groupMembersToRemove.Exists(u => u.Id.Equals(dbUser.Id)))
@ -965,7 +965,7 @@ public class LdapOperationJob : DistributedTaskProgress
return foundUser;
}
private void SyncDbUsers(List<UserInfo> ldapUsers)
private async Task SyncDbUsers(List<UserInfo> ldapUsers)
{
const double percents = 35;
@ -992,12 +992,11 @@ public class LdapOperationJob : DistributedTaskProgress
{
case LdapOperationType.Save:
case LdapOperationType.Sync:
_lDAPUserManager.SyncLDAPUser(userInfo, ldapUsers);
await _lDAPUserManager.SyncLDAPUser(userInfo, ldapUsers);
break;
case LdapOperationType.SaveTest:
case LdapOperationType.SyncTest:
LdapChangeCollection changes;
_lDAPUserManager.GetLDAPSyncUserChange(userInfo, ldapUsers, out changes);
var changes = (await _lDAPUserManager.GetLDAPSyncUserChange(userInfo, ldapUsers)).LdapChangeCollection;
_ldapChanges.AddRange(changes);
break;
default:
@ -1065,7 +1064,7 @@ public class LdapOperationJob : DistributedTaskProgress
_logger.DebugSaveUserInfo(removedUser.GetUserInfoString());
_userManager.SaveUserInfo(removedUser);
_userManager.UpdateUserInfo(removedUser);
break;
case LdapOperationType.SaveTest:
case LdapOperationType.SyncTest:
@ -1129,7 +1128,7 @@ public class LdapOperationJob : DistributedTaskProgress
}
}
private List<UserInfo> SyncGroupsUsers(List<UserInfo> uniqueLdapGroupUsers)
private async Task<List<UserInfo>> SyncGroupsUsers(List<UserInfo> uniqueLdapGroupUsers)
{
const double percents = 30;
@ -1157,7 +1156,7 @@ public class LdapOperationJob : DistributedTaskProgress
{
case LdapOperationType.Save:
case LdapOperationType.Sync:
user = _lDAPUserManager.SyncLDAPUser(ldapGroupUser, uniqueLdapGroupUsers);
user = await _lDAPUserManager.SyncLDAPUser(ldapGroupUser, uniqueLdapGroupUsers);
if (!Equals(user, Constants.LostUser))
{
newUniqueLdapGroupUsers.Add(user);
@ -1165,8 +1164,9 @@ public class LdapOperationJob : DistributedTaskProgress
break;
case LdapOperationType.SaveTest:
case LdapOperationType.SyncTest:
LdapChangeCollection changes;
user = _lDAPUserManager.GetLDAPSyncUserChange(ldapGroupUser, uniqueLdapGroupUsers, out changes);
var wrapper = await _lDAPUserManager.GetLDAPSyncUserChange(ldapGroupUser, uniqueLdapGroupUsers);
user = wrapper.UserInfo;
var changes = wrapper.LdapChangeCollection;
if (!Equals(user, Constants.LostUser))
{
newUniqueLdapGroupUsers.Add(user);

View File

@ -51,6 +51,7 @@ global using ASC.ActiveDirectory.Novell;
global using ASC.ActiveDirectory.Novell.Data;
global using ASC.ActiveDirectory.Novell.Exceptions;
global using ASC.ActiveDirectory.Novell.Extensions;
global using ASC.ActiveDirectory.Wrapper;
global using ASC.Common;
global using ASC.Common.Security.Authorizing;
global using ASC.Common.Threading;

View File

@ -1,4 +1,5 @@
// (c) Copyright Ascensio System SIA 2010-2022

// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
@ -108,9 +109,9 @@ public class LdapUserManager
return Equals(foundUser, Constants.LostUser) || foundUser.Id == userId;
}
public bool TryAddLDAPUser(UserInfo ldapUserInfo, bool onlyGetChanges, out UserInfo portalUserInfo)
public async Task<UserInfo> TryAddLDAPUser(UserInfo ldapUserInfo, bool onlyGetChanges)
{
portalUserInfo = Constants.LostUser;
var portalUserInfo = Constants.LostUser;
try
{
@ -125,24 +126,14 @@ public class LdapUserManager
{
_logger.DebugUserAlredyExistsForEmail(ldapUserInfo.Sid, ldapUserInfo.Email);
return false;
return portalUserInfo;
}
if (!TryChangeExistingUserName(ldapUserInfo.UserName, onlyGetChanges))
{
_logger.DebugUserAlredyExistsForUserName(ldapUserInfo.Sid, ldapUserInfo.UserName);
return false;
}
try
{
_countRoomAdminChecker.CheckAppend().Wait();
}
catch (Exception)
{
_logger.DebugExceedQuota(ldapUserInfo.Sid, ldapUserInfo.UserName);
throw;
return portalUserInfo;
}
if (!ldapUserInfo.WorkFromDate.HasValue)
@ -153,12 +144,12 @@ public class LdapUserManager
if (onlyGetChanges)
{
portalUserInfo = ldapUserInfo;
return true;
return portalUserInfo;
}
_logger.DebugSaveUserInfo(ldapUserInfo.GetUserInfoString());
portalUserInfo = _userManager.SaveUserInfo(ldapUserInfo);
portalUserInfo = await _userManager.SaveUserInfo(ldapUserInfo);
var quotaSettings = _settingsManager.Load<TenantUserQuotaSettings>();
if (quotaSettings.EnableUserQuota)
@ -172,8 +163,6 @@ public class LdapUserManager
_logger.DebugSetUserPassword(portalUserInfo.Id);
_securityContext.SetUserPasswordHash(portalUserInfo.Id, passwordHash);
return true;
}
catch (TenantQuotaException ex)
{
@ -188,7 +177,7 @@ public class LdapUserManager
}
}
return false;
return portalUserInfo;
}
private bool TryChangeExistingUserName(string ldapUserName, bool onlyGetChanges)
@ -223,7 +212,7 @@ public class LdapUserManager
_logger.DebugSaveUserInfo(otherUser.GetUserInfoString());
_userManager.SaveUserInfo(otherUser);
_userManager.UpdateUserInfo(otherUser);
return true;
}
@ -235,25 +224,26 @@ public class LdapUserManager
return false;
}
public UserInfo GetLDAPSyncUserChange(UserInfo ldapUserInfo, List<UserInfo> ldapUsers, out LdapChangeCollection changes)
public async Task<UserInfoAndLdapChangeCollectionWrapper> GetLDAPSyncUserChange(UserInfo ldapUserInfo, List<UserInfo> ldapUsers)
{
return SyncLDAPUser(ldapUserInfo, ldapUsers, out changes, true);
return await SyncLDAPUser(ldapUserInfo, ldapUsers, true);
}
public UserInfo SyncLDAPUser(UserInfo ldapUserInfo, List<UserInfo> ldapUsers = null)
public async Task<UserInfo> SyncLDAPUser(UserInfo ldapUserInfo, List<UserInfo> ldapUsers = null)
{
LdapChangeCollection changes;
return SyncLDAPUser(ldapUserInfo, ldapUsers, out changes);
return (await SyncLDAPUser(ldapUserInfo, ldapUsers, false)).UserInfo;
}
private UserInfo SyncLDAPUser(UserInfo ldapUserInfo, List<UserInfo> ldapUsers, out LdapChangeCollection changes, bool onlyGetChanges = false)
private async Task<UserInfoAndLdapChangeCollectionWrapper> SyncLDAPUser(UserInfo ldapUserInfo, List<UserInfo> ldapUsers, bool onlyGetChanges = false)
{
UserInfo result;
changes = new LdapChangeCollection(_userFormatter);
UserInfo userToUpdate;
var wrapper = new UserInfoAndLdapChangeCollectionWrapper()
{
LdapChangeCollection = new LdapChangeCollection(_userFormatter),
UserInfo = Constants.LostUser
};
var userBySid = _userManager.GetUserBySid(ldapUserInfo.Sid);
if (Equals(userBySid, Constants.LostUser))
@ -266,28 +256,28 @@ public class LdapUserManager
{
if (onlyGetChanges)
{
changes.SetSkipUserChange(ldapUserInfo);
wrapper.LdapChangeCollection.SetSkipUserChange(ldapUserInfo);
}
_logger.DebugSyncUserLdapFailedWithStatus(ldapUserInfo.Sid, ldapUserInfo.UserName,
Enum.GetName(typeof(EmployeeStatus), ldapUserInfo.Status));
return Constants.LostUser;
return wrapper;
}
if (!TryAddLDAPUser(ldapUserInfo, onlyGetChanges, out result))
wrapper.UserInfo = await TryAddLDAPUser(ldapUserInfo, onlyGetChanges);
if (wrapper.UserInfo == Constants.LostUser)
{
if (onlyGetChanges)
{
changes.SetSkipUserChange(ldapUserInfo);
wrapper.LdapChangeCollection.SetSkipUserChange(ldapUserInfo);
}
return Constants.LostUser;
return wrapper;
}
if (onlyGetChanges)
{
changes.SetAddUserChange(result, _logger);
wrapper.LdapChangeCollection.SetAddUserChange(wrapper.UserInfo, _logger);
}
if (!onlyGetChanges && _settingsManager.Load<LdapSettings>().SendWelcomeEmail &&
@ -317,7 +307,7 @@ public class LdapUserManager
new TagValue(NotifyCommonTags.WithoutUnsubscribe, true));
}
return result;
return wrapper;
}
if (userByEmail.IsLDAP())
@ -326,13 +316,13 @@ public class LdapUserManager
{
if (onlyGetChanges)
{
changes.SetSkipUserChange(ldapUserInfo);
wrapper.LdapChangeCollection.SetSkipUserChange(ldapUserInfo);
}
_logger.DebugSyncUserLdapFailedWithEmail(
ldapUserInfo.Sid, ldapUserInfo.UserName, ldapUserInfo.Email);
return Constants.LostUser;
return wrapper;
}
}
@ -350,29 +340,30 @@ public class LdapUserManager
_logger.DebugSyncUserLdapSkipping(ldapUserInfo.Sid, ldapUserInfo.UserName);
if (onlyGetChanges)
{
changes.SetNoneUserChange(ldapUserInfo);
wrapper.LdapChangeCollection.SetNoneUserChange(ldapUserInfo);
}
return userBySid;
wrapper.UserInfo = userBySid;
return wrapper;
}
_logger.DebugSyncUserLdapUpdaiting(ldapUserInfo.Sid, ldapUserInfo.UserName);
if (!TryUpdateUserWithLDAPInfo(userToUpdate, ldapUserInfo, onlyGetChanges, out result))
UserInfo uf;
if (!TryUpdateUserWithLDAPInfo(userToUpdate, ldapUserInfo, onlyGetChanges, out uf))
{
if (onlyGetChanges)
{
changes.SetSkipUserChange(ldapUserInfo);
wrapper.LdapChangeCollection.SetSkipUserChange(ldapUserInfo);
}
return Constants.LostUser;
return wrapper;
}
if (onlyGetChanges)
{
changes.SetUpdateUserChange(ldapUserInfo, result, _logger);
wrapper.LdapChangeCollection.SetUpdateUserChange(ldapUserInfo, uf, _logger);
}
return result;
wrapper.UserInfo = uf;
return wrapper;
}
private const string EXT_MOB_PHONE = "extmobphone";
@ -601,7 +592,7 @@ public class LdapUserManager
{
_logger.DebugSaveUserInfo(userToUpdate.GetUserInfoString());
portlaUserInfo = _userManager.SaveUserInfo(userToUpdate);
portlaUserInfo = _userManager.UpdateUserInfo(userToUpdate);
}
return true;
@ -615,9 +606,9 @@ public class LdapUserManager
return false;
}
public bool TryGetAndSyncLdapUserInfo(string login, string password, out UserInfo userInfo)
public async Task<UserInfo> TryGetAndSyncLdapUserInfo(string login, string password)
{
userInfo = Constants.LostUser;
var userInfo = Constants.LostUser;
try
@ -626,7 +617,7 @@ public class LdapUserManager
if (!settings.EnableLdapAuthentication)
{
return false;
return userInfo;
}
_logger.DebugTryGetAndSyncLdapUserInfo(login);
@ -638,7 +629,7 @@ public class LdapUserManager
if (ldapUserInfo == null || ldapUserInfo.Item1.Equals(Constants.LostUser))
{
_logger.DebugNovellLdapUserImporterLoginFailed(login);
return false;
return userInfo;
}
var portalUser = _userManager.GetUserBySid(ldapUserInfo.Item1.Sid);
@ -648,16 +639,16 @@ public class LdapUserManager
if (!ldapUserInfo.Item2.IsDisabled)
{
_logger.DebugTryCheckAndSyncToLdapUser(ldapUserInfo.Item1.UserName, ldapUserInfo.Item1.Email, ldapUserInfo.Item2.DistinguishedName);
if (!TryCheckAndSyncToLdapUser(ldapUserInfo, _novellLdapUserImporter, out userInfo))
userInfo = await TryCheckAndSyncToLdapUser(ldapUserInfo, _novellLdapUserImporter);
if (Equals(userInfo, Constants.LostUser))
{
_logger.DebugTryCheckAndSyncToLdapUserFailed();
return false;
return userInfo;
}
}
else
{
return false;
return userInfo;
}
}
else
@ -679,18 +670,18 @@ public class LdapUserManager
tenantManager.SetCurrentTenant(tenant);
securityContext.AuthenticateMe(Core.Configuration.Constants.CoreSystem);
var uInfo = SyncLDAPUser(ldapUserInfo.Item1);
var uInfo = await SyncLDAPUser(ldapUserInfo.Item1);
var newLdapUserInfo = new Tuple<UserInfo, LdapObject>(uInfo, ldapUserInfo.Item2);
if (novellLdapUserImporter.Settings.GroupMembership)
{
if (!novellLdapUserImporter.TrySyncUserGroupMembership(newLdapUserInfo))
if (!(await novellLdapUserImporter.TrySyncUserGroupMembership(newLdapUserInfo)))
{
log.DebugTryGetAndSyncLdapUserInfoDisablingUser(login, uInfo);
uInfo.Status = EmployeeStatus.Terminated;
uInfo.Sid = null;
userManager.SaveUserInfo(uInfo);
userManager.UpdateUserInfo(uInfo);
await cookiesManager.ResetUserCookie(uInfo.Id);
}
}
@ -699,7 +690,7 @@ public class LdapUserManager
if (ldapUserInfo.Item2.IsDisabled)
{
_logger.DebugTryGetAndSyncLdapUserInfo(login);
return false;
return userInfo;
}
else
{
@ -707,24 +698,24 @@ public class LdapUserManager
}
}
return true;
return userInfo;
}
catch (Exception ex)
{
_logger.ErrorTryGetLdapUserInfoFailed(login, ex);
userInfo = Constants.LostUser;
return false;
return userInfo;
}
}
private bool TryCheckAndSyncToLdapUser(Tuple<UserInfo, LdapObject> ldapUserInfo, LdapUserImporter importer,
out UserInfo userInfo)
private async Task<UserInfo> TryCheckAndSyncToLdapUser(Tuple<UserInfo, LdapObject> ldapUserInfo, LdapUserImporter importer)
{
UserInfo userInfo;
try
{
_securityContext.AuthenticateMe(Core.Configuration.Constants.CoreSystem);
userInfo = SyncLDAPUser(ldapUserInfo.Item1);
userInfo = await SyncLDAPUser(ldapUserInfo.Item1);
if (userInfo == null || userInfo.Equals(Constants.LostUser))
{
@ -735,18 +726,18 @@ public class LdapUserManager
if (!importer.Settings.GroupMembership)
{
return true;
return userInfo;
}
if (!importer.TrySyncUserGroupMembership(newLdapUserInfo))
if (!(await importer.TrySyncUserGroupMembership(newLdapUserInfo)))
{
userInfo.Sid = null;
userInfo.Status = EmployeeStatus.Terminated;
_userManager.SaveUserInfo(userInfo);
_userManager.UpdateUserInfo(userInfo);
throw new Exception("The user did not pass the configuration check by ldap group settings");
}
return true;
return userInfo;
}
catch (Exception ex)
{
@ -759,6 +750,6 @@ public class LdapUserManager
}
userInfo = Constants.LostUser;
return false;
return userInfo;
}
}

View File

@ -0,0 +1,32 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.ActiveDirectory.Wrapper;
public class UserInfoAndLdapChangeCollectionWrapper
{
public UserInfo UserInfo { get; set; }
public LdapChangeCollection LdapChangeCollection { get; set; }
}

View File

@ -57,7 +57,7 @@ public class CookieAuthHandler : AuthenticationHandler<AuthenticationSchemeOptio
_httpContextAccessor = httpContextAccessor;
}
protected override Task<AuthenticateResult> HandleAuthenticateAsync()
protected override async Task<AuthenticateResult> HandleAuthenticateAsync()
{
try
{
@ -75,7 +75,7 @@ public class CookieAuthHandler : AuthenticationHandler<AuthenticationSchemeOptio
authorization = authorization.Substring("Bearer ".Length);
}
if (!_securityContext.AuthenticateMe(authorization))
if (!(await _securityContext.AuthenticateMe(authorization)))
{
throw new AuthenticationException(nameof(HttpStatusCode.Unauthorized));
}
@ -83,7 +83,7 @@ public class CookieAuthHandler : AuthenticationHandler<AuthenticationSchemeOptio
}
catch (Exception)
{
return Task.FromResult(AuthenticateResult.Fail(new AuthenticationException(nameof(HttpStatusCode.Unauthorized))));
return AuthenticateResult.Fail(new AuthenticationException(nameof(HttpStatusCode.Unauthorized)));
}
finally
{
@ -95,6 +95,6 @@ public class CookieAuthHandler : AuthenticationHandler<AuthenticationSchemeOptio
}
}
return Task.FromResult(AuthenticateResult.Success(new AuthenticationTicket(Context.User, Scheme.Name)));
return AuthenticateResult.Success(new AuthenticationTicket(Context.User, Scheme.Name));
}
}

View File

@ -57,9 +57,17 @@ public static class ISetupBuilderExtension
awsTarget.LogGroup = awsTarget.LogGroup.Replace("${var:name}", settings.Name);
}
if (!string.IsNullOrEmpty(settings.AWSSecretAccessKey))
var awsAccessKeyId = string.IsNullOrEmpty(settings.AWSAccessKeyId) ? configuration["aws:cloudWatch:accessKeyId"] : settings.AWSAccessKeyId;
var awsSecretAccessKey = string.IsNullOrEmpty(settings.AWSSecretAccessKey) ? configuration["aws:cloudWatch:secretAccessKey"] : settings.AWSSecretAccessKey;
if (!string.IsNullOrEmpty(awsAccessKeyId))
{
awsTarget.Credentials = new Amazon.Runtime.BasicAWSCredentials(settings.AWSAccessKeyId, settings.AWSSecretAccessKey);
awsTarget.LogGroup = String.IsNullOrEmpty(configuration["aws:cloudWatch:logGroupName"]) ? awsTarget.LogGroup : configuration["aws:cloudWatch:logGroupName"];
awsTarget.Region = String.IsNullOrEmpty(configuration["aws:cloudWatch:region"]) ? awsTarget.Region : configuration["aws:cloudWatch:region"];
awsTarget.Credentials = new Amazon.Runtime.BasicAWSCredentials(awsAccessKeyId, awsSecretAccessKey);
}
}

View File

@ -157,7 +157,7 @@ public class EmployeeFullDtoHelper : EmployeeDtoHelper
return lambda;
}
public EmployeeFullDto GetSimple(UserInfo userInfo)
public async Task<EmployeeFullDto> GetSimple(UserInfo userInfo)
{
var result = new EmployeeFullDto
{
@ -167,7 +167,7 @@ public class EmployeeFullDtoHelper : EmployeeDtoHelper
FillGroups(result, userInfo);
var photoData = _userPhotoManager.GetUserPhotoData(userInfo.Id, UserPhotoManager.BigFotoSize);
var photoData = await _userPhotoManager.GetUserPhotoData(userInfo.Id, UserPhotoManager.BigFotoSize);
if (photoData != null)
{

View File

@ -40,7 +40,6 @@
<PackageReference Include="log4net" Version="2.0.14" />
<PackageReference Include="Microsoft.AspNetCore.Cryptography.KeyDerivation" Version="6.0.7" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.Core" Version="2.2.5" />
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="6.0.0" />
<PackageReference Include="Microsoft.Extensions.Caching.StackExchangeRedis" Version="6.0.7" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="6.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="6.0.0" />

View File

@ -145,7 +145,7 @@ public class KafkaCacheNotify<T> : IDisposable, ICacheNotify<T> where T : IMessa
_cancelationToken[channelName] = new CancellationTokenSource();
_actions[channelName] = onchange;
void action()
async void action()
{
var conf = new ConsumerConfig(_clientConfig)
{
@ -160,7 +160,7 @@ public class KafkaCacheNotify<T> : IDisposable, ICacheNotify<T> where T : IMessa
try
{
//TODO: must add checking exist
adminClient.CreateTopicsAsync(
await adminClient.CreateTopicsAsync(
new TopicSpecification[]
{
new TopicSpecification
@ -169,7 +169,7 @@ public class KafkaCacheNotify<T> : IDisposable, ICacheNotify<T> where T : IMessa
NumPartitions = 1,
ReplicationFactor = 1
}
}).Wait();
});
}
catch (AggregateException) { }
}

View File

@ -24,8 +24,6 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace System.IO;
[Singletone]
public class TempPath
{

View File

@ -24,8 +24,6 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
using JsonSerializer = System.Text.Json.JsonSerializer;
namespace ASC.Common.Threading;
[ProtoContract(IgnoreUnknownSubTypes = true)]
@ -77,21 +75,6 @@ public class DistributedTask
Publication(this);
}
public T GetProperty<T>(string propName)
{
if (!_props.TryGetValue(propName, out var propValue))
{
return default;
}
return JsonSerializer.Deserialize<T>(propValue);
}
public void SetProperty<T>(string propName, T propValue)
{
_props[propName] = JsonSerializer.Serialize(propValue);
}
public dynamic this[string propName]
{
get

View File

@ -45,15 +45,15 @@ public class DistributedTaskProgress : DistributedTask
[ProtoMember(3)]
protected int StepCount { get; set; }
public void RunJob()
public virtual async Task RunJob(DistributedTask _, CancellationToken cancellationToken)
{
Percentage = 0;
Status = DistributedTaskStatus.Running;
DoJob();
await DoJob();
}
protected virtual void DoJob() { }
protected virtual Task DoJob() { return Task.CompletedTask; }
protected void StepDone()
{

View File

@ -121,41 +121,7 @@ public class DistributedTaskQueue
public void EnqueueTask(DistributedTaskProgress taskProgress)
{
EnqueueTask((a, b) => taskProgress.RunJob(), taskProgress);
}
public void EnqueueTask(Action<DistributedTask, CancellationToken> action, DistributedTask distributedTask = null)
{
if (distributedTask == null)
{
distributedTask = new DistributedTask();
}
distributedTask.InstanceId = INSTANCE_ID;
var cancelation = new CancellationTokenSource();
var token = cancelation.Token;
_cancelations[distributedTask.Id] = cancelation;
var task = new Task(() => { action(distributedTask, token); }, token, TaskCreationOptions.LongRunning);
task.ConfigureAwait(false)
.GetAwaiter()
.OnCompleted(() => OnCompleted(task, distributedTask.Id));
distributedTask.Status = DistributedTaskStatus.Running;
if (distributedTask.Publication == null)
{
distributedTask.Publication = GetPublication();
}
distributedTask.PublishChanges();
task.Start(Scheduler);
_logger.TraceEnqueueTask(distributedTask.Id, INSTANCE_ID);
EnqueueTask(taskProgress.RunJob, taskProgress);
}
public void EnqueueTask(Func<DistributedTask, CancellationToken, Task> action, DistributedTask distributedTask = null)
@ -393,5 +359,4 @@ public class DistributedTaskQueue
return destination;
}
}

View File

@ -50,7 +50,6 @@
<None Remove="protos\user_photo_cache_item.proto" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="AWSSDK.Core" Version="3.7.10.11" />
<PackageReference Include="AWSSDK.SimpleEmail" Version="3.7.0.150" />
<PackageReference Include="FirebaseAdmin" Version="2.3.0" />
<PackageReference Include="Grpc.Tools" Version="2.47.0">
@ -63,7 +62,6 @@
<PackageReference Include="Pomelo.EntityFrameworkCore.MySql" Version="6.0.2" />
<PackageReference Include="System.Linq.Async" Version="6.0.1" />
<PackageReference Include="Telegram.Bot" Version="17.0.0" />
<PackageReference Include="Telegram.Bot.Extensions.Polling" Version="1.0.2" />
</ItemGroup>
<ItemGroup>
<Protobuf Include="protos\create_client_proto.proto" />

View File

@ -61,7 +61,7 @@ public class UserManager
private readonly CardDavAddressbook _cardDavAddressbook;
private readonly ILogger<UserManager> _log;
private readonly ICache _cache;
private readonly TenantQuotaFeatureCheckerCount<CountRoomAdminFeature> _tenantQuotaFeatureChecker;
private readonly TenantQuotaFeatureCheckerCount<CountRoomAdminFeature> _countRoomAdminChecker;
private readonly TenantQuotaFeatureCheckerCount<CountUserFeature> _activeUsersFeatureChecker;
private readonly Constants _constants;
@ -85,7 +85,7 @@ public class UserManager
CardDavAddressbook cardDavAddressbook,
ILogger<UserManager> log,
ICache cache,
TenantQuotaFeatureCheckerCount<CountRoomAdminFeature> tenantQuotaFeatureChecker,
TenantQuotaFeatureCheckerCount<CountRoomAdminFeature> countRoomAdrminChecker,
TenantQuotaFeatureCheckerCount<CountUserFeature> activeUsersFeatureChecker
)
{
@ -100,7 +100,7 @@ public class UserManager
_cardDavAddressbook = cardDavAddressbook;
_log = log;
_cache = cache;
_tenantQuotaFeatureChecker = tenantQuotaFeatureChecker;
_countRoomAdminChecker = countRoomAdrminChecker;
_activeUsersFeatureChecker = activeUsersFeatureChecker;
_constants = _userManagerConstants.Constants;
}
@ -312,22 +312,49 @@ public class UserManager
return findUsers.ToArray();
}
public UserInfo SaveUserInfo(UserInfo u, bool isUser = false, bool syncCardDav = false)
public UserInfo UpdateUserInfo(UserInfo u)
{
if (IsSystemUser(u.Id))
{
return SystemUsers[u.Id];
}
if (u.Id == Guid.Empty)
{
_permissionContext.DemandPermissions(Constants.Action_AddRemoveUser);
}
else
{
_permissionContext.DemandPermissions(new UserSecurityProvider(u.Id), Constants.Action_EditUser);
if (u.Status == EmployeeStatus.Terminated && u.Id == _tenantManager.GetCurrentTenant().OwnerId)
{
throw new InvalidOperationException("Can not disable tenant owner.");
}
var oldUserData = _userService.GetUserByUserName(_tenantManager.GetCurrentTenant().Id, u.UserName);
if (oldUserData == null || Equals(oldUserData, Constants.LostUser))
{
throw new InvalidOperationException("User not found.");
}
return _userService.SaveUser(_tenantManager.GetCurrentTenant().Id, u);
}
public async Task<UserInfo> UpdateUserInfoWithSyncCardDavAsync(UserInfo u)
{
var oldUserData = _userService.GetUserByUserName(_tenantManager.GetCurrentTenant().Id, u.UserName);
var newUser = UpdateUserInfo(u);
await SyncCardDavAsync(u, oldUserData, newUser);
return newUser;
}
public async Task<UserInfo> SaveUserInfo(UserInfo u, bool isVisitor = false, bool syncCardDav = false)
{
if (IsSystemUser(u.Id))
{
return SystemUsers[u.Id];
}
_permissionContext.DemandPermissions(Constants.Action_AddRemoveUser);
if (!_coreBaseSettings.Personal)
{
if (_constants.MaxEveryoneCount <= GetUsersByGroup(Constants.GroupEveryone.ID).Length)
@ -336,28 +363,32 @@ public class UserManager
}
}
if (u.Status == EmployeeStatus.Terminated && u.Id == _tenantManager.GetCurrentTenant().OwnerId)
{
throw new InvalidOperationException("Can not disable tenant owner.");
}
var oldUserData = _userService.GetUserByUserName(_tenantManager.GetCurrentTenant().Id, u.UserName);
if (Equals(oldUserData, Constants.LostUser))
if (oldUserData != null && !Equals(oldUserData, Constants.LostUser))
{
if (isUser)
throw new InvalidOperationException("User already exist.");
}
if (isVisitor)
{
_activeUsersFeatureChecker.CheckAppend().Wait();
await _activeUsersFeatureChecker.CheckAppend();
}
else
{
_tenantQuotaFeatureChecker.CheckAppend().Wait();
}
await _countRoomAdminChecker.CheckAppend();
}
var newUser = _userService.SaveUser(_tenantManager.GetCurrentTenant().Id, u);
if (syncCardDav)
{
await SyncCardDavAsync(u, oldUserData, newUser);
}
return newUser;
}
private async Task SyncCardDavAsync(UserInfo u, UserInfo oldUserData, UserInfo newUser)
{
var tenant = _tenantManager.GetCurrentTenant();
var myUri = (_accessor?.HttpContext != null) ? _accessor.HttpContext.Request.GetUrlRewriter().ToString() :
@ -371,10 +402,10 @@ public class UserManager
{
var userAuthorization = oldUserData.Email.ToLower() + ":" + _instanceCrypto.Encrypt(oldUserData.Email);
var requestUrlBook = _cardDavAddressbook.GetRadicaleUrl(myUri, newUser.Email.ToLower(), true, true);
var collection = _cardDavAddressbook.GetCollection(requestUrlBook, userAuthorization, myUri.ToString()).Result;
var collection = await _cardDavAddressbook.GetCollection(requestUrlBook, userAuthorization, myUri.ToString());
if (collection.Completed && collection.StatusCode != 404)
{
_cardDavAddressbook.Delete(myUri, newUser.Id, newUser.Email, tenant.Id).Wait();//TODO
await _cardDavAddressbook.Delete(myUri, newUser.Id, newUser.Email, tenant.Id);
}
foreach (var email in allUserEmails)
{
@ -387,7 +418,7 @@ public class UserManager
Authorization = rootAuthorization,
Header = myUri
};
_radicaleClient.RemoveAsync(davItemRequest).ConfigureAwait(false);
await _radicaleClient.RemoveAsync(davItemRequest).ConfigureAwait(false);
}
catch (Exception ex)
{
@ -397,36 +428,31 @@ public class UserManager
}
else
{
try
{
var cardDavUser = new CardDavItem(u.Id, u.FirstName, u.LastName, u.UserName, u.BirthDate, u.Sex, u.Title, u.Email, u.ContactsList, u.MobilePhone);
try
{
_cardDavAddressbook.UpdateItemForAllAddBooks(allUserEmails, myUri, cardDavUser, _tenantManager.GetCurrentTenant().Id, oldUserData != null && oldUserData.Email != newUser.Email ? oldUserData.Email : null).Wait(); // todo
}
catch (Exception ex)
{
_log.ErrorWithException(ex);
}
await _cardDavAddressbook.UpdateItemForAllAddBooks(allUserEmails, myUri, cardDavUser, _tenantManager.GetCurrentTenant().Id, oldUserData != null && oldUserData.Email != newUser.Email ? oldUserData.Email : null);
}
catch (Exception ex)
{
_log.ErrorWithException(ex);
}
}
catch (Exception ex)
{
_log.ErrorWithException(ex);
}
}
}
}
return newUser;
}
public IEnumerable<string> GetDavUserEmails()
{
return _userService.GetDavUserEmails(_tenantManager.GetCurrentTenant().Id);
}
public void DeleteUser(Guid id)
public async Task DeleteUser(Guid id)
{
if (IsSystemUser(id))
{
@ -454,7 +480,7 @@ public class UserManager
new Uri(_cache.Get<string>("REWRITE_URL" + tenant.Id)).ToString() : tenant.GetTenantDomain(_coreSettings);
var davUsersEmails = GetDavUserEmails();
var requestUrlBook = _cardDavAddressbook.GetRadicaleUrl(myUri, delUser.Email.ToLower(), true, true);
var addBookCollection = _cardDavAddressbook.GetCollection(requestUrlBook, userAuthorization, myUri.ToString()).Result;
var addBookCollection = await _cardDavAddressbook.GetCollection(requestUrlBook, userAuthorization, myUri.ToString());
if (addBookCollection.Completed && addBookCollection.StatusCode != 404)
@ -465,7 +491,7 @@ public class UserManager
Authorization = rootAuthorization,
Header = myUri
};
_radicaleClient.RemoveAsync(davbookRequest).ConfigureAwait(false);
await _radicaleClient.RemoveAsync(davbookRequest).ConfigureAwait(false);
}
foreach (var email in davUsersEmails)
@ -479,14 +505,13 @@ public class UserManager
Authorization = rootAuthorization,
Header = myUri
};
_radicaleClient.RemoveAsync(davItemRequest).ConfigureAwait(false);
await _radicaleClient.RemoveAsync(davItemRequest).ConfigureAwait(false);
}
catch (Exception ex)
{
_log.ErrorWithException(ex);
}
}
}
catch (Exception ex)
{
@ -600,7 +625,7 @@ public class UserManager
return GetUsers(employeeStatus).Where(u => IsUserInGroupInternal(u.Id, groupId, refs)).ToArray();
}
public void AddUserIntoGroup(Guid userId, Guid groupId, bool dontClearAddressBook = false)
public async Task AddUserIntoGroup(Guid userId, Guid groupId, bool dontClearAddressBook = false)
{
if (Constants.LostUser.Id == userId || Constants.LostGroupInfo.ID == groupId)
{
@ -621,7 +646,7 @@ public class UserManager
if (!dontClearAddressBook)
{
_cardDavAddressbook.Delete(myUri, user.Id, user.Email, tenant.Id).Wait(); //todo
await _cardDavAddressbook.Delete(myUri, user.Id, user.Email, tenant.Id);
}
}
}

View File

@ -95,7 +95,7 @@ public class SecurityContext
return AuthenticateMe(new UserAccount(u, tenantid, _userFormatter), funcLoginEvent);
}
public bool AuthenticateMe(string cookie)
public async Task<bool> AuthenticateMe(string cookie)
{
if (string.IsNullOrEmpty(cookie)) return false;
@ -161,7 +161,7 @@ public class SecurityContext
return false;
}
var settingLoginEvents = _dbLoginEventsManager.GetLoginEventIds(tenant, userid).Result; // remove Result
var settingLoginEvents = await _dbLoginEventsManager.GetLoginEventIds(tenant, userid);
if (loginEventId != 0 && !settingLoginEvents.Contains(loginEventId))
{
return false;

View File

@ -59,7 +59,7 @@ public static class BaseDbContextExtension
{
case Provider.MySql:
optionsBuilder.ReplaceService<IMigrationsSqlGenerator, CustomMySqlMigrationsSqlGenerator>();
optionsBuilder.UseMySql(connectionString.ConnectionString, ServerVersion.Parse("8.0.25"), providerOptions =>
optionsBuilder.UseMySql(connectionString.ConnectionString, ServerVersion.AutoDetect(connectionString.ConnectionString), providerOptions =>
{
if (!string.IsNullOrEmpty(migrateAssembly))
{

View File

@ -24,15 +24,12 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
global using System;
global using System.Collections;
global using System.Collections.Concurrent;
global using System.Collections.Generic;
global using System.Configuration;
global using System.Data.Common;
global using System.Diagnostics;
global using System.Globalization;
global using System.Linq;
global using System.Linq.Expressions;
global using System.Net;
global using System.Net.Http.Headers;

View File

@ -29,6 +29,6 @@ namespace ASC.Notify.Channels;
public interface ISenderChannel
{
string SenderName { get; }
SendResponse DirectSend(INoticeMessage message);
void SendAsync(INoticeMessage message);
Task<SendResponse> DirectSend(INoticeMessage message);
Task SendAsync(INoticeMessage message);
}

View File

@ -45,16 +45,16 @@ public class SenderChannel : ISenderChannel
_firstSink = AddSink(_firstSink, dispatcherSink);
}
public void SendAsync(INoticeMessage message)
public async Task SendAsync(INoticeMessage message)
{
ArgumentNullException.ThrowIfNull(message);
_firstSink.ProcessMessageAsync(message);
await _firstSink.ProcessMessageAsync(message);
}
public SendResponse DirectSend(INoticeMessage message)
public async Task<SendResponse> DirectSend(INoticeMessage message)
{
return _senderSink.ProcessMessage(message);
return await _senderSink.ProcessMessage(message);
}
private ISink AddSink(ISink firstSink, ISink addedSink)

View File

@ -38,7 +38,7 @@ public class EmailSenderSink : Sink
_serviceProvider = serviceProvider;
}
public override SendResponse ProcessMessage(INoticeMessage message)
public override async Task<SendResponse> ProcessMessage(INoticeMessage message)
{
if (message.Recipient.Addresses == null || message.Recipient.Addresses.Length == 0)
{
@ -50,7 +50,7 @@ public class EmailSenderSink : Sink
{
using var scope = _serviceProvider.CreateScope();
var m = scope.ServiceProvider.GetRequiredService<EmailSenderSinkMessageCreator>().CreateNotifyMessage(message, _senderName);
var result = _sender.Send(m);
var result = await _sender.Send(m);
responce.Result = result switch
{

View File

@ -43,7 +43,7 @@ public class DispatchEngine
_logger.LogOnly(_logOnly);
}
public SendResponse Dispatch(INoticeMessage message, string senderName)
public async Task<SendResponse> Dispatch(INoticeMessage message, string senderName)
{
var response = new SendResponse(message, senderName, SendResult.OK);
if (!_logOnly)
@ -51,7 +51,7 @@ public class DispatchEngine
var sender = _context.GetSender(senderName);
if (sender != null)
{
response = sender.DirectSend(message);
response = await sender.DirectSend(message);
}
else
{

View File

@ -110,7 +110,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
}
}
private void NotifyScheduler(object state)
private async void NotifyScheduler(object state)
{
try
{
@ -124,8 +124,9 @@ public class NotifyEngine : INotifyEngine, IDisposable
copy = _sendMethods.ToList();
}
foreach (var w in copy)
for (var i = 0; i < copy.Count; i++)
{
using var w = copy[i];
if (!w.ScheduleDate.HasValue)
{
lock (_sendMethods)
@ -138,7 +139,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
{
try
{
w.InvokeSendMethod(now);
await w.InvokeSendMethod(now);
}
catch (Exception error)
{
@ -177,7 +178,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
}
private void NotifySender(object state)
private async void NotifySender(object state)
{
try
{
@ -201,7 +202,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
try
{
SendNotify(request, scope);
await SendNotify(request, scope);
}
catch (Exception e)
{
@ -225,7 +226,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
}
private NotifyResult SendNotify(NotifyRequest request, IServiceScope serviceScope)
private async Task<NotifyResult> SendNotify(NotifyRequest request, IServiceScope serviceScope)
{
var sendResponces = new List<SendResponse>();
@ -236,7 +237,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
}
else
{
sendResponces.AddRange(SendGroupNotify(request, serviceScope));
sendResponces.AddRange(await SendGroupNotify(request, serviceScope));
}
NotifyResult result;
@ -258,15 +259,15 @@ public class NotifyEngine : INotifyEngine, IDisposable
return request.Intercept(place, serviceScope) ? new SendResponse(request.NotifyAction, sender, request.Recipient, SendResult.Prevented) : null;
}
private List<SendResponse> SendGroupNotify(NotifyRequest request, IServiceScope serviceScope)
private async Task<List<SendResponse>> SendGroupNotify(NotifyRequest request, IServiceScope serviceScope)
{
var responces = new List<SendResponse>();
SendGroupNotify(request, responces, serviceScope);
await SendGroupNotify(request, responces, serviceScope);
return responces;
}
private void SendGroupNotify(NotifyRequest request, List<SendResponse> responces, IServiceScope serviceScope)
private async Task SendGroupNotify(NotifyRequest request, List<SendResponse> responces, IServiceScope serviceScope)
{
if (request.Recipient is IDirectRecipient)
{
@ -276,7 +277,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
var directresponses = new List<SendResponse>(1);
try
{
directresponses = SendDirectNotify(request, serviceScope);
directresponses = await SendDirectNotify(request, serviceScope);
}
catch (Exception exc)
{
@ -308,7 +309,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
try
{
var newRequest = request.Split(recipient);
SendGroupNotify(newRequest, responces, serviceScope);
await SendGroupNotify(newRequest, responces, serviceScope);
}
catch (Exception exc)
{
@ -334,7 +335,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
}
}
private List<SendResponse> SendDirectNotify(NotifyRequest request, IServiceScope serviceScope)
private async Task<List<SendResponse>> SendDirectNotify(NotifyRequest request, IServiceScope serviceScope)
{
if (request.Recipient is not IDirectRecipient)
{
@ -371,7 +372,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
{
try
{
response = SendDirectNotify(request, channel, serviceScope);
response = await SendDirectNotify(request, channel, serviceScope);
}
catch (Exception exc)
{
@ -396,7 +397,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
return responses;
}
private SendResponse SendDirectNotify(NotifyRequest request, ISenderChannel channel, IServiceScope serviceScope)
private async Task<SendResponse> SendDirectNotify(NotifyRequest request, ISenderChannel channel, IServiceScope serviceScope)
{
if (request.Recipient is not IDirectRecipient)
{
@ -418,7 +419,7 @@ public class NotifyEngine : INotifyEngine, IDisposable
return preventresponse;
}
channel.SendAsync(noticeMessage);
await channel.SendAsync(noticeMessage);
return new SendResponse(noticeMessage, channel.SenderName, SendResult.Inprogress);
}
@ -585,19 +586,21 @@ public class NotifyEngine : INotifyEngine, IDisposable
}
private sealed class SendMethodWrapper
private sealed class SendMethodWrapper : IDisposable
{
private readonly object _locker = new object();
private readonly SemaphoreSlim _semaphore;
private readonly CronExpression _cronExpression;
private readonly Action<DateTime> _method;
private readonly ILogger _logger;
public DateTime? ScheduleDate { get; private set; }
public ILogger Logger { get; }
public SendMethodWrapper(Action<DateTime> method, string cron, ILogger log)
{
_semaphore = new SemaphoreSlim(1);
_method = method;
Logger = log;
_logger = log;
if (!string.IsNullOrEmpty(cron))
{
_cronExpression = new CronExpression(cron);
@ -617,15 +620,14 @@ public class NotifyEngine : INotifyEngine, IDisposable
}
catch (Exception e)
{
Logger.ErrorUpdateScheduleDate(e);
_logger.ErrorUpdateScheduleDate(e);
}
}
public void InvokeSendMethod(DateTime d)
public async Task InvokeSendMethod(DateTime d)
{
lock (_locker)
{
Task.Run(() =>
await _semaphore.WaitAsync();
await Task.Run(() =>
{
try
{
@ -633,10 +635,10 @@ public class NotifyEngine : INotifyEngine, IDisposable
}
catch (Exception e)
{
Logger.ErrorInvokeSendMethod(e);
}
}).Wait();
_logger.ErrorInvokeSendMethod(e);
}
});
_semaphore.Release();
}
public override bool Equals(object obj)
@ -648,6 +650,11 @@ public class NotifyEngine : INotifyEngine, IDisposable
{
return _method.GetHashCode();
}
public void Dispose()
{
_semaphore.Dispose();
}
}
public void Dispose()

View File

@ -39,7 +39,7 @@ class JabberSenderSink : Sink
private readonly IServiceProvider _serviceProvider;
public override SendResponse ProcessMessage(INoticeMessage message)
public override async Task<SendResponse> ProcessMessage(INoticeMessage message)
{
try
{
@ -53,7 +53,7 @@ class JabberSenderSink : Sink
}
else
{
_sender.Send(m);
await _sender.Send(m);
}
return new SendResponse(message, _senderName, result);

View File

@ -43,7 +43,7 @@ class PushSenderSink : Sink
private readonly IServiceProvider _serviceProvider;
public override SendResponse ProcessMessage(INoticeMessage message)
public override async Task<SendResponse> ProcessMessage(INoticeMessage message)
{
try
{
@ -58,7 +58,7 @@ class PushSenderSink : Sink
}
else
{
_sender.Send(m);
await _sender.Send(m);
}
return new SendResponse(message, Constants.NotifyPushSenderSysName, result);

View File

@ -31,7 +31,7 @@ namespace ASC.Core.Notify.Senders;
[Singletone]
public class AWSSender : SmtpSender, IDisposable
{
private readonly object _locker = new object();
private readonly SemaphoreSlim _semaphore = new SemaphoreSlim(1);
private AmazonSimpleEmailServiceClient _amazonEmailServiceClient;
private TimeSpan _refreshTimeout;
private DateTime _lastRefresh;
@ -56,7 +56,7 @@ public class AWSSender : SmtpSender, IDisposable
_lastRefresh = DateTime.UtcNow - _refreshTimeout; //set to refresh on first send
}
public override NoticeSendResult Send(NotifyMessage m)
public override async Task<NoticeSendResult> Send(NotifyMessage m)
{
NoticeSendResult result;
try
@ -71,11 +71,11 @@ public class AWSSender : SmtpSender, IDisposable
var configuration = scope.ServiceProvider.GetService<CoreConfiguration>();
if (!configuration.SmtpSettings.IsDefaultSettings)
{
result = base.Send(m);
result = await base.Send(m);
}
else
{
result = SendMessage(m);
result = await SendMessage(m);
}
_logger.Debug(result.ToString());
@ -106,20 +106,19 @@ public class AWSSender : SmtpSender, IDisposable
if (result == NoticeSendResult.MessageIncorrect || result == NoticeSendResult.SendingImpossible)
{
_logger.DebugAmazonSendingFailed(result);
result = base.Send(m);
result = await base.Send(m);
}
return result;
}
private NoticeSendResult SendMessage(NotifyMessage m)
private async Task<NoticeSendResult> SendMessage(NotifyMessage m)
{
//Check if we need to query stats
RefreshQuotaIfNeeded();
await RefreshQuotaIfNeeded();
if (_quota != null)
{
lock (_locker)
{
await _semaphore.WaitAsync();
if (_quota.Max24HourSend <= _quota.SentLast24Hours)
{
//Quota exceeded, queue next refresh to +24 hours
@ -128,7 +127,7 @@ public class AWSSender : SmtpSender, IDisposable
return NoticeSendResult.SendingImpossible;
}
}
_semaphore.Release();
}
var dest = new Destination
@ -160,7 +159,7 @@ public class AWSSender : SmtpSender, IDisposable
ThrottleIfNeeded();
var response = _amazonEmailServiceClient.SendEmailAsync(request).Result;
var response = await _amazonEmailServiceClient.SendEmailAsync(request);
_lastSend = DateTime.UtcNow;
return response != null ? NoticeSendResult.OK : NoticeSendResult.TryOnceAgain;
@ -182,15 +181,14 @@ public class AWSSender : SmtpSender, IDisposable
}
}
private void RefreshQuotaIfNeeded()
private async Task RefreshQuotaIfNeeded()
{
if (!IsRefreshNeeded())
{
return;
}
lock (_locker)
{
await _semaphore.WaitAsync();
if (IsRefreshNeeded())//Double check
{
_logger.DebugRefreshingQuota(_refreshTimeout, _lastRefresh);
@ -200,7 +198,7 @@ public class AWSSender : SmtpSender, IDisposable
try
{
var r = new GetSendQuotaRequest();
_quota = _amazonEmailServiceClient.GetSendQuotaAsync(r).Result;
_quota = await _amazonEmailServiceClient.GetSendQuotaAsync(r);
_sendWindow = TimeSpan.FromSeconds(1.0 / _quota.MaxSendRate);
_logger.DebugQuota(_quota.SentLast24Hours, _quota.Max24HourSend, _quota.MaxSendRate, _sendWindow);
}
@ -209,7 +207,7 @@ public class AWSSender : SmtpSender, IDisposable
_logger.ErrorRefreshingQuota(e);
}
}
}
_semaphore.Release();
}
private bool IsRefreshNeeded()

View File

@ -29,5 +29,5 @@ namespace ASC.Core.Notify.Senders;
public interface INotifySender
{
void Init(IDictionary<string, string> properties);
NoticeSendResult Send(NotifyMessage m);
Task<NoticeSendResult> Send(NotifyMessage m);
}

View File

@ -40,7 +40,7 @@ public class JabberSender : INotifySender
public void Init(IDictionary<string, string> properties) { }
public NoticeSendResult Send(NotifyMessage m)
public Task<NoticeSendResult> Send(NotifyMessage m)
{
var text = m.Content;
if (!string.IsNullOrEmpty(text))
@ -59,7 +59,7 @@ public class JabberSender : INotifySender
_logger.ErrorUnexpected(e);
}
return NoticeSendResult.OK;
return Task.FromResult(NoticeSendResult.OK);
}
}

View File

@ -38,10 +38,10 @@ public class NotifyServiceSender : INotifySender
public void Init(IDictionary<string, string> properties) { }
public NoticeSendResult Send(NotifyMessage m)
public Task<NoticeSendResult> Send(NotifyMessage m)
{
_notifyServiceClient.SendNotifyMessage(m);
return NoticeSendResult.OK;
return Task.FromResult(NoticeSendResult.OK);
}
}

View File

@ -41,7 +41,7 @@ public class PushSender : INotifySender
public void Init(IDictionary<string, string> properties) { }
public NoticeSendResult Send(NotifyMessage m)
public Task<NoticeSendResult> Send(NotifyMessage m)
{
if (!string.IsNullOrEmpty(m.Content))
{
@ -59,7 +59,7 @@ public class PushSender : INotifySender
_logger.ErrorUnexpected(e);
}
return NoticeSendResult.OK;
return Task.FromResult(NoticeSendResult.OK);
}
}
public static class FirebaseSenderExtension

View File

@ -57,7 +57,7 @@ public class SmtpSender : INotifySender
_initProperties = properties;
}
public virtual NoticeSendResult Send(NotifyMessage m)
public virtual Task<NoticeSendResult> Send(NotifyMessage m)
{
using var scope = _serviceProvider.CreateScope();
var tenantManager = scope.ServiceProvider.GetService<TenantManager>();
@ -149,7 +149,7 @@ public class SmtpSender : INotifySender
smtpClient.Dispose();
}
return result;
return Task.FromResult(result);
}
private void BuildSmtpSettings(CoreConfiguration configuration)

View File

@ -41,7 +41,7 @@ public class TelegramSender : INotifySender
public void Init(IDictionary<string, string> properties) { }
public NoticeSendResult Send(NotifyMessage m)
public Task<NoticeSendResult> Send(NotifyMessage m)
{
if (!string.IsNullOrEmpty(m.Content))
{
@ -59,6 +59,6 @@ public class TelegramSender : INotifySender
_logger.ErrorUnexpected(e);
}
return NoticeSendResult.OK;
return Task.FromResult(NoticeSendResult.OK);
}
}

View File

@ -37,13 +37,13 @@ class DispatchSink : Sink
_senderName = senderName;
}
public override SendResponse ProcessMessage(INoticeMessage message)
public override Task<SendResponse> ProcessMessage(INoticeMessage message)
{
return _dispatcher.Dispatch(message, _senderName);
}
public override void ProcessMessageAsync(INoticeMessage message)
public override async Task ProcessMessageAsync(INoticeMessage message)
{
_dispatcher.Dispatch(message, _senderName);
await _dispatcher.Dispatch(message, _senderName);
}
}

View File

@ -29,6 +29,6 @@ namespace ASC.Notify.Sinks;
public interface ISink
{
ISink NextSink { get; set; }
SendResponse ProcessMessage(INoticeMessage message);
void ProcessMessageAsync(INoticeMessage message);
Task<SendResponse> ProcessMessage(INoticeMessage message);
Task ProcessMessageAsync(INoticeMessage message);
}

View File

@ -35,10 +35,10 @@ public abstract class Sink : ISink
{
public ISink NextSink { get; set; }
public abstract SendResponse ProcessMessage(INoticeMessage message);
public abstract Task<SendResponse> ProcessMessage(INoticeMessage message);
public virtual void ProcessMessageAsync(INoticeMessage message)
public virtual async Task ProcessMessageAsync(INoticeMessage message)
{
NextSink.ProcessMessageAsync(message);
await NextSink.ProcessMessageAsync(message);
}
}

View File

@ -39,7 +39,7 @@ class TelegramSenderSink : Sink
}
public override SendResponse ProcessMessage(INoticeMessage message)
public override async Task<SendResponse> ProcessMessage(INoticeMessage message)
{
try
{
@ -47,7 +47,7 @@ class TelegramSenderSink : Sink
using var scope = _serviceProvider.CreateScope();
var m = scope.ServiceProvider.GetRequiredService<TelegramSenderSinkMessageCreator>().CreateNotifyMessage(message, _senderName);
_sender.Send(m);
await _sender.Send(m);
return new SendResponse(message, _senderName, result);
}

View File

@ -76,4 +76,42 @@ public static class ActionInvoker
}
}
}
public static async Task Try(
Func<object, Task> action,
object state,
int maxAttempts,
Action<Exception> onFailure = null,
Action<Exception> onAttemptFailure = null,
int sleepMs = 1000,
bool isSleepExponential = true)
{
ArgumentNullException.ThrowIfNull(action);
var countAttempts = 0;
while (countAttempts++ < maxAttempts)
{
try
{
await action(state);
return;
}
catch (Exception error)
{
if (countAttempts < maxAttempts)
{
onAttemptFailure?.Invoke(error);
if (sleepMs > 0)
{
await Task.Delay(isSleepExponential ? sleepMs * countAttempts : sleepMs);
}
}
else
{
onFailure?.Invoke(error);
}
}
}
}
}

View File

@ -122,25 +122,25 @@ public class BackupAjaxHandler
return _backupService.GetBackupProgress(tenantId);
}
public void DeleteBackup(Guid id)
public async Task DeleteBackup(Guid id)
{
DemandPermissionsBackup();
_backupService.DeleteBackup(id);
await _backupService.DeleteBackup(id);
}
public void DeleteAllBackups()
public async Task DeleteAllBackups()
{
DemandPermissionsBackup();
_backupService.DeleteAllBackups(GetCurrentTenantId());
await _backupService.DeleteAllBackups(GetCurrentTenantId());
}
public List<BackupHistoryRecord> GetBackupHistory()
public async Task<List<BackupHistoryRecord>> GetBackupHistory()
{
DemandPermissionsBackup();
return _backupService.GetBackupHistory(GetCurrentTenantId());
return await _backupService.GetBackupHistory(GetCurrentTenantId());
}
public void CreateSchedule(BackupStorageType storageType, Dictionary<string, string> storageParams, int backupsStored, CronParams cronParams)

View File

@ -32,13 +32,13 @@ public interface IBackupService
BackupProgress GetBackupProgress(int tenantId);
BackupProgress GetRestoreProgress(int tenantId);
BackupProgress GetTransferProgress(int tenantId);
List<BackupHistoryRecord> GetBackupHistory(int tenantId);
Task<List<BackupHistoryRecord>> GetBackupHistory(int tenantId);
List<TransferRegion> GetTransferRegions();
ScheduleResponse GetSchedule(int tenantId);
string GetTmpFolder();
void CreateSchedule(CreateScheduleRequest request);
void DeleteAllBackups(int tenantId);
void DeleteBackup(Guid backupId);
Task DeleteAllBackups(int tenantId);
Task DeleteBackup(Guid backupId);
void DeleteSchedule(int tenantId);
void StartBackup(StartBackupRequest request);
void StartRestore(StartRestoreRequest request);

View File

@ -45,7 +45,7 @@ public class DbBackupProvider : IBackupProvider
public event EventHandler<ProgressChangedEventArgs> ProgressChanged;
public IEnumerable<XElement> GetElements(int tenant, string[] configs, IDataWriteOperator writer)
public Task<IEnumerable<XElement>> GetElements(int tenant, string[] configs, IDataWriteOperator writer)
{
_processedTables.Clear();
var xml = new List<XElement>();
@ -70,10 +70,10 @@ public class DbBackupProvider : IBackupProvider
}
}
return xml;
return Task.FromResult(xml.AsEnumerable());
}
public void LoadFrom(IEnumerable<XElement> elements, int tenant, string[] configs, IDataReadOperator reader)
public Task LoadFrom(IEnumerable<XElement> elements, int tenant, string[] configs, IDataReadOperator reader)
{
_processedTables.Clear();
@ -81,6 +81,7 @@ public class DbBackupProvider : IBackupProvider
{
RestoreDatabase(connectionString, elements, reader);
}
return Task.CompletedTask;
}
public IEnumerable<ConnectionStringSettings> GetConnectionStrings(string[] configs)

View File

@ -50,12 +50,12 @@ public class FileBackupProvider : IBackupProvider
public event EventHandler<ProgressChangedEventArgs> ProgressChanged;
public IEnumerable<XElement> GetElements(int tenant, string[] configs, IDataWriteOperator writer)
public async Task<IEnumerable<XElement>> GetElements(int tenant, string[] configs, IDataWriteOperator writer)
{
InvokeProgressChanged("Saving files...", 0);
var config = GetWebConfig(configs);
var files = ComposeFiles(tenant, config);
var files = await ComposeFiles(tenant, config);
var elements = new List<XElement>();
var backupKeys = new List<string>();
@ -74,7 +74,7 @@ public class FileBackupProvider : IBackupProvider
{
try
{
using var stream = storage.GetReadStreamAsync(file.Domain, file.Path).Result;
using var stream = await storage.GetReadStreamAsync(file.Domain, file.Path);
writer.WriteEntry(backupPath, stream);
break;
}
@ -97,7 +97,7 @@ public class FileBackupProvider : IBackupProvider
return elements;
}
public void LoadFrom(IEnumerable<XElement> elements, int tenant, string[] configs, IDataReadOperator dataOperator)
public async Task LoadFrom(IEnumerable<XElement> elements, int tenant, string[] configs, IDataReadOperator dataOperator)
{
InvokeProgressChanged("Restoring files...", 0);
@ -114,7 +114,7 @@ public class FileBackupProvider : IBackupProvider
var storage = _storageFactory.GetStorage(config, tenant, backupInfo.Module, null);
try
{
storage.SaveAsync(backupInfo.Domain, backupInfo.Path, entry).Wait();
await storage.SaveAsync(backupInfo.Domain, backupInfo.Path, entry);
}
catch (Exception error)
{
@ -126,7 +126,7 @@ public class FileBackupProvider : IBackupProvider
}
}
private IEnumerable<FileBackupInfo> ComposeFiles(int tenant, string config)
private async Task<IEnumerable<FileBackupInfo>> ComposeFiles(int tenant, string config)
{
var files = new List<FileBackupInfo>();
foreach (var module in _storageFactoryConfig.GetModuleList(config))
@ -138,13 +138,13 @@ public class FileBackupProvider : IBackupProvider
foreach (var domain in domainList)
{
files.AddRange(store
.ListFilesRelativeAsync(domain, "\\", "*.*", true).ToArrayAsync().Result
files.AddRange((await store
.ListFilesRelativeAsync(domain, "\\", "*.*", true).ToArrayAsync())
.Select(x => new FileBackupInfo(domain, module, x)));
}
files.AddRange(store
.ListFilesRelativeAsync(string.Empty, "\\", "*.*", true).ToArrayAsync().Result
files.AddRange((await store
.ListFilesRelativeAsync(string.Empty, "\\", "*.*", true).ToArrayAsync())
.Where(x => domainList.All(domain => x.IndexOf($"{domain}/") == -1))
.Select(x => new FileBackupInfo(string.Empty, module, x)));
}

View File

@ -31,8 +31,8 @@ public interface IBackupProvider
string Name { get; }
event EventHandler<ProgressChangedEventArgs> ProgressChanged;
IEnumerable<XElement> GetElements(int tenant, string[] configs, IDataWriteOperator writer);
void LoadFrom(IEnumerable<XElement> elements, int tenant, string[] configs, IDataReadOperator reader);
Task<IEnumerable<XElement>> GetElements(int tenant, string[] configs, IDataWriteOperator writer);
Task LoadFrom(IEnumerable<XElement> elements, int tenant, string[] configs, IDataReadOperator reader);
}
public class ProgressChangedEventArgs : EventArgs

View File

@ -58,7 +58,7 @@ public class BackupService : IBackupService
}
}
public void DeleteBackup(Guid id)
public async Task DeleteBackup(Guid id)
{
var backupRecord = _backupRepository.GetBackupRecord(id);
_backupRepository.DeleteBackupRecord(backupRecord.Id);
@ -69,10 +69,10 @@ public class BackupService : IBackupService
return;
}
storage.Delete(backupRecord.StoragePath);
await storage.Delete(backupRecord.StoragePath);
}
public void DeleteAllBackups(int tenantId)
public async Task DeleteAllBackups(int tenantId)
{
foreach (var backupRecord in _backupRepository.GetBackupRecordsByTenantId(tenantId))
{
@ -85,7 +85,7 @@ public class BackupService : IBackupService
continue;
}
storage.Delete(backupRecord.StoragePath);
await storage.Delete(backupRecord.StoragePath);
}
catch (Exception error)
{
@ -94,7 +94,7 @@ public class BackupService : IBackupService
}
}
public List<BackupHistoryRecord> GetBackupHistory(int tenantId)
public async Task<List<BackupHistoryRecord>> GetBackupHistory(int tenantId)
{
var backupHistory = new List<BackupHistoryRecord>();
foreach (var record in _backupRepository.GetBackupRecordsByTenantId(tenantId))
@ -105,7 +105,7 @@ public class BackupService : IBackupService
continue;
}
if (storage.IsExists(record.StoragePath))
if (await storage.IsExists(record.StoragePath))
{
backupHistory.Add(new BackupHistoryRecord
{

View File

@ -115,7 +115,7 @@ public class BackupProgressItem : BaseBackupProgressItem
_configPaths = configPaths;
}
protected override void DoJob()
protected override async Task DoJob()
{
if (ThreadPriority.BelowNormal < Thread.CurrentThread.Priority)
{
@ -147,13 +147,13 @@ public class BackupProgressItem : BaseBackupProgressItem
PublishChanges();
};
backupTask.RunJob();
await backupTask.RunJob();
var backupStorage = _backupStorageFactory.GetBackupStorage(_storageType, TenantId, StorageParams);
if (backupStorage != null)
{
storagePath = backupStorage.Upload(_storageBasePath, tempFile, _userId);
Link = backupStorage.GetPublicLink(storagePath);
storagePath = await backupStorage.Upload(_storageBasePath, tempFile, _userId);
Link = await backupStorage.GetPublicLink(storagePath);
}
var repo = _backupRepository;

View File

@ -104,7 +104,7 @@ public class RestoreProgressItem : BaseBackupProgressItem
_configPaths = configPaths;
}
protected override void DoJob()
protected override async Task DoJob()
{
Tenant tenant = null;
@ -129,7 +129,7 @@ public class RestoreProgressItem : BaseBackupProgressItem
var storage = _backupStorageFactory.GetBackupStorage(StorageType, TenantId, StorageParams);
storage.Download(StoragePath, tempFile);
await storage.Download(StoragePath, tempFile);
if (!_coreBaseSettings.Standalone)
{
@ -156,7 +156,7 @@ public class RestoreProgressItem : BaseBackupProgressItem
Percentage = Percentage = 10d + 0.65 * args.Progress;
PublishChanges();
};
restoreTask.RunJob();
await restoreTask.RunJob();
Tenant restoredTenant = null;

View File

@ -99,7 +99,7 @@ public class TransferProgressItem : BaseBackupProgressItem
}
protected override void DoJob()
protected override async Task DoJob()
{
var tempFile = PathHelper.GetTempFileName(TempFolder);
var tenant = _tenantManager.GetTenant(TenantId);
@ -121,7 +121,7 @@ public class TransferProgressItem : BaseBackupProgressItem
PublishChanges();
};
transferProgressItem.RunJob();
await transferProgressItem.RunJob();
Link = GetLink(alias, false);
_notifyHelper.SendAboutTransferComplete(tenant, TargetRegion, Link, !Notify, transferProgressItem.ToTenantId);

View File

@ -45,34 +45,34 @@ public class ConsumerBackupStorage : IBackupStorage
_store = _storageSettingsHelper.DataStore(settings);
}
public string Upload(string storageBasePath, string localPath, Guid userId)
public async Task<string> Upload(string storageBasePath, string localPath, Guid userId)
{
using var stream = File.OpenRead(localPath);
var storagePath = Path.GetFileName(localPath);
_store.SaveAsync(Domain, storagePath, stream, ACL.Private).Wait();
await _store.SaveAsync(Domain, storagePath, stream, ACL.Private);
return storagePath;
}
public void Download(string storagePath, string targetLocalPath)
public async Task Download(string storagePath, string targetLocalPath)
{
using var source = _store.GetReadStreamAsync(Domain, storagePath).Result;
using var source = await _store.GetReadStreamAsync(Domain, storagePath);
using var destination = File.OpenWrite(targetLocalPath);
source.CopyTo(destination);
await source.CopyToAsync(destination);
}
public void Delete(string storagePath)
public async Task Delete(string storagePath)
{
if (_store.IsFileAsync(Domain, storagePath).Result)
if (await _store.IsFileAsync(Domain, storagePath))
{
_store.DeleteAsync(Domain, storagePath).Wait();
await _store.DeleteAsync(Domain, storagePath);
}
}
public bool IsExists(string storagePath)
public async Task<bool> IsExists(string storagePath)
{
if (_store != null)
{
return _store.IsFileAsync(Domain, storagePath).Result;
return await _store.IsFileAsync(Domain, storagePath);
}
else
{
@ -80,8 +80,8 @@ public class ConsumerBackupStorage : IBackupStorage
}
}
public string GetPublicLink(string storagePath)
public async Task<string> GetPublicLink(string storagePath)
{
return _store.GetInternalUriAsync(Domain, storagePath, TimeSpan.FromDays(1), null).Result.AbsoluteUri;
return (await _store.GetInternalUriAsync(Domain, storagePath, TimeSpan.FromDays(1), null)).AbsoluteUri;
}
}

View File

@ -44,39 +44,39 @@ public class DataStoreBackupStorage : IBackupStorage
_tenant = tenant;
}
public string Upload(string storageBasePath, string localPath, Guid userId)
public async Task<string> Upload(string storageBasePath, string localPath, Guid userId)
{
using var stream = File.OpenRead(localPath);
var storagePath = Path.GetFileName(localPath);
GetDataStore().SaveAsync("", storagePath, stream).Wait();
await GetDataStore().SaveAsync("", storagePath, stream);
return storagePath;
}
public void Download(string storagePath, string targetLocalPath)
public async Task Download(string storagePath, string targetLocalPath)
{
using var source = GetDataStore().GetReadStreamAsync("", storagePath).Result;
using var source = await GetDataStore().GetReadStreamAsync("", storagePath);
using var destination = File.OpenWrite(targetLocalPath);
source.CopyTo(destination);
}
public void Delete(string storagePath)
public async Task Delete(string storagePath)
{
var dataStore = GetDataStore();
if (dataStore.IsFileAsync("", storagePath).Result)
if (await dataStore.IsFileAsync("", storagePath))
{
dataStore.DeleteAsync("", storagePath).Wait();
await dataStore.DeleteAsync("", storagePath);
}
}
public bool IsExists(string storagePath)
public async Task<bool> IsExists(string storagePath)
{
return GetDataStore().IsFileAsync("", storagePath).Result;
return await GetDataStore().IsFileAsync("", storagePath);
}
public string GetPublicLink(string storagePath)
public async Task<string> GetPublicLink(string storagePath)
{
return GetDataStore().GetPreSignedUriAsync("", storagePath, TimeSpan.FromDays(1), null).Result.ToString();
return (await GetDataStore().GetPreSignedUriAsync("", storagePath, TimeSpan.FromDays(1), null)).ToString();
}
protected virtual IDataStore GetDataStore()

View File

@ -60,7 +60,7 @@ public class DocumentsBackupStorage : IBackupStorage
_webConfigPath = webConfigPath;
}
public string Upload(string folderId, string localPath, Guid userId)
public async Task<string> Upload(string folderId, string localPath, Guid userId)
{
_tenantManager.SetCurrentTenant(_tenantId);
if (!userId.Equals(Guid.Empty))
@ -75,62 +75,62 @@ public class DocumentsBackupStorage : IBackupStorage
if (int.TryParse(folderId, out var fId))
{
return Upload(fId, localPath).ToString();
return (await Upload(fId, localPath)).ToString();
}
return Upload(folderId, localPath);
return await Upload(folderId, localPath);
}
public void Download(string fileId, string targetLocalPath)
public async Task Download(string fileId, string targetLocalPath)
{
_tenantManager.SetCurrentTenant(_tenantId);
if (int.TryParse(fileId, out var fId))
{
DownloadDao(fId, targetLocalPath);
await DownloadDao(fId, targetLocalPath);
return;
}
DownloadDao(fileId, targetLocalPath);
await DownloadDao(fileId, targetLocalPath);
}
public void Delete(string fileId)
public async Task Delete(string fileId)
{
_tenantManager.SetCurrentTenant(_tenantId);
if (int.TryParse(fileId, out var fId))
{
DeleteDao(fId);
await DeleteDao(fId);
return;
}
DeleteDao(fileId);
await DeleteDao(fileId);
}
public bool IsExists(string fileId)
public async Task<bool> IsExists(string fileId)
{
_tenantManager.SetCurrentTenant(_tenantId);
if (int.TryParse(fileId, out var fId))
{
return IsExistsDao(fId);
return await IsExistsDao(fId);
}
return IsExistsDao(fileId);
return await IsExistsDao(fileId);
}
public string GetPublicLink(string fileId)
public Task<string> GetPublicLink(string fileId)
{
return string.Empty;
return Task.FromResult(String.Empty);
}
private T Upload<T>(T folderId, string localPath)
private async Task<T> Upload<T>(T folderId, string localPath)
{
var folderDao = GetFolderDao<T>();
var fileDao = GetFileDao<T>();
var folder = folderDao.GetFolderAsync(folderId).Result;
var folder = await folderDao.GetFolderAsync(folderId);
if (folder == null)
{
throw new FileNotFoundException("Folder not found.");
@ -144,52 +144,52 @@ public class DocumentsBackupStorage : IBackupStorage
File<T> file = null;
var buffer = new byte[_setupInfo.ChunkUploadSize];
var chunkedUploadSession = fileDao.CreateUploadSessionAsync(newFile, source.Length).Result;
var chunkedUploadSession = await fileDao.CreateUploadSessionAsync(newFile, source.Length);
chunkedUploadSession.CheckQuota = false;
int bytesRead;
while ((bytesRead = source.Read(buffer, 0, (int)_setupInfo.ChunkUploadSize)) > 0)
while ((bytesRead = await source.ReadAsync(buffer, 0, (int)_setupInfo.ChunkUploadSize)) > 0)
{
using (var theMemStream = new MemoryStream())
{
theMemStream.Write(buffer, 0, bytesRead);
await theMemStream.WriteAsync(buffer, 0, bytesRead);
theMemStream.Position = 0;
file = fileDao.UploadChunkAsync(chunkedUploadSession, theMemStream, bytesRead).Result;
file = await fileDao.UploadChunkAsync(chunkedUploadSession, theMemStream, bytesRead);
}
}
return file.Id;
}
private void DownloadDao<T>(T fileId, string targetLocalPath)
private async Task DownloadDao<T>(T fileId, string targetLocalPath)
{
_tenantManager.SetCurrentTenant(_tenantId);
var fileDao = GetFileDao<T>();
var file = fileDao.GetFileAsync(fileId).Result;
var file = await fileDao.GetFileAsync(fileId);
if (file == null)
{
throw new FileNotFoundException("File not found.");
}
using var source = fileDao.GetFileStreamAsync(file).Result;
using var source = await fileDao.GetFileStreamAsync(file);
using var destination = File.OpenWrite(targetLocalPath);
source.CopyTo(destination);
await source.CopyToAsync(destination);
}
private void DeleteDao<T>(T fileId)
private async Task DeleteDao<T>(T fileId)
{
var fileDao = GetFileDao<T>();
fileDao.DeleteFileAsync(fileId).Wait();
await fileDao.DeleteFileAsync(fileId);
}
private bool IsExistsDao<T>(T fileId)
private async Task<bool> IsExistsDao<T>(T fileId)
{
var fileDao = GetFileDao<T>();
try
{
var file = fileDao.GetFileAsync(fileId).Result;
var file = await fileDao.GetFileAsync(fileId);
return file != null && file.RootFolderType != FolderType.TRASH;
}

View File

@ -28,9 +28,9 @@ namespace ASC.Data.Backup.Storage;
public interface IBackupStorage
{
bool IsExists(string storagePath);
string GetPublicLink(string storagePath);
string Upload(string storageBasePath, string localPath, Guid userId);
void Delete(string storagePath);
void Download(string storagePath, string targetLocalPath);
Task<bool> IsExists(string storagePath);
Task<string> GetPublicLink(string storagePath);
Task<string> Upload(string storageBasePath, string localPath, Guid userId);
Task Delete(string storagePath);
Task Download(string storagePath, string targetLocalPath);
}

View File

@ -29,7 +29,7 @@ namespace ASC.Data.Backup.Storage;
[Scope]
public class LocalBackupStorage : IBackupStorage
{
public string Upload(string storageBasePath, string localPath, Guid userId)
public Task<string> Upload(string storageBasePath, string localPath, Guid userId)
{
if (!Directory.Exists(storageBasePath))
{
@ -42,26 +42,28 @@ public class LocalBackupStorage : IBackupStorage
File.Copy(localPath, storagePath, true);
}
return storagePath;
return Task.FromResult(storagePath);
}
public void Download(string storagePath, string targetLocalPath)
public Task Download(string storagePath, string targetLocalPath)
{
File.Copy(storagePath, targetLocalPath, true);
return Task.CompletedTask;
}
public void Delete(string storagePath)
public Task Delete(string storagePath)
{
File.Delete(storagePath);
return Task.CompletedTask;
}
public bool IsExists(string storagePath)
public Task<bool> IsExists(string storagePath)
{
return File.Exists(storagePath);
return Task.FromResult(File.Exists(storagePath));
}
public string GetPublicLink(string storagePath)
public Task<string> GetPublicLink(string storagePath)
{
return string.Empty;
return Task.FromResult(string.Empty);
}
}

View File

@ -43,7 +43,7 @@ internal class S3BackupStorage : IBackupStorage
_region = region;
}
public string Upload(string storageBasePath, string localPath, Guid userId)
public async Task<string> Upload(string storageBasePath, string localPath, Guid userId)
{
string key;
@ -58,7 +58,7 @@ internal class S3BackupStorage : IBackupStorage
using (var fileTransferUtility = new TransferUtility(_accessKeyId, _secretAccessKey, RegionEndpoint.GetBySystemName(_region)))
{
fileTransferUtility.Upload(
await fileTransferUtility.UploadAsync(
new TransferUtilityUploadRequest
{
BucketName = _bucket,
@ -73,7 +73,7 @@ internal class S3BackupStorage : IBackupStorage
return key;
}
public void Download(string storagePath, string targetLocalPath)
public async Task Download(string storagePath, string targetLocalPath)
{
var request = new GetObjectRequest
{
@ -82,27 +82,27 @@ internal class S3BackupStorage : IBackupStorage
};
using var s3 = GetClient();
using var response = s3.GetObjectAsync(request).Result;
response.WriteResponseStreamToFileAsync(targetLocalPath, true, new CancellationToken());
using var response = await s3.GetObjectAsync(request);
await response.WriteResponseStreamToFileAsync(targetLocalPath, true, new CancellationToken());
}
public void Delete(string storagePath)
public async Task Delete(string storagePath)
{
using var s3 = GetClient();
s3.DeleteObjectAsync(new DeleteObjectRequest
await s3.DeleteObjectAsync(new DeleteObjectRequest
{
BucketName = _bucket,
Key = GetKey(storagePath)
});
}
public bool IsExists(string storagePath)
public async Task<bool> IsExists(string storagePath)
{
using var s3 = GetClient();
try
{
var request = new ListObjectsRequest { BucketName = _bucket, Prefix = GetKey(storagePath) };
var response = s3.ListObjectsAsync(request).Result;
var response = await s3.ListObjectsAsync(request);
return response.S3Objects.Count > 0;
}
@ -114,18 +114,18 @@ internal class S3BackupStorage : IBackupStorage
}
}
public string GetPublicLink(string storagePath)
public Task<string> GetPublicLink(string storagePath)
{
using var s3 = GetClient();
return s3.GetPreSignedURL(
return Task.FromResult(s3.GetPreSignedURL(
new GetPreSignedUrlRequest
{
BucketName = _bucket,
Key = GetKey(storagePath),
Expires = DateTime.UtcNow.AddDays(1),
Verb = HttpVerb.GET
});
}));
}
private string GetKey(string fileName)

View File

@ -70,7 +70,7 @@ public class BackupPortalTask : PortalTaskBase
}
public override void RunJob()
public override async Task RunJob()
{
_logger.DebugBeginBackup(TenantId);
_tenantManager.SetCurrentTenant(TenantId);
@ -80,13 +80,13 @@ public class BackupPortalTask : PortalTaskBase
{
if (_dump)
{
DoDump(writer);
await DoDump(writer);
}
else
{
var modulesToProcess = GetModulesToProcess().ToList();
var fileGroups = GetFilesGroup();
var fileGroups = await GetFilesGroup();
var stepscount = ProcessStorage ? fileGroups.Count : 0;
SetStepsCount(modulesToProcess.Count + stepscount);
@ -97,7 +97,7 @@ public class BackupPortalTask : PortalTaskBase
}
if (ProcessStorage)
{
DoBackupStorage(writer, fileGroups);
await DoBackupStorage(writer, fileGroups);
}
}
}
@ -121,7 +121,7 @@ public class BackupPortalTask : PortalTaskBase
return list;
}
private void DoDump(IDataWriteOperator writer)
private async Task DoDump(IDataWriteOperator writer)
{
var databases = new Dictionary<Tuple<string, string>, List<string>>();
@ -173,7 +173,7 @@ public class BackupPortalTask : PortalTaskBase
var tenants = _tenantManager.GetTenants(false).Select(r => r.Id);
foreach (var t in tenants)
{
files.AddRange(GetFiles(t));
files.AddRange(await GetFiles(t));
}
stepscount += files.Count * 2 + 1;
@ -258,9 +258,9 @@ public class BackupPortalTask : PortalTaskBase
}
}
private IEnumerable<BackupFileInfo> GetFiles(int tenantId)
private async Task<IEnumerable<BackupFileInfo>> GetFiles(int tenantId)
{
var files = GetFilesToProcess(tenantId).ToList();
var files = (await GetFilesToProcess(tenantId)).ToList();
using var backupRecordContext = _dbContextFactory.CreateDbContext();
var exclude = backupRecordContext.Backups.AsQueryable().Where(b => b.TenantId == tenantId && b.StorageType == 0 && b.StoragePath != null).ToList();
files = files.Where(f => !exclude.Any(e => f.Path.Replace('\\', '/').Contains($"/file_{e.StoragePath}/"))).ToList();
@ -621,9 +621,9 @@ public class BackupPortalTask : PortalTaskBase
_logger.DebugArchiveDirEnd(subDir);
}
private List<IGrouping<string, BackupFileInfo>> GetFilesGroup()
private async Task<List<IGrouping<string, BackupFileInfo>>> GetFilesGroup()
{
var files = GetFilesToProcess(TenantId).ToList();
var files = (await GetFilesToProcess(TenantId)).ToList();
using var backupRecordContext = _dbContextFactory.CreateDbContext();
var exclude = backupRecordContext.Backups.AsQueryable().Where(b => b.TenantId == TenantId && b.StorageType == 0 && b.StoragePath != null).ToList();
@ -697,7 +697,7 @@ public class BackupPortalTask : PortalTaskBase
_logger.DebugEndSavingDataForModule(module.ModuleName);
}
private void DoBackupStorage(IDataWriteOperator writer, List<IGrouping<string, BackupFileInfo>> fileGroups)
private async Task DoBackupStorage(IDataWriteOperator writer, List<IGrouping<string, BackupFileInfo>> fileGroups)
{
_logger.DebugBeginBackupStorage();
@ -710,10 +710,10 @@ public class BackupPortalTask : PortalTaskBase
{
var storage = StorageFactory.GetStorage(ConfigPath, TenantId, group.Key);
var file1 = file;
ActionInvoker.Try(state =>
await ActionInvoker.Try(async state =>
{
var f = (BackupFileInfo)state;
using var fileStream = storage.GetReadStreamAsync(f.Domain, f.Path).Result;
using var fileStream = await storage.GetReadStreamAsync(f.Domain, f.Path);
writer.WriteEntry(file1.GetZipKey(), fileStream);
}, file, 5, error => _logger.WarningCanNotBackupFile(file1.Module, file1.Path, error));

View File

@ -44,7 +44,7 @@ public class DeletePortalTask : PortalTaskBase
_logger = logger;
}
public override void RunJob()
public override async Task RunJob()
{
_logger.DebugBeginDelete(TenantId);
var modulesToProcess = GetModulesToProcess().Reverse().ToList();
@ -57,7 +57,7 @@ public class DeletePortalTask : PortalTaskBase
if (ProcessStorage)
{
DoDeleteStorage();
await DoDeleteStorage();
}
_logger.DebugEndDelete(TenantId);
@ -84,7 +84,7 @@ public class DeletePortalTask : PortalTaskBase
_logger.DebugEndDeleteDataForModule(module.ModuleName);
}
private void DoDeleteStorage()
private async Task DoDeleteStorage()
{
_logger.DebugBeginDeleteStorage();
var storageModules = StorageFactoryConfig.GetModuleList(ConfigPath).Where(IsStorageModuleAllowed).ToList();
@ -95,10 +95,10 @@ public class DeletePortalTask : PortalTaskBase
var domains = StorageFactoryConfig.GetDomainList(ConfigPath, module);
foreach (var domain in domains)
{
ActionInvoker.Try(state => storage.DeleteFilesAsync((string)state, "\\", "*.*", true).Wait(), domain, 5,
await ActionInvoker.Try(async state => await storage.DeleteFilesAsync((string)state, "\\", "*.*", true), domain, 5,
onFailure: error => _logger.WarningCanNotDeleteFilesForDomain(domain, error));
}
storage.DeleteFilesAsync("\\", "*.*", true).Wait();
await storage.DeleteFilesAsync("\\", "*.*", true);
SetCurrentStepProgress((int)(++modulesProcessed * 100 / (double)storageModules.Count));
}

View File

@ -85,14 +85,14 @@ public abstract class PortalTaskBase
}
}
public abstract void RunJob();
public abstract Task RunJob();
internal virtual IEnumerable<IModuleSpecifics> GetModulesToProcess()
{
return ModuleProvider.AllModules.Where(module => !_ignoredModules.Contains(module.ModuleName));
}
protected IEnumerable<BackupFileInfo> GetFilesToProcess(int tenantId)
protected async Task<IEnumerable<BackupFileInfo>> GetFilesToProcess(int tenantId)
{
var files = new List<BackupFileInfo>();
foreach (var module in StorageFactoryConfig.GetModuleList(ConfigPath).Where(IsStorageModuleAllowed))
@ -103,12 +103,12 @@ public abstract class PortalTaskBase
foreach (var domain in domains)
{
files.AddRange(
store.ListFilesRelativeAsync(domain, "\\", "*.*", true).ToArrayAsync().Result
(await store.ListFilesRelativeAsync(domain, "\\", "*.*", true).ToArrayAsync())
.Select(path => new BackupFileInfo(domain, module, path, tenantId)));
}
files.AddRange(
store.ListFilesRelativeAsync(string.Empty, "\\", "*.*", true).ToArrayAsync().Result
(await store.ListFilesRelativeAsync(string.Empty, "\\", "*.*", true).ToArrayAsync())
.Where(path => domains.All(domain => !path.Contains(domain + "/")))
.Select(path => new BackupFileInfo(string.Empty, module, path, tenantId)));
}

View File

@ -60,7 +60,7 @@ public class RestoreDbModuleTask : PortalTaskBase
Init(-1, null);
}
public override void RunJob()
public override Task RunJob()
{
_logger.DebugBeginRestoreDataForModule(_module.ModuleName);
SetStepsCount(_module.Tables.Count(t => !_ignoredTables.Contains(t.Name)));
@ -86,6 +86,7 @@ public class RestoreDbModuleTask : PortalTaskBase
}
_logger.DebugEndRestoreDataForModule(_module.ModuleName);
return Task.CompletedTask;
}
public string[] ExecuteArray(DbCommand command)

View File

@ -79,7 +79,7 @@ public class RestorePortalTask : PortalTaskBase
Init(tenantId, toConfigPath);
}
public override void RunJob()
public override async Task RunJob()
{
_options.DebugBeginRestorePortal();
@ -94,7 +94,7 @@ public class RestorePortalTask : PortalTaskBase
if (Dump)
{
RestoreFromDump(dataReader);
await RestoreFromDump(dataReader);
}
else
{
@ -111,7 +111,7 @@ public class RestorePortalTask : PortalTaskBase
restoreTask.IgnoreTable(tableName);
}
restoreTask.RunJob();
await restoreTask.RunJob();
}
}
@ -125,7 +125,7 @@ public class RestorePortalTask : PortalTaskBase
_ascCacheNotify.ClearCache();
}
DoRestoreStorage(dataReader);
await DoRestoreStorage(dataReader);
}
if (UnblockPortalAfterCompleted)
@ -153,7 +153,7 @@ public class RestorePortalTask : PortalTaskBase
_options.DebugEndRestorePortal();
}
private void RestoreFromDump(IDataReadOperator dataReader)
private async Task RestoreFromDump(IDataReadOperator dataReader)
{
var keyBase = KeyHelper.GetDatabaseSchema();
var keys = dataReader.GetEntries(keyBase).Select(r => Path.GetFileName(r)).ToList();
@ -187,7 +187,7 @@ public class RestorePortalTask : PortalTaskBase
SetStepsCount(stepscount + 1);
DoDeleteStorage(storageModules, tenants);
await DoDeleteStorage(storageModules, tenants);
}
else
{
@ -343,7 +343,7 @@ public class RestorePortalTask : PortalTaskBase
}
}
private void DoRestoreStorage(IDataReadOperator dataReader)
private async Task DoRestoreStorage(IDataReadOperator dataReader)
{
_options.DebugBeginRestoreStorage();
@ -371,7 +371,7 @@ public class RestorePortalTask : PortalTaskBase
using var stream = dataReader.GetEntry(key);
try
{
storage.SaveAsync(file.Domain, adjustedPath, module != null ? module.PrepareData(key, stream, _columnMapper) : stream).Wait();
await storage.SaveAsync(file.Domain, adjustedPath, module != null ? module.PrepareData(key, stream, _columnMapper) : stream);
}
catch (Exception error)
{
@ -399,7 +399,7 @@ public class RestorePortalTask : PortalTaskBase
_options.DebugEndRestoreStorage();
}
private void DoDeleteStorage(IEnumerable<string> storageModules, IEnumerable<Tenant> tenants)
private async Task DoDeleteStorage(IEnumerable<string> storageModules, IEnumerable<Tenant> tenants)
{
_options.DebugBeginDeleteStorage();
@ -414,12 +414,12 @@ public class RestorePortalTask : PortalTaskBase
foreach (var domain in domains)
{
ActionInvoker.Try(
state =>
await ActionInvoker.Try(
async state =>
{
if (storage.IsDirectoryAsync((string)state).Result)
if (await storage.IsDirectoryAsync((string)state))
{
storage.DeleteFilesAsync((string)state, "\\", "*.*", true).Wait();
await storage.DeleteFilesAsync((string)state, "\\", "*.*", true);
}
},
domain,

View File

@ -74,7 +74,7 @@ public class TransferPortalTask : PortalTaskBase
BackupDirectory = backupDirectory;
}
public override void RunJob()
public override async Task RunJob()
{
_logger.DebugBeginTransfer(TenantId);
var fromDbFactory = new DbFactory(null, null);
@ -103,7 +103,7 @@ public class TransferPortalTask : PortalTaskBase
{
backupTask.IgnoreModule(moduleName);
}
backupTask.RunJob();
await backupTask.RunJob();
//restore db data from temporary file
var restoreTask = _serviceProvider.GetService<RestorePortalTask>();
@ -114,12 +114,12 @@ public class TransferPortalTask : PortalTaskBase
{
restoreTask.IgnoreModule(moduleName);
}
restoreTask.RunJob();
await restoreTask.RunJob();
//transfer files
if (ProcessStorage)
{
DoTransferStorage(columnMapper);
await DoTransferStorage(columnMapper);
}
SaveTenant(toDbFactory, tenantAlias, TenantStatus.Active);
@ -153,10 +153,10 @@ public class TransferPortalTask : PortalTaskBase
}
}
private void DoTransferStorage(ColumnMapper columnMapper)
private async Task DoTransferStorage(ColumnMapper columnMapper)
{
_logger.DebugBeginTransferStorage();
var fileGroups = GetFilesToProcess(TenantId).GroupBy(file => file.Module).ToList();
var fileGroups = (await GetFilesToProcess(TenantId)).GroupBy(file => file.Module).ToList();
var groupsProcessed = 0;
foreach (var group in fileGroups)
{
@ -173,7 +173,7 @@ public class TransferPortalTask : PortalTaskBase
{
try
{
utility.CopyFileAsync(file.Domain, file.Path, file.Domain, adjustedPath).Wait();
await utility.CopyFileAsync(file.Domain, file.Path, file.Domain, adjustedPath);
}
catch (Exception error)
{

View File

@ -3,6 +3,7 @@
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<ProduceReferenceAssembly>false</ProduceReferenceAssembly>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>

View File

@ -24,9 +24,6 @@
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
global using System;
global using System.IO;
global using System.Linq;
global using System.Security.Cryptography;
global using System.Text;

Some files were not shown because too many files have changed in this diff Show More