Merge branch 'develop' into bugfix/login

This commit is contained in:
Viktor Fomin 2022-10-14 12:35:10 +05:00
commit 5f2ac4ed3a
49 changed files with 1762 additions and 207 deletions

View File

@ -101,6 +101,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.Migration", "common\ASC
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.ActiveDirectory", "common\ASC.ActiveDirectory\ASC.ActiveDirectory.csproj", "{9F81862F-303D-467F-8DC9-044BE2CCF329}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ASC.EventBus.ActiveMQ", "common\ASC.EventBus.ActiveMQ\ASC.EventBus.ActiveMQ.csproj", "{86916EF2-4A1B-441C-B673-EB0F68EC9C3A}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@ -287,6 +289,10 @@ Global
{9F81862F-303D-467F-8DC9-044BE2CCF329}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9F81862F-303D-467F-8DC9-044BE2CCF329}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9F81862F-303D-467F-8DC9-044BE2CCF329}.Release|Any CPU.Build.0 = Release|Any CPU
{86916EF2-4A1B-441C-B673-EB0F68EC9C3A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{86916EF2-4A1B-441C-B673-EB0F68EC9C3A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{86916EF2-4A1B-441C-B673-EB0F68EC9C3A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{86916EF2-4A1B-441C-B673-EB0F68EC9C3A}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

View File

@ -10,6 +10,7 @@
"common\\ASC.Data.Encryption\\ASC.Data.Encryption.csproj",
"common\\ASC.Data.Reassigns\\ASC.Data.Reassigns.csproj",
"common\\ASC.Data.Storage\\ASC.Data.Storage.csproj",
"common\\ASC.EventBus.ActiveMQ\\ASC.EventBus.ActiveMQ.csproj",
"common\\ASC.EventBus.Extensions.Logger\\ASC.EventBus.Extensions.Logger.csproj",
"common\\ASC.EventBus.RabbitMQ\\ASC.EventBus.RabbitMQ.csproj",
"common\\ASC.EventBus\\ASC.EventBus.csproj",

91
build/build.backend.docker.sh Executable file
View File

@ -0,0 +1,91 @@
#!/bin/bash
rd="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "Run script directory:" $dir
dir=$(builtin cd $rd/../; pwd)
echo "Root directory:" $dir
cd $dir
branch=$(git branch | sed -n -e 's/^\* \(.*\)/\1/p')
echo "GIT_BRANCH:" $branch
cd $dir/build/install/docker/
docker_dir="$( pwd )"
echo "Docker directory:" $docker_dir
build_date=$(date +%Y-%m-%d)
echo "BUILD DATE: $build_date"
local_ip=$(ipconfig getifaddr en0)
echo "LOCAL IP: $local_ip"
doceditor=${local_ip}:5013
login=${local_ip}:5011
client=${local_ip}:5001
echo "SERVICE_DOCEDITOR: $doceditor"
echo "SERVICE_LOGIN: $login"
echo "SERVICE_CLIENT: $client"
arch_name="$(uname -m)"
echo "Run MySQL"
if [ "${arch_name}" = "x86_64" ]; then
echo "CPU Type: x86_64 -> run db.yml"
docker compose -f db.yml up -d
elif [ "${arch_name}" = "arm64" ]; then
echo "CPU Type: arm64 -> run ddb.arm.yml"
MYSQL_IMAGE=arm64v8/mysql:oracle \
docker compose -f db.yml up -d
else
echo "Error: Unknown CPU Type: ${arch_name}."
exit 1
fi
echo "Run environments (redis, rabbitmq)"
DOCKERFILE=Dockerfile.dev \
docker compose -f redis.yml -f rabbitmq.yml up -d
if [ "$1" = "--no_ds" ]; then
echo "SKIP Document server"
else
echo "Run Document server"
docker compose -f ds.yml up -d
fi
echo "Stop all backend services"
DOCKERFILE=Dockerfile.dev \
docker compose -f docspace.dev.yml down
echo "Build all backend services"
DOCKERFILE=Dockerfile.dev \
RELEASE_DATE=$build_date \
GIT_BRANCH=$branch \
SERVICE_DOCEDITOR=$doceditor \
SERVICE_LOGIN=$login \
SERVICE_CLIENT=$client \
docker compose -f build.dev.yml build
echo "Run DB migration"
DOCKERFILE=Dockerfile.dev \
docker compose -f migration-runner.yml up -d
echo "Start all backend services"
DOCKERFILE=Dockerfile.dev \
ROOT_DIR=$dir \
RELEASE_DATE=$build_date \
GIT_BRANCH=$branch \
SERVICE_DOCEDITOR=$doceditor \
SERVICE_LOGIN=$login \
SERVICE_CLIENT=$client \
APP_URL_PORTAL="http://$local_ip:8092" \
docker compose -f docspace.dev.yml up -d

View File

@ -6,6 +6,7 @@
DOCKER_TAG=latest
CONTAINER_PREFIX=${PRODUCT}-
MYSQL_VERSION=8.0.18
MYSQL_IMAGE=mysql:${MYSQL_VERSION}
ELK_VERSION=7.13.1
SERVICE_PORT=5050
DOCUMENT_SERVER_IMAGE_NAME=onlyoffice/4testing-documentserver-ee:latest

View File

@ -0,0 +1,334 @@
ARG SRC_PATH="/app/onlyoffice/src"
ARG BUILD_PATH="/var/www"
ARG DOTNET_SDK="mcr.microsoft.com/dotnet/sdk:6.0"
ARG DOTNET_RUN="mcr.microsoft.com/dotnet/aspnet:6.0"
FROM $DOTNET_SDK AS base
ARG RELEASE_DATE="2022-10-09"
ARG DEBIAN_FRONTEND=noninteractive
ARG PRODUCT_VERSION=0.0.0
ARG BUILD_NUMBER=0
ARG GIT_BRANCH="develop"
ARG SRC_PATH
ARG BUILD_PATH
ARG BUILD_ARGS="build"
ARG DEPLOY_ARGS="deploy"
ARG DEBUG_INFO="true"
LABEL onlyoffice.docspace.release-date="${RELEASE_DATE}" \
maintainer="Ascensio System SIA <support@onlyoffice.com>"
ENV LANG=en_US.UTF-8 \
LANGUAGE=en_US:en \
LC_ALL=en_US.UTF-8
RUN apt-get -y update && \
apt-get install -yq \
sudo \
locales \
git \
npm && \
locale-gen en_US.UTF-8 && \
npm install --global yarn && \
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - && \
apt-get install -y nodejs && \
rm -rf /var/lib/apt/lists/*
RUN echo ${GIT_BRANCH} && \
git clone --depth 1 --recurse-submodules -b ${GIT_BRANCH} https://github.com/ONLYOFFICE/DocSpace.git ${SRC_PATH}
RUN cd ${SRC_PATH} && \
mkdir -p /app/onlyoffice/ && \
find config/ -maxdepth 1 -name "*.json" | grep -v test | xargs tar -cvf config.tar && \
tar -C "/app/onlyoffice/" -xvf config.tar && \
cp config/*.config /app/onlyoffice/config/ && \
mkdir -p /etc/nginx/conf.d && cp -f config/nginx/onlyoffice.conf /etc/nginx/conf.d/ && \
mkdir -p /etc/nginx/includes/ && cp -f config/nginx/includes/onlyoffice*.conf /etc/nginx/includes/ && \
sed -i "s/\"number\".*,/\"number\": \"${PRODUCT_VERSION}.${BUILD_NUMBER}\",/g" /app/onlyoffice/config/appsettings.json && \
sed -e 's/#//' -i /etc/nginx/conf.d/onlyoffice.conf && \
cd ${SRC_PATH}/build/install/common/ && \
bash build-backend.sh -sp "${SRC_PATH}" && \
bash publish-backend.sh -sp "${SRC_PATH}" -bp "${BUILD_PATH}" && \
cp -rf ${SRC_PATH}/products/ASC.Files/Server/DocStore ${BUILD_PATH}/products/ASC.Files/server/ && \
rm -rf ${SRC_PATH}/common/* && \
rm -rf ${SRC_PATH}/web/ASC.Web.Core/* && \
rm -rf ${SRC_PATH}/web/ASC.Web.Studio/* && \
rm -rf ${SRC_PATH}/products/ASC.Files/Server/* && \
rm -rf ${SRC_PATH}/products/ASC.Files/Service/* && \
rm -rf ${SRC_PATH}/products/ASC.People/Server/*
COPY config/mysql/conf.d/mysql.cnf /etc/mysql/conf.d/mysql.cnf
FROM $DOTNET_RUN as dotnetrun
ARG BUILD_PATH
ARG SRC_PATH
ENV BUILD_PATH=${BUILD_PATH}
ENV SRC_PATH=${SRC_PATH}
# add defualt user and group for no-root run
RUN mkdir -p /var/log/onlyoffice && \
mkdir -p /app/onlyoffice/data && \
addgroup --system --gid 107 onlyoffice && \
adduser -uid 104 --quiet --home /var/www/onlyoffice --system --gid 107 onlyoffice && \
chown onlyoffice:onlyoffice /app/onlyoffice -R && \
chown onlyoffice:onlyoffice /var/log -R && \
chown onlyoffice:onlyoffice /var/www -R && \
apt-get -y update && \
apt-get install -yq \
sudo \
nano \
curl \
vim \
python3-pip \
libgdiplus && \
pip3 install --upgrade jsonpath-ng multipledispatch && \
rm -rf /var/lib/apt/lists/*
COPY --from=base --chown=onlyoffice:onlyoffice /app/onlyoffice/config/* /app/onlyoffice/config/
#USER onlyoffice
EXPOSE 5050
ENTRYPOINT ["python3", "docker-entrypoint.py"]
FROM node:16.16-slim as noderun
ARG BUILD_PATH
ARG SRC_PATH
ENV BUILD_PATH=${BUILD_PATH}
ENV SRC_PATH=${SRC_PATH}
RUN mkdir -p /var/log/onlyoffice && \
mkdir -p /app/onlyoffice/data && \
addgroup --system --gid 107 onlyoffice && \
adduser -uid 104 --quiet --home /var/www/onlyoffice --system --gid 107 onlyoffice && \
chown onlyoffice:onlyoffice /app/onlyoffice -R && \
chown onlyoffice:onlyoffice /var/log -R && \
chown onlyoffice:onlyoffice /var/www -R && \
apt-get -y update && \
apt-get install -yq \
sudo \
nano \
curl \
vim \
python3-pip && \
pip3 install --upgrade jsonpath-ng multipledispatch && \
rm -rf /var/lib/apt/lists/*
COPY --from=base --chown=onlyoffice:onlyoffice /app/onlyoffice/config/* /app/onlyoffice/config/
EXPOSE 5050
ENTRYPOINT ["python3", "docker-entrypoint.py"]
## Nginx image ##
FROM nginx AS proxy
ARG SRC_PATH
ARG BUILD_PATH
ARG COUNT_WORKER_CONNECTIONS=1024
ENV DNS_NAMESERVER=127.0.0.11 \
COUNT_WORKER_CONNECTIONS=$COUNT_WORKER_CONNECTIONS \
MAP_HASH_BUCKET_SIZE=""
RUN apt-get -y update && \
apt-get install -yq vim && \
rm -rf /var/lib/apt/lists/* && \
rm -rf /usr/share/nginx/html/*
# copy static services files and config values
COPY --from=base /etc/nginx/conf.d /etc/nginx/conf.d
COPY --from=base /etc/nginx/includes /etc/nginx/includes
COPY /config/nginx/templates/upstream.conf.template /etc/nginx/templates/upstream.conf.template
COPY /config/nginx/templates/nginx.conf.template /etc/nginx/nginx.conf.template
COPY prepare-nginx-proxy.sh /docker-entrypoint.d/prepare-nginx-proxy.sh
# add defualt user and group for no-root run
RUN chown nginx:nginx /etc/nginx/* -R && \
chown nginx:nginx /docker-entrypoint.d/* && \
# changes for upstream configure
sed -i 's/127.0.0.1:5010/$service_api_system/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:5012/$service_backup/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:5007/$service_files/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:5004/$service_people_server/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:5000/$service_api/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:5003/$service_studio/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:9899/$service_socket/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:9834/$service_sso/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:9999/$service_urlshortener/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:5034/$service_migration/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:5013/$service_doceditor/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:5011/$service_login/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/127.0.0.1:5001/$service_client/' /etc/nginx/conf.d/onlyoffice.conf && \
sed -i 's/172.*/$document_server;/' /etc/nginx/conf.d/onlyoffice.conf
## ASC.Data.Backup.BackgroundTasks ##
FROM dotnetrun AS backup_background
WORKDIR ${BUILD_PATH}/services/ASC.Data.Backup.BackgroundTasks/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Data.Backup.BackgroundTasks/service/ .
CMD ["ASC.Data.Backup.BackgroundTasks.dll", "ASC.Data.Backup.BackgroundTasks"]
## ASC.ClearEvents ##
FROM dotnetrun AS clear-events
WORKDIR ${BUILD_PATH}/services/ASC.ClearEvents/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.ClearEvents/service/ .
CMD ["ASC.ClearEvents.dll", "ASC.ClearEvents"]
## ASC.Migration ##
FROM dotnetrun AS migration
WORKDIR ${BUILD_PATH}/services/ASC.Migration/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Migration/service/ .
CMD ["ASC.Migration.dll", "ASC.Migration"]
## ASC.Data.Backup ##
FROM dotnetrun AS backup
WORKDIR ${BUILD_PATH}/services/ASC.Data.Backup/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Data.Backup/service/ .
CMD ["ASC.Data.Backup.dll", "ASC.Data.Backup"]
## ASC.Files ##
FROM dotnetrun AS files
WORKDIR ${BUILD_PATH}/products/ASC.Files/server/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/products/ASC.Files/server/ .
CMD ["ASC.Files.dll", "ASC.Files"]
## ASC.Files.Service ##
FROM dotnetrun AS files_services
WORKDIR ${BUILD_PATH}/products/ASC.Files/service/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Files.Service/service/ .
CMD ["ASC.Files.Service.dll", "ASC.Files.Service"]
## ASC.Notify ##
FROM dotnetrun AS notify
WORKDIR ${BUILD_PATH}/services/ASC.Notify/service
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Notify/service/ .
CMD ["ASC.Notify.dll", "ASC.Notify"]
## ASC.People ##
FROM dotnetrun AS people_server
WORKDIR ${BUILD_PATH}/products/ASC.People/server/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/products/ASC.People/server/ .
CMD ["ASC.People.dll", "ASC.People"]
## ASC.Socket.IO ##
FROM noderun AS socket
WORKDIR ${BUILD_PATH}/services/ASC.Socket.IO/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Socket.IO/service/ .
CMD ["server.js", "ASC.Socket.IO"]
## ASC.SsoAuth ##
FROM noderun AS ssoauth
WORKDIR ${BUILD_PATH}/services/ASC.SsoAuth/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.SsoAuth/service/ .
CMD ["app.js", "ASC.SsoAuth"]
## ASC.Studio.Notify ##
FROM dotnetrun AS studio_notify
WORKDIR ${BUILD_PATH}/services/ASC.Studio.Notify/service/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Studio.Notify/service/ .
CMD ["ASC.Studio.Notify.dll", "ASC.Studio.Notify"]
## ASC.TelegramService ##
FROM dotnetrun AS telegram_service
WORKDIR ${BUILD_PATH}/services/ASC.TelegramService/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.TelegramService/service/ .
CMD ["ASC.TelegramService.dll", "ASC.TelegramService"]
## ASC.UrlShortener ##
FROM noderun AS urlshortener
WORKDIR ${BUILD_PATH}/services/ASC.UrlShortener/service/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.UrlShortener/service/ .
CMD ["index.js", "ASC.UrlShortener"]
## ASC.Web.Api ##
FROM dotnetrun AS api
WORKDIR ${BUILD_PATH}/studio/ASC.Web.Api/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Web.Api/service/ .
CMD ["ASC.Web.Api.dll", "ASC.Web.Api"]
## ASC.Webhooks.Service ##
FROM dotnetrun AS webhooks-service
WORKDIR ${BUILD_PATH}/services/ASC.Webhooks.Service/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Webhooks.Service/service/ .
CMD ["ASC.Webhooks.Service.dll", "ASC.Webhooks.Service"]
## ASC.Web.Studio ##
FROM dotnetrun AS studio
WORKDIR ${BUILD_PATH}/studio/ASC.Web.Studio/
COPY --chown=onlyoffice:onlyoffice docker-entrypoint.py ./docker-entrypoint.py
COPY --from=base --chown=onlyoffice:onlyoffice ${BUILD_PATH}/services/ASC.Web.Studio/service/ .
CMD ["ASC.Web.Studio.dll", "ASC.Web.Studio"]
## ASC.Migration.Runner ##
FROM $DOTNET_RUN AS onlyoffice-migration-runner
ARG BUILD_PATH
ARG SRC_PATH
ENV BUILD_PATH=${BUILD_PATH}
ENV SRC_PATH=${SRC_PATH}
WORKDIR ${BUILD_PATH}/services/ASC.Migration.Runner/
COPY ./docker-migration-entrypoint.sh ./docker-migration-entrypoint.sh
COPY --from=base ${SRC_PATH}/ASC.Migration.Runner/service/ .
ENTRYPOINT ["./docker-migration-entrypoint.sh"]
## image for k8s bin-share ##
FROM busybox:latest AS bin_share
RUN mkdir -p /app/appserver/ASC.Files/server && \
mkdir -p /app/appserver/ASC.People/server/ && \
addgroup --system --gid 107 onlyoffice && \
adduser -u 104 onlyoffice --home /var/www/onlyoffice --system -G onlyoffice
COPY bin-share-docker-entrypoint.sh /app/docker-entrypoint.sh
COPY --from=base /var/www/products/ASC.Files/server/ /app/appserver/ASC.Files/server/
COPY --from=base /var/www/products/ASC.People/server/ /app/appserver/ASC.People/server/
ENTRYPOINT ["./app/docker-entrypoint.sh"]
## image for k8s wait-bin-share ##
FROM busybox:latest AS wait_bin_share
RUN mkdir /app
COPY wait-bin-share-docker-entrypoint.sh /app/docker-entrypoint.sh
ENTRYPOINT ["./app/docker-entrypoint.sh"]

View File

@ -0,0 +1,142 @@
version: "3.8"
services:
onlyoffice-backup-background-tasks:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: backup_background
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-backup-background:${DOCKER_TAG}"
onlyoffice-clear-events:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: clear-events
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-clear-events:${DOCKER_TAG}"
onlyoffice-migration:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: migration
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-migration:${DOCKER_TAG}"
onlyoffice-backup:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: backup
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-backup:${DOCKER_TAG}"
onlyoffice-files:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: files
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-files:${DOCKER_TAG}"
onlyoffice-files-services:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: files_services
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-files-services:${DOCKER_TAG}"
onlyoffice-notify:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: notify
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-notify:${DOCKER_TAG}"
onlyoffice-people-server:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: people_server
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-people-server:${DOCKER_TAG}"
onlyoffice-socket:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: socket
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-socket:${DOCKER_TAG}"
onlyoffice-studio-notify:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: studio_notify
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-studio-notify:${DOCKER_TAG}"
onlyoffice-telegram-service:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: telegram_service
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-telegram-service:${DOCKER_TAG}"
onlyoffice-urlshortener:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: urlshortener
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-urlshortener:${DOCKER_TAG}"
onlyoffice-api:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: api
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-api:${DOCKER_TAG}"
onlyoffice-studio:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: studio
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-studio:${DOCKER_TAG}"
onlyoffice-ssoauth:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: ssoauth
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-ssoauth:${DOCKER_TAG}"
onlyoffice-webhooks-service:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: webhooks-service
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-webhooks-service:${DOCKER_TAG}"
onlyoffice-bin-share:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: bin_share
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-bin-share:${DOCKER_TAG}"
onlyoffice-wait-bin-share:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: wait_bin_share
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-wait-bin-share:${DOCKER_TAG}"
onlyoffice-proxy:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: proxy
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-proxy:${DOCKER_TAG}"
onlyoffice-migration-runner:
build:
context: ./
dockerfile: "${DOCKERFILE}"
target: onlyoffice-migration-runner
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-migration-runner:${DOCKER_TAG}"

View File

@ -10,6 +10,11 @@ map $SERVICE_DOCEDITOR $service_doceditor {
$SERVICE_DOCEDITOR $SERVICE_DOCEDITOR;
}
map $SERVICE_CLIENT $service_client {
volatile;
$SERVICE_CLIENT $SERVICE_CLIENT;
}
map $SERVICE_MIGRATION $service_migration {
volatile;
$SERVICE_MIGRATION $SERVICE_MIGRATION;

View File

@ -2,7 +2,7 @@ version: "3.8"
services:
onlyoffice-mysql-server:
image: mysql:${MYSQL_VERSION}
image: ${MYSQL_IMAGE}
command: --default-authentication-plugin=mysql_native_password
cap_add:
- SYS_NICE
@ -21,13 +21,13 @@ services:
- mysql_data:/var/lib/mysql
- ./config/mysql/conf.d/:/etc/mysql/conf.d
networks:
- ${NETWORK_NAME}
- ${NETWORK_NAME}
tmpfs: /var/log/mysql/
networks:
onlyoffice:
name: ${NETWORK_NAME}
driver: 'bridge'
driver: "bridge"
volumes:
mysql_data:

View File

@ -0,0 +1,198 @@
version: "3.8"
x-service: &x-service-base
container_name: base
restart: always
expose:
- ${SERVICE_PORT}
environment:
MYSQL_HOST: ${MYSQL_HOST}
MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD}
MYSQL_DATABASE: ${MYSQL_DATABASE}
MYSQL_USER: ${MYSQL_USER}
MYSQL_PASSWORD: ${MYSQL_PASSWORD}
DATABASE_MIGRATION: ${DATABASE_MIGRATION}
APP_DOTNET_ENV: ${APP_DOTNET_ENV}
APP_CORE_BASE_DOMAIN: ${APP_CORE_BASE_DOMAIN}
APP_CORE_MACHINEKEY: ${APP_CORE_MACHINEKEY}
APP_URL_PORTAL: ${APP_URL_PORTAL}
DOCUMENT_SERVER_JWT_SECRET: ${DOCUMENT_SERVER_JWT_SECRET}
DOCUMENT_SERVER_JWT_HEADER: ${DOCUMENT_SERVER_JWT_HEADER}
DOCUMENT_SERVER_URL_PUBLIC: ${DOCUMENT_SERVER_URL_PUBLIC}
DOCUMENT_SERVER_URL_INTERNAL: ${DOCUMENT_SERVER_URL_INTERNAL}
KAFKA_HOST: ${KAFKA_HOST}
ELK_HOST: ${ELK_HOST}
PROXY_HOST: ${PROXY_HOST}
volumes:
#- /app/onlyoffice/CommunityServer/data:/app/onlyoffice/data
- app_data:/app/onlyoffice/data
- files_data:/var/www/products/ASC.Files/server/
- people_data:/var/www/products/ASC.People/server/
services:
onlyoffice-elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION}
container_name: ${ELK_HOST}
restart: always
environment:
- discovery.type=single-node
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
ulimits:
memlock:
soft: -1
hard: -1
nofile:
soft: 65535
hard: 65535
volumes:
- es_data:/usr/share/elasticsearch/data
expose:
- "9200"
- "9300"
onlyoffice-backup-background-tasks:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-backup-background:${DOCKER_TAG}"
container_name: ${BACKUP_BACKGRUOND_TASKS_HOST}
onlyoffice-backup:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-backup:${DOCKER_TAG}"
container_name: ${BACKUP_HOST}
onlyoffice-clear-events:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-clear-events:${DOCKER_TAG}"
container_name: ${CLEAR_EVENTS_HOST}
onlyoffice-migration:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-migration:${DOCKER_TAG}"
container_name: ${MIGRATION_HOST}
onlyoffice-files:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-files:${DOCKER_TAG}"
container_name: ${FILES_HOST}
onlyoffice-files-services:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-files-services:${DOCKER_TAG}"
container_name: ${FILES_SERVICES_HOST}
onlyoffice-people-server:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-people-server:${DOCKER_TAG}"
container_name: ${PEOPLE_SERVER_HOST}
onlyoffice-socket:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-socket:${DOCKER_TAG}"
container_name: ${SOCKET_HOST}
expose:
- ${SERVICE_PORT}
onlyoffice-studio-notify:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-studio-notify:${DOCKER_TAG}"
container_name: ${STUDIO_NOTIFY_HOST}
onlyoffice-telegram-service:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-telegram-service:${DOCKER_TAG}"
container_name: ${TELEGRAM_SERVICE_HOST}
onlyoffice-urlshortener:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-urlshortener:${DOCKER_TAG}"
container_name: ${URLSHORTENER_HOST}
expose:
- ${SERVICE_PORT}
- "9999"
onlyoffice-api:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-api:${DOCKER_TAG}"
container_name: ${API_HOST}
onlyoffice-studio:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-studio:${DOCKER_TAG}"
container_name: ${STUDIO_HOST}
onlyoffice-ssoauth:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-ssoauth:${DOCKER_TAG}"
container_name: ${SSOAUTH_HOST}
expose:
- ${SERVICE_PORT}
- "9834"
onlyoffice-webhooks-service:
<<: *x-service-base
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-webhooks-service:${DOCKER_TAG}"
container_name: ${WEBHOOKS_SERVICE_HOST}
onlyoffice-proxy:
image: "${REPO}/${DOCKER_IMAGE_PREFIX}-proxy:${DOCKER_TAG}"
container_name: ${PROXY_HOST}
restart: always
expose:
- "8081"
- "8099"
- "8092"
ports:
- 8092:8092
depends_on:
- onlyoffice-backup-background-tasks
- onlyoffice-backup
- onlyoffice-clear-events
- onlyoffice-migration
- onlyoffice-webhooks-service
- onlyoffice-files
- onlyoffice-files-services
- onlyoffice-people-server
- onlyoffice-socket
- onlyoffice-studio-notify
- onlyoffice-telegram-service
- onlyoffice-urlshortener
- onlyoffice-api
- onlyoffice-studio
- onlyoffice-ssoauth
environment:
- SERVICE_BACKUP=${SERVICE_BACKUP}
- SERVICE_FILES=${SERVICE_FILES}
- SERVICE_FILES_SERVICES=${SERVICE_FILES_SERVICES}
- SERVICE_CLEAR_EVENTS=${SERVICE_CLEAR_EVENTS}
- SERVICE_MIGRATION=${SERVICE_MIGRATION}
- SERVICE_WEBHOOKS_SERVICE=${SERVICE_WEBHOOKS_SERVICE}
- SERVICE_NOTIFY=${SERVICE_NOTIFY}
- SERVICE_PEOPLE_SERVER=${SERVICE_PEOPLE_SERVER}
- SERVICE_SOCKET=${SERVICE_SOCKET}
- SERVICE_STUDIO_NOTIFY=${SERVICE_STUDIO_NOTIFY}
- SERVICE_TELEGRAM_SERVICE=${SERVICE_TELEGRAM_SERVICE}
- SERVICE_URLSHORTENER=${SERVICE_URLSHORTENER}
- SERVICE_API=${SERVICE_API}
- SERVICE_STUDIO=${SERVICE_STUDIO}
- SERVICE_SSOAUTH=${SERVICE_SSOAUTH}
- SERVICE_DOCEDITOR=${SERVICE_DOCEDITOR}
- SERVICE_LOGIN=${SERVICE_LOGIN}
- SERVICE_CLIENT=${SERVICE_CLIENT}
- DOCUMENT_SERVER=${DOCUMENT_SERVER_HOST}
- SERVICE_PORT=${SERVICE_PORT}
- GIT_BRANCH=${GIT_BRANCH}
- ROOT_DIR=${ROOT_DIR}
volumes:
- proxy_log:/var/log/nginx
- ${ROOT_DIR}/public:/var/www/public
networks:
default:
external:
name: ${NETWORK_NAME}
volumes:
es_data:
proxy_log:
app_data:
files_data:
people_data:

View File

@ -1,10 +0,0 @@
#!/bin/bash
rd="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "Run script directory:" $rd
dir=$(builtin cd $rd/../../../; pwd)
echo "Root directory:" $dir
# Web API Root
dotnet $dir/web/ASC.Web.Api/bin/Debug/ASC.Web.Api.dll urls=http://0.0.0.0:5000 $STORAGE_ROOT=$dir/Data log:dir=$dir/Logs log:name=api pathToConf=$dir/config core:products:folder=$dir/products

View File

@ -1,10 +0,0 @@
#!/bin/bash
dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "Root directory:" $dir
$dir/api.sh &
$dir/studio.sh &
$dir/people.sh &
$dir/files.sh &
$dir/files.service.sh &

View File

@ -1,20 +0,0 @@
version: "3.8"
services:
rhonda_rabbitmq:
image: rabbitmq:3-management-alpine
container_name: "rabbitmq"
ports:
- 5672:5672
- 15672:15672
volumes:
- ~/.container-data/rabbitmq/data/:/var/lib/rabbitmq/
- ~/.container-data/rabbitmq/log/:/var/log/rabbitmq
redis:
image: redis:latest
volumes:
- ~/.container-data/redis/log/:/var/log/redis
ports:
- 6379:6379
volumes:
container-data:

View File

@ -1,11 +0,0 @@
#!/bin/bash
rd="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "Run script directory:" $rd
dir=$(builtin cd $rd/../../../; pwd)
echo "Root directory:" $dir
# Web API Files.Service
# set servicepath=%cd%\products\ASC.Files\Service\bin\Debug\ASC.Files.Service.exe urls=http://0.0.0.0:5009 $STORAGE_ROOT=%cd%\Data log:dir=%cd%\Logs log:name=files.service pathToConf=%cd%\config core:products:folder=%cd%\products
dotnet $dir/products/ASC.Files/Service/bin/Debug/ASC.Files.Service.dll urls=http://0.0.0.0:5009 $STORAGE_ROOT=$dir/Data log:dir=$dir/Logs log:name=files.service pathToConf=$dir/config core:products:folder=$dir/products

View File

@ -1,11 +0,0 @@
#!/bin/bash
rd="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "Run script directory:" $rd
dir=$(builtin cd $rd/../../../; pwd)
echo "Root directory:" $dir
# Web API Files
# set servicepath=%cd%\products\ASC.Files\Server\bin\Debug\ASC.Files.exe urls=http://0.0.0.0:5007 $STORAGE_ROOT=%cd%\Data log:dir=%cd%\Logs log:name=files pathToConf=%cd%\config core:products:folder=%cd%\products
dotnet $dir/products/ASC.Files/Server/bin/Debug/ASC.Files.dll urls=http://0.0.0.0:5007 $STORAGE_ROOT=$dir/Data log:dir=$dir/Logs log:name=files pathToConf=$dir/config core:products:folder=$dir/products

View File

@ -1,11 +0,0 @@
#!/bin/bash
rd="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "Run script directory:" $rd
dir=$(builtin cd $rd/../../../; pwd)
echo "Root directory:" $dir
# Web API People
# set servicepath=%cd%\products\ASC.People\Server\bin\Debug\ASC.People.exe urls=http://0.0.0.0:5004 $STORAGE_ROOT=%cd%\Data log:dir=%cd%\Logs log:name=people pathToConf=%cd%\config core:products:folder=%cd%\products
dotnet $dir/products/ASC.People/Server/bin/Debug/ASC.People.dll urls=http://0.0.0.0:5004 $STORAGE_ROOT=$dir/Data log:dir=$dir/Logs log:name=people pathToConf=$dir/config core:products:folder=$dir/products

View File

@ -1,10 +0,0 @@
#!/bin/bash
rd="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "Run script directory:" $rd
dir=$(builtin cd $rd/../../../; pwd)
echo "Root directory:" $dir
# Web Socket IO
node $dir/common/ASC.Socket.IO/server.js --logPath=$dir/Logs

View File

@ -1,11 +0,0 @@
#!/bin/bash
rd="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo "Run script directory:" $rd
dir=$(builtin cd $rd/../../../; pwd)
echo "Root directory:" $dir
# Web Studio
# set servicepath=%cd%\web\ASC.Web.Studio\bin\Debug\ASC.Web.Studio.exe urls=http://0.0.0.0:5003 $STORAGE_ROOT=%cd%\Data log:dir=%cd%\Logs log:name=studio pathToConf=%cd%\config core:products:folder=%cd%\products
dotnet $dir/web/ASC.Web.Studio/bin/Debug/ASC.Web.Studio.dll urls=http://0.0.0.0:5003 $STORAGE_ROOT=$dir/Data log:dir=$dir/Logs log:name=studio pathToConf=$dir/config core:products:folder=$dir/products

View File

@ -29,6 +29,7 @@
<ItemGroup>
<ProjectReference Include="..\..\web\ASC.Web.Core\ASC.Web.Core.csproj" />
<ProjectReference Include="..\ASC.EventBus.ActiveMQ\ASC.EventBus.ActiveMQ.csproj" />
<ProjectReference Include="..\ASC.EventBus.Extensions.Logger\ASC.EventBus.Extensions.Logger.csproj" />
<ProjectReference Include="..\ASC.EventBus.RabbitMQ\ASC.EventBus.RabbitMQ.csproj" />
<ProjectReference Include="..\ASC.Webhooks.Core\ASC.Webhooks.Core.csproj" />

View File

@ -54,6 +54,8 @@ public static class ConfigurationManagerExtension
.AddJsonFile($"kafka.{env.EnvironmentName}.json", optional: true, reloadOnChange: true)
.AddJsonFile("rabbitmq.json", optional: true, reloadOnChange: true)
.AddJsonFile($"rabbitmq.{env.EnvironmentName}.json", optional: true, reloadOnChange: true)
.AddJsonFile("activemq.json", optional: true, reloadOnChange: true)
.AddJsonFile($"activemq.{env.EnvironmentName}.json", optional: true, reloadOnChange: true)
.AddJsonFile("redis.json", optional: true, reloadOnChange: true)
.AddJsonFile($"redis.{env.EnvironmentName}.json", optional: true, reloadOnChange: true);

View File

@ -75,6 +75,7 @@ public static class ServiceCollectionExtension
services.AddSingleton<IEventBusSubscriptionsManager, InMemoryEventBusSubscriptionsManager>();
var rabbitMQConfiguration = configuration.GetSection("RabbitMQ").Get<RabbitMQSettings>();
var activeMQConfiguration = configuration.GetSection("ActiveMQ").Get<ActiveMQSettings>();
if (rabbitMQConfiguration != null)
{
@ -154,6 +155,54 @@ public static class ServiceCollectionExtension
return new EventBusRabbitMQ(rabbitMQPersistentConnection, logger, iLifetimeScope, eventBusSubcriptionsManager, serializer, subscriptionClientName, retryCount);
});
}
else if (activeMQConfiguration != null)
{
services.AddSingleton<IActiveMQPersistentConnection>(sp =>
{
var cfg = sp.GetRequiredService<IConfiguration>();
var logger = sp.GetRequiredService<ILogger<DefaultActiveMQPersistentConnection>>();
var factory = new Apache.NMS.NMSConnectionFactory(activeMQConfiguration.Uri);
var retryCount = 5;
if (!string.IsNullOrEmpty(cfg["core:eventBus:connectRetryCount"]))
{
retryCount = int.Parse(cfg["core:eventBus:connectRetryCount"]);
}
return new DefaultActiveMQPersistentConnection(factory, logger, retryCount);
});
services.AddSingleton<IEventBus, EventBusActiveMQ>(sp =>
{
var cfg = sp.GetRequiredService<IConfiguration>();
var activeMQPersistentConnection = sp.GetRequiredService<IActiveMQPersistentConnection>();
var iLifetimeScope = sp.GetRequiredService<ILifetimeScope>();
var logger = sp.GetRequiredService<ILogger<EventBusActiveMQ>>();
var eventBusSubcriptionsManager = sp.GetRequiredService<IEventBusSubscriptionsManager>();
var serializer = new EventBus.Serializers.ProtobufSerializer();
var subscriptionClientName = "asc_event_bus_default_queue";
if (!string.IsNullOrEmpty(cfg["core:eventBus:subscriptionClientName"]))
{
subscriptionClientName = cfg["core:eventBus:subscriptionClientName"];
}
var retryCount = 5;
if (!string.IsNullOrEmpty(cfg["core:eventBus:connectRetryCount"]))
{
retryCount = int.Parse(cfg["core:eventBus:connectRetryCount"]);
}
return new EventBusActiveMQ(activeMQPersistentConnection, logger, iLifetimeScope, eventBusSubcriptionsManager, serializer, subscriptionClientName, retryCount);
});
}
else
{

View File

@ -144,3 +144,7 @@ global using StackExchange.Redis.Extensions.Core.Configuration;
global using StackExchange.Redis.Extensions.Newtonsoft;
global using LogLevel = Microsoft.Extensions.Logging.LogLevel;
global using ASC.Common.Caching.Settings;
global using ASC.EventBus.ActiveMQ;

View File

@ -232,17 +232,4 @@ public class RabbitMQCache<T> : IDisposable, ICacheNotify<T> where T : IMessage<
_disposed = true;
}
}
public class RabbitMQSettings
{
public string HostName { get; set; }
public string UserName { get; set; }
public string Password { get; set; }
public int Port { get; set; }
public string VirtualHost { get; set; }
public string Uri { get; set; }
public bool EnableSsl { get; set; }
public string SslServerName { get; set; }
public string SslCertPath { get; set; }
}
}

View File

@ -0,0 +1,31 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Common.Caching.Settings;
public class ActiveMQSettings
{
public string Uri { get; set; }
}

View File

@ -0,0 +1,39 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Common.Caching;
public class RabbitMQSettings
{
public string HostName { get; set; }
public string UserName { get; set; }
public string Password { get; set; }
public int Port { get; set; }
public string VirtualHost { get; set; }
public string Uri { get; set; }
public bool EnableSsl { get; set; }
public string SslServerName { get; set; }
public string SslCertPath { get; set; }
}

View File

@ -45,6 +45,8 @@ public class TariffServiceStorage
Notify = notify;
Notify.Subscribe((i) =>
{
Cache.Insert(TariffService.GetTariffNeedToUpdateCacheKey(i.TenantId), "update", _cacheExpiration);
Cache.Remove(TariffService.GetTariffCacheKey(i.TenantId));
Cache.Remove(TariffService.GetBillingUrlCacheKey(i.TenantId));
Cache.Remove(TariffService.GetBillingPaymentCacheKey(i.TenantId)); // clear all payments
@ -169,7 +171,11 @@ public class TariffService : ITariffService
{
tariff = GetBillingInfo(tenantId) ?? CreateDefault();
tariff = CalculateTariff(tenantId, tariff);
tariffId = tariff.Id;
if (string.IsNullOrEmpty(_cache.Get<string>(GetTariffNeedToUpdateCacheKey(tenantId))))
{
tariffId = tariff.Id;
}
if (_billingClient.Configured && withRequestToPaymentSystem)
{
@ -210,9 +216,9 @@ public class TariffService : ITariffService
if (SaveBillingInfo(tenantId, asynctariff))
{
asynctariff = CalculateTariff(tenantId, asynctariff);
tariff = asynctariff;
tariffId = asynctariff.Id;
}
tariffId = asynctariff.Id;
}
catch (BillingNotFoundException)
{
@ -237,9 +243,9 @@ public class TariffService : ITariffService
if (SaveBillingInfo(tenantId, asynctariff))
{
asynctariff = CalculateTariff(tenantId, asynctariff);
tariff = asynctariff;
tariffId = asynctariff.Id;
}
tariffId = asynctariff.Id;
}
catch (Exception error)
{
@ -367,6 +373,11 @@ public class TariffService : ITariffService
return $"{tenantId}:tariff";
}
internal static string GetTariffNeedToUpdateCacheKey(int tenantId)
{
return $"{tenantId}:update";
}
internal static string GetBillingUrlCacheKey(int tenantId)
{
return $"{tenantId}:billing:urls";

View File

@ -22,8 +22,8 @@
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup;
[Scope]
@ -40,8 +40,9 @@ public class BackupAjaxHandler
private readonly ConsumerFactory _consumerFactory;
private readonly BackupService _backupService;
private readonly TempPath _tempPath;
private readonly StorageFactory _storageFactory;
private const string BackupTempFolder = "backup";
private const string BackupTempModule = "backup_temp";
private const string BackupFileName = "backup.tmp";
#region backup
@ -57,7 +58,8 @@ public class BackupAjaxHandler
UserManager userManager,
TenantExtra tenantExtra,
ConsumerFactory consumerFactory,
TempPath tempPath)
TempPath tempPath,
StorageFactory storageFactory)
{
_tenantManager = tenantManager;
_messageService = messageService;
@ -69,7 +71,8 @@ public class BackupAjaxHandler
_tenantExtra = tenantExtra;
_consumerFactory = consumerFactory;
_backupService = backupService;
_tempPath = tempPath;
_tempPath = tempPath;
_storageFactory = storageFactory;
}
public void StartBackup(BackupStorageType storageType, Dictionary<string, string> storageParams)
@ -385,7 +388,8 @@ public class BackupAjaxHandler
public string GetTmpFilePath()
{
var folder = Path.Combine(_tempPath.GetTempPath(), BackupTempFolder, _tenantManager.GetCurrentTenant().Id.ToString());
var discStore = _storageFactory.GetStorage("", _tenantManager.GetCurrentTenant().Id.ToString(), BackupTempModule, null) as DiscDataStore;
var folder = discStore.GetPhysicalPath("", "");
if (!Directory.Exists(folder))
{

View File

@ -27,8 +27,7 @@
namespace ASC.Web.Studio.Core.Backup;
public class BackupFileUploadHandler
{
private const long MaxBackupFileSize = 1024L * 1024L * 1024L;
{
public BackupFileUploadHandler(RequestDelegate next)
{
@ -53,12 +52,6 @@ public class BackupFileUploadHandler
var file = context.Request.Form.Files[0];
if (file.Length <= 0 || file.Length > MaxBackupFileSize)
{
result = Error($"File size must be greater than 0 and less than {MaxBackupFileSize} bytes");
}
var filePath = backupAjaxHandler.GetTmpFilePath();
if (File.Exists(filePath))

View File

@ -72,6 +72,7 @@ global using ASC.Data.Backup.Tasks.Modules;
global using ASC.Data.Backup.Utils;
global using ASC.Data.Storage;
global using ASC.Data.Storage.Configuration;
global using ASC.Data.Storage.DiscStorage;
global using ASC.EventBus.Events;
global using ASC.Files.Core;
global using ASC.MessagingSystem.Core;
@ -103,6 +104,5 @@ global using Microsoft.Extensions.Logging;
global using MySql.Data.MySqlClient;
global using Newtonsoft.Json;
global using Newtonsoft.Json.Linq;
global using ProtoBuf;

View File

@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>disable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Apache.NMS.AMQP" Version="2.0.0" />
<PackageReference Include="Polly" Version="7.2.3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\ASC.Common\ASC.Common.csproj" />
<ProjectReference Include="..\ASC.EventBus\ASC.EventBus.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,175 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
using ASC.EventBus.ActiveMQ.Log;
namespace ASC.EventBus.ActiveMQ;
public class DefaultActiveMQPersistentConnection
: IActiveMQPersistentConnection
{
private readonly IConnectionFactory _connectionFactory;
private readonly ILogger<DefaultActiveMQPersistentConnection> _logger;
private readonly int _retryCount;
private IConnection _connection;
private bool _disposed;
readonly object sync_root = new object();
public DefaultActiveMQPersistentConnection(IConnectionFactory connectionFactory, ILogger<DefaultActiveMQPersistentConnection> logger, int retryCount = 5)
{
_connectionFactory = connectionFactory ?? throw new ArgumentNullException(nameof(connectionFactory));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_retryCount = retryCount;
}
public bool IsConnected
{
get
{
return _connection != null && _connection.IsStarted && !_disposed;
}
}
public ISession CreateSession()
{
return CreateSession(AcknowledgementMode.AutoAcknowledge);
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
try
{
_connection.ExceptionListener -= OnExceptionListener;
_connection.ConnectionInterruptedListener -= OnConnectionInterruptedListener;
_connection.ConnectionResumedListener -= OnConnectionResumedListener;
_connection.Dispose();
}
catch (IOException ex)
{
_logger.CriticalDefaultActiveMQPersistentConnection(ex);
}
}
private void OnExceptionListener(Exception exception)
{
if (_disposed)
{
return;
}
_logger.WarningActiveMQConnectionThrowException();
TryConnect();
}
private void OnConnectionResumedListener()
{
if (_disposed)
{
return;
}
_logger.WarningActiveMQConnectionThrowException();
TryConnect();
}
private void OnConnectionInterruptedListener()
{
if (_disposed)
{
return;
}
_logger.WarningActiveMQConnectionThrowException();
TryConnect();
}
public bool TryConnect()
{
_logger.InformationActiveMQTryingConnect();
lock (sync_root)
{
var policy = Policy.Handle<SocketException>()
.WaitAndRetry(_retryCount, retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)), (ex, time) =>
{
_logger.WarningActiveMQCouldNotConnect(time.TotalSeconds, ex);
}
);
policy.Execute(() =>
{
_connection = _connectionFactory
.CreateConnection();
_connection.Start();
});
if (IsConnected)
{
_connection.ExceptionListener += OnExceptionListener;
_connection.ConnectionInterruptedListener += OnConnectionInterruptedListener;
_connection.ConnectionResumedListener += OnConnectionResumedListener;
if (_connection is Apache.NMS.AMQP.NmsConnection)
{
var hostname = ((Apache.NMS.AMQP.NmsConnection)_connection).ConnectionInfo.ConfiguredUri.Host;
_logger.InformationActiveMQAcquiredPersistentConnection(hostname);
}
return true;
}
else
{
_logger.CriticalActiveMQCouldNotBeCreated();
return false;
}
}
}
public ISession CreateSession(AcknowledgementMode acknowledgementMode)
{
if (!IsConnected)
{
throw new InvalidOperationException("No ActiveMQ connections are available to perform this action");
}
return _connection.CreateSession(acknowledgementMode);
}
}

View File

@ -0,0 +1,363 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.EventBus.ActiveMQ;
public class EventBusActiveMQ : IEventBus, IDisposable
{
const string EXCHANGE_NAME = "asc_event_bus";
const string AUTOFAC_SCOPE_NAME = "asc_event_bus";
private readonly ILogger<EventBusActiveMQ> _logger;
private readonly IEventBusSubscriptionsManager _subsManager;
private readonly ILifetimeScope _autofac;
private static ConcurrentQueue<Guid> _rejectedEvents;
private readonly IActiveMQPersistentConnection _persistentConnection;
private readonly IIntegrationEventSerializer _serializer;
private ISession _consumerSession;
private readonly List<IMessageConsumer> _consumers;
private readonly int _retryCount;
private string _queueName;
public EventBusActiveMQ(IActiveMQPersistentConnection persistentConnection,
ILogger<EventBusActiveMQ> logger,
ILifetimeScope autofac,
IEventBusSubscriptionsManager subsManager,
IIntegrationEventSerializer serializer,
string queueName = null,
int retryCount = 5)
{
_persistentConnection = persistentConnection ?? throw new ArgumentNullException(nameof(persistentConnection));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_subsManager = subsManager ?? new InMemoryEventBusSubscriptionsManager();
_serializer = serializer;
_queueName = queueName;
_autofac = autofac;
_retryCount = retryCount;
_rejectedEvents = new ConcurrentQueue<Guid>();
_consumerSession = CreateConsumerSession();
_subsManager.OnEventRemoved += SubsManager_OnEventRemoved;
_consumers = new List<IMessageConsumer>();
}
private void SubsManager_OnEventRemoved(object sender, string eventName)
{
if (!_persistentConnection.IsConnected)
{
_persistentConnection.TryConnect();
}
using (var session = _persistentConnection.CreateSession())
{
var messageSelector = $"eventName='{eventName}'";
var findedConsumer = _consumers.Find(x => x.MessageSelector == messageSelector);
if (findedConsumer != null)
{
findedConsumer.Close();
_consumers.Remove(findedConsumer);
}
if (_subsManager.IsEmpty)
{
_queueName = string.Empty;
_consumerSession.Close();
}
}
}
public void Publish(IntegrationEvent @event)
{
if (!_persistentConnection.IsConnected)
{
_persistentConnection.TryConnect();
}
var policy = Policy.Handle<SocketException>()
.WaitAndRetry(_retryCount, retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)), (ex, time) =>
{
_logger.WarningCouldNotPublishEvent(@event.Id, time.TotalSeconds, ex);
});
using (var session = _persistentConnection.CreateSession(AcknowledgementMode.ClientAcknowledge))
{
var destination = session.GetQueue(_queueName);
using (var producer = session.CreateProducer(destination))
{
producer.DeliveryMode = MsgDeliveryMode.Persistent;
var body = _serializer.Serialize(@event);
var request = session.CreateStreamMessage();
var eventName = @event.GetType().Name;
request.Properties["eventName"] = eventName;
request.WriteBytes(body);
producer.Send(request);
}
}
}
public void Subscribe<T, TH>()
where T : IntegrationEvent
where TH : IIntegrationEventHandler<T>
{
var eventName = _subsManager.GetEventKey<T>();
_logger.InformationSubscribing(eventName, typeof(TH).GetGenericTypeName());
_subsManager.AddSubscription<T, TH>();
StartBasicConsume(eventName);
}
public void SubscribeDynamic<TH>(string eventName) where TH : IDynamicIntegrationEventHandler
{
_logger.InformationSubscribingDynamic(eventName, typeof(TH).GetGenericTypeName());
_subsManager.AddDynamicSubscription<TH>(eventName);
StartBasicConsume(eventName);
}
private ISession CreateConsumerSession()
{
if (!_persistentConnection.IsConnected)
{
_persistentConnection.TryConnect();
}
_logger.TraceCreatingConsumerSession();
_consumerSession = _persistentConnection.CreateSession(AcknowledgementMode.ClientAcknowledge);
return _consumerSession;
}
private void StartBasicConsume(string eventName)
{
_logger.TraceStartingBasicConsume();
if (!_persistentConnection.IsConnected)
{
_persistentConnection.TryConnect();
}
var destination = _consumerSession.GetQueue(_queueName);
var messageSelector = $"eventName='{eventName}'";
var consumer = _consumerSession.CreateConsumer(destination, messageSelector);
_consumers.Add(consumer);
if (_consumerSession != null)
{
consumer.Listener += Consumer_Listener;
}
else
{
_logger.ErrorStartBasicConsumeCantCall();
}
}
private void Consumer_Listener(IMessage objMessage)
{
var streamMessage = objMessage as IStreamMessage;
var eventName = streamMessage.Properties["eventName"].ToString();
var buffer = new byte[4 * 1024];
byte[] serializedMessage;
using (var ms = new MemoryStream())
{
int read;
while ((read = streamMessage.ReadBytes(buffer)) > 0)
{
ms.Write(buffer, 0, read);
if (read < buffer.Length)
{
break;
}
}
serializedMessage = ms.ToArray();
}
var @event = GetEvent(eventName, serializedMessage);
var message = @event.ToString();
try
{
if (message.ToLowerInvariant().Contains("throw-fake-exception"))
{
throw new InvalidOperationException($"Fake exception requested: \"{message}\"");
}
ProcessEvent(eventName, @event)
.GetAwaiter()
.GetResult();
streamMessage.Acknowledge();
}
catch (IntegrationEventRejectExeption ex)
{
_logger.WarningProcessingMessage(message, ex);
if (_rejectedEvents.TryPeek(out var result) && result.Equals(ex.EventId))
{
_rejectedEvents.TryDequeue(out var _);
streamMessage.Acknowledge();
}
else
{
_rejectedEvents.Enqueue(ex.EventId);
}
}
catch (Exception ex)
{
_logger.WarningProcessingMessage(message, ex);
streamMessage.Acknowledge();
}
}
private IntegrationEvent GetEvent(string eventName, byte[] serializedMessage)
{
var eventType = _subsManager.GetEventTypeByName(eventName);
var integrationEvent = (IntegrationEvent)_serializer.Deserialize(serializedMessage, eventType);
return integrationEvent;
}
public void Unsubscribe<T, TH>()
where T : IntegrationEvent
where TH : IIntegrationEventHandler<T>
{
var eventName = _subsManager.GetEventKey<T>();
_logger.InformationUnsubscribing(eventName);
_subsManager.RemoveSubscription<T, TH>();
}
public void UnsubscribeDynamic<TH>(string eventName) where TH : IDynamicIntegrationEventHandler
{
_subsManager.RemoveDynamicSubscription<TH>(eventName);
}
private void PreProcessEvent(IntegrationEvent @event)
{
if (_rejectedEvents.Count == 0)
{
return;
}
if (_rejectedEvents.TryPeek(out var result) && result.Equals(@event.Id))
{
@event.Redelivered = true;
}
}
private async Task ProcessEvent(string eventName, IntegrationEvent @event)
{
_logger.TraceProcessingEvent(eventName);
PreProcessEvent(@event);
if (_subsManager.HasSubscriptionsForEvent(eventName))
{
using (var scope = _autofac.BeginLifetimeScope(AUTOFAC_SCOPE_NAME))
{
var subscriptions = _subsManager.GetHandlersForEvent(eventName);
foreach (var subscription in subscriptions)
{
if (subscription.IsDynamic)
{
var handler = scope.ResolveOptional(subscription.HandlerType) as IDynamicIntegrationEventHandler;
if (handler == null)
{
continue;
}
using dynamic eventData = @event;
await Task.Yield();
await handler.Handle(eventData);
}
else
{
var handler = scope.ResolveOptional(subscription.HandlerType);
if (handler == null)
{
continue;
}
var eventType = _subsManager.GetEventTypeByName(eventName);
var concreteType = typeof(IIntegrationEventHandler<>).MakeGenericType(eventType);
await Task.Yield();
await (Task)concreteType.GetMethod("Handle").Invoke(handler, new object[] { @event });
}
}
}
}
else
{
_logger.WarningNoSubscription(eventName);
}
}
public void Dispose()
{
foreach (var consumer in _consumers)
{
consumer.Dispose();
}
if (_consumerSession != null)
{
_consumerSession.Dispose();
}
_subsManager.Clear();
}
}

View File

@ -0,0 +1,42 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
global using System.Collections.Concurrent;
global using System.Net.Sockets;
global using ASC.EventBus.Abstractions;
global using ASC.EventBus.Events;
global using ASC.EventBus.ActiveMQ.Log;
global using Apache.NMS;
global using Autofac;
global using Microsoft.Extensions.Logging;
global using Polly;
global using ASC.EventBus.Exceptions;
global using ASC.EventBus.Extensions;
global using ASC.EventBus.Serializers;

View File

@ -0,0 +1,35 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.EventBus.ActiveMQ;
public interface IActiveMQPersistentConnection
{
bool IsConnected { get; }
bool TryConnect();
ISession CreateSession(AcknowledgementMode acknowledgementMode);
ISession CreateSession();
}

View File

@ -0,0 +1,53 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.EventBus.ActiveMQ.Log;
internal static partial class DefaultActiveMQPersistentConnectionLogger
{
[LoggerMessage(Level = LogLevel.Critical, Message = "DefaultActiveMQPersistentConnection")]
public static partial void CriticalDefaultActiveMQPersistentConnection(this ILogger<DefaultActiveMQPersistentConnection> logger, Exception exception);
[LoggerMessage(Level = LogLevel.Information, Message = "ActiveMQ Client is trying to connect")]
public static partial void InformationActiveMQTryingConnect(this ILogger<DefaultActiveMQPersistentConnection> logger);
[LoggerMessage(Level = LogLevel.Warning, Message = "ActiveMQ Client could not connect after {timeOut}s")]
public static partial void WarningActiveMQCouldNotConnect(this ILogger<DefaultActiveMQPersistentConnection> logger, double timeOut, Exception exception);
[LoggerMessage(Level = LogLevel.Information, Message = "ActiveMQ Client acquired a persistent connection to '{hostName}' and is subscribed to failure events")]
public static partial void InformationActiveMQAcquiredPersistentConnection(this ILogger<DefaultActiveMQPersistentConnection> logger, string hostName);
[LoggerMessage(Level = LogLevel.Critical, Message = "FATAL ERROR: ActiveMQ connections could not be created and opened")]
public static partial void CriticalActiveMQCouldNotBeCreated(this ILogger<DefaultActiveMQPersistentConnection> logger);
[LoggerMessage(Level = LogLevel.Warning, Message = "A ActiveMQ connection is shutdown. Trying to re-connect...")]
public static partial void WarningActiveMQConnectionShutdown(this ILogger<DefaultActiveMQPersistentConnection> logger);
[LoggerMessage(Level = LogLevel.Warning, Message = "A ActiveMQ connection throw exception. Trying to re-connect...")]
public static partial void WarningActiveMQConnectionThrowException(this ILogger<DefaultActiveMQPersistentConnection> logger);
[LoggerMessage(Level = LogLevel.Warning, Message = "A ActiveMQ connection is on shutdown. Trying to re-connect...")]
public static partial void WarningActiveMQConnectionIsOnShutDown(this ILogger<DefaultActiveMQPersistentConnection> logger);
}

View File

@ -0,0 +1,74 @@
// (c) Copyright Ascensio System SIA 2010-2022
//
// This program is a free software product.
// You can redistribute it and/or modify it under the terms
// of the GNU Affero General Public License (AGPL) version 3 as published by the Free Software
// Foundation. In accordance with Section 7(a) of the GNU AGPL its Section 15 shall be amended
// to the effect that Ascensio System SIA expressly excludes the warranty of non-infringement of
// any third-party rights.
//
// This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For details, see
// the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
//
// You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, EU, LV-1021.
//
// The interactive user interfaces in modified source and object code versions of the Program must
// display Appropriate Legal Notices, as required under Section 5 of the GNU AGPL version 3.
//
// Pursuant to Section 7(b) of the License you must retain the original Product logo when
// distributing the program. Pursuant to Section 7(e) we decline to grant you any rights under
// trademark law for use of our trademarks.
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.EventBus.ActiveMQ.Log;
internal static partial class EventBusActiveMQLogger
{
[LoggerMessage(Level = LogLevel.Warning, Message = "Could not publish event: {eventId} after {timeout}s")]
public static partial void WarningCouldNotPublishEvent(this ILogger<EventBusActiveMQ> logger, Guid eventId, double timeout, Exception exception);
[LoggerMessage(Level = LogLevel.Trace, Message = "Creating ActiveMQ session to publish event: {eventId} ({eventName})")]
public static partial void TraceCreatingActiveMQSession(this ILogger<EventBusActiveMQ> logger, Guid eventId, string eventName);
[LoggerMessage(Level = LogLevel.Trace, Message = "Declaring ActiveMQ exchange to publish event: {eventId}")]
public static partial void TraceDeclaringActiveMQSession(this ILogger<EventBusActiveMQ> logger, Guid eventId);
[LoggerMessage(Level = LogLevel.Trace, Message = "Publishing event to ActiveMQ: {eventId}")]
public static partial void TracePublishingEvent(this ILogger<EventBusActiveMQ> logger, Guid eventId);
[LoggerMessage(Level = LogLevel.Information, Message = "Subscribing to dynamic event {eventName} with {eventHandler}")]
public static partial void InformationSubscribingDynamic(this ILogger<EventBusActiveMQ> logger, string eventName, string eventHandler);
[LoggerMessage(Level = LogLevel.Information, Message = "Subscribing to event {eventName} with {eventHandler}")]
public static partial void InformationSubscribing(this ILogger<EventBusActiveMQ> logger, string eventName, string eventHandler);
[LoggerMessage(Level = LogLevel.Information, Message = "Unsubscribing from event {eventName}")]
public static partial void InformationUnsubscribing(this ILogger<EventBusActiveMQ> logger, string eventName);
[LoggerMessage(Level = LogLevel.Trace, Message = "Starting ActiveMQ basic consume")]
public static partial void TraceStartingBasicConsume(this ILogger<EventBusActiveMQ> logger);
[LoggerMessage(Level = LogLevel.Trace, Message = "Consumer tag {consumerTag} already exist. Cancelled BasicConsume again")]
public static partial void TraceConsumerTagExist(this ILogger<EventBusActiveMQ> logger, string consumerTag);
[LoggerMessage(Level = LogLevel.Error, Message = "StartBasicConsume can't call on _consumerSession == null")]
public static partial void ErrorStartBasicConsumeCantCall(this ILogger<EventBusActiveMQ> logger);
[LoggerMessage(Level = LogLevel.Warning, Message = "----- ERROR Processing message \"{message}\"")]
public static partial void WarningProcessingMessage(this ILogger<EventBusActiveMQ> logger, string message, Exception exception);
[LoggerMessage(Level = LogLevel.Trace, Message = "Creating ActiveMQ consumer session")]
public static partial void TraceCreatingConsumerSession(this ILogger<EventBusActiveMQ> logger);
[LoggerMessage(Level = LogLevel.Warning, Message = "Recreating ActiveMQ consumer session")]
public static partial void WarningRecreatingConsumerSession(this ILogger<EventBusActiveMQ> logger, Exception exception);
[LoggerMessage(Level = LogLevel.Trace, Message = "Processing ActiveMQ event: {eventName}")]
public static partial void TraceProcessingEvent(this ILogger<EventBusActiveMQ> logger, string eventName);
[LoggerMessage(Level = LogLevel.Warning, Message = "No subscription for ActiveMQ event: {eventName}")]
public static partial void WarningNoSubscription(this ILogger<EventBusActiveMQ> logger, string eventName);
}

View File

@ -65,7 +65,6 @@ public class EventBusRabbitMQ : IEventBus, IDisposable
_subsManager.OnEventRemoved += SubsManager_OnEventRemoved;
_serializer = serializer;
_rejectedEvents = new ConcurrentQueue<Guid>();
}
private void SubsManager_OnEventRemoved(object sender, string eventName)
@ -250,22 +249,16 @@ public class EventBusRabbitMQ : IEventBus, IDisposable
}
catch (IntegrationEventRejectExeption ex)
{
_logger.WarningProcessingMessage(message, ex);
if (eventArgs.Redelivered)
{
if (_rejectedEvents.TryPeek(out var result) && result.Equals(ex.EventId))
{
_rejectedEvents.TryDequeue(out var _);
_consumerChannel.BasicReject(eventArgs.DeliveryTag, requeue: false);
}
else
{
_rejectedEvents.Enqueue(ex.EventId);
}
}
else
{
_logger.WarningProcessingMessage(message, ex);
if (_rejectedEvents.TryPeek(out var result) && result.Equals(ex.EventId))
{
_rejectedEvents.TryDequeue(out var _);
_consumerChannel.BasicReject(eventArgs.DeliveryTag, requeue: false);
}
else
{
_rejectedEvents.Enqueue(ex.EventId);
_consumerChannel.BasicNack(eventArgs.DeliveryTag, multiple: false, requeue: true);
}
}

View File

@ -39,6 +39,7 @@ global using ASC.Data.Backup.ApiModels;
global using ASC.Data.Backup.Contracts;
global using ASC.Data.Backup.Core.IntegrationEvents.Events;
global using ASC.Data.Backup.EF.Context;
global using ASC.Data.Backup.Extension;
global using ASC.Data.Backup.Services;
global using ASC.EventBus.Abstractions;
global using ASC.Files.Core.EF;
@ -51,16 +52,12 @@ global using Autofac;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.AspNetCore.Hosting;
global using Microsoft.AspNetCore.Http.Features;
global using Microsoft.AspNetCore.Mvc;
global using Microsoft.AspNetCore.Server.Kestrel.Core;
global using Microsoft.Extensions.Configuration;
global using Microsoft.Extensions.DependencyInjection;
global using Microsoft.Extensions.Hosting;
global using Microsoft.Extensions.Hosting.WindowsServices;
global using static ASC.Data.Backup.BackupAjaxHandler;
global using ASC.Data.Backup.Extension;
global using NLog.Extensions.Logging;
global using NLog.Web;
global using NLog.AWS.Logger;

View File

@ -22,8 +22,8 @@
//
// All the Product's GUI elements, including illustrations and icon sets, as well as technical writing
// content are licensed under the terms of the Creative Commons Attribution-ShareAlike 4.0
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
// International. See the License terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
namespace ASC.Data.Backup;
public class Startup : BaseStartup
@ -38,6 +38,16 @@ public class Startup : BaseStartup
{
base.ConfigureServices(services);
var maxRequestLimit = 1024L * 1024L * 1024L;
services.Configure<KestrelServerOptions>(options =>
{
options.Limits.MaxRequestBodySize = maxRequestLimit;
});
services.Configure<FormOptions>(x =>
{
x.MultipartBodyLengthLimit = maxRequestLimit;
});
services.AddBaseDbContextPool<BackupsContext>();
services.AddBaseDbContextPool<FilesDbContext>();

5
config/activemq.json Normal file
View File

@ -0,0 +1,5 @@
{
"ActiveMQ": {
"Uri": "amqp://127.0.0.1:5672"
}
}

View File

@ -249,6 +249,14 @@
"virtualpath": "~/studio/{0}/backup",
"expires": "0:10:0",
"disableEncryption": true
},
{
"name": "backup_temp",
"visible": false,
"type": "disc",
"path": "$STORAGE_ROOT\\Studio\\{0}\\temp\\backup",
"expires": "0:10:0",
"disableEncryption": true
},
{
"name": "customnavigation",

View File

@ -23,6 +23,8 @@ import withPeopleLoader from "SRC_DIR/HOCs/withPeopleLoader";
const StyledContainer = styled.div`
width: 100%;
min-height: 33px;
height: 60px;
.group-button-menu-container {
margin: 0 0 0 -20px;
@ -142,6 +144,7 @@ const StyledInfoPanelToggleWrapper = styled.div`
align-items: center;
justify-content: center;
border-radius: 50%;
margin-bottom: 1px;
}
`;

View File

@ -11,7 +11,7 @@ const CommentEditor = ({
item,
setSelection,
isRecycleBinFolder,
fetchFileVersions,
updateCommentVersion,
}) => {
@ -60,15 +60,17 @@ const CommentEditor = ({
{comment}
</Text>
)}
<div className="edit_toggle" onClick={onOpenEditor}>
<ReactSVG
className="edit_toggle-icon"
src="images/pencil.react.svg"
/>
<div className="property-content edit_toggle-text">
{comment ? t("Common:EditButton") : t("Common:AddButton")}
{!isRecycleBinFolder && (
<div className="edit_toggle" onClick={onOpenEditor}>
<ReactSVG
className="edit_toggle-icon"
src="images/pencil.react.svg"
/>
<div className="property-content edit_toggle-text">
{comment ? t("Common:EditButton") : t("Common:AddButton")}
</div>
</div>
</div>
)}
</div>
) : (
<div className="property-comment_editor-editor">
@ -101,14 +103,14 @@ const CommentEditor = ({
);
};
export default inject(({ auth, versionHistoryStore }) => {
export default inject(({ auth, versionHistoryStore, treeFoldersStore }) => {
const { setSelection } = auth.infoPanelStore;
const { fetchFileVersions, updateCommentVersion } = versionHistoryStore;
const { isRecycleBinFolder } = treeFoldersStore;
return {
setSelection,
isRecycleBinFolder,
fetchFileVersions,
updateCommentVersion,
};

View File

@ -60,6 +60,7 @@ const StyledContainer = styled.div`
.header-container {
min-height: 33px;
height: 60px;
}
`;
@ -303,7 +304,7 @@ class SectionHeaderContent extends React.Component {
};
getContextOptionsFolder = () => {
const { t, toggleInfoPanel, personal } = this.props;
const { t, isRecycleBinFolder } = this.props;
return [
{
@ -493,7 +494,6 @@ class SectionHeaderContent extends React.Component {
isHeaderChecked,
isHeaderIndeterminate,
showText,
toggleInfoPanel,
isRoomsFolder,
isEmptyPage,
} = this.props;

View File

@ -449,7 +449,6 @@ const WhiteLabel = (props) => {
fontWeight="600"
isHovered
type="action"
color={!isPortalPaid ? "#A3A9AE" : ""}
className="settings_unavailable"
>
{t("ChangeLogoButton")}
@ -505,7 +504,6 @@ const WhiteLabel = (props) => {
fontWeight="600"
isHovered
type="action"
color={!isPortalPaid ? "#A3A9AE" : ""}
className="settings_unavailable"
>
{t("ChangeLogoButton")}
@ -561,7 +559,6 @@ const WhiteLabel = (props) => {
fontWeight="600"
isHovered
type="action"
color={!isPortalPaid ? "#A3A9AE" : ""}
className="settings_unavailable"
>
{t("ChangeLogoButton")}
@ -617,7 +614,6 @@ const WhiteLabel = (props) => {
fontWeight="600"
isHovered
type="action"
color={!isPortalPaid ? "#A3A9AE" : ""}
className="settings_unavailable"
>
{t("ChangeLogoButton")}
@ -674,7 +670,6 @@ const WhiteLabel = (props) => {
fontWeight="600"
isHovered
type="action"
color={!isPortalPaid ? "#A3A9AE" : ""}
className="settings_unavailable"
>
{t("ChangeLogoButton")}
@ -779,7 +774,6 @@ const WhiteLabel = (props) => {
fontWeight="600"
isHovered
type="action"
color={!isPortalPaid ? "#A3A9AE" : ""}
className="settings_unavailable"
>
{t("ChangeLogoButton")}
@ -836,7 +830,6 @@ const WhiteLabel = (props) => {
fontWeight="600"
isHovered
type="action"
color={!isPortalPaid ? "#A3A9AE" : ""}
className="settings_unavailable"
>
{t("ChangeLogoButton")}

View File

@ -6,6 +6,7 @@ $font-family-base: "Open Sans", sans-serif;
html,
body {
height: 100%;
-webkit-tap-highlight-color: rgba(0, 0, 0, 0);
}
#root {
min-height: 100%;

View File

@ -509,16 +509,6 @@ public class FileSecurity : IFileSecurity
return false;
}
if (isVisitor && e.RootFolderType == FolderType.Recent)
{
return false;
}
if (isVisitor && e.RootFolderType == FolderType.Favorites)
{
return false;
}
if (isVisitor && e.RootFolderType == FolderType.Templates)
{
return false;

View File

@ -522,11 +522,6 @@ public class GlobalFolder
return 0;
}
if (_userManager.IsVisitor(_authContext.CurrentAccount.ID))
{
return 0;
}
if (!RecentFolderCache.TryGetValue(_tenantManager.GetCurrentTenant().Id, out var recentFolderId))
{
var folderDao = daoFactory.GetFolderDao<int>();
@ -551,11 +546,6 @@ public class GlobalFolder
return 0;
}
if (_userManager.IsVisitor(_authContext.CurrentAccount.ID))
{
return 0;
}
if (!FavoritesFolderCache.TryGetValue(_tenantManager.GetCurrentTenant().Id, out var favoriteFolderId))
{
var folderDao = daoFactory.GetFolderDao<int>();

View File

@ -114,7 +114,7 @@ public class FoldersControllerHelper<T> : FilesHelperBase<T>
yield return await _globalFolderHelper.FolderShareAsync;
}
if (!IsVisitor && !withoutAdditionalFolder)
if (!withoutAdditionalFolder)
{
if (_filesSettingsHelper.FavoritesSection)
{
@ -126,8 +126,10 @@ public class FoldersControllerHelper<T> : FilesHelperBase<T>
yield return await _globalFolderHelper.FolderRecentAsync;
}
if (!_coreBaseSettings.Personal && _coreBaseSettings.DisableDocSpace
&& PrivacyRoomSettings.IsAvailable())
if (!IsVisitor &&
!_coreBaseSettings.Personal &&
_coreBaseSettings.DisableDocSpace &&
PrivacyRoomSettings.IsAvailable())
{
yield return await _globalFolderHelper.FolderPrivacyAsync;
}
@ -147,7 +149,7 @@ public class FoldersControllerHelper<T> : FilesHelperBase<T>
yield return await _globalFolderHelper.FolderTemplatesAsync;
}
if (!withoutTrash)
if (!withoutTrash && !IsVisitor)
{
yield return (int)_globalFolderHelper.FolderTrash;
}

View File

@ -32,8 +32,6 @@ public class UserController : PeopleControllerBase
private readonly ICache _cache;
private readonly TenantManager _tenantManager;
private readonly GlobalSpace _globalSpace;
private readonly Constants _constants;
private readonly CookiesManager _cookiesManager;
private readonly CoreBaseSettings _coreBaseSettings;
private readonly CustomNamingPeople _customNamingPeople;
@ -69,8 +67,6 @@ public class UserController : PeopleControllerBase
public UserController(
ICache cache,
TenantManager tenantManager,
GlobalSpace globalSpace,
Constants constants,
CookiesManager cookiesManager,
CoreBaseSettings coreBaseSettings,
CustomNamingPeople customNamingPeople,
@ -112,8 +108,6 @@ public class UserController : PeopleControllerBase
{
_cache = cache;
_tenantManager = tenantManager;
_globalSpace = globalSpace;
_constants = constants;
_cookiesManager = cookiesManager;
_coreBaseSettings = coreBaseSettings;
_customNamingPeople = customNamingPeople;