Merge branch 'release/v1.1.0' into feature/translate-1.1.0

This commit is contained in:
Alexey Safronov 2021-12-06 15:09:57 +03:00
commit 4fef520aba
32 changed files with 1442 additions and 2425 deletions

View File

@ -6,7 +6,6 @@
"packages/asc-web-components",
"packages/asc-web-common",
"packages/browserslist-config-asc",
"packages/debug-info",
"web/ASC.Web.Login",
"web/ASC.Web.Client",
"web/ASC.Web.Editor",
@ -18,16 +17,16 @@
"products/ASC.Calendar/Client"
],
"scripts": {
"build": "lerna run build --parallel --ignore @appserver/common --ignore @appserver/components --ignore @appserver/browserslist-config-asc --ignore @appserver/debug-info",
"build": "lerna run build --parallel --ignore @appserver/common --ignore @appserver/components --ignore @appserver/browserslist-config-asc",
"build:personal": "lerna run build --parallel --scope {@appserver/studio,@appserver/people,@appserver/files,@appserver/editor}",
"bump": "lerna version --no-push --no-git-tag-version",
"clean": "lerna run clean --parallel",
"deploy": "shx rm -rf build/deploy/products && shx rm -rf build/deploy/public && shx rm -rf build/deploy/studio && lerna run deploy --parallel --ignore @appserver/common --ignore @appserver/components --ignore @appserver/browserslist-config-asc --ignore @appserver/debug-info && shx cp -r public build/deploy",
"deploy": "shx rm -rf build/deploy/products && shx rm -rf build/deploy/public && shx rm -rf build/deploy/studio && lerna run deploy --parallel --ignore @appserver/common --ignore @appserver/components --ignore @appserver/browserslist-config-asc && shx cp -r public build/deploy",
"deploy:personal": "shx rm -rf build/deploy/products && shx rm -rf build/deploy/public && shx rm -rf build/deploy/studio && lerna run deploy --parallel --scope {@appserver/studio,@appserver/people,@appserver/files,@appserver/editor} && shx cp -r public build/deploy",
"serve": "lerna run serve --parallel --ignore @appserver/common --ignore @appserver/components --ignore @appserver/browserslist-config-asc --ignore @appserver/debug-info",
"start": "lerna run start --parallel --ignore @appserver/common --ignore @appserver/components --ignore @appserver/browserslist-config-asc --ignore @appserver/debug-info",
"serve": "lerna run serve --parallel --ignore @appserver/common --ignore @appserver/components --ignore @appserver/browserslist-config-asc",
"start": "lerna run start --parallel --ignore @appserver/common --ignore @appserver/components --ignore @appserver/browserslist-config-asc",
"start:personal": "lerna run start --parallel --scope {@appserver/studio,@appserver/people,@appserver/files,@appserver/editor}",
"start-prod": "lerna run start-prod --parallel --ignore @appserver/common --ignore @appserver/components --ignore @appserver/browserslist-config-asc --ignore @appserver/debug-info",
"start-prod": "lerna run start-prod --parallel --ignore @appserver/common --ignore @appserver/components --ignore @appserver/browserslist-config-asc",
"storybook": "yarn workspace @appserver/components storybook",
"storybook-build": "yarn workspace @appserver/components run storybook-build",
"sw-build": "workbox injectManifest config/workbox-config.js && yarn sw-modify && yarn sw-minimize",
@ -47,9 +46,10 @@
"sw-studio-replace": "replace-in-files --string='studio/client/' --replacement='/' build/deploy/public/sw.js",
"test": "yarn workspace @appserver/components test",
"wipe": "shx rm -rf node_modules yarn.lock web/**/node_modules products/**/node_modules",
"debug-info": "node packages/debug-info --unreleased-only --template debuginfo --output public/debuginfo.md"
"debug-info": "auto-changelog --unreleased-only --template debuginfo --output public/debuginfo.md"
},
"devDependencies": {
"auto-changelog": "https://github.com/ONLYOFFICE/auto-changelog.git#master",
"lerna": "3.22.1",
"replace-in-files-cli": "^1.0.0",
"shx": "^0.3.3",

View File

@ -1,9 +1,10 @@
import { request, setWithCredentialsStatus } from "../client";
export function login(userName, passwordHash) {
export function login(userName, passwordHash, session) {
const data = {
userName,
passwordHash,
session,
};
return request({

View File

@ -148,9 +148,9 @@ class AuthStore {
return [settingsModuleWrapper];
};
login = async (user, hash) => {
login = async (user, hash, session = true) => {
try {
const response = await api.user.login(user, hash);
const response = await api.user.login(user, hash, session);
if (!response || (!response.token && !response.tfa))
throw response.error.message;

View File

@ -1,14 +0,0 @@
{
"name": "@appserver/debug-info",
"version": "1.1.0",
"private": true,
"description": "Command line tool for generating debug info by commit history",
"main": "src/index.js",
"dependencies": {
"commander": "^7.2.0",
"handlebars": "^4.7.7",
"node-fetch": "^2.6.5",
"parse-github-url": "^1.0.2",
"semver": "^7.3.5"
}
}

View File

@ -1,167 +0,0 @@
const semver = require("semver");
const {
cmd,
isLink,
encodeHTML,
niceDate,
replaceText,
getGitVersion,
} = require("./utils");
const COMMIT_SEPARATOR = "__AUTO_CHANGELOG_COMMIT_SEPARATOR__";
const MESSAGE_SEPARATOR = "__AUTO_CHANGELOG_MESSAGE_SEPARATOR__";
const MATCH_COMMIT = /(.*)\n(.*)\n(.*)\n(.*)\n([\S\s]+)/;
const MATCH_STATS = /(\d+) files? changed(?:, (\d+) insertions?...)?(?:, (\d+) deletions?...)?/;
const BODY_FORMAT = "%B";
const FALLBACK_BODY_FORMAT = "%s%n%n%b";
// https://help.github.com/articles/closing-issues-via-commit-messages
const DEFAULT_FIX_PATTERN = /(?:close[sd]?|fixe?[sd]?|resolve[sd]?)\s(?:#(\d+)|(https?:\/\/.+?\/(?:issues|pull|pull-requests|merge_requests)\/(\d+)))/gi;
const MERGE_PATTERNS = [
/^Merge pull request #(\d+) from .+\n\n(.+)/, // Regular GitHub merge
/^(.+) \(#(\d+)\)(?:$|\n\n)/, // Github squash merge
/^Merged in .+ \(pull request #(\d+)\)\n\n(.+)/, // BitBucket merge
/^Merge branch .+ into .+\n\n(.+)[\S\s]+See merge request [^!]*!(\d+)/, // GitLab merge
];
const fetchCommits = async (diff, options = {}) => {
const format = await getLogFormat();
const log = await cmd(
`git log ${diff} --shortstat --pretty=format:${format} ${options.appendGitLog}`
);
return parseCommits(log, options);
};
const getLogFormat = async () => {
const gitVersion = await getGitVersion();
const bodyFormat =
gitVersion && semver.gte(gitVersion, "1.7.2")
? BODY_FORMAT
: FALLBACK_BODY_FORMAT;
return `${COMMIT_SEPARATOR}%H%n%ai%n%an%n%ae%n${bodyFormat}${MESSAGE_SEPARATOR}`;
};
const parseCommits = (string, options = {}) => {
return string
.split(COMMIT_SEPARATOR)
.slice(1)
.map((commit) => parseCommit(commit, options))
.filter((commit) => filterCommit(commit, options));
};
const parseCommit = (commit, options = {}) => {
const [, hash, date, author, email, tail] = commit.match(MATCH_COMMIT);
const [body, stats] = tail.split(MESSAGE_SEPARATOR);
const message = encodeHTML(body);
const parsed = {
hash,
shorthash: hash.slice(0, 7),
author,
email,
date: new Date(date).toISOString(),
niceDate: niceDate(new Date(date)),
subject: replaceText(getSubject(message), options),
message: message.trim(),
fixes: getFixes(message, author, options),
href: options.getCommitLink(hash),
breaking:
!!options.breakingPattern &&
new RegExp(options.breakingPattern).test(message),
...getStats(stats),
};
return {
...parsed,
merge: getMerge(parsed, message, options),
};
};
const getSubject = (message) => {
if (!message.trim()) {
return "_No commit message_";
}
return message.match(/[^\n]+/)[0];
};
const getStats = (stats) => {
if (!stats.trim()) return {};
const [, files, insertions, deletions] = stats.match(MATCH_STATS);
return {
files: parseInt(files || 0),
insertions: parseInt(insertions || 0),
deletions: parseInt(deletions || 0),
};
};
const getFixes = (message, author, options = {}) => {
const pattern = getFixPattern(options);
const fixes = [];
let match = pattern.exec(message);
if (!match) return null;
while (match) {
const id = getFixID(match);
const href = isLink(match[2]) ? match[2] : options.getIssueLink(id);
fixes.push({ id, href, author });
match = pattern.exec(message);
}
return fixes;
};
const getFixID = (match) => {
// Get the last non-falsey value in the match array
for (let i = match.length; i >= 0; i--) {
if (match[i]) {
return match[i];
}
}
};
const getFixPattern = (options) => {
if (options.issuePattern) {
return new RegExp(options.issuePattern, "g");
}
return DEFAULT_FIX_PATTERN;
};
const getMergePatterns = (options) => {
if (options.mergePattern) {
return MERGE_PATTERNS.concat(new RegExp(options.mergePattern, "g"));
}
return MERGE_PATTERNS;
};
const getMerge = (commit, message, options = {}) => {
const patterns = getMergePatterns(options);
for (const pattern of patterns) {
const match = pattern.exec(message);
if (match) {
const id = /^\d+$/.test(match[1]) ? match[1] : match[2];
const message = /^\d+$/.test(match[1]) ? match[2] : match[1];
return {
id,
message: replaceText(message, options),
href: options.getMergeLink(id),
author: commit.author,
commit,
};
}
}
return null;
};
const filterCommit = (commit, { ignoreCommitPattern }) => {
if (
ignoreCommitPattern &&
new RegExp(ignoreCommitPattern).test(commit.subject)
) {
return false;
}
return true;
};
module.exports = {
COMMIT_SEPARATOR,
MESSAGE_SEPARATOR,
fetchCommits,
parseCommit,
};

View File

@ -1,9 +0,0 @@
#!/usr/bin/env node
const { run } = require('./run')
run(process.argv).catch(error => {
console.log('\n')
console.error(error)
process.exit(1)
})

View File

@ -1,172 +0,0 @@
const semver = require("semver");
const { fetchCommits } = require("./commits");
const MERGE_COMMIT_PATTERN = /^Merge (remote-tracking )?branch '.+'/;
const COMMIT_MESSAGE_PATTERN = /\n+([\S\s]+)/;
const parseReleases = async (tags, options, onParsed) => {
const releases = await Promise.all(
tags.map(async (tag) => {
const commits = await fetchCommits(tag.diff, options);
const merges = commits
.filter((commit) => commit.merge)
.map((commit) => commit.merge);
const fixes = commits
.filter((commit) => commit.fixes)
.map((commit) => ({ fixes: commit.fixes, commit }));
const emptyRelease = merges.length === 0 && fixes.length === 0;
const { message } = commits[0] || { message: null };
const breakingCount = commits.filter((c) => c.breaking).length;
const filteredCommits = commits
.filter(filterCommits(merges))
.sort(sortCommits(options))
.slice(0, getCommitLimit(options, emptyRelease, breakingCount));
if (onParsed) onParsed(tag);
const origins = commits.sort(sortCommits(options));
return {
...tag,
summary: getSummary(message, options),
commits: filteredCommits,
origins,
groups: getGroups(commits),
merges,
fixes,
};
})
);
return releases.filter(filterReleases(options));
};
const getGroups = (commits) => {
const grouped = commits.reduce((groups, commit) => {
const niceDate = commit.niceDate;
if (!groups[niceDate]) {
groups[niceDate] = [];
}
if (!commit.merge && !MERGE_COMMIT_PATTERN.test(commit.subject))
groups[niceDate].push(commit);
return groups;
}, {});
// Edit: to add it in the array format instead
const groupArrays = Object.keys(grouped)
.map((niceDate) => {
if (grouped[niceDate].length === 0) return null;
const date = new Date(grouped[niceDate][0].date);
return {
niceDate,
date,
authors: getGroupedByAuthor(grouped[niceDate]),
};
})
.filter((g) => g !== null && g.authors.length > 0);
const sortedGroups = groupArrays
.slice()
.sort((a, b) => {
return b.date - a.date;
})
.slice(0, 30); // last 30 days only
// sortedGroups.forEach(({ niceDate }, i) =>
// console.log("niceDate", i, niceDate)
// );
return sortedGroups;
};
const getGroupedByAuthor = (commits) => {
const grouped = commits.reduce((groups, commit) => {
const { author } = commit;
if (!groups[author]) {
groups[author] = [];
}
groups[author].push(commit);
return groups;
}, {});
const groupArrays = Object.keys(grouped).map((author) => {
return {
author,
commits: grouped[author],
};
});
return groupArrays;
};
const filterCommits = (merges) => (commit) => {
if (commit.fixes || commit.merge) {
// Filter out commits that already appear in fix or merge lists
return false;
}
if (commit.breaking) {
return true;
}
if (semver.valid(commit.subject)) {
// Filter out version commits
return false;
}
if (MERGE_COMMIT_PATTERN.test(commit.subject)) {
// Filter out merge commits
return false;
}
if (merges.findIndex((m) => m.message === commit.subject) !== -1) {
// Filter out commits with the same message as an existing merge
return false;
}
return true;
};
const sortCommits = ({ sortCommits }) => (a, b) => {
if (!a.breaking && b.breaking) return 1;
if (a.breaking && !b.breaking) return -1;
if (sortCommits === "date") return new Date(a.date) - new Date(b.date);
if (sortCommits === "date-desc") return new Date(b.date) - new Date(a.date);
if (sortCommits === "subject") return a.subject.localeCompare(b.subject);
if (sortCommits === "subject-desc") return b.subject.localeCompare(a.subject);
return b.insertions + b.deletions - (a.insertions + a.deletions);
};
const getCommitLimit = (
{ commitLimit, backfillLimit },
emptyRelease,
breakingCount
) => {
if (commitLimit === false) {
return undefined; // Return all commits
}
const limit = emptyRelease ? backfillLimit : commitLimit;
return Math.max(breakingCount, limit);
};
const getSummary = (message, { releaseSummary }) => {
if (!message || !releaseSummary) {
return null;
}
if (COMMIT_MESSAGE_PATTERN.test(message)) {
return message.match(COMMIT_MESSAGE_PATTERN)[1];
}
return null;
};
const filterReleases = (options) => ({ merges, fixes, commits }) => {
if (
options.hideEmptyReleases &&
merges.length + fixes.length + commits.length === 0
) {
return false;
}
return true;
};
module.exports = {
parseReleases,
};

View File

@ -1,96 +0,0 @@
const parseRepoURL = require('parse-github-url')
const { cmd } = require('./utils')
const fetchRemote = async options => {
const remoteURL = await cmd(`git config --get remote.${options.remote}.url`)
return getRemote(remoteURL, options)
}
const getRemote = (remoteURL, options = {}) => {
const overrides = getOverrides(options)
if (!remoteURL) {
// No point warning if everything is overridden
if (Object.keys(overrides).length !== 4) {
console.warn(`Warning: Git remote ${options.remote} was not found`)
}
return {
getCommitLink: () => null,
getIssueLink: () => null,
getMergeLink: () => null,
getCompareLink: () => null,
...overrides
}
}
const remote = parseRepoURL(remoteURL)
const protocol = remote.protocol === 'http:' ? 'http:' : 'https:'
const hostname = remote.hostname || remote.host
const IS_BITBUCKET = /bitbucket/.test(hostname)
const IS_GITLAB = /gitlab/.test(hostname)
const IS_GITLAB_SUBGROUP = /\.git$/.test(remote.branch)
const IS_AZURE = /dev\.azure/.test(hostname)
const IS_VISUAL_STUDIO = /visualstudio/.test(hostname)
if (IS_BITBUCKET) {
const url = `${protocol}//${hostname}/${remote.repo}`
return {
getCommitLink: id => `${url}/commits/${id}`,
getIssueLink: id => `${url}/issues/${id}`,
getMergeLink: id => `${url}/pull-requests/${id}`,
getCompareLink: (from, to) => `${url}/compare/${to}..${from}`,
...overrides
}
}
if (IS_GITLAB) {
const url = IS_GITLAB_SUBGROUP
? `${protocol}//${hostname}/${remote.repo}/${remote.branch.replace(/\.git$/, '')}`
: `${protocol}//${hostname}/${remote.repo}`
return {
getCommitLink: id => `${url}/commit/${id}`,
getIssueLink: id => `${url}/issues/${id}`,
getMergeLink: id => `${url}/merge_requests/${id}`,
getCompareLink: (from, to) => `${url}/compare/${from}...${to}`,
...overrides
}
}
if (IS_AZURE || IS_VISUAL_STUDIO) {
const url = IS_AZURE
? `${protocol}//${hostname}/${remote.path}`
: `${protocol}//${hostname}/${remote.repo}/${remote.branch}`
const project = IS_AZURE
? `${protocol}//${hostname}/${remote.repo}`
: `${protocol}//${hostname}/${remote.owner}`
return {
getCommitLink: id => `${url}/commit/${id}`,
getIssueLink: id => `${project}/_workitems/edit/${id}`,
getMergeLink: id => `${url}/pullrequest/${id}`,
getCompareLink: (from, to) => `${url}/branches?baseVersion=GT${to}&targetVersion=GT${from}&_a=commits`,
...overrides
}
}
const url = `${protocol}//${hostname}/${remote.repo}`
return {
getCommitLink: id => `${url}/commit/${id}`,
getIssueLink: id => `${url}/issues/${id}`,
getMergeLink: id => `${url}/pull/${id}`,
getCompareLink: (from, to) => `${url}/compare/${from}...${to}`,
...overrides
}
}
const getOverrides = ({ commitUrl, issueUrl, mergeUrl, compareUrl }) => {
const overrides = {}
if (commitUrl) overrides.getCommitLink = id => commitUrl.replace('{id}', id)
if (issueUrl) overrides.getIssueLink = id => issueUrl.replace('{id}', id)
if (mergeUrl) overrides.getMergeLink = id => mergeUrl.replace('{id}', id)
if (compareUrl) overrides.getCompareLink = (from, to) => compareUrl.replace('{from}', from).replace('{to}', to)
return overrides
}
module.exports = {
fetchRemote,
getRemote
}

View File

@ -1,203 +0,0 @@
const { Command } = require("commander");
const { version } = require("../package.json");
const { fetchRemote } = require("./remote");
const { fetchTags } = require("./tags");
const { parseReleases } = require("./releases");
const { compileTemplate } = require("./template");
const {
parseLimit,
readFile,
readJson,
writeFile,
fileExists,
updateLog,
formatBytes,
} = require("./utils");
const DEFAULT_OPTIONS = {
output: "CHANGELOG.md",
template: "compact",
remote: "origin",
commitLimit: 3,
backfillLimit: 3,
tagPrefix: "",
sortCommits: "relevance",
appendGitLog: "",
appendGitTag: "",
config: ".debug-info",
};
const PACKAGE_FILE = "package.json";
const PACKAGE_OPTIONS_KEY = "debug-info";
const PREPEND_TOKEN = "<!-- debug-info-above -->";
const getOptions = async (argv) => {
const commandOptions = new Command()
.option(
"-o, --output <file>",
`output file, default: ${DEFAULT_OPTIONS.output}`
)
.option(
"-c, --config <file>",
`config file location, default: ${DEFAULT_OPTIONS.config}`
)
.option(
"-t, --template <template>",
`specify template to use [compact, keepachangelog, json], default: ${DEFAULT_OPTIONS.template}`
)
.option(
"-r, --remote <remote>",
`specify git remote to use for links, default: ${DEFAULT_OPTIONS.remote}`
)
.option(
"-p, --package [file]",
"use version from file as latest release, default: package.json"
)
.option(
"-v, --latest-version <version>",
"use specified version as latest release"
)
.option("-u, --unreleased", "include section for unreleased changes")
.option(
"-l, --commit-limit <count>",
`number of commits to display per release, default: ${DEFAULT_OPTIONS.commitLimit}`,
parseLimit
)
.option(
"-b, --backfill-limit <count>",
`number of commits to backfill empty releases with, default: ${DEFAULT_OPTIONS.backfillLimit}`,
parseLimit
)
.option(
"--commit-url <url>",
"override url for commits, use {id} for commit id"
)
.option(
"-i, --issue-url <url>",
"override url for issues, use {id} for issue id"
) // -i kept for back compatibility
.option(
"--merge-url <url>",
"override url for merges, use {id} for merge id"
)
.option(
"--compare-url <url>",
"override url for compares, use {from} and {to} for tags"
)
.option(
"--issue-pattern <regex>",
"override regex pattern for issues in commit messages"
)
.option(
"--breaking-pattern <regex>",
"regex pattern for breaking change commits"
)
.option(
"--merge-pattern <regex>",
"add custom regex pattern for merge commits"
)
.option(
"--ignore-commit-pattern <regex>",
"pattern to ignore when parsing commits"
)
.option("--tag-pattern <regex>", "override regex pattern for version tags")
.option("--tag-prefix <prefix>", "prefix used in version tags")
.option(
"--starting-version <tag>",
"specify earliest version to include in changelog"
)
.option(
"--starting-date <yyyy-mm-dd>",
"specify earliest date to include in changelog"
)
.option(
"--sort-commits <property>",
`sort commits by property [relevance, date, date-desc], default: ${DEFAULT_OPTIONS.sortCommits}`
)
.option(
"--release-summary",
"use tagged commit message body as release summary"
)
.option("--unreleased-only", "only output unreleased changes")
.option("--hide-empty-releases", "hide empty releases")
.option("--handlebars-setup <file>", "handlebars setup file")
.option("--append-git-log <string>", "string to append to git log command")
.option("--append-git-tag <string>", "string to append to git tag command")
.option("--prepend", "prepend changelog to output file")
.option("--stdout", "output changelog to stdout")
.version(version)
.parse(argv)
.opts();
const pkg = await readJson(PACKAGE_FILE);
const packageOptions = pkg ? pkg[PACKAGE_OPTIONS_KEY] : null;
const dotOptions = await readJson(
commandOptions.config || DEFAULT_OPTIONS.config
);
const options = {
...DEFAULT_OPTIONS,
...dotOptions,
...packageOptions,
...commandOptions,
};
const remote = await fetchRemote(options);
const latestVersion = await getLatestVersion(options);
return {
...options,
...remote,
latestVersion,
};
};
const getLatestVersion = async (options) => {
if (options.latestVersion) {
return options.latestVersion;
}
if (options.package) {
const file = options.package === true ? PACKAGE_FILE : options.package;
if ((await fileExists(file)) === false) {
throw new Error(`File ${file} does not exist`);
}
const { version } = await readJson(file);
return version;
}
return null;
};
const run = async (argv) => {
const options = await getOptions(argv);
const log = (string) => (options.stdout ? null : updateLog(string));
log("Fetching tags…");
const tags = await fetchTags(options);
log(`${tags.length} version tags found…`);
const onParsed = ({ title }) => log(`Fetched ${title}`);
const releases = await parseReleases(tags, options, onParsed);
const changelog = await compileTemplate(releases, options);
await write(changelog, options, log);
};
const write = async (changelog, options, log) => {
if (options.stdout) {
process.stdout.write(changelog);
return;
}
const bytes = formatBytes(Buffer.byteLength(changelog, "utf8"));
const existing =
(await fileExists(options.output)) &&
(await readFile(options.output, "utf8"));
if (existing) {
const index = options.prepend ? 0 : existing.indexOf(PREPEND_TOKEN);
if (index !== -1) {
const prepended = `${changelog}\n${existing.slice(index)}`;
await writeFile(options.output, prepended);
log(`${bytes} prepended to ${options.output}\n`);
return;
}
}
await writeFile(options.output, changelog);
log(`${bytes} written to ${options.output}\n`);
};
module.exports = {
run,
};

View File

@ -1,111 +0,0 @@
const semver = require('semver')
const { cmd, niceDate } = require('./utils')
const DIVIDER = '---'
const MATCH_V = /^v\d/
const fetchTags = async (options, remote) => {
const format = `%(refname:short)${DIVIDER}%(creatordate:short)`
const tags = (await cmd(`git tag -l --format=${format} ${options.appendGitTag}`))
.trim()
.split('\n')
.map(parseTag(options))
.filter(isValidTag(options))
.sort(sortTags)
const { latestVersion, unreleased, unreleasedOnly, getCompareLink } = options
if (latestVersion || unreleased || unreleasedOnly) {
const previous = tags[0]
const v = !MATCH_V.test(latestVersion) && previous && MATCH_V.test(previous.version) ? 'v' : ''
const compareTo = latestVersion ? `${v}${latestVersion}` : 'HEAD'
tags.unshift({
tag: null,
title: latestVersion ? `${v}${latestVersion}` : 'Unreleased',
date: new Date().toISOString(),
diff: previous ? `${previous.tag}..` : 'HEAD',
href: previous ? getCompareLink(previous.tag, compareTo) : null
})
}
const enriched = tags.map(enrichTag(options))
return enriched.slice(0, getLimit(enriched, options))
}
const getLimit = (tags, { unreleasedOnly, startingVersion, startingDate }) => {
if (unreleasedOnly) {
return 1
}
if (startingVersion) {
const index = tags.findIndex(({ tag }) => tag === startingVersion)
if (index !== -1) {
return index + 1
}
}
if (startingDate) {
return tags.filter(t => t.isoDate >= startingDate).length
}
return tags.length
}
const parseTag = ({ tagPrefix }) => string => {
const [tag, date] = string.split(DIVIDER)
return {
tag,
date,
title: tag,
version: inferSemver(tag.replace(tagPrefix, ''))
}
}
const enrichTag = ({ getCompareLink, tagPattern }) => (t, index, tags) => {
const previous = tags[index + 1]
return {
isoDate: t.date.slice(0, 10),
niceDate: niceDate(t.date),
diff: previous ? `${previous.tag}..${t.tag}` : t.tag,
href: previous ? getCompareLink(previous.tag, t.tag || 'HEAD') : null,
major: Boolean(
previous &&
semver.valid(t.version) &&
semver.valid(previous.version) &&
semver.diff(t.version, previous.version) === 'major'
),
minor: Boolean(
previous &&
semver.valid(t.version) &&
semver.valid(previous.version) &&
['minor', 'preminor'].includes(semver.diff(t.version, previous.version))
),
...t
}
}
const isValidTag = ({ tagPattern }) => ({ tag, version }) => {
if (tagPattern) {
return new RegExp(tagPattern).test(tag)
}
return semver.valid(version)
}
const sortTags = ({ version: a }, { version: b }) => {
if (semver.valid(a) && semver.valid(b)) {
return semver.rcompare(a, b)
}
return a < b ? 1 : -1
}
const inferSemver = tag => {
if (/^v?\d+$/.test(tag)) {
// v1 becomes v1.0.0
return `${tag}.0.0`
}
if (/^v?\d+\.\d+$/.test(tag)) {
// v1.0 becomes v1.0.0
return `${tag}.0`
}
return tag
}
module.exports = {
fetchTags
}

View File

@ -1,98 +0,0 @@
const { join } = require('path')
const Handlebars = require('handlebars')
const fetch = require('node-fetch')
const { readFile, fileExists } = require('./utils')
const TEMPLATES_DIR = join(__dirname, '..', 'templates')
const MATCH_URL = /^https?:\/\/.+/
const COMPILE_OPTIONS = {
noEscape: true
}
Handlebars.registerHelper('json', (object) => {
return new Handlebars.SafeString(JSON.stringify(object, null, 2))
})
Handlebars.registerHelper('commit-list', (context, options) => {
if (!context || context.length === 0) {
return ''
}
const list = context
.filter(item => {
const commit = item.commit || item
if (options.hash.exclude) {
const pattern = new RegExp(options.hash.exclude, 'm')
if (pattern.test(commit.message)) {
return false
}
}
if (options.hash.message) {
const pattern = new RegExp(options.hash.message, 'm')
return pattern.test(commit.message)
}
if (options.hash.subject) {
const pattern = new RegExp(options.hash.subject)
return pattern.test(commit.subject)
}
return true
})
.map(item => options.fn(item))
.join('')
if (!list) {
return ''
}
return `${options.hash.heading}\n\n${list}`
})
Handlebars.registerHelper('matches', function (val, pattern, options) {
const r = new RegExp(pattern, options.hash.flags || '')
return r.test(val) ? options.fn(this) : options.inverse(this)
})
const getTemplate = async template => {
if (MATCH_URL.test(template)) {
const response = await fetch(template)
return response.text()
}
if (await fileExists(template)) {
return readFile(template)
}
const path = join(TEMPLATES_DIR, template + '.hbs')
if (await fileExists(path) === false) {
throw new Error(`Template '${template}' was not found`)
}
return readFile(path)
}
const cleanTemplate = template => {
return template
// Remove indentation
.replace(/\n +/g, '\n')
.replace(/^ +/, '')
// Fix multiple blank lines
.replace(/\n\n\n+/g, '\n\n')
.replace(/\n\n$/, '\n')
}
const compileTemplate = async (releases, options) => {
const { template, handlebarsSetup } = options
if (handlebarsSetup) {
const path = /^\//.test(handlebarsSetup) ? handlebarsSetup : join(process.cwd(), handlebarsSetup)
const setup = require(path)
if (typeof setup === 'function') {
setup(Handlebars)
}
}
const compile = Handlebars.compile(await getTemplate(template), COMPILE_OPTIONS)
if (template === 'json') {
return compile({ releases, options })
}
return cleanTemplate(compile({ releases, options }))
}
module.exports = {
compileTemplate
}

View File

@ -1,132 +0,0 @@
const readline = require("readline");
const fs = require("fs");
const { spawn } = require("child_process");
const MONTH_NAMES = [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December",
];
const updateLog = (string, clearLine = true) => {
if (clearLine) {
readline.clearLine(process.stdout);
readline.cursorTo(process.stdout, 0);
}
process.stdout.write(`debug-info: ${string}`);
};
const formatBytes = (bytes) => {
return `${Math.max(1, Math.round(bytes / 1024))} kB`;
};
// Simple util for calling a child process
const cmd = (string, onProgress) => {
const [cmd, ...args] = string.trim().split(" ");
return new Promise((resolve, reject) => {
const child = spawn(cmd, args);
let data = "";
child.stdout.on("data", (buffer) => {
data += buffer.toString();
if (onProgress) {
onProgress(data.length);
}
});
child.stdout.on("end", () => resolve(data));
child.on("error", reject);
});
};
const getGitVersion = async () => {
const output = await cmd("git --version");
const match = output.match(/\d+\.\d+\.\d+/);
return match ? match[0] : null;
};
const niceDate = (string) => {
const date = new Date(string);
const day = date.getUTCDate();
const month = MONTH_NAMES[date.getUTCMonth()];
const year = date.getUTCFullYear();
return `${day} ${month} ${year}`;
};
const isLink = (string) => {
return /^http/.test(string);
};
const parseLimit = (limit) => {
return limit === "false" ? false : parseInt(limit, 10);
};
const encodeHTML = (string) => {
return string.replace(/</g, "&lt;").replace(/>/g, "&gt;");
};
const replaceText = (string, options) => {
if (!options.replaceText) {
return string;
}
return Object.keys(options.replaceText).reduce((string, pattern) => {
return string.replace(
new RegExp(pattern, "g"),
options.replaceText[pattern]
);
}, string);
};
const createCallback = (resolve, reject) => (err, data) => {
if (err) reject(err);
else resolve(data);
};
const readFile = (path) => {
return new Promise((resolve, reject) => {
fs.readFile(path, "utf-8", createCallback(resolve, reject));
});
};
const writeFile = (path, data) => {
return new Promise((resolve, reject) => {
fs.writeFile(path, data, createCallback(resolve, reject));
});
};
const fileExists = (path) => {
return new Promise((resolve) => {
fs.access(path, (err) => resolve(!err));
});
};
const readJson = async (path) => {
if ((await fileExists(path)) === false) {
return null;
}
return JSON.parse(await readFile(path));
};
module.exports = {
updateLog,
formatBytes,
cmd,
getGitVersion,
niceDate,
isLink,
parseLimit,
encodeHTML,
replaceText,
readFile,
writeFile,
fileExists,
readJson,
};

View File

@ -1,30 +0,0 @@
### Changelog
All notable changes to this project will be documented in this file. Dates are displayed in UTC.
{{#each releases}}
{{#if href}}
###{{#unless major}}#{{/unless}} [{{title}}]({{href}})
{{else}}
#### {{title}}
{{/if}}
{{#if tag}}
> {{niceDate}}
{{/if}}
{{#if summary}}
{{summary}}
{{/if}}
{{#each merges}}
- {{#if commit.breaking}}**Breaking change:** {{/if}}{{message}}{{#if href}} [`#{{id}}`]({{href}}){{/if}}
{{/each}}
{{#each fixes}}
- {{#if commit.breaking}}**Breaking change:** {{/if}}{{commit.subject}}{{#each fixes}}{{#if href}} [`#{{id}}`]({{href}}){{/if}}{{/each}}
{{/each}}
{{#each commits}}
- {{#if breaking}}**Breaking change:** {{/if}}{{subject}}{{#if href}} [`{{shorthash}}`]({{href}}){{/if}}
{{/each}}
{{/each}}

View File

@ -1,12 +0,0 @@
{{#each releases}}
{{#commit-list groups heading='## Changelog'}}
### {{niceDate}}:
{{#each authors}}
#### {{author}}:
{{#each commits}}
- {{subject}} {{#if href}}[`{{shorthash}}`]({{href}}){{/if}}
{{/each}}
{{/each}}
{{/commit-list}}
{{/each}}

View File

@ -1 +0,0 @@
{{json releases}}

View File

@ -1,39 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
{{#each releases}}
{{#if href}}
## [{{title}}]({{href}}){{#if tag}} - {{isoDate}}{{/if}}
{{else}}
## {{title}}{{#if tag}} - {{isoDate}}{{/if}}
{{/if}}
{{#if summary}}
{{summary}}
{{/if}}
{{#if merges}}
### Merged
{{#each merges}}
- {{#if commit.breaking}}**Breaking change:** {{/if}}{{message}} {{#if href}}[`#{{id}}`]({{href}}){{/if}}
{{/each}}
{{/if}}
{{#if fixes}}
### Fixed
{{#each fixes}}
- {{#if commit.breaking}}**Breaking change:** {{/if}}{{commit.subject}}{{#each fixes}} {{#if href}}[`#{{id}}`]({{href}}){{/if}}{{/each}}
{{/each}}
{{/if}}
{{#commit-list commits heading='### Commits'}}
- {{#if breaking}}**Breaking change:** {{/if}}{{subject}} {{#if href}}[`{{shorthash}}`]({{href}}){{/if}}
{{/commit-list}}
{{/each}}

View File

@ -91,7 +91,7 @@ const Panels = (props) => {
onClose={onClose}
foldersType="exceptPrivacyTrashFolders"
ByExtension
searchParam={"docx"}
searchParam={".docx"}
headerName={t("Translations:CreateMasterFormFromFile")}
titleFilesList={t("SelectFile:SelectDOCXFormat")}
creationButtonPrimary

View File

@ -180,13 +180,9 @@ class DialogsStore {
const { createFile, fetchFiles, filter } = this.filesStore;
const { id } = this.selectedFolderStore;
let newTitle = fileInfo.title;
const templateId =
typeof fileInfo.id === "string"
? encodeURIComponent(fileInfo.id)
: fileInfo.id;
newTitle = newTitle.substring(0, newTitle.lastIndexOf("."));
createFile(id, `${newTitle}.docxf`, templateId)
createFile(id, `${newTitle}.docxf`, fileInfo.id)
.then(() => fetchFiles(id, filter, true, true))
.catch((err) => console.error(err));
};

View File

@ -559,7 +559,7 @@ namespace ASC.Web.Files.Services.WCFService
return new List<File<T>>(result);
}
public File<T> CreateNewFile(FileModel<T> fileWrapper, bool enableExternalExt = false)
public File<T> CreateNewFile<TTemplate>(FileModel<T, TTemplate> fileWrapper, bool enableExternalExt = false)
{
if (string.IsNullOrEmpty(fileWrapper.Title) || fileWrapper.ParentId == null) throw new ArgumentException();
@ -612,7 +612,7 @@ namespace ASC.Web.Files.Services.WCFService
file.Title = FileUtility.ReplaceFileExtension(title, fileExt);
}
if (EqualityComparer<T>.Default.Equals(fileWrapper.TemplateId, default(T)))
if (EqualityComparer<TTemplate>.Default.Equals(fileWrapper.TemplateId, default(TTemplate)))
{
var culture = UserManager.GetUsers(AuthContext.CurrentAccount.ID).GetCulture();
var storeTemplate = GetStoreTemplate();
@ -656,13 +656,14 @@ namespace ASC.Web.Files.Services.WCFService
}
else
{
var template = fileDao.GetFile(fileWrapper.TemplateId);
var fileTemlateDao = DaoFactory.GetFileDao<TTemplate>();
var template = fileTemlateDao.GetFile(fileWrapper.TemplateId);
ErrorIf(template == null, FilesCommonResource.ErrorMassage_FileNotFound);
ErrorIf(!FileSecurity.CanRead(template), FilesCommonResource.ErrorMassage_SecurityException_ReadFile);
try
{
using (var stream = fileDao.GetFileStream(template))
using (var stream = fileTemlateDao.GetFileStream(template))
{
file.ContentLength = template.ContentLength;
file = fileDao.SaveFile(file, stream);
@ -670,7 +671,7 @@ namespace ASC.Web.Files.Services.WCFService
if (template.ThumbnailStatus == Thumbnail.Created)
{
using (var thumb = fileDao.GetThumbnail(template))
using (var thumb = fileTemlateDao.GetThumbnail(template))
{
fileDao.SaveThumbnail(file, thumb);
}
@ -770,7 +771,7 @@ namespace ASC.Web.Files.Services.WCFService
}
}
public File<T> UpdateFileStream(T fileId, Stream stream, bool encrypted, bool forcesave)
public File<T> UpdateFileStream(T fileId, Stream stream, string fileExtension, bool encrypted, bool forcesave)
{
try
{
@ -780,7 +781,7 @@ namespace ASC.Web.Files.Services.WCFService
}
var file = EntryManager.SaveEditing(fileId,
null,
fileExtension,
null,
stream,
null,
@ -2432,10 +2433,10 @@ namespace ASC.Web.Files.Services.WCFService
}
}
public class FileModel<T>
public class FileModel<T, TTempate>
{
public T ParentId { get; set; }
public string Title { get; set; }
public T TemplateId { get; set; }
public TTempate TemplateId { get; set; }
}
}

View File

@ -157,7 +157,10 @@ namespace ASC.Files.Thirdparty.Box
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}
@ -215,7 +218,10 @@ namespace ASC.Files.Thirdparty.Box
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}

View File

@ -161,7 +161,10 @@ namespace ASC.Files.Thirdparty.Dropbox
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}
@ -219,7 +222,10 @@ namespace ASC.Files.Thirdparty.Dropbox
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}

View File

@ -158,7 +158,10 @@ namespace ASC.Files.Thirdparty.GoogleDrive
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}
@ -216,7 +219,10 @@ namespace ASC.Files.Thirdparty.GoogleDrive
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}

View File

@ -157,7 +157,10 @@ namespace ASC.Files.Thirdparty.OneDrive
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}
@ -215,7 +218,10 @@ namespace ASC.Files.Thirdparty.OneDrive
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}

View File

@ -149,7 +149,10 @@ namespace ASC.Files.Thirdparty.SharePoint
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}
@ -207,7 +210,10 @@ namespace ASC.Files.Thirdparty.SharePoint
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}

View File

@ -157,7 +157,10 @@ namespace ASC.Files.Thirdparty.Sharpbox
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}
@ -219,7 +222,10 @@ namespace ASC.Files.Thirdparty.Sharpbox
break;
case FilterType.ByExtension:
if (!string.IsNullOrEmpty(searchText))
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Contains(searchText));
{
searchText = searchText.Trim().ToLower();
files = files.Where(x => FileUtility.GetFileExtension(x.Title).Equals(searchText));
}
break;
}

View File

@ -8,5 +8,6 @@ namespace ASC.Files.Core.Model
public IFormFile File { get; set; }
public bool Encrypted { get; set; }
public bool Forcesave { get; set; }
public string FileExtension { get; set; }
}
}

View File

@ -739,7 +739,7 @@ namespace ASC.Web.Files.Utils
break;
case FilterType.ByExtension:
var filterExt = (searchText ?? string.Empty).ToLower().Trim();
where = f => !string.IsNullOrEmpty(filterExt) && f.FileEntryType == FileEntryType.File && FileUtility.GetFileExtension(f.Title).Contains(filterExt);
where = f => !string.IsNullOrEmpty(filterExt) && f.FileEntryType == FileEntryType.File && FileUtility.GetFileExtension(f.Title).Equals(filterExt);
break;
}
@ -1008,13 +1008,7 @@ namespace ASC.Web.Files.Utils
if (file.RootFolderType == FolderType.TRASH) throw new Exception(FilesCommonResource.ErrorMassage_ViewTrashItem);
var currentExt = file.ConvertedExtension;
if (string.IsNullOrEmpty(newExtension))
{
if (currentExt != FileUtility.MasterFormExtension)
newExtension = FileUtility.GetInternalExtension(file.Title);
else
newExtension = currentExt;
}
if (string.IsNullOrEmpty(newExtension)) newExtension = FileUtility.GetFileExtension(file.Title);
var replaceVersion = false;
if (file.Forcesave != ForcesaveType.None)

View File

@ -558,13 +558,13 @@ namespace ASC.Api.Documents
[Update("{fileId}/update")]
public FileWrapper<string> UpdateFileStreamFromForm(string fileId, [FromForm] FileStreamModel model)
{
return FilesControllerHelperString.UpdateFileStream(FilesControllerHelperInt.GetFileFromRequest(model).OpenReadStream(), fileId, model.Encrypted, model.Forcesave);
return FilesControllerHelperString.UpdateFileStream(FilesControllerHelperInt.GetFileFromRequest(model).OpenReadStream(), fileId, model.FileExtension, model.Encrypted, model.Forcesave);
}
[Update("{fileId:int}/update")]
public FileWrapper<int> UpdateFileStreamFromForm(int fileId, [FromForm] FileStreamModel model)
{
return FilesControllerHelperInt.UpdateFileStream(FilesControllerHelperInt.GetFileFromRequest(model).OpenReadStream(), fileId, model.Encrypted, model.Forcesave);
return FilesControllerHelperInt.UpdateFileStream(FilesControllerHelperInt.GetFileFromRequest(model).OpenReadStream(), fileId, model.FileExtension, model.Encrypted, model.Forcesave);
}
@ -957,14 +957,14 @@ namespace ASC.Api.Documents
/// <remarks>In case the extension for the file title differs from DOCX/XLSX/PPTX and belongs to one of the known text, spreadsheet or presentation formats, it will be changed to DOCX/XLSX/PPTX accordingly. If the file extension is not set or is unknown, the DOCX extension will be added to the file title.</remarks>
/// <returns>New file info</returns>
[Create("@my/file")]
public FileWrapper<int> CreateFileFromBody([FromBody] CreateFileModel<int> model)
public FileWrapper<int> CreateFileFromBody([FromBody] CreateFileModel<JsonElement> model)
{
return FilesControllerHelperInt.CreateFile(GlobalFolderHelper.FolderMy, model.Title, model.TemplateId, model.EnableExternalExt);
}
[Create("@my/file")]
[Consumes("application/x-www-form-urlencoded")]
public FileWrapper<int> CreateFileFromForm([FromForm] CreateFileModel<int> model)
public FileWrapper<int> CreateFileFromForm([FromForm] CreateFileModel<JsonElement> model)
{
return FilesControllerHelperInt.CreateFile(GlobalFolderHelper.FolderMy, model.Title, model.TemplateId, model.EnableExternalExt);
}
@ -979,27 +979,27 @@ namespace ASC.Api.Documents
/// <remarks>In case the extension for the file title differs from DOCX/XLSX/PPTX and belongs to one of the known text, spreadsheet or presentation formats, it will be changed to DOCX/XLSX/PPTX accordingly. If the file extension is not set or is unknown, the DOCX extension will be added to the file title.</remarks>
/// <returns>New file info</returns>
[Create("{folderId}/file")]
public FileWrapper<string> CreateFileFromBody(string folderId, [FromBody] CreateFileModel<string> model)
public FileWrapper<string> CreateFileFromBody(string folderId, [FromBody] CreateFileModel<JsonElement> model)
{
return FilesControllerHelperString.CreateFile(folderId, model.Title, model.TemplateId, model.EnableExternalExt);
}
[Create("{folderId}/file")]
[Consumes("application/x-www-form-urlencoded")]
public FileWrapper<string> CreateFileFromForm(string folderId, [FromForm] CreateFileModel<string> model)
public FileWrapper<string> CreateFileFromForm(string folderId, [FromForm] CreateFileModel<JsonElement> model)
{
return FilesControllerHelperString.CreateFile(folderId, model.Title, model.TemplateId, model.EnableExternalExt);
}
[Create("{folderId:int}/file")]
public FileWrapper<int> CreateFileFromBody(int folderId, [FromBody] CreateFileModel<int> model)
public FileWrapper<int> CreateFileFromBody(int folderId, [FromBody] CreateFileModel<JsonElement> model)
{
return FilesControllerHelperInt.CreateFile(folderId, model.Title, model.TemplateId, model.EnableExternalExt);
}
[Create("{folderId:int}/file")]
[Consumes("application/x-www-form-urlencoded")]
public FileWrapper<int> CreateFileFromForm(int folderId, [FromForm] CreateFileModel<int> model)
public FileWrapper<int> CreateFileFromForm(int folderId, [FromForm] CreateFileModel<JsonElement> model)
{
return FilesControllerHelperInt.CreateFile(folderId, model.Title, model.TemplateId, model.EnableExternalExt);
}

View File

@ -185,11 +185,11 @@ namespace ASC.Files.Helpers
}
}
public FileWrapper<T> UpdateFileStream(Stream file, T fileId, bool encrypted = false, bool forcesave = false)
public FileWrapper<T> UpdateFileStream(Stream file, T fileId, string fileExtension, bool encrypted = false, bool forcesave = false)
{
try
{
var resultFile = FileStorageService.UpdateFileStream(fileId, file, encrypted, forcesave);
var resultFile = FileStorageService.UpdateFileStream(fileId, file, fileExtension, encrypted, forcesave);
return FileWrapperHelper.Get(resultFile);
}
catch (FileNotFoundException e)
@ -317,9 +317,23 @@ namespace ASC.Files.Helpers
return FolderWrapperHelper.Get(folder);
}
public FileWrapper<T> CreateFile(T folderId, string title, T templateId, bool enableExternalExt = false)
public FileWrapper<T> CreateFile(T folderId, string title, JsonElement templateId, bool enableExternalExt = false)
{
var file = FileStorageService.CreateNewFile(new FileModel<T> { ParentId = folderId, Title = title, TemplateId = templateId }, enableExternalExt);
File<T> file;
if (templateId.ValueKind == JsonValueKind.Number)
{
file = FileStorageService.CreateNewFile(new FileModel<T, int> { ParentId = folderId, Title = title, TemplateId = templateId.GetInt32() }, enableExternalExt);
}
else if (templateId.ValueKind == JsonValueKind.String)
{
file = FileStorageService.CreateNewFile(new FileModel<T, string> { ParentId = folderId, Title = title, TemplateId = templateId.GetString() }, enableExternalExt);
}
else
{
file = FileStorageService.CreateNewFile(new FileModel<T, int> { ParentId = folderId, Title = title, TemplateId = 0 }, enableExternalExt);
}
return FileWrapperHelper.Get(file);
}
@ -354,7 +368,8 @@ namespace ASC.Files.Helpers
if (ext == destExt)
{
return CreateFile(destFolderId, destTitle, fileId);
var newFile = FileStorageService.CreateNewFile(new FileModel<T, T> { ParentId = destFolderId, Title = destTitle, TemplateId = fileId }, false);
return FileWrapperHelper.Get(newFile);
}
using (var fileStream = FileConverter.Exec(file, destExt))

View File

@ -695,7 +695,7 @@ const Editor = () => {
const mailMergeActionProps = {
isTablesOnly: true,
searchParam: "xlsx",
searchParam: ".xlsx",
};
const compareFilesActionProps = {
isDocumentsOnly: true,

View File

@ -196,8 +196,7 @@ const Form = (props) => {
localStorage.removeItem("redirectPath");
window.location.href = redirectPath;
}
}
catch(e) {
} catch (e) {
toastr.error(
t("Common:ProviderNotConnected"),
t("Common:ProviderLoginError")
@ -322,7 +321,8 @@ const Form = (props) => {
const hash = createPasswordHash(pass, hashSettings);
isDesktop && checkPwd();
login(userName, hash)
const session = !isChecked;
login(userName, hash, session)
.then((res) => {
const { url, user, hash } = res;
const redirectPath = localStorage.getItem("redirectPath");

2583
yarn.lock

File diff suppressed because it is too large Load Diff