From f1bee10ebb62e0349679ebda5308573e7741e9d7 Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Mon, 21 Oct 2024 11:05:42 +0700 Subject: [PATCH 01/21] UBERF-8517: Fix github external sync (#6999) Signed-off-by: Andrey Sobolev --- .../github/pod-github/src/sync/issueBase.ts | 36 +++++++++++++------ services/github/pod-github/src/sync/issues.ts | 9 ++--- .../pod-github/src/sync/pullrequests.ts | 9 ++--- 3 files changed, 36 insertions(+), 18 deletions(-) diff --git a/services/github/pod-github/src/sync/issueBase.ts b/services/github/pod-github/src/sync/issueBase.ts index 0ff45f0f960..0b2365ce292 100644 --- a/services/github/pod-github/src/sync/issueBase.ts +++ b/services/github/pod-github/src/sync/issueBase.ts @@ -1068,17 +1068,20 @@ export abstract class IssueSyncManagerBase { _class: Ref>, repo: GithubIntegrationRepository, issues: IssueExternalData[], - derivedClient: TxOperations + derivedClient: TxOperations, + syncDocs?: DocSyncInfo[] ): Promise { if (repo.githubProject == null) { return } - const syncInfo = await this.client.findAll(github.class.DocSyncInfo, { - space: repo.githubProject, - repository: repo._id, - objectClass: _class, - url: { $in: issues.map((it) => (it.url ?? '').toLowerCase()) } - }) + const syncInfo = + syncDocs ?? + (await this.client.findAll(github.class.DocSyncInfo, { + space: repo.githubProject, + repository: repo._id, + objectClass: _class, + url: { $in: issues.map((it) => (it.url ?? '').toLowerCase()) } + })) const ops = derivedClient.apply() @@ -1088,8 +1091,10 @@ export abstract class IssueSyncManagerBase { this.ctx.info('Retrieve empty document', { repo: repo.name, workspace: this.provider.getWorkspaceId().name }) continue } - const existing = syncInfo.find((it) => it.url === issue.url.toLowerCase()) - if (existing === undefined) { + const existing = + syncInfo.find((it) => it.url.toLowerCase() === issue.url.toLowerCase()) ?? + syncInfo.find((it) => (it.external as IssueExternalData)?.id === issue.id) + if (existing === undefined && syncDocs === undefined) { this.ctx.info('Create sync doc', { url: issue.url, workspace: this.provider.getWorkspaceId().name }) await ops.createDoc(github.class.DocSyncInfo, repo.githubProject, { url: issue.url.toLowerCase(), @@ -1103,7 +1108,10 @@ export abstract class IssueSyncManagerBase { externalVersionSince: '', lastModified: new Date(issue.updatedAt).getTime() }) - } else { + } else if (existing !== undefined) { + if (syncDocs !== undefined) { + syncDocs = syncDocs.filter((it) => it._id !== existing._id) + } const externalEqual = deepEqual(existing.external, issue) if (!externalEqual || existing.externalVersion !== githubExternalSyncVersion) { this.ctx.info('Update sync doc', { url: issue.url, workspace: this.provider.getWorkspaceId().name }) @@ -1126,6 +1134,14 @@ export abstract class IssueSyncManagerBase { this.ctx.error(err) } } + // if no sync doc, mark it as synchronized + for (const sd of syncDocs ?? []) { + await ops.update(sd, { + needSync: githubSyncVersion, + externalVersion: githubExternalSyncVersion, + error: 'not found external doc' + }) + } await ops.commit(true) this.provider.sync() } diff --git a/services/github/pod-github/src/sync/issues.ts b/services/github/pod-github/src/sync/issues.ts index 290ce7ed159..818d97a69bf 100644 --- a/services/github/pod-github/src/sync/issues.ts +++ b/services/github/pod-github/src/sync/issues.ts @@ -980,7 +980,7 @@ export class IssueSyncManager extends IssueSyncManagerBase implements DocSyncMan // Wait global project sync await integration.syncLock.get(prj._id) - const ids = syncDocs.map((it) => (it.external as IssueExternalData).id).filter((it) => it !== undefined) + const allSyncDocs = [...syncDocs] // let partsize = 50 try { @@ -988,7 +988,8 @@ export class IssueSyncManager extends IssueSyncManagerBase implements DocSyncMan if (this.provider.isClosing()) { break } - const idsPart = ids.splice(0, partsize) + const docsPart = allSyncDocs.splice(0, partsize) + const idsPart = docsPart.map((it) => (it.external as IssueExternalData).id).filter((it) => it !== undefined) if (idsPart.length === 0) { break } @@ -1023,11 +1024,11 @@ export class IssueSyncManager extends IssueSyncManagerBase implements DocSyncMan }) } - await this.syncIssues(tracker.class.Issue, repo, issues, derivedClient) + await this.syncIssues(tracker.class.Issue, repo, issues, derivedClient, docsPart) } catch (err: any) { if (partsize > 1) { partsize = 1 - ids.push(...idsPart) + allSyncDocs.push(...docsPart) this.ctx.warn('issue external retrieval switch to one by one mode', { errors: err.errors, msg: err.message, diff --git a/services/github/pod-github/src/sync/pullrequests.ts b/services/github/pod-github/src/sync/pullrequests.ts index 51d3e78d543..5978c48de0b 100644 --- a/services/github/pod-github/src/sync/pullrequests.ts +++ b/services/github/pod-github/src/sync/pullrequests.ts @@ -1334,12 +1334,13 @@ export class PullRequestSyncManager extends IssueSyncManagerBase implements DocS ): Promise { await integration.syncLock.get(prj._id) - const ids = syncDocs.map((it) => (it.external as IssueExternalData).id).filter((it) => it !== undefined) + const allSyncDocs = [...syncDocs] let partsize = 50 try { while (true) { - const idsPart = ids.splice(0, partsize) + const docsPart = allSyncDocs.splice(0, partsize) + const idsPart = docsPart.map((it) => (it.external as IssueExternalData).id).filter((it) => it !== undefined) if (idsPart.length === 0) { break } @@ -1373,11 +1374,11 @@ export class PullRequestSyncManager extends IssueSyncManagerBase implements DocS data: cutObjectArray(response) }) } - await this.syncIssues(github.class.GithubPullRequest, repo, issues, derivedClient) + await this.syncIssues(github.class.GithubPullRequest, repo, issues, derivedClient, docsPart) } catch (err: any) { if (partsize > 1) { partsize = 1 - ids.push(...idsPart) + allSyncDocs.push(...docsPart) this.ctx.warn('pull request external retrieval switch to one by one mode', { errors: err.errors, msg: err.message, From 8bd62ecede9bd582c12facdef46e7d532dbe7ebb Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Mon, 21 Oct 2024 12:16:40 +0700 Subject: [PATCH 02/21] UBERF-8504: Fix DocSyncInfo in transactions (#6998) Signed-off-by: Andrey Sobolev --- packages/core/src/operations.ts | 5 +++-- services/github/model-github/src/migration.ts | 8 +++++++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/packages/core/src/operations.ts b/packages/core/src/operations.ts index 9db10bdae00..7f77335e75f 100644 --- a/packages/core/src/operations.ts +++ b/packages/core/src/operations.ts @@ -444,7 +444,8 @@ export class ApplyOperations extends TxOperations { constructor ( readonly ops: TxOperations, readonly scope?: string, - readonly measureName?: string + readonly measureName?: string, + isDerived?: boolean ) { const txClient: Client = { getHierarchy: () => ops.client.getHierarchy(), @@ -460,7 +461,7 @@ export class ApplyOperations extends TxOperations { return {} } } - super(txClient, ops.user) + super(txClient, ops.user, isDerived ?? false) } match(_class: Ref>, query: DocumentQuery): ApplyOperations { diff --git a/services/github/model-github/src/migration.ts b/services/github/model-github/src/migration.ts index 540bcf15803..63f888ab6a4 100644 --- a/services/github/model-github/src/migration.ts +++ b/services/github/model-github/src/migration.ts @@ -2,7 +2,7 @@ // Copyright © 2023 Hardcore Engineering Inc. // -import core, { toIdMap, type AnyAttribute, type Ref, type Status } from '@hcengineering/core' +import core, { DOMAIN_TX, toIdMap, type AnyAttribute, type Ref, type Status } from '@hcengineering/core' import { tryMigrate, tryUpgrade, @@ -326,6 +326,12 @@ export const githubOperationPreTime: MigrateOperation = { { state: 'migrate-missing-states', func: migrateMissingStates + }, + { + state: 'remove-doc-sync-info-txes', + func: async (client) => { + await client.deleteMany(DOMAIN_TX, { objectClass: github.class.DocSyncInfo }) + } } ]) }, From 453be085fc6858c78e532603d68b76a6abf576e9 Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Mon, 21 Oct 2024 12:58:12 +0700 Subject: [PATCH 03/21] Fix backup check tool (#6997) Signed-off-by: Andrey Sobolev --- common/config/rush/pnpm-lock.yaml | 30 ++++++++++++------------- server/backup-service/package.json | 4 ++-- server/backup/package.json | 4 ++-- server/backup/src/backup.ts | 35 ++++++++++++++++-------------- 4 files changed, 38 insertions(+), 35 deletions(-) diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml index f07ee37bdb4..35d45d4d99a 100644 --- a/common/config/rush/pnpm-lock.yaml +++ b/common/config/rush/pnpm-lock.yaml @@ -1332,8 +1332,8 @@ dependencies: specifier: ~0.32.0 version: 0.32.0 '@types/tar-stream': - specifier: ^2.2.2 - version: 2.2.3 + specifier: ^3.1.3 + version: 3.1.3 '@types/toposort': specifier: ^2.0.3 version: 2.0.7 @@ -1617,7 +1617,7 @@ dependencies: specifier: ^9.7.1 version: 9.13.0 itty-router: - specifier: ^5.0.17 + specifier: ^5.0.18 version: 5.0.18 jest: specifier: ^29.7.0 @@ -1839,8 +1839,8 @@ dependencies: specifier: ^3.0.0 version: 3.0.3 tar-stream: - specifier: ^2.2.0 - version: 2.2.0 + specifier: ^3.1.7 + version: 3.1.7 telegraf: specifier: ^4.16.3 version: 4.16.3 @@ -9833,8 +9833,8 @@ packages: resolution: {integrity: sha512-7NQmHra/JILCd1QqpSzl8+mJRc8ZHz3uDm8YV1Ks9IhK0epEiTw8aIErbvH9PI+6XbqhyIQy3462nEsn7UVzjQ==} dev: false - /@types/tar-stream@2.2.3: - resolution: {integrity: sha512-if3mugZfjVkXOMZdFjIHySxY13r6GXPpyOlsDmLffvyI7tLz9wXE8BFjNivXsvUeyJ1KNlOpfLnag+ISmxgxPw==} + /@types/tar-stream@3.1.3: + resolution: {integrity: sha512-Zbnx4wpkWBMBSu5CytMbrT5ZpMiF55qgM+EpHzR4yIDu7mv52cej8hTkOc6K+LzpkOAbxwn/m7j3iO+/l42YkQ==} dependencies: '@types/node': 20.11.19 dev: false @@ -26087,14 +26087,14 @@ packages: dev: false file:projects/backup-service.tgz(esbuild@0.20.1)(ts-node@10.9.2): - resolution: {integrity: sha512-yVHrvyvHvtRFu894lGDl0o19qrJv3scuxI80Dg+rQDzjb4nWXAPGAj477N6TR7vrZSmYAmsR5qEaHkXxc35ipg==, tarball: file:projects/backup-service.tgz} + resolution: {integrity: sha512-q/p/O9tTXiKT+qik2nsPL+zmBUwyvvfwBIH3RN4Z7vItg7gV/mI38jal1ppQU/Ddnwvfifn/h7tFc/sSTpjG2A==, tarball: file:projects/backup-service.tgz} id: file:projects/backup-service.tgz name: '@rush-temp/backup-service' version: 0.0.0 dependencies: '@types/jest': 29.5.12 '@types/node': 20.11.19 - '@types/tar-stream': 2.2.3 + '@types/tar-stream': 3.1.3 '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3) '@typescript-eslint/parser': 6.21.0(eslint@8.56.0)(typescript@5.3.3) eslint: 8.56.0 @@ -26104,7 +26104,7 @@ packages: eslint-plugin-promise: 6.1.1(eslint@8.56.0) jest: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2) prettier: 3.2.5 - tar-stream: 2.2.0 + tar-stream: 3.1.7 ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3) typescript: 5.3.3 transitivePeerDependencies: @@ -26623,7 +26623,7 @@ packages: dev: false file:projects/cloud-branding.tgz(@types/node@20.11.19)(bufferutil@4.0.8)(esbuild@0.20.1)(ts-node@10.9.2)(utf-8-validate@6.0.4): - resolution: {integrity: sha512-LuePDPK46tYQhJ9inFPje2LSd/RqlwYSW0k0UDsNWKNyJn4WwWGngaNblMp22YGBw8wMDgJpbaIjS/BPvZ7RQg==, tarball: file:projects/cloud-branding.tgz} + resolution: {integrity: sha512-f1NgjqZw48X0+O4cblNzjU8SeW4RYnvaMvYNFyN+eKbqTDHI1N97+q5f2OoDgRonvaZa+CXMETNFXMSkniWwyA==, tarball: file:projects/cloud-branding.tgz} id: file:projects/cloud-branding.tgz name: '@rush-temp/cloud-branding' version: 0.0.0 @@ -26658,7 +26658,7 @@ packages: dev: false file:projects/cloud-datalake.tgz(@types/node@20.11.19)(bufferutil@4.0.8)(esbuild@0.20.1)(ts-node@10.9.2)(utf-8-validate@6.0.4): - resolution: {integrity: sha512-AA2lTsmPKPeYA1MTwIscZFRO40m9Ctc59Er2x8VRLNBBt4mQ01b1CCay4VFVPWYxAzh+Ru9RoUIB7lS+m8sj9Q==, tarball: file:projects/cloud-datalake.tgz} + resolution: {integrity: sha512-KdKIEaVTjeWtCEUYfkvyrIYxglGcfM8iICOnTxRzPiVmOIP3kswu7PaUKeH/xeFZQcvRBvhtNI7XT0dBr5mODA==, tarball: file:projects/cloud-datalake.tgz} id: file:projects/cloud-datalake.tgz name: '@rush-temp/cloud-datalake' version: 0.0.0 @@ -32916,14 +32916,14 @@ packages: dev: false file:projects/server-backup.tgz(esbuild@0.20.1)(ts-node@10.9.2): - resolution: {integrity: sha512-RCYMutijYbZbV1jzpWwwzQL9bZS8uzTOYX0eZgxmh3aYyU1Ub4v4TixQpdiiH72ujj0V2hPn9bM2hpYaxzbD1w==, tarball: file:projects/server-backup.tgz} + resolution: {integrity: sha512-oDyT5+30r1kpzb+Cx/HTsY1f4fRjjtSy7n13JjzYPmkn37nVcHYb1X1RImHxhoiPehYWdftTsE5v5l2on0mOPg==, tarball: file:projects/server-backup.tgz} id: file:projects/server-backup.tgz name: '@rush-temp/server-backup' version: 0.0.0 dependencies: '@types/jest': 29.5.12 '@types/node': 20.11.19 - '@types/tar-stream': 2.2.3 + '@types/tar-stream': 3.1.3 '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.56.0)(typescript@5.3.3) '@typescript-eslint/parser': 6.21.0(eslint@8.56.0)(typescript@5.3.3) eslint: 8.56.0 @@ -32934,7 +32934,7 @@ packages: jest: 29.7.0(@types/node@20.11.19)(ts-node@10.9.2) prettier: 3.2.5 prettier-plugin-svelte: 3.2.1(prettier@3.2.5)(svelte@4.2.11) - tar-stream: 2.2.0 + tar-stream: 3.1.7 ts-jest: 29.1.2(esbuild@0.20.1)(jest@29.7.0)(typescript@5.3.3) typescript: 5.3.3 transitivePeerDependencies: diff --git a/server/backup-service/package.json b/server/backup-service/package.json index d0480406615..554529dda65 100644 --- a/server/backup-service/package.json +++ b/server/backup-service/package.json @@ -33,7 +33,7 @@ "eslint-config-standard-with-typescript": "^40.0.0", "prettier": "^3.1.0", "typescript": "^5.3.3", - "@types/tar-stream": "^2.2.2", + "@types/tar-stream": "^3.1.3", "@types/node": "~20.11.16", "jest": "^29.7.0", "ts-jest": "^29.1.1", @@ -46,7 +46,7 @@ "@hcengineering/client-resources": "^0.6.27", "@hcengineering/client": "^0.6.18", "@hcengineering/model": "^0.6.11", - "tar-stream": "^2.2.0", + "tar-stream": "^3.1.7", "@hcengineering/server-tool": "^0.6.0", "@hcengineering/server-core": "^0.6.1", "@hcengineering/server-storage": "^0.6.0", diff --git a/server/backup/package.json b/server/backup/package.json index 808422a6220..e5339813e6b 100644 --- a/server/backup/package.json +++ b/server/backup/package.json @@ -33,7 +33,7 @@ "eslint-config-standard-with-typescript": "^40.0.0", "prettier": "^3.1.0", "typescript": "^5.3.3", - "@types/tar-stream": "^2.2.2", + "@types/tar-stream": "^3.1.3", "@types/node": "~20.11.16", "jest": "^29.7.0", "ts-jest": "^29.1.1", @@ -47,7 +47,7 @@ "@hcengineering/client": "^0.6.18", "@hcengineering/model": "^0.6.11", "@hcengineering/analytics": "^0.6.0", - "tar-stream": "^2.2.0", + "tar-stream": "^3.1.7", "@hcengineering/server-tool": "^0.6.0", "@hcengineering/server-client": "^0.6.0", "@hcengineering/server-token": "^0.6.11", diff --git a/server/backup/src/backup.ts b/server/backup/src/backup.ts index 1aef026c395..c1e4ac133e1 100644 --- a/server/backup/src/backup.ts +++ b/server/backup/src/backup.ts @@ -45,7 +45,7 @@ import { type StorageAdapter } from '@hcengineering/server-core' import { fullTextPushStagePrefix } from '@hcengineering/server-indexer' import { generateToken } from '@hcengineering/server-token' import { connect } from '@hcengineering/server-tool' -import { createReadStream, createWriteStream, existsSync, mkdirSync } from 'node:fs' +import { createReadStream, createWriteStream, existsSync, mkdirSync, statSync } from 'node:fs' import { rm } from 'node:fs/promises' import { basename, dirname } from 'node:path' import { PassThrough } from 'node:stream' @@ -178,7 +178,7 @@ async function loadDigest ( result.delete(k as Ref) } } catch (err: any) { - ctx.error('digest is broken, will do full backup for', { domain }) + ctx.error('digest is broken, will do full backup for', { domain, err: err.message, snapshot }) } } // Stop if stop date is matched and provided @@ -236,14 +236,10 @@ async function verifyDigest ( blobs.set(bname, { doc, buffer: undefined }) } else { blobs.delete(bname) - const blob = doc as Blob - - if (blob.size === bf.length) { - validDocs.add(name as Ref) - } + validDocs.add(bname as Ref) } } else { - validDocs.add(name as Ref) + validDocs.add(bname as Ref) } next() }) @@ -265,10 +261,7 @@ async function verifyDigest ( sz = bf.length } - // If blob size matches doc size, remove from requiredDocs - if (sz === bf.length) { - validDocs.add(name as Ref) - } + validDocs.add(name as Ref) } next() }) @@ -364,7 +357,7 @@ async function verifyDigest ( } } catch (err: any) { digestToRemove.add(snapshot) - ctx.error('digest is broken, will do full backup for', { domain }) + ctx.error('digest is broken, will do full backup for', { domain, err: err.message, snapshot }) modified = true } } @@ -1490,6 +1483,7 @@ export async function backupSize (storage: BackupStorage): Promise { */ export async function backupDownload (storage: BackupStorage, storeIn: string): Promise { const infoFile = 'backup.json.gz' + const sizeFile = 'backup.size.gz' if (!(await storage.exists(infoFile))) { throw new Error(`${infoFile} should present to restore`) @@ -1499,6 +1493,12 @@ export async function backupDownload (storage: BackupStorage, storeIn: string): const backupInfo: BackupInfo = JSON.parse(gunzipSync(await storage.loadFile(infoFile)).toString()) console.log('workspace:', backupInfo.workspace ?? '', backupInfo.version) + let sizeInfo: Record = {} + if (await storage.exists(sizeFile)) { + sizeInfo = JSON.parse(gunzipSync(await storage.loadFile(sizeFile)).toString()) + } + console.log('workspace:', backupInfo.workspace ?? '', backupInfo.version) + const addFileSize = async (file: string | undefined | null, force: boolean = false): Promise => { if (file != null) { const target = join(storeIn, file) @@ -1506,8 +1506,11 @@ export async function backupDownload (storage: BackupStorage, storeIn: string): if (!existsSync(dir)) { mkdirSync(dir, { recursive: true }) } - if (!existsSync(target) || force) { - const fileSize = await storage.stat(file) + + const serverSize: number | undefined = sizeInfo[file] + + if (!existsSync(target) || force || (serverSize !== undefined && serverSize !== statSync(target).size)) { + const fileSize = serverSize ?? (await storage.stat(file)) console.log('downloading', file, fileSize) const readStream = await storage.load(file) const outp = createWriteStream(target) @@ -1781,7 +1784,7 @@ export async function restore ( if (sendSize > dataUploadSize || (doc === undefined && docs.length > 0)) { totalSend += docs.length - ctx.info('upload', { + ctx.info('upload-' + c, { docs: docs.length, totalSend, from: docsToAdd.size + totalSend, From 001b2dd0d69794ead708ec85e1c6b7a14714c340 Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Mon, 21 Oct 2024 12:58:32 +0700 Subject: [PATCH 04/21] QFIX: Pass isDerived to apply (#7001) Signed-off-by: Andrey Sobolev --- packages/core/src/operations.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/core/src/operations.ts b/packages/core/src/operations.ts index 7f77335e75f..8d55f8ec829 100644 --- a/packages/core/src/operations.ts +++ b/packages/core/src/operations.ts @@ -314,7 +314,7 @@ export class TxOperations implements Omit { } apply (scope?: string, measure?: string): ApplyOperations { - return new ApplyOperations(this, scope, measure) + return new ApplyOperations(this, scope, measure, this.isDerived) } async diffUpdate( From 0b1af0da90ca28ab3a58d75fa62e3a920bb69cbf Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Mon, 21 Oct 2024 13:12:45 +0700 Subject: [PATCH 05/21] UBERF-8518: Optimize client model (#7000) Signed-off-by: Andrey Sobolev --- desktop/src/ui/platform.ts | 2 +- dev/prod/src/platform.ts | 2 +- packages/core/src/__tests__/client.test.ts | 36 +++- packages/core/src/client.ts | 104 ++---------- packages/core/src/hierarchy.ts | 20 +-- packages/core/src/utils.ts | 62 ++++++- plugins/client-resources/src/index.ts | 168 +++++++++++++------ plugins/client/src/index.ts | 7 +- products/tracker/src/platform.ts | 3 +- services/calendar/pod-calendar/src/client.ts | 3 + services/github/pod-github/src/client.ts | 1 + 11 files changed, 242 insertions(+), 166 deletions(-) diff --git a/desktop/src/ui/platform.ts b/desktop/src/ui/platform.ts index c43c95eeed1..3e51ba0a00f 100644 --- a/desktop/src/ui/platform.ts +++ b/desktop/src/ui/platform.ts @@ -305,7 +305,7 @@ export async function configurePlatform (): Promise { addLocation(printId, () => import(/* webpackChunkName: "print" */ '@hcengineering/print-resources')) addLocation(textEditorId, () => import(/* webpackChunkName: "text-editor" */ '@hcengineering/text-editor-resources')) - setMetadata(client.metadata.FilterModel, true) + setMetadata(client.metadata.FilterModel, 'ui') setMetadata(client.metadata.ExtraPlugins, ['preference' as Plugin]) // Use binary response transfer for faster performance and small transfer sizes. diff --git a/dev/prod/src/platform.ts b/dev/prod/src/platform.ts index 68031ac2e47..9de3123eba8 100644 --- a/dev/prod/src/platform.ts +++ b/dev/prod/src/platform.ts @@ -397,7 +397,7 @@ export async function configurePlatform() { addLocation(textEditorId, () => import(/* webpackChunkName: "text-editor" */ '@hcengineering/text-editor-resources')) addLocation(uploaderId, () => import(/* webpackChunkName: "uploader" */ '@hcengineering/uploader-resources')) - setMetadata(client.metadata.FilterModel, true) + setMetadata(client.metadata.FilterModel, 'ui') setMetadata(client.metadata.ExtraPlugins, ['preference' as Plugin]) // Use binary response transfer for faster performance and small transfer sizes. diff --git a/packages/core/src/__tests__/client.test.ts b/packages/core/src/__tests__/client.test.ts index f6cf6d466da..997de8ae98a 100644 --- a/packages/core/src/__tests__/client.test.ts +++ b/packages/core/src/__tests__/client.test.ts @@ -13,19 +13,30 @@ // See the License for the specific language governing permissions and // limitations under the License. // -import { Plugin, IntlString } from '@hcengineering/platform' +import { IntlString, Plugin } from '@hcengineering/platform' import type { Account, Class, Data, Doc, Domain, PluginConfiguration, Ref, Timestamp } from '../classes' -import { Space, ClassifierKind, DOMAIN_MODEL } from '../classes' -import { createClient, ClientConnection } from '../client' +import { ClassifierKind, DOMAIN_MODEL, Space } from '../classes' +import { ClientConnection, createClient } from '../client' +import { clone } from '../clone' import core from '../component' import { Hierarchy } from '../hierarchy' import { ModelDb, TxDb } from '../memdb' import { TxOperations } from '../operations' -import type { DocumentQuery, FindResult, TxResult, SearchQuery, SearchOptions, SearchResult } from '../storage' +import type { DocumentQuery, FindResult, SearchOptions, SearchQuery, SearchResult, TxResult } from '../storage' import { Tx, TxFactory, TxProcessor } from '../tx' +import { fillConfiguration, pluginFilterTx } from '../utils' import { connect } from './connection' import { genMinModel } from './minmodel' -import { clone } from '../clone' + +function filterPlugin (plugin: Plugin): (txes: Tx[]) => Promise { + return async (txes) => { + const configs = new Map, PluginConfiguration>() + fillConfiguration(txes, configs) + + const excludedPlugins = Array.from(configs.values()).filter((it) => !it.enabled || it.pluginId !== plugin) + return pluginFilterTx(excludedPlugins, configs, txes) + } +} describe('client', () => { it('should create client and spaces', async () => { @@ -136,7 +147,10 @@ describe('client', () => { } const txCreateDoc1 = txFactory.createTxCreateDoc(core.class.PluginConfiguration, core.space.Model, pluginData1) txes.push(txCreateDoc1) - const client1 = new TxOperations(await createClient(connectPlugin, ['testPlugin1' as Plugin]), core.account.System) + const client1 = new TxOperations( + await createClient(connectPlugin, filterPlugin('testPlugin1' as Plugin)), + core.account.System + ) const result1 = await client1.findAll(core.class.PluginConfiguration, {}) expect(result1).toHaveLength(1) @@ -153,7 +167,10 @@ describe('client', () => { } const txCreateDoc2 = txFactory.createTxCreateDoc(core.class.PluginConfiguration, core.space.Model, pluginData2) txes.push(txCreateDoc2) - const client2 = new TxOperations(await createClient(connectPlugin, ['testPlugin1' as Plugin]), core.account.System) + const client2 = new TxOperations( + await createClient(connectPlugin, filterPlugin('testPlugin1' as Plugin)), + core.account.System + ) const result2 = await client2.findAll(core.class.PluginConfiguration, {}) expect(result2).toHaveLength(2) @@ -176,7 +193,10 @@ describe('client', () => { pluginData3 ) txes.push(txUpdateDoc) - const client3 = new TxOperations(await createClient(connectPlugin, ['testPlugin2' as Plugin]), core.account.System) + const client3 = new TxOperations( + await createClient(connectPlugin, filterPlugin('testPlugin2' as Plugin)), + core.account.System + ) const result3 = await client3.findAll(core.class.PluginConfiguration, {}) expect(result3).toHaveLength(1) diff --git a/packages/core/src/client.ts b/packages/core/src/client.ts index 2b063ea8345..11d480e8730 100644 --- a/packages/core/src/client.ts +++ b/packages/core/src/client.ts @@ -13,17 +13,16 @@ // limitations under the License. // -import { Plugin } from '@hcengineering/platform' import { BackupClient, DocChunk } from './backup' -import { Account, AttachedDoc, Class, DOMAIN_MODEL, Doc, Domain, PluginConfiguration, Ref, Timestamp } from './classes' +import { Account, AttachedDoc, Class, DOMAIN_MODEL, Doc, Domain, Ref, Timestamp } from './classes' import core from './component' import { Hierarchy } from './hierarchy' import { MeasureContext, MeasureMetricsContext } from './measurements' import { ModelDb } from './memdb' import type { DocumentQuery, FindOptions, FindResult, FulltextStorage, Storage, TxResult, WithLookup } from './storage' import { SearchOptions, SearchQuery, SearchResult, SortingOrder } from './storage' -import { Tx, TxCUD, TxCollectionCUD, TxCreateDoc, TxProcessor, TxUpdateDoc } from './tx' -import { toFindResult, toIdMap } from './utils' +import { Tx, TxCUD, TxCollectionCUD } from './tx' +import { toFindResult } from './utils' const transactionThreshold = 500 @@ -215,13 +214,15 @@ export interface TxPersistenceStore { store: (model: LoadModelResponse) => Promise } +export type ModelFilter = (tx: Tx[]) => Promise + /** * @public */ export async function createClient ( connect: (txHandler: TxHandler) => Promise, // If set will build model with only allowed plugins. - allowedPlugins?: Plugin[], + modelFilter?: ModelFilter, txPersistence?: TxPersistenceStore, _ctx?: MeasureContext ): Promise { @@ -248,14 +249,12 @@ export async function createClient ( } lastTx = tx.reduce((cur, it) => (it.modifiedOn > cur ? it.modifiedOn : cur), 0) } - const configs = new Map, PluginConfiguration>() - const conn = await ctx.with('connect', {}, async () => await connect(txHandler)) await ctx.with( 'load-model', { reload: false }, - async (ctx) => await loadModel(ctx, conn, allowedPlugins, configs, hierarchy, model, false, txPersistence) + async (ctx) => await loadModel(ctx, conn, modelFilter, hierarchy, model, false, txPersistence) ) txBuffer = txBuffer.filter((tx) => tx.space !== core.space.Model) @@ -277,7 +276,7 @@ export async function createClient ( const loadModelResponse = await ctx.with( 'connect', { reload: true }, - async (ctx) => await loadModel(ctx, conn, allowedPlugins, configs, hierarchy, model, true, txPersistence) + async (ctx) => await loadModel(ctx, conn, modelFilter, hierarchy, model, true, txPersistence) ) if (event === ClientConnectEvent.Reconnected && loadModelResponse.full) { @@ -286,7 +285,7 @@ export async function createClient ( model = new ModelDb(hierarchy) await ctx.with('build-model', {}, async (ctx) => { - await buildModel(ctx, loadModelResponse, allowedPlugins, configs, hierarchy, model) + await buildModel(ctx, loadModelResponse, modelFilter, hierarchy, model) }) await oldOnConnect?.(ClientConnectEvent.Upgraded) @@ -393,8 +392,7 @@ function isPersonAccount (tx: Tx): boolean { async function loadModel ( ctx: MeasureContext, conn: ClientConnection, - allowedPlugins: Plugin[] | undefined, - configs: Map, PluginConfiguration>, + modelFilter: ModelFilter | undefined, hierarchy: Hierarchy, model: ModelDb, reload = false, @@ -418,19 +416,18 @@ async function loadModel ( ) } - await ctx.with('build-model', {}, (ctx) => buildModel(ctx, modelResponse, allowedPlugins, configs, hierarchy, model)) + await ctx.with('build-model', {}, (ctx) => buildModel(ctx, modelResponse, modelFilter, hierarchy, model)) return modelResponse } async function buildModel ( ctx: MeasureContext, modelResponse: LoadModelResponse, - allowedPlugins: Plugin[] | undefined, - configs: Map, PluginConfiguration>, + modelFilter: ModelFilter | undefined, hierarchy: Hierarchy, model: ModelDb ): Promise { - let systemTx: Tx[] = [] + const systemTx: Tx[] = [] const userTx: Tx[] = [] const atxes = modelResponse.transactions @@ -444,23 +441,11 @@ async function buildModel ( ) }) - if (allowedPlugins != null) { - await ctx.with('fill config system', {}, async () => { - fillConfiguration(systemTx, configs) - }) - await ctx.with('fill config user', {}, async () => { - fillConfiguration(userTx, configs) - }) - const excludedPlugins = Array.from(configs.values()).filter( - (it) => !it.enabled || !allowedPlugins.includes(it.pluginId) - ) - await ctx.with('filter txes', {}, async () => { - systemTx = pluginFilterTx(excludedPlugins, configs, systemTx) - }) + let txes = systemTx.concat(userTx) + if (modelFilter !== undefined) { + txes = await modelFilter(txes) } - const txes = systemTx.concat(userTx) - await ctx.with('build hierarchy', {}, async () => { for (const tx of txes) { try { @@ -488,60 +473,3 @@ function getLastTxTime (txes: Tx[]): number { } return lastTxTime } - -function fillConfiguration (systemTx: Tx[], configs: Map, PluginConfiguration>): void { - for (const t of systemTx) { - if (t._class === core.class.TxCreateDoc) { - const ct = t as TxCreateDoc - if (ct.objectClass === core.class.PluginConfiguration) { - configs.set(ct.objectId as Ref, TxProcessor.createDoc2Doc(ct) as PluginConfiguration) - } - } else if (t._class === core.class.TxUpdateDoc) { - const ut = t as TxUpdateDoc - if (ut.objectClass === core.class.PluginConfiguration) { - const c = configs.get(ut.objectId as Ref) - if (c !== undefined) { - TxProcessor.updateDoc2Doc(c, ut) - } - } - } - } -} - -function pluginFilterTx ( - excludedPlugins: PluginConfiguration[], - configs: Map, PluginConfiguration>, - systemTx: Tx[] -): Tx[] { - const stx = toIdMap(systemTx) - const totalExcluded = new Set>() - let msg = '' - for (const a of excludedPlugins) { - for (const c of configs.values()) { - if (a.pluginId === c.pluginId) { - for (const id of c.transactions) { - if (c.classFilter !== undefined) { - const filter = new Set(c.classFilter) - const tx = stx.get(id as Ref) - if ( - tx?._class === core.class.TxCreateDoc || - tx?._class === core.class.TxUpdateDoc || - tx?._class === core.class.TxRemoveDoc - ) { - const cud = tx as TxCUD - if (filter.has(cud.objectClass)) { - totalExcluded.add(id as Ref) - } - } - } else { - totalExcluded.add(id as Ref) - } - } - msg += ` ${c.pluginId}:${c.transactions.length}` - } - } - } - console.log('exclude plugin', msg) - systemTx = systemTx.filter((t) => !totalExcluded.has(t._id)) - return systemTx -} diff --git a/packages/core/src/hierarchy.ts b/packages/core/src/hierarchy.ts index cf83ff9edde..9397c5fa921 100644 --- a/packages/core/src/hierarchy.ts +++ b/packages/core/src/hierarchy.ts @@ -30,7 +30,7 @@ export class Hierarchy { private readonly attributes = new Map, Map>() private readonly attributesById = new Map, AnyAttribute>() private readonly descendants = new Map, Ref[]>() - private readonly ancestors = new Map, { ordered: Ref[], set: Set> }>() + private readonly ancestors = new Map, Set>>() private readonly proxies = new Map>, ProxyHandler>() private readonly classifierProperties = new Map, Record>() @@ -166,7 +166,7 @@ export class Hierarchy { if (result === undefined) { throw new Error('ancestors not found: ' + _class) } - return result.ordered + return Array.from(result) } getClass(_class: Ref>): Class { @@ -301,7 +301,7 @@ export class Hierarchy { * It will iterate over parents. */ isDerived(_class: Ref>, from: Ref>): boolean { - return this.ancestors.get(_class)?.set?.has(from) ?? false + return this.ancestors.get(_class)?.has(from) ?? false } /** @@ -388,19 +388,17 @@ export class Hierarchy { const list = this.ancestors.get(_class) if (list === undefined) { if (add) { - this.ancestors.set(_class, { ordered: [classifier], set: new Set([classifier]) }) + this.ancestors.set(_class, new Set([classifier])) } } else { if (add) { - if (!list.set.has(classifier)) { - list.ordered.push(classifier) - list.set.add(classifier) + if (!list.has(classifier)) { + list.add(classifier) } } else { - const pos = list.ordered.indexOf(classifier) - if (pos !== -1) { - list.ordered.splice(pos, 1) - list.set.delete(classifier) + const pos = list.has(classifier) + if (pos) { + list.delete(classifier) } } } diff --git a/packages/core/src/utils.ts b/packages/core/src/utils.ts index 4987f6d617e..506201eba58 100644 --- a/packages/core/src/utils.ts +++ b/packages/core/src/utils.ts @@ -40,7 +40,8 @@ import { roleOrder, Space, TypedSpace, - WorkspaceMode + WorkspaceMode, + type PluginConfiguration } from './classes' import core from './component' import { Hierarchy } from './hierarchy' @@ -48,7 +49,7 @@ import { TxOperations } from './operations' import { isPredicate } from './predicate' import { Branding, BrandingMap } from './server' import { DocumentQuery, FindResult } from './storage' -import { DOMAIN_TX } from './tx' +import { DOMAIN_TX, TxProcessor, type Tx, type TxCreateDoc, type TxCUD, type TxUpdateDoc } from './tx' function toHex (value: number, chars: number): string { const result = value.toString(16) @@ -835,3 +836,60 @@ export function getBranding (brandings: BrandingMap, key: string | undefined): B return Object.values(brandings).find((branding) => branding.key === key) ?? null } + +export function fillConfiguration (systemTx: Tx[], configs: Map, PluginConfiguration>): void { + for (const t of systemTx) { + if (t._class === core.class.TxCreateDoc) { + const ct = t as TxCreateDoc + if (ct.objectClass === core.class.PluginConfiguration) { + configs.set(ct.objectId as Ref, TxProcessor.createDoc2Doc(ct) as PluginConfiguration) + } + } else if (t._class === core.class.TxUpdateDoc) { + const ut = t as TxUpdateDoc + if (ut.objectClass === core.class.PluginConfiguration) { + const c = configs.get(ut.objectId as Ref) + if (c !== undefined) { + TxProcessor.updateDoc2Doc(c, ut) + } + } + } + } +} + +export function pluginFilterTx ( + excludedPlugins: PluginConfiguration[], + configs: Map, PluginConfiguration>, + systemTx: Tx[] +): Tx[] { + const stx = toIdMap(systemTx) + const totalExcluded = new Set>() + let msg = '' + for (const a of excludedPlugins) { + for (const c of configs.values()) { + if (a.pluginId === c.pluginId) { + for (const id of c.transactions) { + if (c.classFilter !== undefined) { + const filter = new Set(c.classFilter) + const tx = stx.get(id as Ref) + if ( + tx?._class === core.class.TxCreateDoc || + tx?._class === core.class.TxUpdateDoc || + tx?._class === core.class.TxRemoveDoc + ) { + const cud = tx as TxCUD + if (filter.has(cud.objectClass)) { + totalExcluded.add(id as Ref) + } + } + } else { + totalExcluded.add(id as Ref) + } + } + msg += ` ${c.pluginId}:${c.transactions.length}` + } + } + } + console.log('exclude plugin', msg) + systemTx = systemTx.filter((t) => !totalExcluded.has(t._id)) + return systemTx +} diff --git a/plugins/client-resources/src/index.ts b/plugins/client-resources/src/index.ts index be8d1bd02b2..58748dc5b32 100644 --- a/plugins/client-resources/src/index.ts +++ b/plugins/client-resources/src/index.ts @@ -24,7 +24,16 @@ import core, { TxWorkspaceEvent, WorkspaceEvent, concatLink, - createClient + createClient, + fillConfiguration, + pluginFilterTx, + type Class, + type ClientConnection, + type Doc, + type ModelFilter, + type PluginConfiguration, + type Ref, + type TxCUD } from '@hcengineering/core' import platform, { Severity, Status, getMetadata, getPlugins, setPlatformStatus } from '@hcengineering/platform' import { connect } from './connection' @@ -70,68 +79,123 @@ export default async () => { return { function: { GetClient: async (token: string, endpoint: string, opt?: ClientFactoryOptions): Promise => { - const filterModel = getMetadata(clientPlugin.metadata.FilterModel) ?? false + const filterModel = getMetadata(clientPlugin.metadata.FilterModel) ?? 'none' - const client = createClient( - async (handler: TxHandler) => { - const url = concatLink(endpoint, `/${token}`) + const handler = async (handler: TxHandler): Promise => { + const url = concatLink(endpoint, `/${token}`) - const upgradeHandler: TxHandler = (...txes: Tx[]) => { - for (const tx of txes) { - if (tx?._class === core.class.TxModelUpgrade) { - opt?.onUpgrade?.() - return - } - if (tx?._class === core.class.TxWorkspaceEvent) { - const event = tx as TxWorkspaceEvent - if (event.event === WorkspaceEvent.MaintenanceNotification) { - void setPlatformStatus( - new Status(Severity.WARNING, platform.status.MaintenanceWarning, { - time: event.params.timeMinutes - }) - ) - } + const upgradeHandler: TxHandler = (...txes: Tx[]) => { + for (const tx of txes) { + if (tx?._class === core.class.TxModelUpgrade) { + opt?.onUpgrade?.() + return + } + if (tx?._class === core.class.TxWorkspaceEvent) { + const event = tx as TxWorkspaceEvent + if (event.event === WorkspaceEvent.MaintenanceNotification) { + void setPlatformStatus( + new Status(Severity.WARNING, platform.status.MaintenanceWarning, { + time: event.params.timeMinutes + }) + ) } } - handler(...txes) } - const tokenPayload: { workspace: string, email: string } = decodeTokenPayload(token) - - const newOpt = { ...opt } - const connectTimeout = getMetadata(clientPlugin.metadata.ConnectionTimeout) - let connectPromise: Promise | undefined - if ((connectTimeout ?? 0) > 0) { - connectPromise = new Promise((resolve, reject) => { - const connectTO = setTimeout(() => { - if (!clientConnection.isConnected()) { - newOpt.onConnect = undefined - void clientConnection?.close() - void opt?.onDialTimeout?.() - reject(new Error(`Connection timeout, and no connection established to ${endpoint}`)) - } - }, connectTimeout) - newOpt.onConnect = (event) => { - // Any event is fine, it means server is alive. - clearTimeout(connectTO) - resolve() + handler(...txes) + } + const tokenPayload: { workspace: string, email: string } = decodeTokenPayload(token) + + const newOpt = { ...opt } + const connectTimeout = getMetadata(clientPlugin.metadata.ConnectionTimeout) + let connectPromise: Promise | undefined + if ((connectTimeout ?? 0) > 0) { + connectPromise = new Promise((resolve, reject) => { + const connectTO = setTimeout(() => { + if (!clientConnection.isConnected()) { + newOpt.onConnect = undefined + void clientConnection?.close() + void opt?.onDialTimeout?.() + reject(new Error(`Connection timeout, and no connection established to ${endpoint}`)) } - }) - } - const clientConnection = connect(url, upgradeHandler, tokenPayload.workspace, tokenPayload.email, newOpt) - if (connectPromise !== undefined) { - await connectPromise - } - return await Promise.resolve(clientConnection) - }, - filterModel ? [...getPlugins(), ...(getMetadata(clientPlugin.metadata.ExtraPlugins) ?? [])] : undefined, - createModelPersistence(getWSFromToken(token)), - opt?.ctx - ) + }, connectTimeout) + newOpt.onConnect = (event) => { + // Any event is fine, it means server is alive. + clearTimeout(connectTO) + resolve() + } + }) + } + const clientConnection = connect(url, upgradeHandler, tokenPayload.workspace, tokenPayload.email, newOpt) + if (connectPromise !== undefined) { + await connectPromise + } + return await Promise.resolve(clientConnection) + } + + const modelFilter: ModelFilter = async (txes) => { + if (filterModel === 'client') { + return returnClientTxes(txes) + } + if (filterModel === 'ui') { + return returnUITxes(txes) + } + return txes + } + + const client = createClient(handler, modelFilter, createModelPersistence(getWSFromToken(token)), opt?.ctx) return await client } } } } +function returnUITxes (txes: Tx[]): Tx[] { + const configs = new Map, PluginConfiguration>() + fillConfiguration(txes, configs) + + const allowedPlugins = [...getPlugins(), ...(getMetadata(clientPlugin.metadata.ExtraPlugins) ?? [])] + const excludedPlugins = Array.from(configs.values()).filter( + (it) => !it.enabled || !allowedPlugins.includes(it.pluginId) + ) + return pluginFilterTx(excludedPlugins, configs, txes) +} + +function returnClientTxes (txes: Tx[]): Tx[] { + const configs = new Map, PluginConfiguration>() + fillConfiguration(txes, configs) + const excludedPlugins = Array.from(configs.values()).filter((it) => !it.enabled || it.pluginId.startsWith('server-')) + + const toExclude = new Set([ + 'workbench:class:Application' as Ref>, + 'presentation:class:ComponentPointExtension' as Ref>, + 'presentation:class:ObjectSearchCategory' as Ref>, + 'notification:class:NotificationGroup' as Ref>, + 'notification:class:NotificationType' as Ref>, + 'view:class:Action' as Ref>, + 'view:class:Viewlet' as Ref>, + 'text-editor:class:TextEditorAction' as Ref>, + 'templates:class:TemplateField' as Ref>, + 'activity:class:DocUpdateMessageViewlet' as Ref>, + 'core:class:PluginConfiguration' as Ref>, + 'core:class:DomainIndexConfiguration' as Ref> + ]) + + const result = pluginFilterTx(excludedPlugins, configs, txes).filter((tx) => { + // Exclude all matched UI plugins + if ( + tx?._class === core.class.TxCreateDoc || + tx?._class === core.class.TxUpdateDoc || + tx?._class === core.class.TxRemoveDoc + ) { + const cud = tx as TxCUD + if (toExclude.has(cud.objectClass)) { + return false + } + } + return true + }) + return result +} + function createModelPersistence (workspace: string): TxPersistenceStore | undefined { const overrideStore = getMetadata(clientPlugin.metadata.OverridePersistenceStore) if (overrideStore !== undefined) { diff --git a/plugins/client/src/index.ts b/plugins/client/src/index.ts index 8eac225b84b..1bccb6cd7e6 100644 --- a/plugins/client/src/index.ts +++ b/plugins/client/src/index.ts @@ -69,10 +69,15 @@ export interface ClientFactoryOptions { */ export type ClientFactory = (token: string, endpoint: string, opt?: ClientFactoryOptions) => Promise +// client - will filter out all server model elements +// It will also filter out all UI Elements, like Actions, View declarations etc. +// ui - will filter out all server element's and all UI disabled elements. +export type FilterMode = 'none' | 'client' | 'ui' + export default plugin(clientId, { metadata: { ClientSocketFactory: '' as Metadata, - FilterModel: '' as Metadata, + FilterModel: '' as Metadata, ExtraPlugins: '' as Metadata, UseBinaryProtocol: '' as Metadata, UseProtocolCompression: '' as Metadata, diff --git a/products/tracker/src/platform.ts b/products/tracker/src/platform.ts index 44df935ecd6..6c17bdb7e83 100644 --- a/products/tracker/src/platform.ts +++ b/products/tracker/src/platform.ts @@ -104,6 +104,5 @@ export async function configurePlatform() { setMetadata(uiPlugin.metadata.PlatformTitle, 'Tracker') setMetadata(workbench.metadata.PlatformTitle, 'Tracker') - setMetadata(client.metadata.FilterModel, true) - setMetadata(client.metadata.ExtraPlugins, ['preference' as Plugin]) + setMetadata(client.metadata.FilterModel, 'ui') } diff --git a/services/calendar/pod-calendar/src/client.ts b/services/calendar/pod-calendar/src/client.ts index fa222927d8b..530bafab4e1 100644 --- a/services/calendar/pod-calendar/src/client.ts +++ b/services/calendar/pod-calendar/src/client.ts @@ -13,10 +13,13 @@ // limitations under the License. // +import client from '@hcengineering/client' import { type Client } from '@hcengineering/core' +import { setMetadata } from '@hcengineering/platform' import { createClient, getTransactorEndpoint } from '@hcengineering/server-client' export async function getClient (token: string): Promise { const endpoint = await getTransactorEndpoint(token) + setMetadata(client.metadata.FilterModel, 'client') return await createClient(endpoint, token) } diff --git a/services/github/pod-github/src/client.ts b/services/github/pod-github/src/client.ts index 02813d1dad0..ed5a64c1483 100644 --- a/services/github/pod-github/src/client.ts +++ b/services/github/pod-github/src/client.ts @@ -37,6 +37,7 @@ export async function createPlatformClient ( { mode: 'github' } ) setMetadata(client.metadata.ConnectionTimeout, timeout) + setMetadata(client.metadata.FilterModel, 'client') const endpoint = await getTransactorEndpoint(token) const connection = await ( await clientResources() From 11b6aa4820d51a8072db4cefd618075794378fce Mon Sep 17 00:00:00 2001 From: Alexey Zinoviev Date: Mon, 21 Oct 2024 11:00:14 +0400 Subject: [PATCH 06/21] uberf-8512: fix acc memory leak (#7002) Signed-off-by: Alexey Zinoviev --- server/account/src/collections/mongo.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/account/src/collections/mongo.ts b/server/account/src/collections/mongo.ts index a3ace061d16..e1c2e490d9a 100644 --- a/server/account/src/collections/mongo.ts +++ b/server/account/src/collections/mongo.ts @@ -107,7 +107,7 @@ export class MongoDbCollection> implements DbColle cursor.limit(limit) } - return await this.collection.find(query as Filter).toArray() + return await cursor.toArray() } async findOne (query: Query): Promise { From 7190bacc0ff1dbe50fd4fe86a88524967258f680 Mon Sep 17 00:00:00 2001 From: Artem Kheystver Date: Mon, 21 Oct 2024 09:04:37 +0200 Subject: [PATCH 07/21] Update posthog config to disable unneeded stuff (#6986) --- dev/prod/src/analytics/posthog.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/dev/prod/src/analytics/posthog.ts b/dev/prod/src/analytics/posthog.ts index 90fa34d5ec8..59a837aa347 100644 --- a/dev/prod/src/analytics/posthog.ts +++ b/dev/prod/src/analytics/posthog.ts @@ -4,7 +4,12 @@ import posthog from 'posthog-js' export class PosthogAnalyticProvider implements AnalyticProvider { init(config: Record): boolean { if (config.POSTHOG_API_KEY !== undefined && config.POSTHOG_API_KEY !== '' && config.POSTHOG_HOST !== null) { - posthog.init(config.POSTHOG_API_KEY, { api_host: config.POSTHOG_HOST }) + posthog.init(config.POSTHOG_API_KEY, { + api_host: config.POSTHOG_HOST, + autocapture: false, + capture_pageview: false, + capture_pageleave: false + }) return true } return false From 996978ef263e3380b41108e7b6948a7a9db4d5e5 Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Mon, 21 Oct 2024 14:06:24 +0700 Subject: [PATCH 08/21] Rollback github service to node:20 (#7003) Signed-off-by: Andrey Sobolev --- services/github/pod-github/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/github/pod-github/Dockerfile b/services/github/pod-github/Dockerfile index 30f1522c4df..5f7feb3e06a 100644 --- a/services/github/pod-github/Dockerfile +++ b/services/github/pod-github/Dockerfile @@ -1,5 +1,5 @@ -FROM node:22 +FROM node:20 WORKDIR /usr/src/app From f06a6e2bc8f36afb230affa8fbd35e05fcabd506 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Mon, 21 Oct 2024 15:02:22 +0700 Subject: [PATCH 09/21] fix: another attempt to migrate empty document fields (#7004) Signed-off-by: Alexander Onnikov --- dev/tool/src/clean.ts | 4 +- dev/tool/src/markup.ts | 6 +- models/controlled-documents/src/migration.ts | 4 +- models/core/src/migration.ts | 2 +- models/document/src/migration.ts | 78 ++++++++++--------- .../activity-resources/src/references.ts | 4 +- .../src/utils/__tests__/ydoc.test.ts | 3 + .../src/utils/collaborative-doc.ts | 14 ++-- server/collaborator/src/storage/platform.ts | 4 +- server/tool/src/initializer.ts | 2 +- 10 files changed, 65 insertions(+), 56 deletions(-) diff --git a/dev/tool/src/clean.ts b/dev/tool/src/clean.ts index 1003a81635e..14a8d357b41 100644 --- a/dev/tool/src/clean.ts +++ b/dev/tool/src/clean.ts @@ -1270,7 +1270,7 @@ async function updateYDoc ( doc: RelatedDocument ): Promise { try { - const ydoc = await loadCollaborativeDoc(storage, workspaceId, _id, ctx) + const ydoc = await loadCollaborativeDoc(ctx, storage, workspaceId, _id) if (ydoc === undefined) { ctx.error('document content not found', { document: contentDoc._id }) return @@ -1284,7 +1284,7 @@ async function updateYDoc ( }) if (updatedYDoc !== undefined) { - await saveCollaborativeDoc(storage, workspaceId, _id, updatedYDoc, ctx) + await saveCollaborativeDoc(ctx, storage, workspaceId, _id, updatedYDoc) } } catch { // do nothing, the collaborative doc does not sem to exist yet diff --git a/dev/tool/src/markup.ts b/dev/tool/src/markup.ts index 4783bd6d208..c898757a541 100644 --- a/dev/tool/src/markup.ts +++ b/dev/tool/src/markup.ts @@ -199,7 +199,7 @@ async function processMigrateMarkupFor ( if (blob === undefined) { try { const ydoc = markupToYDoc(value, attribute.name) - await saveCollaborativeDoc(storageAdapter, workspaceId, collaborativeDoc, ydoc, ctx) + await saveCollaborativeDoc(ctx, storageAdapter, workspaceId, collaborativeDoc, ydoc) } catch (err) { console.error('failed to process document', doc._class, doc._id, err) } @@ -298,7 +298,7 @@ export async function restoreLostMarkup ( console.log(doc._class, doc._id, attr.name, markup) if (command === 'restore') { const ydoc = markupToYDoc(markup, attr.name) - await saveCollaborativeDoc(storageAdapter, workspaceId, value, ydoc, ctx) + await saveCollaborativeDoc(ctx, storageAdapter, workspaceId, value, ydoc) } restored = true break @@ -329,7 +329,7 @@ export async function restoreLostMarkup ( console.log(doc._class, doc._id, attr.name, markup) if (command === 'restore') { const ydoc = markupToYDoc(markup, attr.name) - await saveCollaborativeDoc(storageAdapter, workspaceId, value, ydoc, ctx) + await saveCollaborativeDoc(ctx, storageAdapter, workspaceId, value, ydoc) } } } diff --git a/models/controlled-documents/src/migration.ts b/models/controlled-documents/src/migration.ts index 0672f170cb6..0135c160ed6 100644 --- a/models/controlled-documents/src/migration.ts +++ b/models/controlled-documents/src/migration.ts @@ -292,7 +292,7 @@ async function migrateDocSections (client: MigrationClient): Promise { // Migrate sections headers + content try { - const ydoc = await loadCollaborativeDoc(storage, client.workspaceId, document.content, ctx) + const ydoc = await loadCollaborativeDoc(ctx, storage, client.workspaceId, document.content) if (ydoc === undefined) { ctx.error('collaborative document content not found', { document: document.title }) continue @@ -334,7 +334,7 @@ async function migrateDocSections (client: MigrationClient): Promise { } }) - await saveCollaborativeDoc(storage, client.workspaceId, document.content, ydoc, ctx) + await saveCollaborativeDoc(ctx, storage, client.workspaceId, document.content, ydoc) } catch (err) { ctx.error('error collaborative document content migration', { error: err, document: document.title }) } diff --git a/models/core/src/migration.ts b/models/core/src/migration.ts index 5645eec853d..29e7311bf00 100644 --- a/models/core/src/migration.ts +++ b/models/core/src/migration.ts @@ -229,7 +229,7 @@ async function processMigrateContentFor ( if (blob === undefined) { try { const ydoc = markupToYDoc(value, attribute.name) - await saveCollaborativeDoc(storageAdapter, client.workspaceId, collaborativeDoc, ydoc, ctx) + await saveCollaborativeDoc(ctx, storageAdapter, client.workspaceId, collaborativeDoc, ydoc) } catch (err) { console.error('failed to process document', doc._class, doc._id, err) } diff --git a/models/document/src/migration.ts b/models/document/src/migration.ts index 39792b4b563..4abfd77e502 100644 --- a/models/document/src/migration.ts +++ b/models/document/src/migration.ts @@ -100,36 +100,6 @@ async function migrateTeamspacesMixins (client: MigrationClient): Promise ) } -async function migrateContentField (client: MigrationClient): Promise { - const ctx = new MeasureMetricsContext('migrate_content_field', {}) - const storage = client.storageAdapter - - const documents = await client.find(DOMAIN_DOCUMENT, { - _class: document.class.Document, - content: { $exists: true } - }) - - for (const document of documents) { - try { - const ydoc = await loadCollaborativeDoc(storage, client.workspaceId, document.content, ctx) - if (ydoc === undefined) { - ctx.error('document content not found', { document: document.title }) - continue - } - - if (!ydoc.share.has('') || ydoc.share.has('content')) { - continue - } - - yDocCopyXmlField(ydoc, '', 'content') - - await saveCollaborativeDoc(storage, client.workspaceId, document.content, ydoc, ctx) - } catch (err) { - ctx.error('error document content migration', { error: err, document: document.title }) - } - } -} - async function migrateRank (client: MigrationClient): Promise { const documents = await client.find( DOMAIN_DOCUMENT, @@ -228,7 +198,7 @@ async function renameFieldsRevert (client: MigrationClient): Promise { if (document.description.includes('%description:')) { try { - const ydoc = await loadCollaborativeDoc(storage, client.workspaceId, document.description, ctx) + const ydoc = await loadCollaborativeDoc(ctx, storage, client.workspaceId, document.description) if (ydoc === undefined) { continue } @@ -239,7 +209,7 @@ async function renameFieldsRevert (client: MigrationClient): Promise { yDocCopyXmlField(ydoc, 'description', 'content') - await saveCollaborativeDoc(storage, client.workspaceId, document.description, ydoc, ctx) + await saveCollaborativeDoc(ctx, storage, client.workspaceId, document.description, ydoc) } catch (err) { ctx.error('error document content migration', { error: err, document: document.title }) } @@ -264,6 +234,42 @@ async function renameFieldsRevert (client: MigrationClient): Promise { } } +async function restoreContentField (client: MigrationClient): Promise { + const ctx = new MeasureMetricsContext('restoreContentField', {}) + const storage = client.storageAdapter + + const documents = await client.find(DOMAIN_DOCUMENT, { + _class: document.class.Document, + content: { $exists: true } + }) + + for (const document of documents) { + try { + const ydoc = await loadCollaborativeDoc(ctx, storage, client.workspaceId, document.content) + if (ydoc === undefined) { + ctx.error('document content not found', { document: document.title }) + continue + } + + // ignore if content is already present + if (ydoc.share.has('content') || ydoc.share.has('description')) { + continue + } + + if (ydoc.share.has('')) { + yDocCopyXmlField(ydoc, '', 'content') + if (ydoc.share.has('content')) { + await saveCollaborativeDoc(ctx, storage, client.workspaceId, document.content, ydoc) + } else { + ctx.error('document content still not found', { document: document.title }) + } + } + } catch (err) { + ctx.error('error document content migration', { error: err, document: document.title }) + } + } +} + export const documentOperation: MigrateOperation = { async migrate (client: MigrationClient): Promise { await tryMigrate(client, documentId, [ @@ -279,10 +285,6 @@ export const documentOperation: MigrateOperation = { state: 'migrate-teamspaces-mixins', func: migrateTeamspacesMixins }, - { - state: 'migrateContentField', - func: migrateContentField - }, { state: 'migrateRank', func: migrateRank @@ -300,6 +302,10 @@ export const documentOperation: MigrateOperation = { { state: 'renameFieldsRevert', func: renameFieldsRevert + }, + { + state: 'restoreContentField', + func: restoreContentField } ]) }, diff --git a/server-plugins/activity-resources/src/references.ts b/server-plugins/activity-resources/src/references.ts index f260299f703..34323e191ce 100644 --- a/server-plugins/activity-resources/src/references.ts +++ b/server-plugins/activity-resources/src/references.ts @@ -415,7 +415,7 @@ async function getCreateReferencesTxes ( } else if (attr.type._class === core.class.TypeCollaborativeDoc) { const collaborativeDoc = (createdDoc as any)[attr.name] as CollaborativeDoc try { - const ydoc = await loadCollaborativeDoc(storage, control.workspace, collaborativeDoc, control.ctx) + const ydoc = await loadCollaborativeDoc(ctx, storage, control.workspace, collaborativeDoc) if (ydoc !== undefined) { const attrReferences = getReferencesData( srcDocId, @@ -467,7 +467,7 @@ async function getUpdateReferencesTxes ( hasReferenceAttrs = true try { const collaborativeDoc = (updatedDoc as any)[attr.name] as CollaborativeDoc - const ydoc = await loadCollaborativeDoc(storage, control.workspace, collaborativeDoc, control.ctx) + const ydoc = await loadCollaborativeDoc(ctx, storage, control.workspace, collaborativeDoc) if (ydoc !== undefined) { const attrReferences = getReferencesData( srcDocId, diff --git a/server/collaboration/src/utils/__tests__/ydoc.test.ts b/server/collaboration/src/utils/__tests__/ydoc.test.ts index 6e599642b9b..6dc0acaeb47 100644 --- a/server/collaboration/src/utils/__tests__/ydoc.test.ts +++ b/server/collaboration/src/utils/__tests__/ydoc.test.ts @@ -43,10 +43,12 @@ describe('ydoc', () => { const source = ydoc.getXmlFragment('source') source.insertAfter(null, [new YXmlElement('p'), new YXmlText('foo'), new YXmlElement('p')]) + expect(ydoc.share.has('target')).toBeFalsy() yDocCopyXmlField(ydoc, 'source', 'target') const target = ydoc.getXmlFragment('target') + expect(ydoc.share.has('target')).toBeTruthy() expect(target.toJSON()).toEqual(source.toJSON()) }) @@ -61,6 +63,7 @@ describe('ydoc', () => { expect(target.toJSON()).not.toEqual(source.toJSON()) yDocCopyXmlField(ydoc, 'source', 'target') + expect(ydoc.share.has('target')).toBeTruthy() expect(target.toJSON()).toEqual(source.toJSON()) }) }) diff --git a/server/collaboration/src/utils/collaborative-doc.ts b/server/collaboration/src/utils/collaborative-doc.ts index 8ec043493d4..037279491a9 100644 --- a/server/collaboration/src/utils/collaborative-doc.ts +++ b/server/collaboration/src/utils/collaborative-doc.ts @@ -78,10 +78,10 @@ async function loadCollaborativeDocVersion ( /** @public */ export async function loadCollaborativeDoc ( + ctx: MeasureContext, storageAdapter: StorageAdapter, workspace: WorkspaceId, - collaborativeDoc: CollaborativeDoc, - ctx: MeasureContext + collaborativeDoc: CollaborativeDoc ): Promise { const sources = collaborativeDocUnchain(collaborativeDoc) @@ -101,24 +101,24 @@ export async function loadCollaborativeDoc ( /** @public */ export async function saveCollaborativeDoc ( + ctx: MeasureContext, storageAdapter: StorageAdapter, workspace: WorkspaceId, collaborativeDoc: CollaborativeDoc, - ydoc: YDoc, - ctx: MeasureContext + ydoc: YDoc ): Promise { const { documentId, versionId } = collaborativeDocParse(collaborativeDoc) - await saveCollaborativeDocVersion(storageAdapter, workspace, documentId, versionId, ydoc, ctx) + await saveCollaborativeDocVersion(ctx, storageAdapter, workspace, documentId, versionId, ydoc) } /** @public */ export async function saveCollaborativeDocVersion ( + ctx: MeasureContext, storageAdapter: StorageAdapter, workspace: WorkspaceId, documentId: string, versionId: CollaborativeDocVersion, - ydoc: YDoc, - ctx: MeasureContext + ydoc: YDoc ): Promise { await ctx.with('saveCollaborativeDoc', {}, async (ctx) => { if (versionId === 'HEAD') { diff --git a/server/collaborator/src/storage/platform.ts b/server/collaborator/src/storage/platform.ts index 7a69e94758e..49ddea1973b 100644 --- a/server/collaborator/src/storage/platform.ts +++ b/server/collaborator/src/storage/platform.ts @@ -124,7 +124,7 @@ export class PlatformStorageAdapter implements CollabStorageAdapter { return await ctx.with('load-document', {}, async (ctx) => { return await withRetry(ctx, 5, async () => { - return await loadCollaborativeDoc(this.storage, context.workspaceId, collaborativeDoc, ctx) + return await loadCollaborativeDoc(ctx, this.storage, context.workspaceId, collaborativeDoc) }) }) } @@ -139,7 +139,7 @@ export class PlatformStorageAdapter implements CollabStorageAdapter { await ctx.with('save-document', {}, async (ctx) => { await withRetry(ctx, 5, async () => { - await saveCollaborativeDoc(this.storage, context.workspaceId, collaborativeDoc, document, ctx) + await saveCollaborativeDoc(ctx, this.storage, context.workspaceId, collaborativeDoc, document) }) }) } diff --git a/server/tool/src/initializer.ts b/server/tool/src/initializer.ts index 3b2aa0e263f..e2782444aab 100644 --- a/server/tool/src/initializer.ts +++ b/server/tool/src/initializer.ts @@ -272,7 +272,7 @@ export class WorkspaceInitializer { const json = parseMessageMarkdown(data ?? '', this.imageUrl) const yDoc = jsonToYDocNoSchema(json, field) - await saveCollaborativeDoc(this.storageAdapter, this.wsUrl, collabId, yDoc, this.ctx) + await saveCollaborativeDoc(this.ctx, this.storageAdapter, this.wsUrl, collabId, yDoc) return collabId } From 8edf748d30825c4373aa51371df1866709d1b61b Mon Sep 17 00:00:00 2001 From: Alexey Zinoviev Date: Mon, 21 Oct 2024 16:18:30 +0400 Subject: [PATCH 10/21] Qfix: Extend patch version values range in PG (#7005) Signed-off-by: Alexey Zinoviev --- server/account/src/collections/postgres.ts | 13 ++++++++++++- server/client/src/account.ts | 10 +++++++++- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/server/account/src/collections/postgres.ts b/server/account/src/collections/postgres.ts index 09993fc848a..906f4913d15 100644 --- a/server/account/src/collections/postgres.ts +++ b/server/account/src/collections/postgres.ts @@ -531,7 +531,7 @@ export class PostgresAccountDB implements AccountDB { } protected getMigrations (): [string, string][] { - return [this.getV1Migration()] + return [this.getV1Migration(), this.getV2Migration()] } // NOTE: NEVER MODIFY EXISTING MIGRATIONS. IF YOU NEED TO ADJUST THE SCHEMA, ADD A NEW MIGRATION. @@ -627,4 +627,15 @@ export class PostgresAccountDB implements AccountDB { ` ] } + + private getV2Migration (): [string, string] { + return [ + 'account_db_v2_fix_workspace', + ` + + /* ======= WORKSPACE ======= */ + ALTER TABLE workspace ALTER COLUMN "versionPatch" type INT4; + ` + ] + } } diff --git a/server/client/src/account.ts b/server/client/src/account.ts index b73dfe448ae..9daf7c7c361 100644 --- a/server/client/src/account.ts +++ b/server/client/src/account.ts @@ -138,7 +138,15 @@ export function withRetryConnUntilTimeout

( export function withRetryConnUntilSuccess

( f: (...params: P) => Promise ): (...params: P) => Promise { - const shouldFail = (err: any): boolean => err?.cause?.code !== 'ECONNRESET' && err?.cause?.code !== 'ECONNREFUSED' + const shouldFail = (err: any): boolean => { + const res = err?.cause?.code !== 'ECONNRESET' && err?.cause?.code !== 'ECONNREFUSED' + + if (res) { + console.error('Failing withRetryConnUntilSuccess with error cause:', err?.cause) + } + + return res + } return withRetry(f, shouldFail) } From 187c489b2c3c19126c909a09d8726fc6bcea06d3 Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Mon, 21 Oct 2024 23:23:26 +0700 Subject: [PATCH 11/21] UBERF-8508: Get rid of Mongo in storage adapter (#6989) Signed-off-by: Andrey Sobolev --- dev/doc-import-tool/src/index.ts | 12 +- dev/docker-compose.yaml | 11 - dev/tool/src/__start.ts | 10 +- dev/tool/src/index.ts | 275 +++++------- dev/tool/src/storage.ts | 82 +--- dev/tool/src/workspace.ts | 23 +- models/controlled-documents/src/migration.ts | 20 +- packages/core/src/classes.ts | 2 - packages/storage/src/index.ts | 17 +- .../src/components/MergePersons.svelte | 47 +- .../components/statistics/MetricsInfo.svelte | 28 +- pods/backup/src/index.ts | 4 +- pods/server/src/__start.ts | 5 +- pods/server/src/server.ts | 30 +- pods/workspace/package.json | 2 +- server/backup-service/src/index.ts | 5 +- server/backup/src/backup.ts | 5 +- server/collaborator/src/config.ts | 9 +- server/collaborator/src/starter.ts | 4 +- server/core/src/adapter.ts | 39 +- server/datalake/src/index.ts | 1 - server/front/src/index.ts | 3 +- server/front/src/starter.ts | 8 +- server/middleware/src/domainTx.ts | 6 +- server/minio/src/index.ts | 4 +- server/mongo/src/index.ts | 1 - server/mongo/src/rawAdapter.ts | 194 -------- server/mongo/src/storage.ts | 23 +- server/s3/src/index.ts | 4 +- server/server-pipeline/src/pipeline.ts | 31 +- server/server-storage/src/aggregator.ts | 416 ------------------ server/server-storage/src/blobStorage.ts | 99 ++--- server/server-storage/src/fallback.ts | 269 +++++++++++ server/server-storage/src/index.ts | 2 +- server/server-storage/src/starter.ts | 9 +- .../src/tests/aggregator.spec.ts | 21 +- .../server-storage/src/tests/memAdapters.ts | 115 +---- server/server/src/starter.ts | 12 +- server/tool/src/index.ts | 19 +- server/workspace-service/src/index.ts | 7 - server/workspace-service/src/ws-operations.ts | 19 +- services/github/pod-github/src/platform.ts | 2 +- services/gmail/pod-gmail/src/main.ts | 2 +- services/love/src/main.ts | 12 +- services/love/src/workspaceClient.ts | 30 +- services/print/pod-print/src/config.ts | 2 - services/print/pod-print/src/main.ts | 2 +- services/print/pod-print/src/server.ts | 6 +- services/sign/pod-sign/src/config.ts | 2 - services/sign/pod-sign/src/main.ts | 4 +- services/sign/pod-sign/src/server.ts | 16 +- .../pod-telegram-bot/src/start.ts | 2 +- services/telegram/pod-telegram/src/main.ts | 2 +- tests/docker-compose.override.yaml | 1 - tests/docker-compose.yaml | 3 - tests/restore-pg.sh | 2 +- tests/restore-workspace.sh | 2 +- 57 files changed, 652 insertions(+), 1331 deletions(-) delete mode 100644 server/mongo/src/rawAdapter.ts delete mode 100644 server/server-storage/src/aggregator.ts create mode 100644 server/server-storage/src/fallback.ts diff --git a/dev/doc-import-tool/src/index.ts b/dev/doc-import-tool/src/index.ts index de88634a563..87f96732bb7 100644 --- a/dev/doc-import-tool/src/index.ts +++ b/dev/doc-import-tool/src/index.ts @@ -54,17 +54,11 @@ export function docImportTool (): void { const uploadUrl = process.env.UPLOAD_URL ?? '/files' - const mongodbUri = process.env.MONGO_URL - if (mongodbUri === undefined) { - console.log('Please provide mongodb url') - process.exit(1) - } - setMetadata(serverClientPlugin.metadata.Endpoint, accountUrl) setMetadata(serverToken.metadata.Secret, serverSecret) - async function withStorage (mongodbUri: string, f: (storageAdapter: StorageAdapter) => Promise): Promise { - const adapter = buildStorageFromConfig(storageConfigFromEnv(), mongodbUri) + async function withStorage (f: (storageAdapter: StorageAdapter) => Promise): Promise { + const adapter = buildStorageFromConfig(storageConfigFromEnv()) try { await f(adapter) } catch (err: any) { @@ -94,7 +88,7 @@ export function docImportTool (): void { }, space: ${cmd.space}, backend: ${cmd.backend}` ) - await withStorage(mongodbUri, async (storageAdapter) => { + await withStorage(async (storageAdapter) => { const workspaceId = getWorkspaceId(workspace) const config: Config = { diff --git a/dev/docker-compose.yaml b/dev/docker-compose.yaml index c82f014d1d0..3e6f2594849 100644 --- a/dev/docker-compose.yaml +++ b/dev/docker-compose.yaml @@ -106,7 +106,6 @@ services: environment: # - WS_OPERATION=create - SERVER_SECRET=secret - - MONGO_URL=${MONGO_URL} - DB_URL=${MONGO_URL} # - DB_URL=postgresql://postgres:example@postgres:5432 - SES_URL= @@ -133,7 +132,6 @@ services: environment: # - WS_OPERATION=create - SERVER_SECRET=secret - - MONGO_URL=${MONGO_URL} - DB_URL=postgresql://postgres:example@postgres:5432 - SES_URL= - REGION=pg @@ -161,8 +159,6 @@ services: - COLLABORATOR_PORT=3078 - SECRET=secret - ACCOUNTS_URL=http://host.docker.internal:3000 - - MONGO_URL=${MONGO_URL} - - 'MONGO_OPTIONS={"appName":"collaborator","maxPoolSize":2}' - STORAGE_CONFIG=${STORAGE_CONFIG} restart: unless-stopped front: @@ -179,11 +175,8 @@ services: - 8087:8080 - 8088:8080 environment: - - UV_THREADPOOL_SIZE=10 - SERVER_PORT=8080 - SERVER_SECRET=secret - - MONGO_URL=${MONGO_URL} - - 'MONGO_OPTIONS={"appName":"front","maxPoolSize":1}' - ACCOUNTS_URL=http://host.docker.internal:3000 - UPLOAD_URL=/files - ELASTIC_URL=http://host.docker.internal:9200 @@ -298,8 +291,6 @@ services: - 4005:4005 environment: - SECRET=secret - - MONGO_URL=${MONGO_URL} - - 'MONGO_OPTIONS={"appName":"print","maxPoolSize":1}' - STORAGE_CONFIG=${STORAGE_CONFIG} deploy: resources: @@ -317,8 +308,6 @@ services: - ../services/sign/pod-sign/debug/branding.json:/var/cfg/branding.json environment: - SECRET=secret - - MONGO_URL=${MONGO_URL} - - 'MONGO_OPTIONS={"appName":"sign","maxPoolSize":1}' - MINIO_ENDPOINT=minio - MINIO_ACCESS_KEY=minioadmin - ACCOUNTS_URL=http://host.docker.internal:3000 diff --git a/dev/tool/src/__start.ts b/dev/tool/src/__start.ts index c70db903ede..293126ca5cd 100644 --- a/dev/tool/src/__start.ts +++ b/dev/tool/src/__start.ts @@ -72,7 +72,6 @@ addLocation(serverDriveId, () => import('@hcengineering/server-drive-resources') addLocation(serverAiBotId, () => import('@hcengineering/server-ai-bot-resources')) function prepareTools (): { - mongodbUri: string | undefined dbUrl: string txes: Tx[] version: Data @@ -84,4 +83,13 @@ function prepareTools (): { return { ...prepareToolsRaw(builder(enabled, disabled).getTxes()), version: getModelVersion(), migrateOperations } } +export function getMongoDBUrl (): string { + const url = process.env.MONGO_URL + if (url === undefined) { + console.error('please provide mongo DB URL') + process.exit(1) + } + return url +} + devTool(prepareTools) diff --git a/dev/tool/src/index.ts b/dev/tool/src/index.ts index 821bcf97f96..9bc6e84c8a1 100644 --- a/dev/tool/src/index.ts +++ b/dev/tool/src/index.ts @@ -92,6 +92,7 @@ import { backupDownload } from '@hcengineering/server-backup/src/backup' import type { StorageAdapter, StorageAdapterEx } from '@hcengineering/server-core' import { deepEqual } from 'fast-equals' import { createWriteStream, readFileSync } from 'fs' +import { getMongoDBUrl } from './__start' import { benchmark, benchmarkWorker, @@ -117,7 +118,7 @@ import { moveAccountDbFromMongoToPG, moveFromMongoToPG, moveWorkspaceFromMongoTo import { fixJsonMarkup, migrateMarkup, restoreLostMarkup } from './markup' import { fixMixinForeignAttributes, showMixinForeignAttributes } from './mixin' import { fixAccountEmails, renameAccount } from './renameAccount' -import { moveFiles, showLostFiles, syncFiles } from './storage' +import { moveFiles, showLostFiles } from './storage' const colorConstants = { colorRed: '\u001b[31m', @@ -136,7 +137,6 @@ const colorConstants = { */ export function devTool ( prepareTools: () => { - mongodbUri: string | undefined dbUrl: string txes: Tx[] version: Data @@ -195,8 +195,8 @@ export function devTool ( await shutdown() } - async function withStorage (dbUrl: string, f: (storageAdapter: StorageAdapter) => Promise): Promise { - const adapter = buildStorageFromConfig(storageConfigFromEnv(), dbUrl) + async function withStorage (f: (storageAdapter: StorageAdapter) => Promise): Promise { + const adapter = buildStorageFromConfig(storageConfigFromEnv()) try { await f(adapter) } catch (err: any) { @@ -263,11 +263,12 @@ export function devTool ( }) program - .command('compact-db') + .command('compact-db-mongo') .description('compact all db collections') .option('-w, --workspace ', 'A selected "workspace" only', '') .action(async (cmd: { workspace: string }) => { - const { dbUrl, mongodbUri } = prepareTools() + const { dbUrl } = prepareTools() + const mongodbUri = getMongoDBUrl() await withDatabase(dbUrl, async (db) => { console.log('compacting db ...') let gtotal: number = 0 @@ -508,14 +509,14 @@ export function devTool ( }) program - .command('list-unused-workspaces') + .command('list-unused-workspaces-mongo') .description( 'remove unused workspaces, please pass --remove to really delete them. Without it will only mark them disabled' ) .option('-r|--remove [remove]', 'Force remove', false) .option('-t|--timeout [timeout]', 'Timeout in days', '7') .action(async (cmd: { remove: boolean, disable: boolean, exclude: string, timeout: string }) => { - const { dbUrl, mongodbUri } = prepareTools() + const { dbUrl } = prepareTools() await withDatabase(dbUrl, async (db) => { const workspaces = new Map((await listWorkspacesPure(db)).map((p) => [p._id.toString(), p])) @@ -523,8 +524,9 @@ export function devTool ( const _timeout = parseInt(cmd.timeout) ?? 7 - await withStorage(dbUrl, async (adapter) => { + await withStorage(async (adapter) => { // We need to update workspaces with missing workspaceUrl + const mongodbUri = getMongoDBUrl() const client = getMongoClient(mongodbUri ?? dbUrl) const _client = await client.getClient() try { @@ -572,13 +574,14 @@ export function devTool ( }) program - .command('drop-workspace ') + .command('drop-workspace-mongo ') .description('drop workspace') .option('--full [full]', 'Force remove all data', false) .action(async (workspace, cmd: { full: boolean }) => { - const { dbUrl, mongodbUri } = prepareTools() + const { dbUrl } = prepareTools() + const mongodbUri = getMongoDBUrl() - await withStorage(dbUrl, async (storageAdapter) => { + await withStorage(async (storageAdapter) => { await withDatabase(dbUrl, async (db) => { const ws = await getWorkspaceById(db, workspace) if (ws === null) { @@ -601,12 +604,13 @@ export function devTool ( }) program - .command('drop-workspace-by-email ') + .command('drop-workspace-by-email-mongo ') .description('drop workspace') .option('--full [full]', 'Force remove all data', false) .action(async (email, cmd: { full: boolean }) => { - const { dbUrl, mongodbUri } = prepareTools() - await withStorage(dbUrl, async (storageAdapter) => { + const { dbUrl } = prepareTools() + const mongodbUri = getMongoDBUrl() + await withStorage(async (storageAdapter) => { await withDatabase(dbUrl, async (db) => { const client = getMongoClient(mongodbUri ?? dbUrl) const _client = await client.getClient() @@ -638,12 +642,13 @@ export function devTool ( }) program - .command('drop-workspace-last-visit') + .command('drop-workspace-last-visit-mongo') .description('drop old workspaces') .action(async (cmd: any) => { - const { dbUrl, mongodbUri } = prepareTools() + const { dbUrl } = prepareTools() + const mongodbUri = getMongoDBUrl() - await withStorage(dbUrl, async (storageAdapter) => { + await withStorage(async (storageAdapter) => { await withDatabase(dbUrl, async (db) => { const workspacesJSON = await listWorkspacesPure(db) const client = getMongoClient(mongodbUri ?? dbUrl) @@ -706,11 +711,12 @@ export function devTool ( }) }) - program.command('fix-person-accounts').action(async () => { - const { dbUrl, mongodbUri, version } = prepareTools() + program.command('fix-person-accounts-mongo').action(async () => { + const { dbUrl, version } = prepareTools() + const mongodbUri = getMongoDBUrl() await withDatabase(dbUrl, async (db) => { const ws = await listWorkspacesPure(db) - const client = getMongoClient(mongodbUri ?? dbUrl) + const client = getMongoClient(mongodbUri) const _client = await client.getClient() try { for (const w of ws) { @@ -872,8 +878,7 @@ export function devTool ( .command('backup-s3 ') .description('dump workspace transactions and minio resources') .action(async (bucketName: string, dirName: string, workspace: string, cmd) => { - const { dbUrl } = prepareTools() - await withStorage(dbUrl, async (adapter) => { + await withStorage(async (adapter) => { const storage = await createStorageBackupStorage(toolCtx, adapter, getWorkspaceId(bucketName), dirName) const wsid = getWorkspaceId(workspace) const endpoint = await getTransactorEndpoint(generateToken(systemAccountEmail, wsid), 'external') @@ -1048,8 +1053,8 @@ export function devTool ( .command('diff-workspace ') .description('restore workspace transactions and minio resources from previous dump.') .action(async (workspace: string, cmd) => { - const { dbUrl, mongodbUri, txes } = prepareTools() - await diffWorkspace(mongodbUri ?? dbUrl, getWorkspaceId(workspace), txes) + const { dbUrl, txes } = prepareTools() + await diffWorkspace(dbUrl, getWorkspaceId(workspace), txes) }) program @@ -1057,8 +1062,8 @@ export function devTool ( .description('clear telegram history') .option('-w, --workspace ', 'target workspace') .action(async (workspace: string, cmd) => { - const { dbUrl, mongodbUri } = prepareTools() - await withStorage(dbUrl, async (adapter) => { + const { dbUrl } = prepareTools() + await withStorage(async (adapter) => { await withDatabase(dbUrl, async (db) => { const telegramDB = process.env.TELEGRAM_DATABASE if (telegramDB === undefined) { @@ -1067,7 +1072,7 @@ export function devTool ( } console.log(`clearing ${workspace} history:`) - await clearTelegramHistory(toolCtx, mongodbUri ?? dbUrl, getWorkspaceId(workspace), telegramDB, adapter) + await clearTelegramHistory(toolCtx, dbUrl, getWorkspaceId(workspace), telegramDB, adapter) }) }) }) @@ -1076,8 +1081,8 @@ export function devTool ( .command('clear-telegram-all-history') .description('clear telegram history') .action(async (cmd) => { - const { dbUrl, mongodbUri } = prepareTools() - await withStorage(dbUrl, async (adapter) => { + const { dbUrl } = prepareTools() + await withStorage(async (adapter) => { await withDatabase(dbUrl, async (db) => { const telegramDB = process.env.TELEGRAM_DATABASE if (telegramDB === undefined) { @@ -1089,7 +1094,7 @@ export function devTool ( for (const w of workspaces) { console.log(`clearing ${w.workspace} history:`) - await clearTelegramHistory(toolCtx, mongodbUri ?? dbUrl, getWorkspaceId(w.workspace), telegramDB, adapter) + await clearTelegramHistory(toolCtx, dbUrl, getWorkspaceId(w.workspace), telegramDB, adapter) } }) }) @@ -1116,18 +1121,17 @@ export function devTool ( .option('--tracker', 'Clean tracker', false) .option('--removedTx', 'Clean removed transactions', false) .action(async (workspace: string, cmd: { recruit: boolean, tracker: boolean, removedTx: boolean }) => { - const { dbUrl, mongodbUri } = prepareTools() - await withStorage(dbUrl, async (adapter) => { + const { dbUrl } = prepareTools() + await withStorage(async (adapter) => { await withDatabase(dbUrl, async (db) => { const wsid = getWorkspaceId(workspace) const endpoint = await getTransactorEndpoint(generateToken(systemAccountEmail, wsid), 'external') - await cleanWorkspace(toolCtx, mongodbUri ?? dbUrl, wsid, adapter, getElasticUrl(), endpoint, cmd) + await cleanWorkspace(toolCtx, dbUrl, wsid, adapter, getElasticUrl(), endpoint, cmd) }) }) }) program.command('clean-empty-buckets').action(async (cmd: any) => { - const { dbUrl } = prepareTools() - await withStorage(dbUrl, async (adapter) => { + await withStorage(async (adapter) => { const buckets = await adapter.listBuckets(toolCtx) for (const ws of buckets) { const l = await ws.list() @@ -1145,39 +1149,33 @@ export function devTool ( program .command('upload-file ') .action(async (workspace: string, local: string, remote: string, contentType: string, cmd: any) => { - const { dbUrl } = prepareTools() - await withStorage(dbUrl, async (adapter) => { - const wsId: WorkspaceId = { - name: workspace - } - const token = generateToken(systemAccountEmail, wsId) - const endpoint = await getTransactorEndpoint(token) - const blobClient = new BlobClient(endpoint, token, wsId) - const buffer = readFileSync(local) - await blobClient.upload(toolCtx, remote, buffer.length, contentType, buffer) - }) + const wsId: WorkspaceId = { + name: workspace + } + const token = generateToken(systemAccountEmail, wsId) + const endpoint = await getTransactorEndpoint(token) + const blobClient = new BlobClient(endpoint, token, wsId) + const buffer = readFileSync(local) + await blobClient.upload(toolCtx, remote, buffer.length, contentType, buffer) }) program .command('download-file ') .action(async (workspace: string, remote: string, local: string, cmd: any) => { - const { dbUrl } = prepareTools() - await withStorage(dbUrl, async (adapter) => { - const wsId: WorkspaceId = { - name: workspace + const wsId: WorkspaceId = { + name: workspace + } + const token = generateToken(systemAccountEmail, wsId) + const endpoint = await getTransactorEndpoint(token) + const blobClient = new BlobClient(endpoint, token, wsId) + const wrstream = createWriteStream(local) + await blobClient.writeTo(toolCtx, remote, -1, { + write: (buffer, cb) => { + wrstream.write(buffer, cb) + }, + end: (cb) => { + wrstream.end(cb) } - const token = generateToken(systemAccountEmail, wsId) - const endpoint = await getTransactorEndpoint(token) - const blobClient = new BlobClient(endpoint, token, wsId) - const wrstream = createWriteStream(local) - await blobClient.writeTo(toolCtx, remote, -1, { - write: (buffer, cb) => { - wrstream.write(buffer, cb) - }, - end: (cb) => { - wrstream.end(cb) - } - }) }) }) @@ -1197,14 +1195,14 @@ export function devTool ( const { dbUrl } = prepareTools() await withDatabase(dbUrl, async (db) => { - await withStorage(dbUrl, async (adapter) => { + await withStorage(async (adapter) => { try { const exAdapter = adapter as StorageAdapterEx - if (exAdapter.adapters === undefined || exAdapter.adapters.size < 2) { + if (exAdapter.adapters === undefined || exAdapter.adapters.length < 2) { throw new Error('bad storage config, at least two storage providers are required') } - console.log('moving files to storage provider', exAdapter.defaultAdapter) + console.log('moving files to storage provider', exAdapter.adapters[0].name) let index = 1 const workspaces = await listWorkspacesPure(db) @@ -1237,59 +1235,16 @@ export function devTool ( ) program - .command('sync-files') - .option('-w, --workspace ', 'Selected workspace only', '') - .option('--disabled', 'Include disabled workspaces', false) - .action(async (cmd: { workspace: string, disabled: boolean }) => { - const { dbUrl } = prepareTools() - await withDatabase(dbUrl, async (db) => { - await withStorage(dbUrl, async (adapter) => { - try { - const exAdapter = adapter as StorageAdapterEx - - console.log('syncing files from storage provider') - - let index = 1 - const workspaces = await listWorkspacesPure(db) - workspaces.sort((a, b) => b.lastVisit - a.lastVisit) - - for (const workspace of workspaces) { - if (workspace.disabled === true && !cmd.disabled) { - console.log('ignore disabled workspace', workspace.workspace) - continue - } - - if (cmd.workspace !== '' && workspace.workspace !== cmd.workspace) { - continue - } - - try { - console.log('start', workspace.workspace, index, '/', workspaces.length) - await syncFiles(toolCtx, getWorkspaceId(workspace.workspace), exAdapter) - console.log('done', workspace.workspace) - } catch (err) { - console.warn('failed to sync files', err) - } - - index += 1 - } - } catch (err: any) { - console.error(err) - } - }) - }) - }) - - program - .command('show-lost-files') + .command('show-lost-files-mongo') .option('-w, --workspace ', 'Selected workspace only', '') .option('--disabled', 'Include disabled workspaces', false) .option('--all', 'Show all files', false) .action(async (cmd: { workspace: string, disabled: boolean, all: boolean }) => { - const { dbUrl, mongodbUri } = prepareTools() + const { dbUrl } = prepareTools() await withDatabase(dbUrl, async (db) => { - await withStorage(dbUrl, async (adapter) => { - const client = getMongoClient(mongodbUri ?? dbUrl) + await withStorage(async (adapter) => { + const mongodbUri = getMongoDBUrl() + const client = getMongoClient(mongodbUri) const _client = await client.getClient() try { let index = 1 @@ -1330,7 +1285,7 @@ export function devTool ( program.command('show-lost-markup ').action(async (workspace: string, cmd: any) => { const { dbUrl } = prepareTools() await withDatabase(dbUrl, async (db) => { - await withStorage(dbUrl, async (adapter) => { + await withStorage(async (adapter) => { try { const workspaceId = getWorkspaceId(workspace) const token = generateToken(systemAccountEmail, workspaceId) @@ -1346,7 +1301,7 @@ export function devTool ( program.command('restore-lost-markup ').action(async (workspace: string, cmd: any) => { const { dbUrl } = prepareTools() await withDatabase(dbUrl, async (db) => { - await withStorage(dbUrl, async (adapter) => { + await withStorage(async (adapter) => { try { const workspaceId = getWorkspaceId(workspace) const token = generateToken(systemAccountEmail, workspaceId) @@ -1360,8 +1315,7 @@ export function devTool ( }) program.command('fix-bw-workspace ').action(async (workspace: string) => { - const { dbUrl } = prepareTools() - await withStorage(dbUrl, async (adapter) => { + await withStorage(async (adapter) => { await fixMinioBW(toolCtx, getWorkspaceId(workspace), adapter) }) }) @@ -1410,16 +1364,16 @@ export function devTool ( }) program - .command('mixin-fix-foreign-attributes ') + .command('mixin-fix-foreign-attributes-mongo ') .description('mixin-fix-foreign-attributes') .option('--mixin ', 'Mixin class', '') .option('--property ', 'Property name', '') .action(async (workspace: string, cmd: { mixin: string, property: string }) => { - const { dbUrl, mongodbUri } = prepareTools() + const mongodbUri = getMongoDBUrl() const wsid = getWorkspaceId(workspace) const token = generateToken(systemAccountEmail, wsid) const endpoint = await getTransactorEndpoint(token) - await fixMixinForeignAttributes(mongodbUri ?? dbUrl, wsid, endpoint, cmd) + await fixMixinForeignAttributes(mongodbUri, wsid, endpoint, cmd) }) program @@ -1551,36 +1505,36 @@ export function devTool ( }) program - .command('fix-skills ') + .command('fix-skills-mongo ') .description('fix skills for workspace') .action(async (workspace: string, step: string) => { - const { dbUrl, mongodbUri } = prepareTools() + const mongodbUri = getMongoDBUrl() const wsid = getWorkspaceId(workspace) const token = generateToken(systemAccountEmail, wsid) const endpoint = await getTransactorEndpoint(token) - await fixSkills(mongodbUri ?? dbUrl, wsid, endpoint, step) + await fixSkills(mongodbUri, wsid, endpoint, step) }) program - .command('restore-ats-types ') + .command('restore-ats-types-mongo ') .description('Restore recruiting task types for workspace') .action(async (workspace: string) => { - const { dbUrl, mongodbUri } = prepareTools() + const mongodbUri = getMongoDBUrl() console.log('Restoring recruiting task types in workspace ', workspace, '...') const wsid = getWorkspaceId(workspace) const endpoint = await getTransactorEndpoint(generateToken(systemAccountEmail, wsid), 'external') - await restoreRecruitingTaskTypes(mongodbUri ?? dbUrl, wsid, endpoint) + await restoreRecruitingTaskTypes(mongodbUri, wsid, endpoint) }) program - .command('restore-ats-types-2 ') + .command('restore-ats-types-2-mongo ') .description('Restore recruiting task types for workspace 2') .action(async (workspace: string) => { - const { dbUrl, mongodbUri } = prepareTools() + const mongodbUri = getMongoDBUrl() console.log('Restoring recruiting task types in workspace ', workspace, '...') const wsid = getWorkspaceId(workspace) const endpoint = await getTransactorEndpoint(generateToken(systemAccountEmail, wsid), 'external') - await restoreHrTaskTypesFromUpdates(mongodbUri ?? dbUrl, wsid, endpoint) + await restoreHrTaskTypesFromUpdates(mongodbUri, wsid, endpoint) }) program @@ -1591,33 +1545,32 @@ export function devTool ( .requiredOption('--attribute ') .requiredOption('--type ', 'number | string') .requiredOption('--value ') - .requiredOption('--domain ') .action( async ( workspace: string, cmd: { objectId: string, objectClass: string, type: string, attribute: string, value: string, domain: string } ) => { - const { dbUrl, mongodbUri } = prepareTools() const wsid = getWorkspaceId(workspace) const endpoint = await getTransactorEndpoint(generateToken(systemAccountEmail, wsid), 'external') - await updateField(mongodbUri ?? dbUrl, wsid, endpoint, cmd) + await updateField(wsid, endpoint, cmd) } ) program - .command('recreate-elastic-indexes ') + .command('recreate-elastic-indexes-mongo ') .description('reindex workspace to elastic') .action(async (workspace: string) => { - const { dbUrl, mongodbUri } = prepareTools() + const mongodbUri = getMongoDBUrl() const wsid = getWorkspaceId(workspace) - await recreateElastic(mongodbUri ?? dbUrl, wsid) + await recreateElastic(mongodbUri, wsid) }) program - .command('recreate-all-elastic-indexes') + .command('recreate-all-elastic-indexes-mongo') .description('reindex elastic') .action(async () => { - const { dbUrl, mongodbUri } = prepareTools() + const { dbUrl } = prepareTools() + const mongodbUri = getMongoDBUrl() await withDatabase(dbUrl, async (db) => { const workspaces = await listWorkspacesRaw(db) @@ -1630,28 +1583,29 @@ export function devTool ( }) program - .command('fix-json-markup ') + .command('fix-json-markup-mongo ') .description('fixes double converted json markup') .action(async (workspace: string) => { - const { dbUrl, mongodbUri } = prepareTools() - await withStorage(dbUrl, async (adapter) => { + const mongodbUri = getMongoDBUrl() + await withStorage(async (adapter) => { const wsid = getWorkspaceId(workspace) const endpoint = await getTransactorEndpoint(generateToken(systemAccountEmail, wsid), 'external') - await fixJsonMarkup(toolCtx, mongodbUri ?? dbUrl, adapter, wsid, endpoint) + await fixJsonMarkup(toolCtx, mongodbUri, adapter, wsid, endpoint) }) }) program - .command('migrate-markup') + .command('migrate-markup-mongo') .description('migrates collaborative markup to storage') .option('-w, --workspace ', 'Selected workspace only', '') .option('-c, --concurrency ', 'Number of documents being processed concurrently', '10') .action(async (cmd: { workspace: string, concurrency: string }) => { - const { dbUrl, mongodbUri, txes } = prepareTools() + const { dbUrl, txes } = prepareTools() + const mongodbUri = getMongoDBUrl() await withDatabase(dbUrl, async (db) => { - await withStorage(dbUrl, async (adapter) => { + await withStorage(async (adapter) => { const workspaces = await listWorkspacesPure(db) - const client = getMongoClient(mongodbUri ?? dbUrl) + const client = getMongoClient(mongodbUri) const _client = await client.getClient() let index = 0 try { @@ -1671,7 +1625,7 @@ export function devTool ( registerServerPlugins() registerStringLoaders() - const { pipeline } = await getServerPipeline(toolCtx, txes, mongodbUri ?? dbUrl, dbUrl, wsUrl) + const { pipeline } = await getServerPipeline(toolCtx, txes, dbUrl, wsUrl) await migrateMarkup(toolCtx, adapter, wsId, _client, pipeline, parseInt(cmd.concurrency)) @@ -1686,20 +1640,18 @@ export function devTool ( }) program - .command('remove-duplicates-ids ') + .command('remove-duplicates-ids-mongo ') .description('remove duplicates ids for futue migration') .action(async (workspaces: string) => { - const { dbUrl, mongodbUri } = prepareTools() - await withStorage(dbUrl, async (adapter) => { - await removeDuplicateIds(toolCtx, mongodbUri ?? dbUrl, adapter, accountsUrl, workspaces) + const mongodbUri = getMongoDBUrl() + await withStorage(async (adapter) => { + await removeDuplicateIds(toolCtx, mongodbUri, adapter, accountsUrl, workspaces) }) }) program.command('move-to-pg ').action(async (region: string) => { - const { dbUrl, mongodbUri } = prepareTools() - if (mongodbUri === undefined) { - throw new Error('mongodbUri is not set') - } + const { dbUrl } = prepareTools() + const mongodbUri = getMongoDBUrl() await withDatabase(mongodbUri, async (db) => { const workspaces = await listWorkspacesRaw(db) @@ -1715,10 +1667,8 @@ export function devTool ( }) program.command('move-workspace-to-pg ').action(async (workspace: string, region: string) => { - const { dbUrl, mongodbUri } = prepareTools() - if (mongodbUri === undefined) { - throw new Error('mongodbUri is not set') - } + const { dbUrl } = prepareTools() + const mongodbUri = getMongoDBUrl() await withDatabase(mongodbUri, async (db) => { const workspaceInfo = await getWorkspaceById(db, workspace) @@ -1733,11 +1683,8 @@ export function devTool ( }) program.command('move-account-db-to-pg').action(async () => { - const { dbUrl, mongodbUri } = prepareTools() - - if (mongodbUri === undefined) { - throw new Error('MONGO_URL is not set') - } + const { dbUrl } = prepareTools() + const mongodbUri = getMongoDBUrl() if (mongodbUri === dbUrl) { throw new Error('MONGO_URL and DB_URL are the same') diff --git a/dev/tool/src/storage.ts b/dev/tool/src/storage.ts index a1ec1a77192..d13f254c123 100644 --- a/dev/tool/src/storage.ts +++ b/dev/tool/src/storage.ts @@ -16,7 +16,12 @@ import { type Attachment } from '@hcengineering/attachment' import { type Blob, type MeasureContext, type Ref, type WorkspaceId, RateLimiter } from '@hcengineering/core' import { DOMAIN_ATTACHMENT } from '@hcengineering/model-attachment' -import { type ListBlobResult, type StorageAdapter, type StorageAdapterEx } from '@hcengineering/server-core' +import { + type ListBlobResult, + type StorageAdapter, + type StorageAdapterEx, + type UploadedObjectInfo +} from '@hcengineering/server-core' import { type Db } from 'mongodb' import { PassThrough } from 'stream' @@ -25,54 +30,6 @@ export interface MoveFilesParams { move: boolean } -export async function syncFiles ( - ctx: MeasureContext, - workspaceId: WorkspaceId, - exAdapter: StorageAdapterEx -): Promise { - if (exAdapter.adapters === undefined) return - - for (const [name, adapter] of [...exAdapter.adapters.entries()].reverse()) { - await adapter.make(ctx, workspaceId) - - await retryOnFailure(ctx, 5, async () => { - let time = Date.now() - let count = 0 - - const iterator = await adapter.listStream(ctx, workspaceId) - try { - while (true) { - const dataBulk = await iterator.next() - if (dataBulk.length === 0) break - - for (const data of dataBulk) { - const blob = await exAdapter.stat(ctx, workspaceId, data._id) - if (blob !== undefined) { - if (blob.provider !== name && name === exAdapter.defaultAdapter) { - await exAdapter.syncBlobFromStorage(ctx, workspaceId, data._id, exAdapter.defaultAdapter) - } - continue - } - - await exAdapter.syncBlobFromStorage(ctx, workspaceId, data._id, name) - - count += 1 - if (count % 100 === 0) { - const duration = Date.now() - time - time = Date.now() - - console.log('...processed', count, Math.round(duration / 1000) + 's') - } - } - } - console.log('processed', count) - } finally { - await iterator.close() - } - }) - } -} - export async function moveFiles ( ctx: MeasureContext, workspaceId: WorkspaceId, @@ -81,15 +38,13 @@ export async function moveFiles ( ): Promise { if (exAdapter.adapters === undefined) return - const target = exAdapter.adapters.get(exAdapter.defaultAdapter) + const target = exAdapter.adapters[0].adapter if (target === undefined) return // We assume that the adapter moves all new files to the default adapter await target.make(ctx, workspaceId) - for (const [name, adapter] of exAdapter.adapters.entries()) { - if (name === exAdapter.defaultAdapter) continue - + for (const { name, adapter } of exAdapter.adapters.slice(1).reverse()) { console.log('moving from', name, 'limit', 'concurrency', params.concurrency) // we attempt retry the whole process in case of failure @@ -192,14 +147,9 @@ async function processAdapter ( } for (const data of dataBulk) { - let targetBlob: Blob | ListBlobResult | undefined = targetBlobs.get(data._id) + const targetBlob: Blob | ListBlobResult | undefined = targetBlobs.get(data._id) if (targetBlob !== undefined) { console.log('Target blob already exists', targetBlob._id) - - const aggrBlob = await exAdapter.stat(ctx, workspaceId, data._id) - if (aggrBlob === undefined || aggrBlob?.provider !== targetBlob.provider) { - targetBlob = await exAdapter.syncBlobFromStorage(ctx, workspaceId, targetBlob._id, exAdapter.defaultAdapter) - } // We could safely delete source blob toRemove.push(data._id) } @@ -211,15 +161,13 @@ async function processAdapter ( console.error('blob not found', data._id) continue } - targetBlob = await rateLimiter.exec(async () => { + const info = await rateLimiter.exec(async () => { try { const result = await retryOnFailure( ctx, 5, async () => { - await processFile(ctx, source, target, workspaceId, sourceBlob) - // We need to sync and update aggregator table for now. - return await exAdapter.syncBlobFromStorage(ctx, workspaceId, sourceBlob._id, exAdapter.defaultAdapter) + return await processFile(ctx, source, target, workspaceId, sourceBlob) }, 50 ) @@ -232,8 +180,8 @@ async function processAdapter ( } }) - if (targetBlob !== undefined) { - // We could safely delete source blob + // We could safely delete source blob + if (info !== undefined) { toRemove.push(sourceBlob._id) } processedBytes += sourceBlob.size @@ -266,14 +214,14 @@ async function processFile ( target: Pick, workspaceId: WorkspaceId, blob: Blob -): Promise { +): Promise { const readable = await source.get(ctx, workspaceId, blob._id) try { readable.on('end', () => { readable.destroy() }) const stream = readable.pipe(new PassThrough()) - await target.put(ctx, workspaceId, blob._id, stream, blob.contentType, blob.size) + return await target.put(ctx, workspaceId, blob._id, stream, blob.contentType, blob.size) } finally { readable.destroy() } diff --git a/dev/tool/src/workspace.ts b/dev/tool/src/workspace.ts index b6ebc306187..fe0d9d98af1 100644 --- a/dev/tool/src/workspace.ts +++ b/dev/tool/src/workspace.ts @@ -17,6 +17,7 @@ import contact from '@hcengineering/contact' import core, { type BackupClient, + type Class, type Client as CoreClient, type Doc, DOMAIN_DOC_INDEX_STATE, @@ -72,7 +73,6 @@ export async function diffWorkspace (mongoUrl: string, workspace: WorkspaceId, r } export async function updateField ( - mongoUrl: string, workspaceId: WorkspaceId, transactorUrl: string, cmd: { objectId: string, objectClass: string, type: string, attribute: string, value: string, domain: string } @@ -80,19 +80,18 @@ export async function updateField ( const connection = (await connect(transactorUrl, workspaceId, undefined, { mode: 'backup' })) as unknown as CoreClient & BackupClient - const client = getMongoClient(mongoUrl) - let valueToPut: string | number = cmd.value - if (cmd.type === 'number') valueToPut = parseFloat(valueToPut) + try { - const _client = await client.getClient() - try { - const db = getWorkspaceMongoDB(_client, workspaceId) - await db - .collection(cmd.domain) - .updateOne({ _id: cmd.objectId as Ref }, { $set: { [cmd.attribute]: valueToPut } }) - } finally { - client.close() + const doc = await connection.findOne(cmd.objectClass as Ref>, { _id: cmd.objectId as Ref }) + if (doc === undefined) { + console.error('Document not found') + process.exit(1) } + let valueToPut: string | number = cmd.value + if (cmd.type === 'number') valueToPut = parseFloat(valueToPut) + ;(doc as any)[cmd.attribute] = valueToPut + + await connection.upload(connection.getHierarchy().getDomain(doc?._class), [doc]) } finally { await connection.close() } diff --git a/models/controlled-documents/src/migration.ts b/models/controlled-documents/src/migration.ts index 0135c160ed6..d7119309669 100644 --- a/models/controlled-documents/src/migration.ts +++ b/models/controlled-documents/src/migration.ts @@ -210,17 +210,17 @@ async function createDocumentCategories (tx: TxOperations): Promise { { code: 'CM', title: 'Client Management' } ] - await Promise.all( - categories.map((c) => - createOrUpdate( - tx, - documents.class.DocumentCategory, - documents.space.QualityDocuments, - { ...c, attachments: 0 }, - ((documents.category.DOC as string) + ' - ' + c.code) as Ref - ) + const ops = tx.apply() + for (const c of categories) { + await createOrUpdate( + ops, + documents.class.DocumentCategory, + documents.space.QualityDocuments, + { ...c, attachments: 0 }, + ((documents.category.DOC as string) + ' - ' + c.code) as Ref ) - ) + } + await ops.commit() } async function createTagCategories (tx: TxOperations): Promise { diff --git a/packages/core/src/classes.ts b/packages/core/src/classes.ts index 064205df7ce..c64aa82119f 100644 --- a/packages/core/src/classes.ts +++ b/packages/core/src/classes.ts @@ -551,8 +551,6 @@ export interface Blob extends Doc { // Provider provider: string // A provider specific id - storageId: string - // A content type for blob contentType: string // A etag for blob etag: string diff --git a/packages/storage/src/index.ts b/packages/storage/src/index.ts index 4390e23f75a..ad178582626 100644 --- a/packages/storage/src/index.ts +++ b/packages/storage/src/index.ts @@ -69,16 +69,13 @@ export interface StorageAdapter { getUrl: (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string) => Promise } -export interface StorageAdapterEx extends StorageAdapter { - defaultAdapter: string - adapters?: Map +export interface NamedStorageAdapter { + name: string + adapter: StorageAdapter +} - syncBlobFromStorage: ( - ctx: MeasureContext, - workspaceId: WorkspaceId, - objectName: string, - provider?: string - ) => Promise +export interface StorageAdapterEx extends StorageAdapter { + adapters?: NamedStorageAdapter[] find: (ctx: MeasureContext, workspaceId: WorkspaceId) => StorageIterator } @@ -187,7 +184,7 @@ export async function removeAllObjects ( break } for (const obj of objs) { - bulk.push(obj.storageId) + bulk.push(obj._id) if (bulk.length > 50) { await storage.remove(ctx, workspaceId, bulk) bulk = [] diff --git a/plugins/contact-resources/src/components/MergePersons.svelte b/plugins/contact-resources/src/components/MergePersons.svelte index 3a550b994d5..a2b0ef13e5c 100644 --- a/plugins/contact-resources/src/components/MergePersons.svelte +++ b/plugins/contact-resources/src/components/MergePersons.svelte @@ -13,6 +13,7 @@ // limitations under the License. --> +{#if level === 0} +

+ +
+{/if} - + {/each} {#each Object.entries(metrics.params) as [k, v], i}
- {#each Object.entries(v).sort((a, b) => b[1].value / (b[1].operations + 1) - a[1].value / (a[1].operations + 1)) as [kk, vv]} + {#each getSorted(v, sortingOrder) as [kk, vv]} {@const childExpandable = vv.topResult !== undefined && vv.topResult.length > 0 && diff --git a/pods/backup/src/index.ts b/pods/backup/src/index.ts index 4aa4be81bb7..b19ca41f498 100644 --- a/pods/backup/src/index.ts +++ b/pods/backup/src/index.ts @@ -67,8 +67,8 @@ startBackup( }) return factory }, - (ctx, dbUrls, workspace, branding, externalStorage) => { - return getConfig(ctx, dbUrls, workspace, branding, ctx, { + (ctx, dbUrl, workspace, branding, externalStorage) => { + return getConfig(ctx, dbUrl, workspace, branding, ctx, { externalStorage, fullTextUrl: '', indexParallel: 0, diff --git a/pods/server/src/__start.ts b/pods/server/src/__start.ts index 916e6977f2a..df4dbedba7a 100644 --- a/pods/server/src/__start.ts +++ b/pods/server/src/__start.ts @@ -59,7 +59,7 @@ setMetadata(serverCore.metadata.ElasticIndexVersion, 'v1') setMetadata(serverTelegram.metadata.BotUrl, process.env.TELEGRAM_BOT_URL) setMetadata(serverAiBot.metadata.SupportWorkspaceId, process.env.SUPPORT_WORKSPACE) -const shutdown = start(config.url, { +const shutdown = start(config.dbUrl, { fullTextUrl: config.elasticUrl, storageConfig, rekoniUrl: config.rekoniUrl, @@ -73,7 +73,8 @@ const shutdown = start(config.url, { profiling: { start: profileStart, stop: profileStop - } + }, + mongoUrl: config.mongoUrl }) const close = (): void => { diff --git a/pods/server/src/server.ts b/pods/server/src/server.ts index b708ce9e7b0..97fb170a093 100644 --- a/pods/server/src/server.ts +++ b/pods/server/src/server.ts @@ -35,7 +35,7 @@ registerStringLoaders() * @public */ export function start ( - dbUrls: string, + dbUrl: string, opt: { fullTextUrl: string storageConfig: StorageConfiguration @@ -55,30 +55,32 @@ export function start ( start: () => void stop: () => Promise } + + mongoUrl?: string } ): () => Promise { const metrics = getMetricsContext() registerServerPlugins() - const [mainDbUrl, rawDbUrl] = dbUrls.split(';') - - const externalStorage = buildStorageFromConfig(opt.storageConfig, rawDbUrl ?? mainDbUrl) + const externalStorage = buildStorageFromConfig(opt.storageConfig) const pipelineFactory = createServerPipeline( metrics, - dbUrls, + dbUrl, model, - { ...opt, externalStorage, adapterSecurity: rawDbUrl !== undefined }, - { - serviceAdapters: { - [serverAiBotId]: { - factory: createAIBotAdapter, - db: '%ai-bot', - url: rawDbUrl ?? mainDbUrl + { ...opt, externalStorage, adapterSecurity: dbUrl.startsWith('postgresql') }, + opt.mongoUrl !== undefined + ? { + serviceAdapters: { + [serverAiBotId]: { + factory: createAIBotAdapter, + db: '%ai-bot', + url: opt.mongoUrl + } + } } - } - } + : {} ) const sessionFactory = ( token: Token, diff --git a/pods/workspace/package.json b/pods/workspace/package.json index 29ed5ae98ae..ad935f2b427 100644 --- a/pods/workspace/package.json +++ b/pods/workspace/package.json @@ -20,7 +20,7 @@ "docker:abuild": "docker build -t hardcoreeng/workspace . --platform=linux/arm64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/workspace", "docker:staging": "../../common/scripts/docker_tag.sh hardcoreeng/workspace staging", "docker:push": "../../common/scripts/docker_tag.sh hardcoreeng/workspace", - "run-local": "cross-env DB_URL=mongodb://localhost:27017 MONGO_URL=mongodb://localhost:27017 MINIO_ACCESS_KEY=minioadmi MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost SERVER_SECRET='secret' TRANSACTOR_URL=ws://localhost:3333 ts-node src/__start.ts", + "run-local": "cross-env DB_URL=mongodb://localhost:27017 MINIO_ACCESS_KEY=minioadmi MINIO_SECRET_KEY=minioadmin MINIO_ENDPOINT=localhost SERVER_SECRET='secret' TRANSACTOR_URL=ws://localhost:3333 ts-node src/__start.ts", "format": "format src", "test": "jest --passWithNoTests --silent --forceExit", "_phase:build": "compile transpile src", diff --git a/server/backup-service/src/index.ts b/server/backup-service/src/index.ts index 2dbdd70bcaf..08a876a0296 100644 --- a/server/backup-service/src/index.ts +++ b/server/backup-service/src/index.ts @@ -27,7 +27,7 @@ export function startBackup ( pipelineFactoryFactory: (mongoUrl: string, storage: StorageAdapter) => PipelineFactory, getConfig: ( ctx: MeasureContext, - dbUrls: string, + dbUrl: string, workspace: WorkspaceIdWithUrl, branding: Branding | null, externalStorage: StorageAdapter @@ -38,13 +38,12 @@ export function startBackup ( setMetadata(serverClientPlugin.metadata.UserAgent, config.ServiceID) const mainDbUrl = config.DbURL - const rawDbUrl = config.MongoURL const backupStorageConfig = storageConfigFromEnv(config.Storage) const workspaceStorageConfig = storageConfigFromEnv(config.WorkspaceStorage) const storageAdapter = createStorageFromConfig(backupStorageConfig.storages[0]) - const workspaceStorageAdapter = buildStorageFromConfig(workspaceStorageConfig, rawDbUrl ?? mainDbUrl) + const workspaceStorageAdapter = buildStorageFromConfig(workspaceStorageConfig) const pipelineFactory = pipelineFactoryFactory(mainDbUrl, workspaceStorageAdapter) diff --git a/server/backup/src/backup.ts b/server/backup/src/backup.ts index c1e4ac133e1..b0b31db15ab 100644 --- a/server/backup/src/backup.ts +++ b/server/backup/src/backup.ts @@ -1856,7 +1856,7 @@ export async function restore ( chunks.push(chunk) }) stream.on('end', () => { - const bf = Buffer.concat(chunks) + const bf = Buffer.concat(chunks as any) const doc = JSON.parse(bf.toString()) as Doc if (doc._class === core.class.Blob || doc._class === 'core:class:BlobData') { const data = migradeBlobData(doc as Blob, changeset.get(doc._id) as string) @@ -2211,7 +2211,7 @@ export async function compactBackup ( chunks.push(chunk) }) stream.on('end', () => { - const bf = Buffer.concat(chunks) + const bf = Buffer.concat(chunks as any) const doc = JSON.parse(bf.toString()) as Doc if (doc._class === core.class.Blob || doc._class === 'core:class:BlobData') { const d = blobs.get(bname) @@ -2314,7 +2314,6 @@ function migradeBlobData (blob: Blob, etag: string): string { if (blob._class === 'core:class:BlobData') { const bd = blob as unknown as BlobData blob.contentType = blob.contentType ?? bd.type - blob.storageId = bd._id blob.etag = etag blob._class = core.class.Blob delete (blob as any).type diff --git a/server/collaborator/src/config.ts b/server/collaborator/src/config.ts index 793fe735c26..78f4dc38cbb 100644 --- a/server/collaborator/src/config.ts +++ b/server/collaborator/src/config.ts @@ -25,7 +25,6 @@ export interface Config { Port: number AccountsUrl: string - MongoUrl: string } const envMap: { [key in keyof Config]: string } = { @@ -33,11 +32,10 @@ const envMap: { [key in keyof Config]: string } = { Secret: 'SECRET', Interval: 'INTERVAL', Port: 'COLLABORATOR_PORT', - AccountsUrl: 'ACCOUNTS_URL', - MongoUrl: 'MONGO_URL' + AccountsUrl: 'ACCOUNTS_URL' } -const required: Array = ['Secret', 'ServiceID', 'Port', 'AccountsUrl', 'MongoUrl'] +const required: Array = ['Secret', 'ServiceID', 'Port', 'AccountsUrl'] const config: Config = (() => { const params: Partial = { @@ -45,8 +43,7 @@ const config: Config = (() => { ServiceID: process.env[envMap.ServiceID] ?? 'collaborator-service', Interval: parseInt(process.env[envMap.Interval] ?? '30000'), Port: parseInt(process.env[envMap.Port] ?? '3078'), - AccountsUrl: process.env[envMap.AccountsUrl], - MongoUrl: process.env[envMap.MongoUrl] + AccountsUrl: process.env[envMap.AccountsUrl] } const missingEnv = required.filter((key) => params[key] === undefined).map((key) => envMap[key]) diff --git a/server/collaborator/src/starter.ts b/server/collaborator/src/starter.ts index 56ceb2013fa..a403e16674b 100644 --- a/server/collaborator/src/starter.ts +++ b/server/collaborator/src/starter.ts @@ -18,11 +18,11 @@ import { setMetadata } from '@hcengineering/platform' import serverToken from '@hcengineering/server-token' import type { MeasureContext } from '@hcengineering/core' +import serverClient from '@hcengineering/server-client' import type { StorageConfiguration } from '@hcengineering/server-core' import { buildStorageFromConfig, storageConfigFromEnv } from '@hcengineering/server-storage' import config from './config' import { start } from './server' -import serverClient from '@hcengineering/server-client' export async function startCollaborator (ctx: MeasureContext, onClose?: () => void): Promise { setMetadata(serverToken.metadata.Secret, config.Secret) @@ -30,7 +30,7 @@ export async function startCollaborator (ctx: MeasureContext, onClose?: () => vo setMetadata(serverClient.metadata.Endpoint, config.AccountsUrl) const storageConfig: StorageConfiguration = storageConfigFromEnv() - const storageAdapter = buildStorageFromConfig(storageConfig, config.MongoUrl) + const storageAdapter = buildStorageFromConfig(storageConfig) const shutdown = await start(ctx, config, storageAdapter) diff --git a/server/core/src/adapter.ts b/server/core/src/adapter.ts index 5d25a954b6e..66629f47f1d 100644 --- a/server/core/src/adapter.ts +++ b/server/core/src/adapter.ts @@ -14,16 +14,15 @@ // import { - type LowLevelStorage, type Class, type Doc, type DocumentQuery, type DocumentUpdate, type Domain, type FieldIndexConfig, - type FindOptions, type FindResult, type Hierarchy, + type LowLevelStorage, type MeasureContext, type ModelDb, type Ref, @@ -56,40 +55,6 @@ export interface DomainHelper { ) => Promise } -export interface RawDBAdapterStream { - next: () => Promise - close: () => Promise -} - -/** - * @public - */ -export interface RawDBAdapter { - find: ( - ctx: MeasureContext, - workspace: WorkspaceId, - domain: Domain, - query: DocumentQuery, - options?: Omit, 'projection' | 'lookup' | 'total'> - ) => Promise> - findStream: ( - ctx: MeasureContext, - workspace: WorkspaceId, - domain: Domain, - query: DocumentQuery, - options?: Omit, 'projection' | 'lookup' | 'total'> - ) => Promise> - upload: (ctx: MeasureContext, workspace: WorkspaceId, domain: Domain, docs: T[]) => Promise - update: ( - ctx: MeasureContext, - workspace: WorkspaceId, - domain: Domain, - docs: Map, DocumentUpdate> - ) => Promise - clean: (ctx: MeasureContext, workspace: WorkspaceId, domain: Domain, docs: Ref[]) => Promise - close: () => Promise -} - export type DbAdapterHandler = ( domain: Domain, event: 'add' | 'update' | 'delete' | 'read', @@ -102,7 +67,7 @@ export type DbAdapterHandler = ( export interface DbAdapter extends LowLevelStorage { init?: (domains?: string[], excludeDomains?: string[]) => Promise - helper: () => DomainHelperOperations + helper?: () => DomainHelperOperations close: () => Promise findAll: ( diff --git a/server/datalake/src/index.ts b/server/datalake/src/index.ts index 3e58e548e54..f743e97718b 100644 --- a/server/datalake/src/index.ts +++ b/server/datalake/src/index.ts @@ -89,7 +89,6 @@ export class DatalakeService implements StorageAdapter { provider: '', _class: core.class.Blob, _id: objectName as Ref, - storageId: objectName, contentType: result.type, size: result.size ?? 0, etag: result.etag ?? '', diff --git a/server/front/src/index.ts b/server/front/src/index.ts index 0a87fae70cc..422bc31d804 100644 --- a/server/front/src/index.ts +++ b/server/front/src/index.ts @@ -897,8 +897,7 @@ async function getGeneratePreview ( _id: sizeId as Ref, size: dataBuff.size, contentType, - etag: upload.etag, - storageId: sizeId + etag: upload.etag } } catch (err: any) { Analytics.handleError(err) diff --git a/server/front/src/starter.ts b/server/front/src/starter.ts index 8823fb27f16..2231b174bfa 100644 --- a/server/front/src/starter.ts +++ b/server/front/src/starter.ts @@ -24,12 +24,6 @@ import { start } from '.' export function startFront (ctx: MeasureContext, extraConfig?: Record): void { const SERVER_PORT = parseInt(process.env.SERVER_PORT ?? '8080') - const url = process.env.MONGO_URL - if (url === undefined) { - console.error('please provide mongodb url') - process.exit(1) - } - const elasticUrl = process.env.ELASTIC_URL if (elasticUrl === undefined) { console.error('please provide elastic url') @@ -37,7 +31,7 @@ export function startFront (ctx: MeasureContext, extraConfig?: Record await adapter.tx(ctx, ...txes), { - txes: txes.length + const r = await ctx.with('adapter-tx', { domain }, (ctx) => adapter.tx(ctx, ...txes), { + txes: txes.length, + classes: Array.from(new Set(txes.map((it) => it.objectClass))), + _classes: Array.from(new Set(txes.map((it) => it._class))) }) if (Array.isArray(r)) { diff --git a/server/minio/src/index.ts b/server/minio/src/index.ts index ba5fdb97863..405b9b2a3d8 100644 --- a/server/minio/src/index.ts +++ b/server/minio/src/index.ts @@ -225,8 +225,7 @@ export class MinioService implements StorageAdapter { provider: this.opt.name, space: core.space.Configuration, modifiedBy: core.account.ConfigUser, - modifiedOn: data.lastModified.getTime(), - storageId: _id + modifiedOn: data.lastModified.getTime() }) } onNext() @@ -279,7 +278,6 @@ export class MinioService implements StorageAdapter { provider: '', _class: core.class.Blob, _id: this.stripPrefix(rootPrefix, objectName) as Ref, - storageId: this.stripPrefix(rootPrefix, objectName), contentType: result.metaData['content-type'], size: result.size, etag: result.etag, diff --git a/server/mongo/src/index.ts b/server/mongo/src/index.ts index 4bd60cdfdc3..de47577bf7b 100644 --- a/server/mongo/src/index.ts +++ b/server/mongo/src/index.ts @@ -14,6 +14,5 @@ // limitations under the License. // -export * from './rawAdapter' export * from './storage' export * from './utils' diff --git a/server/mongo/src/rawAdapter.ts b/server/mongo/src/rawAdapter.ts deleted file mode 100644 index d6d05503e28..00000000000 --- a/server/mongo/src/rawAdapter.ts +++ /dev/null @@ -1,194 +0,0 @@ -import { - SortingOrder, - cutObjectArray, - toFindResult, - type Doc, - type DocumentQuery, - type DocumentUpdate, - type Domain, - type FindOptions, - type FindResult, - type MeasureContext, - type Ref, - type WorkspaceId -} from '@hcengineering/core' -import type { RawDBAdapter, RawDBAdapterStream } from '@hcengineering/server-core' -import { type Document, type Filter, type FindCursor, type MongoClient, type Sort } from 'mongodb' -import { toArray, uploadDocuments } from './storage' -import { getMongoClient, getWorkspaceMongoDB } from './utils' - -export function createRawMongoDBAdapter (url: string): RawDBAdapter { - const client = getMongoClient(url) - let mongoClient: MongoClient | undefined - - const collectSort = (options: FindOptions): Sort | undefined => { - if (options?.sort === undefined) { - return undefined - } - const sort: Sort = {} - let count = 0 - for (const key in options.sort) { - const order = options.sort[key] === SortingOrder.Ascending ? 1 : -1 - sort[key] = order - count++ - } - if (count === 0) { - return undefined - } - return sort - } - - async function getCursor ( - workspace: WorkspaceId, - domain: Domain, - query: DocumentQuery, - options?: Omit, 'projection' | 'lookup' | 'total'> - ): Promise<{ - cursor: FindCursor - total: number - }> { - mongoClient = mongoClient ?? (await client.getClient()) - const db = getWorkspaceMongoDB(mongoClient, workspace) - const coll = db.collection(domain) - let cursor = coll.find(query as Filter, { - checkKeys: false - }) - - const total: number = -1 - if (options != null) { - if (options.sort !== undefined) { - const sort = collectSort(options) - if (sort !== undefined) { - cursor = cursor.sort(sort) - } - } - if (options.limit !== undefined || typeof query._id === 'string') { - cursor = cursor.limit(options.limit ?? 1) - } - } - return { cursor, total } - } - - return { - find: async function ( - ctx: MeasureContext, - workspace: WorkspaceId, - domain: Domain, - query: DocumentQuery, - options?: Omit, 'projection' | 'lookup' | 'total'> - ): Promise> { - const { cursor, total } = await ctx.with( - 'get-cursor', - {}, - async () => await getCursor(workspace, domain, query, options) - ) - - // Error in case of timeout - try { - const res = await ctx.with('to-array', {}, async () => await toArray(cursor), { - ...query, - ...options - }) - return toFindResult(res, total) - } catch (e) { - console.error('error during executing cursor in findAll', cutObjectArray(query), options, e) - throw e - } - }, - findStream: async function ( - ctx: MeasureContext, - workspace: WorkspaceId, - domain: Domain, - query: DocumentQuery, - options?: Omit, 'projection' | 'lookup' | 'total'> - ): Promise> { - const { cursor } = await getCursor(workspace, domain, query, options) - - return { - next: async () => { - const result: T[] = [] - const doc = await cursor.next() - if (doc != null) { - result.push(doc) - } - if (cursor.bufferedCount() > 0) { - result.push(...cursor.readBufferedDocuments()) - } - return result - }, - close: async () => { - await cursor.close() - } - } - }, - upload: async (ctx: MeasureContext, workspace, domain, docs) => { - mongoClient = mongoClient ?? (await client.getClient()) - const db = getWorkspaceMongoDB(mongoClient, workspace) - const coll = db.collection(domain) - await uploadDocuments(ctx, docs, coll) - }, - close: async () => { - client.close() - }, - clean: async (ctx, workspace, domain, docs) => { - mongoClient = mongoClient ?? (await client.getClient()) - const db = getWorkspaceMongoDB(mongoClient, workspace) - const coll = db.collection(domain) - await coll.deleteMany({ _id: { $in: docs } }) - }, - update: async ( - ctx: MeasureContext, - workspace: WorkspaceId, - domain: Domain, - operations: Map, DocumentUpdate> - ): Promise => { - await ctx.with('update', { domain }, async () => { - mongoClient = mongoClient ?? (await client.getClient()) - const db = getWorkspaceMongoDB(mongoClient, workspace) - const coll = db.collection(domain) - - // remove old and insert new ones - const ops = Array.from(operations.entries()) - let skip = 500 - while (ops.length > 0) { - const part = ops.splice(0, skip) - try { - await ctx.with('raw-bulk-write', {}, async () => { - await coll.bulkWrite( - part.map((it) => { - const { $unset, ...set } = it[1] as any - if ($unset !== undefined) { - for (const k of Object.keys(set)) { - if ($unset[k] === '') { - // eslint-disable-next-line @typescript-eslint/no-dynamic-delete - delete $unset[k] - } - } - } - return { - updateOne: { - filter: { _id: it[0] }, - update: { - $set: { ...set, '%hash%': null }, - ...($unset !== undefined ? { $unset } : {}) - } - } - } - }), - { - ordered: false - } - ) - }) - } catch (err: any) { - ctx.error('failed on bulk write', { error: err, skip }) - if (skip !== 1) { - ops.push(...part) - skip = 1 // Let's update one by one, to loose only one failed variant. - } - } - } - }) - } - } -} diff --git a/server/mongo/src/storage.ts b/server/mongo/src/storage.ts index 7e4f1cd848f..b3bd76597e7 100644 --- a/server/mongo/src/storage.ts +++ b/server/mongo/src/storage.ts @@ -1300,8 +1300,8 @@ class MongoAdapter extends MongoAdapterBase { const coll = this.db.collection(domain) promises.push( - addOperation(ctx, 'bulk-write', { domain, operations: ops.length }, async (ctx) => { - await ctx.with( + addOperation(ctx, 'bulk-write', { domain, operations: ops.length }, (ctx) => + ctx.with( 'bulk-write', { domain }, async () => { @@ -1318,7 +1318,7 @@ class MongoAdapter extends MongoAdapterBase { operations: ops.length } ) - }) + ) ) } if (domainBulk.findUpdate.size > 0) { @@ -1337,7 +1337,7 @@ class MongoAdapter extends MongoAdapterBase { ctx, 'find-result', {}, - async (ctx) => await coll.find({ _id: { $in: Array.from(domainBulk.findUpdate) } }).toArray(), + (ctx) => coll.find({ _id: { $in: Array.from(domainBulk.findUpdate) } }).toArray(), { domain, _ids: domainBulk.findUpdate.size, queueTime: stTime - st } ) result.push(...docs) @@ -1665,19 +1665,8 @@ class MongoTxAdapter extends MongoAdapterBase implements TxAdapter { @withContext('get-model') async getModel (ctx: MeasureContext): Promise { const txCollection = this.db.collection(DOMAIN_TX) - const cursor = await ctx.with('find', {}, async () => { - const c = txCollection.find( - { objectSpace: core.space.Model }, - { - sort: { - _id: 1, - modifiedOn: 1 - } - } - ) - return c - }) - const model = await ctx.with('to-array', {}, async () => await toArray(cursor)) + const cursor = txCollection.find({ objectSpace: core.space.Model }) + const model = await toArray(cursor) // We need to put all core.account.System transactions first const systemTx: Tx[] = [] const userTx: Tx[] = [] diff --git a/server/s3/src/index.ts b/server/s3/src/index.ts index 9eaacc24402..56450dd64d1 100644 --- a/server/s3/src/index.ts +++ b/server/s3/src/index.ts @@ -263,8 +263,7 @@ export class S3Service implements StorageAdapter { provider: this.opt.name, space: core.space.Configuration, modifiedBy: core.account.ConfigUser, - modifiedOn: data.LastModified?.getTime() ?? 0, - storageId: _id + modifiedOn: data.LastModified?.getTime() ?? 0 }) } } @@ -289,7 +288,6 @@ export class S3Service implements StorageAdapter { provider: '', _class: core.class.Blob, _id: this.stripPrefix(rootPrefix, objectName) as Ref, - storageId: this.stripPrefix(rootPrefix, objectName), contentType: result.ContentType ?? '', size: result.ContentLength ?? 0, etag: result.ETag ?? '', diff --git a/server/server-pipeline/src/pipeline.ts b/server/server-pipeline/src/pipeline.ts index 4865ed6587e..38a74b9f4fd 100644 --- a/server/server-pipeline/src/pipeline.ts +++ b/server/server-pipeline/src/pipeline.ts @@ -71,7 +71,7 @@ import { createIndexStages } from './indexing' export function getTxAdapterFactory ( metrics: MeasureContext, - dbUrls: string, + dbUrl: string, workspace: WorkspaceIdWithUrl, branding: Branding | null, opt: { @@ -86,7 +86,7 @@ export function getTxAdapterFactory ( }, extensions?: Partial ): DbAdapterFactory { - const conf = getConfig(metrics, dbUrls, workspace, branding, metrics, opt, extensions) + const conf = getConfig(metrics, dbUrl, workspace, branding, metrics, opt, extensions) const adapterName = conf.domains[DOMAIN_TX] ?? conf.defaultAdapter const adapter = conf.adapters[adapterName] return adapter.factory @@ -98,7 +98,7 @@ export function getTxAdapterFactory ( export function createServerPipeline ( metrics: MeasureContext, - dbUrls: string, + dbUrl: string, model: Tx[], opt: { fullTextUrl: string @@ -116,7 +116,7 @@ export function createServerPipeline ( return (ctx, workspace, upgrade, broadcast, branding) => { const metricsCtx = opt.usePassedCtx === true ? ctx : metrics const wsMetrics = metricsCtx.newChild('🧲 session', {}) - const conf = getConfig(metrics, dbUrls, workspace, branding, wsMetrics, opt, extensions) + const conf = getConfig(metrics, dbUrl, workspace, branding, wsMetrics, opt, extensions) const middlewares: MiddlewareCreator[] = [ LookupMiddleware.create, @@ -163,7 +163,7 @@ export function createServerPipeline ( export function createBackupPipeline ( metrics: MeasureContext, - dbUrls: string, + dbUrl: string, systemTx: Tx[], opt: { usePassedCtx?: boolean @@ -177,7 +177,7 @@ export function createBackupPipeline ( const wsMetrics = metricsCtx.newChild('🧲 backup', {}) const conf = getConfig( metrics, - dbUrls, + dbUrl, workspace, branding, wsMetrics, @@ -229,25 +229,19 @@ export function createBackupPipeline ( export async function getServerPipeline ( ctx: MeasureContext, model: Tx[], - mongodbUri: string | undefined, dbUrl: string, wsUrl: WorkspaceIdWithUrl ): Promise<{ pipeline: Pipeline storageAdapter: StorageAdapter }> { - const dbUrls = mongodbUri !== undefined && mongodbUri !== dbUrl ? `${dbUrl};${mongodbUri}` : dbUrl - const storageConfig: StorageConfiguration = storageConfigFromEnv() - if (mongodbUri === undefined) { - throw new Error('MONGO_URL is not provided') - } - const storageAdapter = buildStorageFromConfig(storageConfig, mongodbUri) + const storageAdapter = buildStorageFromConfig(storageConfig) const pipelineFactory = createServerPipeline( ctx, - dbUrls, + dbUrl, model, { externalStorage: storageAdapter, @@ -291,7 +285,7 @@ export async function getServerPipeline ( export function getConfig ( metrics: MeasureContext, - dbUrls: string, + dbUrl: string, workspace: WorkspaceIdWithUrl, branding: Branding | null, ctx: MeasureContext, @@ -309,7 +303,6 @@ export function getConfig ( ): DbConfiguration { const metricsCtx = opt.usePassedCtx === true ? ctx : metrics const wsMetrics = metricsCtx.newChild('🧲 session', {}) - const [dbUrl, mongoUrl] = dbUrls.split(';') const conf: DbConfiguration & FulltextDBConfiguration = { domains: { [DOMAIN_TX]: 'Tx', @@ -324,11 +317,11 @@ export function getConfig ( defaultAdapter: extensions?.defaultAdapter ?? 'Main', adapters: { Tx: { - factory: mongoUrl !== undefined ? createPostgresTxAdapter : createMongoTxAdapter, + factory: dbUrl.startsWith('postgresql') ? createPostgresTxAdapter : createMongoTxAdapter, url: dbUrl }, Main: { - factory: mongoUrl !== undefined ? createPostgresAdapter : createMongoAdapter, + factory: dbUrl.startsWith('postgresql') ? createPostgresAdapter : createMongoAdapter, url: dbUrl }, Null: { @@ -341,7 +334,7 @@ export function getConfig ( }, StorageData: { factory: createStorageDataAdapter, - url: mongoUrl ?? dbUrl + url: '' }, FullTextBlob: { factory: createElasticBackupDataAdapter, diff --git a/server/server-storage/src/aggregator.ts b/server/server-storage/src/aggregator.ts deleted file mode 100644 index d10bfcbb1d5..00000000000 --- a/server/server-storage/src/aggregator.ts +++ /dev/null @@ -1,416 +0,0 @@ -import core, { - DOMAIN_BLOB, - groupByArray, - toIdMap, - withContext, - type Blob, - type MeasureContext, - type Ref, - type StorageIterator, - type WorkspaceId -} from '@hcengineering/core' -import { type Readable } from 'stream' - -import { getMetadata } from '@hcengineering/platform' -import { - type BlobStorageIterator, - type BucketInfo, - type ListBlobResult, - type StorageAdapter, - type StorageAdapterEx, - type UploadedObjectInfo -} from '@hcengineering/storage' - -import { Analytics } from '@hcengineering/analytics' -import serverCore, { - type RawDBAdapter, - type StorageConfig, - type StorageConfiguration -} from '@hcengineering/server-core' - -class NoSuchKeyError extends Error { - code: string - constructor (msg: string) { - super(msg) - this.code = 'NoSuchKey' - } -} - -/** - * Perform operations on storage adapter and map required information into BinaryDocument into provided DbAdapter storage. - */ -export class AggregatorStorageAdapter implements StorageAdapter, StorageAdapterEx { - constructor ( - readonly adapters: Map, - readonly defaultAdapter: string, // Adapter will be used to put new documents into, if not matched by content type - readonly dbAdapter: RawDBAdapter - ) {} - - async syncBlobFromStorage ( - ctx: MeasureContext, - workspaceId: WorkspaceId, - objectName: string, - providerId?: string - ): Promise { - let current: Blob | undefined = ( - await this.dbAdapter.find(ctx, workspaceId, DOMAIN_BLOB, { _id: objectName as Ref }, { limit: 1 }) - ).shift() - let updated = false - if (current === undefined && providerId !== undefined) { - current = await this.adapters.get(providerId)?.stat(ctx, workspaceId, objectName) - if (current !== undefined) { - current.provider = providerId - updated = true - } - } - - const provider = this.adapters.get(providerId ?? current?.provider ?? this.defaultAdapter) - if (provider === undefined) { - throw new NoSuchKeyError('No such provider found') - } - const stat = updated ? current : await provider.stat(ctx, workspaceId, objectName) - if (stat !== undefined) { - stat.provider = providerId ?? current?.provider ?? this.defaultAdapter - if (current !== undefined && !updated) { - await this.dbAdapter.clean(ctx, workspaceId, DOMAIN_BLOB, [current._id]) - } - await this.dbAdapter.upload(ctx, workspaceId, DOMAIN_BLOB, [stat]) - // TODO: We need to send notification about Blob is changed. - return stat - } else { - throw new NoSuchKeyError('No such blob found') - } - } - - async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise {} - - doTrimHash (s: string | undefined): string { - if (s == null) { - return '' - } - if (s.startsWith('"') && s.endsWith('"')) { - return s.slice(1, s.length - 1) - } - return s - } - - async doSyncDocs (ctx: MeasureContext, workspaceId: WorkspaceId, docs: ListBlobResult[]): Promise { - const existingBlobs = toIdMap( - await this.dbAdapter.find(ctx, workspaceId, DOMAIN_BLOB, { _id: { $in: docs.map((it) => it._id) } }) - ) - const toUpdate: Blob[] = [] - for (const d of docs) { - const blobInfo = existingBlobs.get(d._id) - if ( - blobInfo === undefined || // Blob info undefined - // Provider are same and etag or size are diffrent. - (d.provider === blobInfo.provider && - (this.doTrimHash(blobInfo.etag) !== this.doTrimHash(d.etag) || blobInfo.size !== d.size)) || - // We have replacement in default - (d.provider === this.defaultAdapter && blobInfo?.provider !== d.provider) - ) { - const stat = await this.adapters.get(d.provider)?.stat(ctx, workspaceId, d._id) - if (stat !== undefined) { - stat.provider = d.provider - toUpdate.push(stat) - } else { - ctx.error('blob not found for sync', { provider: d.provider, id: d._id, workspace: workspaceId.name }) - } - } - } - if (toUpdate.length > 0) { - await this.dbAdapter.clean(ctx, workspaceId, DOMAIN_BLOB, Array.from(toUpdate.map((it) => it._id))) - await this.dbAdapter.upload(ctx, workspaceId, DOMAIN_BLOB, toUpdate) - } - } - - find (ctx: MeasureContext, workspaceId: WorkspaceId): StorageIterator { - const storageIterator = this.makeStorageIterator(ctx, workspaceId) - - return { - next: async () => { - const docInfos = await storageIterator.next() - if (docInfos.length > 0) { - await this.doSyncDocs(ctx, workspaceId, docInfos) - } - - return docInfos.map((it) => ({ - hash: it.etag, - id: it._id, - size: it.size - })) - }, - close: async (ctx) => { - await storageIterator.close() - } - } - } - - private makeStorageIterator (ctx: MeasureContext, workspaceId: WorkspaceId): BlobStorageIterator { - const adapters = Array.from(this.adapters.entries()) - let provider: [string, StorageAdapter] | undefined - let iterator: BlobStorageIterator | undefined - return { - next: async () => { - while (true) { - if (iterator === undefined && adapters.length > 0) { - provider = adapters.shift() as [string, StorageAdapter] - iterator = await provider[1].listStream(ctx, workspaceId) - } - if (iterator === undefined) { - return [] - } - const docInfos = await iterator.next() - if (docInfos.length > 0) { - for (const d of docInfos) { - d.provider = provider?.[0] as string - } - // We need to check if our stored version is fine - return docInfos - } else { - // We need to take next adapter - await iterator.close() - iterator = undefined - continue - } - } - }, - close: async () => { - if (iterator !== undefined) { - await iterator.close() - } - } - } - } - - async close (): Promise { - for (const a of this.adapters.values()) { - await a.close() - } - await this.dbAdapter.close() - } - - async exists (ctx: MeasureContext, workspaceId: WorkspaceId): Promise { - for (const a of this.adapters.values()) { - if (!(await a.exists(ctx, workspaceId))) { - return false - } - } - return true - } - - @withContext('aggregator-make', {}) - async make (ctx: MeasureContext, workspaceId: WorkspaceId): Promise { - for (const [k, a] of this.adapters.entries()) { - try { - if (!(await a.exists(ctx, workspaceId))) { - await a.make(ctx, workspaceId) - } - } catch (err: any) { - ctx.error('failed to init adapter', { adapter: k, workspaceId, error: err }) - // Do not throw error in case default adapter is ok - Analytics.handleError(err) - if (k === this.defaultAdapter) { - // We should throw in case default one is not valid - throw err - } - } - } - } - - @withContext('aggregator-listBuckets', {}) - async listBuckets (ctx: MeasureContext): Promise { - const result: BucketInfo[] = [] - for (const a of this.adapters.values()) { - result.push(...(await a.listBuckets(ctx))) - } - return result - } - - @withContext('aggregator-delete', {}) - async delete (ctx: MeasureContext, workspaceId: WorkspaceId): Promise { - for (const a of this.adapters.values()) { - if (await a.exists(ctx, workspaceId)) { - await a.delete(ctx, workspaceId) - } - } - } - - @withContext('aggregator-remove', {}) - async remove (ctx: MeasureContext, workspaceId: WorkspaceId, objectNames: string[]): Promise { - const docs = await this.dbAdapter.find(ctx, workspaceId, DOMAIN_BLOB, { - _id: { $in: objectNames as Ref[] } - }) - - // Group by provider and delegate into it. - const byProvider = groupByArray(docs, (item) => item.provider) - for (const [k, docs] of byProvider) { - const adapter = this.adapters.get(k) - if (adapter !== undefined) { - await adapter.remove( - ctx, - workspaceId, - docs.map((it) => it._id) - ) - } - } - await this.dbAdapter.clean(ctx, workspaceId, DOMAIN_BLOB, objectNames as Ref[]) - } - - async listStream (ctx: MeasureContext, workspaceId: WorkspaceId): Promise { - const data = await this.dbAdapter.findStream(ctx, workspaceId, DOMAIN_BLOB, {}) - return { - next: async (): Promise => { - return await data.next() - }, - close: async () => { - await data.close() - } - } - } - - @withContext('aggregator-stat', {}) - async stat (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise { - const result = await this.dbAdapter.find( - ctx, - workspaceId, - DOMAIN_BLOB, - { _id: name as Ref }, - { limit: 1 } - ) - return result.shift() - } - - @withContext('aggregator-get', {}) - async get (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise { - const { provider, stat } = await this.findProvider(ctx, workspaceId, name) - return await provider.get(ctx, workspaceId, stat.storageId) - } - - @withContext('find-provider', {}) - private async findProvider ( - ctx: MeasureContext, - workspaceId: WorkspaceId, - objectName: string - ): Promise<{ provider: StorageAdapter, stat: Blob }> { - const stat = ( - await this.dbAdapter.find(ctx, workspaceId, DOMAIN_BLOB, { _id: objectName as Ref }, { limit: 1 }) - ).shift() - if (stat === undefined) { - throw new NoSuchKeyError(`No such object found ${objectName}`) - } - const provider = this.adapters.get(stat.provider) - if (provider === undefined) { - throw new NoSuchKeyError(`No such provider found: ${provider}`) - } - return { provider, stat } - } - - @withContext('aggregator-partial', {}) - async partial ( - ctx: MeasureContext, - workspaceId: WorkspaceId, - objectName: string, - offset: number, - length?: number | undefined - ): Promise { - const { provider, stat } = await this.findProvider(ctx, workspaceId, objectName) - return await provider.partial(ctx, workspaceId, stat.storageId, offset, length) - } - - @withContext('aggregator-read', {}) - async read (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise { - const { provider, stat } = await this.findProvider(ctx, workspaceId, name) - return await provider.read(ctx, workspaceId, stat.storageId) - } - - selectProvider ( - forceProvider: string | undefined, - contentType: string - ): { adapter: StorageAdapter, provider: string } { - if (forceProvider !== undefined) { - return { - adapter: this.adapters.get(forceProvider) ?? (this.adapters.get(this.defaultAdapter) as StorageAdapter), - provider: forceProvider - } - } - - return { adapter: this.adapters.get(this.defaultAdapter) as StorageAdapter, provider: this.defaultAdapter } - } - - @withContext('aggregator-put', {}) - async put ( - ctx: MeasureContext, - workspaceId: WorkspaceId, - objectName: string, - stream: string | Readable | Buffer, - contentType: string, - size?: number | undefined - ): Promise { - const stat = ( - await this.dbAdapter.find(ctx, workspaceId, DOMAIN_BLOB, { _id: objectName as Ref }, { limit: 1 }) - ).shift() - - const { provider, adapter } = this.selectProvider(undefined, contentType) - - const result = await adapter.put(ctx, workspaceId, objectName, stream, contentType, size) - - if (size === undefined || size === 0 || !Number.isInteger(size)) { - const docStats = await adapter.stat(ctx, workspaceId, objectName) - if (docStats !== undefined) { - if (contentType !== docStats.contentType) { - contentType = docStats.contentType - } - size = docStats.size - } - } - - const blobDoc: Blob = { - _class: core.class.Blob, - _id: objectName as Ref, - modifiedBy: core.account.System, - modifiedOn: Date.now(), - space: core.space.Configuration, - provider, - storageId: objectName, - size: size ?? 0, - contentType, - etag: result.etag, - version: result.versionId ?? null - } - - await this.dbAdapter.upload(ctx, workspaceId, DOMAIN_BLOB, [blobDoc]) - - // If the file is already stored in different provider, we need to remove it. - if (stat !== undefined && stat.provider !== provider) { - // TODO temporary not needed - // const adapter = this.adapters.get(stat.provider) - // await adapter?.remove(ctx, workspaceId, [stat._id]) - } - - return result - } - - @withContext('aggregator-getUrl', {}) - async getUrl (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise { - // const { provider, stat } = await this.findProvider(ctx, workspaceId, name) - // return await provider.getUrl(ctx, workspaceId, stat.storageId) - const filesUrl = getMetadata(serverCore.metadata.FilesUrl) ?? '' - return filesUrl.replaceAll(':workspace', workspaceId.name).replaceAll(':blobId', name) - } -} - -/** - * @public - */ -export function buildStorage ( - config: StorageConfiguration, - dbAdapter: RawDBAdapter, - storageFactory: (config: StorageConfig) => StorageAdapter -): AggregatorStorageAdapter { - const adapters = new Map() - for (const c of config.storages) { - adapters.set(c.name, storageFactory(c)) - } - return new AggregatorStorageAdapter(adapters, config.default, dbAdapter) -} diff --git a/server/server-storage/src/blobStorage.ts b/server/server-storage/src/blobStorage.ts index a44d147ad0b..a90ce52abc4 100644 --- a/server/server-storage/src/blobStorage.ts +++ b/server/server-storage/src/blobStorage.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import core, { +import { Class, Doc, DocumentQuery, @@ -28,27 +28,20 @@ import core, { ModelDb, Ref, StorageIterator, + toFindResult, Tx, TxResult, WorkspaceId, type Blob } from '@hcengineering/core' -import { createMongoAdapter } from '@hcengineering/mongo' import { PlatformError, unknownError } from '@hcengineering/platform' -import { - DbAdapter, - DbAdapterHandler, - StorageAdapter, - type DomainHelperOperations, - type StorageAdapterEx -} from '@hcengineering/server-core' +import { DbAdapter, DbAdapterHandler, StorageAdapter, type StorageAdapterEx } from '@hcengineering/server-core' class StorageBlobAdapter implements DbAdapter { constructor ( readonly workspaceId: WorkspaceId, - readonly client: StorageAdapter, // Should not be closed - readonly ctx: MeasureContext, - readonly blobAdapter: DbAdapter // A real blob adapter for Blob documents. + readonly client: StorageAdapterEx, // Should not be closed + readonly ctx: MeasureContext ) {} async traverse( @@ -56,23 +49,26 @@ class StorageBlobAdapter implements DbAdapter { query: DocumentQuery, options?: Pick, 'sort' | 'limit' | 'projection'> ): Promise> { - return await this.blobAdapter.traverse(domain, query, options) + return { + next: async () => { + return toFindResult([]) + }, + close: async () => {} + } } init?: ((domains?: string[], excludeDomains?: string[]) => Promise) | undefined on?: ((handler: DbAdapterHandler) => void) | undefined async rawFindAll(domain: Domain, query: DocumentQuery, options?: FindOptions): Promise { - return await this.blobAdapter.rawFindAll(domain, query, options) + return [] } async rawUpdate( domain: Domain, query: DocumentQuery, operations: DocumentUpdate - ): Promise { - await this.blobAdapter.rawUpdate(domain, query, operations) - } + ): Promise {} async findAll( ctx: MeasureContext, @@ -80,15 +76,11 @@ class StorageBlobAdapter implements DbAdapter { query: DocumentQuery, options?: FindOptions ): Promise> { - return await this.blobAdapter.findAll(ctx, _class, query, options) - } - - helper (): DomainHelperOperations { - return this.blobAdapter.helper() + return toFindResult([]) } async groupBy(ctx: MeasureContext, domain: Domain, field: string): Promise> { - return await this.blobAdapter.groupBy(ctx, domain, field) + return new Set() } async tx (ctx: MeasureContext, ...tx: Tx[]): Promise { @@ -98,53 +90,32 @@ class StorageBlobAdapter implements DbAdapter { async createIndexes (domain: Domain, config: Pick, 'indexes'>): Promise {} async removeOldIndex (domain: Domain, deletePattern: RegExp[], keepPattern: RegExp[]): Promise {} - async close (): Promise { - await this.blobAdapter.close() - } + async close (): Promise {} find (ctx: MeasureContext, domain: Domain, recheck?: boolean): StorageIterator { - return (this.client as StorageAdapterEx).find(ctx, this.workspaceId) + return this.client.find(ctx, this.workspaceId) } async load (ctx: MeasureContext, domain: Domain, docs: Ref[]): Promise { - return await this.blobAdapter.load(ctx, domain, docs) + const blobs: Blob[] = [] + for (const d of docs) { + const bb = await this.client.stat(ctx, this.workspaceId, d) + if (bb !== undefined) { + blobs.push(bb) + } + } + return blobs } async upload (ctx: MeasureContext, domain: Domain, docs: Doc[]): Promise { - // We need to update docs to have provider === defualt one. - if ('adapters' in this.client) { - const toUpload: Doc[] = [] - const adapterEx = this.client as StorageAdapterEx - for (const d of docs) { - // We need sync stats to be sure all info are correct from storage. - if (d._class === core.class.Blob) { - const blob = d as Blob - const blobStat = await this.client.stat(ctx, this.workspaceId, blob.storageId) - if (blobStat !== undefined) { - blob.provider = adapterEx.defaultAdapter - blob.etag = blobStat.etag - blob.contentType = blobStat.contentType - blob.version = blobStat.version - blob.size = blobStat.size - delete (blob as any).downloadUrl - delete (blob as any).downloadUrlExpire - - toUpload.push(blob) - } - } - } - docs = toUpload - } - await this.blobAdapter.upload(ctx, domain, docs) + // Nothing to do } async clean (ctx: MeasureContext, domain: Domain, docs: Ref[]): Promise { - await Promise.all([this.blobAdapter.clean(ctx, domain, docs), this.client.remove(this.ctx, this.workspaceId, docs)]) + await this.client.remove(this.ctx, this.workspaceId, docs) } - async update (ctx: MeasureContext, domain: Domain, operations: Map, DocumentUpdate>): Promise { - await this.blobAdapter.update(ctx, domain, operations) - } + async update (ctx: MeasureContext, domain: Domain, operations: Map, DocumentUpdate>): Promise {} } /** @@ -163,17 +134,5 @@ export async function createStorageDataAdapter ( } // We need to create bucket if it doesn't exist await storage.make(ctx, workspaceId) - - const storageEx = 'adapters' in storage ? (storage as StorageAdapterEx) : undefined - - const blobAdapter = await createMongoAdapter(ctx, hierarchy, url, workspaceId, modelDb, undefined, { - calculateHash: (d) => { - const blob = d as Blob - if (storageEx?.adapters !== undefined && storageEx.adapters.get(blob.provider) === undefined) { - return { digest: blob.etag + '_' + storageEx.defaultAdapter, size: blob.size } - } - return { digest: blob.etag, size: blob.size } - } - }) - return new StorageBlobAdapter(workspaceId, storage, ctx, blobAdapter) + return new StorageBlobAdapter(workspaceId, storage as StorageAdapterEx, ctx) } diff --git a/server/server-storage/src/fallback.ts b/server/server-storage/src/fallback.ts new file mode 100644 index 00000000000..5ba873e60a7 --- /dev/null +++ b/server/server-storage/src/fallback.ts @@ -0,0 +1,269 @@ +import { + withContext, + type Blob, + type MeasureContext, + type StorageIterator, + type WorkspaceId +} from '@hcengineering/core' +import { type Readable } from 'stream' + +import { getMetadata } from '@hcengineering/platform' +import { + type BlobStorageIterator, + type BucketInfo, + type ListBlobResult, + type NamedStorageAdapter, + type StorageAdapter, + type StorageAdapterEx, + type UploadedObjectInfo +} from '@hcengineering/storage' + +import { Analytics } from '@hcengineering/analytics' +import serverCore, { type StorageConfig, type StorageConfiguration } from '@hcengineering/server-core' + +class NoSuchKeyError extends Error { + code: string + constructor (msg: string) { + super(msg) + this.code = 'NoSuchKey' + } +} + +/** + * Perform operations on storage adapter and map required information into BinaryDocument into provided DbAdapter storage. + */ +export class FallbackStorageAdapter implements StorageAdapter, StorageAdapterEx { + // Adapters should be in reverse order, first one is target one, and next ones are for fallback + constructor (readonly adapters: NamedStorageAdapter[]) {} + + async initialize (ctx: MeasureContext, workspaceId: WorkspaceId): Promise {} + + doTrimHash (s: string | undefined): string { + if (s == null) { + return '' + } + if (s.startsWith('"') && s.endsWith('"')) { + return s.slice(1, s.length - 1) + } + return s + } + + find (ctx: MeasureContext, workspaceId: WorkspaceId): StorageIterator { + const storageIterator = this.makeStorageIterator(ctx, workspaceId) + + return { + next: async () => { + const docInfos = await storageIterator.next() + + return docInfos.map((it) => ({ + hash: it.etag, + id: it._id, + size: it.size + })) + }, + close: async (ctx) => { + await storageIterator.close() + } + } + } + + private makeStorageIterator (ctx: MeasureContext, workspaceId: WorkspaceId): BlobStorageIterator { + // We need to reverse, since we need to iterate on latest document last + const adapters = [...this.adapters].reverse() + let provider: NamedStorageAdapter | undefined + let iterator: BlobStorageIterator | undefined + return { + next: async () => { + while (true) { + if (iterator === undefined && adapters.length > 0) { + provider = adapters.shift() as NamedStorageAdapter + iterator = await provider.adapter.listStream(ctx, workspaceId) + } + if (iterator === undefined) { + return [] + } + const docInfos = await iterator.next() + if (docInfos.length > 0) { + for (const d of docInfos) { + d.provider = provider?.name as string + } + // We need to check if our stored version is fine + return docInfos + } else { + // We need to take next adapter + await iterator.close() + iterator = undefined + continue + } + } + }, + close: async () => { + if (iterator !== undefined) { + await iterator.close() + } + } + } + } + + async close (): Promise { + for (const { adapter } of this.adapters) { + await adapter.close() + } + } + + async exists (ctx: MeasureContext, workspaceId: WorkspaceId): Promise { + for (const { adapter } of this.adapters) { + if (!(await adapter.exists(ctx, workspaceId))) { + return false + } + } + return true + } + + @withContext('aggregator-make', {}) + async make (ctx: MeasureContext, workspaceId: WorkspaceId): Promise { + for (const { name, adapter } of this.adapters) { + try { + if (!(await adapter.exists(ctx, workspaceId))) { + await adapter.make(ctx, workspaceId) + } + } catch (err: any) { + ctx.error('failed to init adapter', { adapter: name, workspaceId, error: err }) + // Do not throw error in case default adapter is ok + Analytics.handleError(err) + } + } + } + + @withContext('aggregator-listBuckets', {}) + async listBuckets (ctx: MeasureContext): Promise { + const result: BucketInfo[] = [] + for (const { adapter } of this.adapters) { + result.push(...(await adapter.listBuckets(ctx))) + } + return result + } + + @withContext('aggregator-delete', {}) + async delete (ctx: MeasureContext, workspaceId: WorkspaceId): Promise { + for (const { adapter } of this.adapters) { + if (await adapter.exists(ctx, workspaceId)) { + await adapter.delete(ctx, workspaceId) + } + } + } + + @withContext('aggregator-remove', {}) + async remove (ctx: MeasureContext, workspaceId: WorkspaceId, objectNames: string[]): Promise { + // Group by provider and delegate into it. + for (const { adapter } of this.adapters) { + await adapter.remove(ctx, workspaceId, objectNames) + } + } + + async listStream (ctx: MeasureContext, workspaceId: WorkspaceId): Promise { + const storageIterator = this.makeStorageIterator(ctx, workspaceId) + return { + next: async (): Promise => { + return await storageIterator.next() + }, + close: async () => { + await storageIterator.close() + } + } + } + + @withContext('aggregator-stat', {}) + async stat (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise { + const result = await this.findProvider(ctx, workspaceId, name) + if (result !== undefined) { + result.stat.provider = result.name + } + return result?.stat + } + + @withContext('aggregator-get', {}) + async get (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise { + const result = await this.findProvider(ctx, workspaceId, name) + if (result === undefined) { + throw new NoSuchKeyError(`${workspaceId.name} missing ${name}`) + } + return await result.adapter.get(ctx, workspaceId, result.stat._id) + } + + @withContext('find-provider', {}) + private async findProvider ( + ctx: MeasureContext, + workspaceId: WorkspaceId, + objectName: string + ): Promise<{ name: string, adapter: StorageAdapter, stat: Blob } | undefined> { + // Group by provider and delegate into it. + for (const { name, adapter } of this.adapters) { + const stat = await adapter.stat(ctx, workspaceId, objectName) + if (stat !== undefined) { + return { name, adapter, stat } + } + } + } + + @withContext('aggregator-partial', {}) + async partial ( + ctx: MeasureContext, + workspaceId: WorkspaceId, + objectName: string, + offset: number, + length?: number | undefined + ): Promise { + const result = await this.findProvider(ctx, workspaceId, objectName) + if (result === undefined) { + throw new NoSuchKeyError(`${workspaceId.name} missing ${objectName}`) + } + return await result.adapter.partial(ctx, workspaceId, result.stat._id, offset, length) + } + + @withContext('aggregator-read', {}) + async read (ctx: MeasureContext, workspaceId: WorkspaceId, objectName: string): Promise { + const result = await this.findProvider(ctx, workspaceId, objectName) + if (result === undefined) { + throw new NoSuchKeyError(`${workspaceId.name} missing ${objectName}`) + } + return await result.adapter.read(ctx, workspaceId, result.stat._id) + } + + @withContext('aggregator-put', {}) + put ( + ctx: MeasureContext, + workspaceId: WorkspaceId, + objectName: string, + stream: string | Readable | Buffer, + contentType: string, + size?: number | undefined + ): Promise { + const adapter = this.adapters[0].adapter + // Remove in other storages, if appicable + return adapter.put(ctx, workspaceId, objectName, stream, contentType, size) + } + + @withContext('aggregator-getUrl', {}) + async getUrl (ctx: MeasureContext, workspaceId: WorkspaceId, name: string): Promise { + // const { provider, stat } = await this.findProvider(ctx, workspaceId, name) + // return await provider.getUrl(ctx, workspaceId, stat.storageId) + const filesUrl = getMetadata(serverCore.metadata.FilesUrl) ?? '' + return filesUrl.replaceAll(':workspace', workspaceId.name).replaceAll(':blobId', name) + } +} + +/** + * @public + */ +export function buildStorage ( + config: StorageConfiguration, + storageFactory: (config: StorageConfig) => StorageAdapter +): FallbackStorageAdapter { + const adapters: NamedStorageAdapter[] = [] + for (const c of config.storages) { + adapters.push({ name: c.name, adapter: storageFactory(c) }) + } + // Reverse adapter's so latest one will be target one. + return new FallbackStorageAdapter(adapters.reverse()) +} diff --git a/server/server-storage/src/index.ts b/server/server-storage/src/index.ts index 823da0a904d..c255d9c854d 100644 --- a/server/server-storage/src/index.ts +++ b/server/server-storage/src/index.ts @@ -14,6 +14,6 @@ // limitations under the License. // -export * from './aggregator' +export * from './fallback' export * from './blobStorage' export * from './starter' diff --git a/server/server-storage/src/starter.ts b/server/server-storage/src/starter.ts index a01e70a886f..079ede7af4a 100644 --- a/server/server-storage/src/starter.ts +++ b/server/server-storage/src/starter.ts @@ -1,9 +1,8 @@ import { DatalakeService, type DatalakeConfig } from '@hcengineering/datalake' import { MinioConfig, MinioService, addMinioFallback } from '@hcengineering/minio' -import { createRawMongoDBAdapter } from '@hcengineering/mongo' import { S3Service, type S3Config } from '@hcengineering/s3' import { StorageAdapter, StorageConfiguration, type StorageConfig } from '@hcengineering/server-core' -import { AggregatorStorageAdapter, buildStorage } from './aggregator' +import { FallbackStorageAdapter, buildStorage } from './fallback' /* @@ -14,8 +13,6 @@ import { AggregatorStorageAdapter, buildStorage } from './aggregator' * kind - an storage kind minior/s3 for now. * name - a symbolic name for provider, name could be ommited in case kind will be used as name. * uri - an storage URI with encoded parameters. - * contentTypes - a comma separated list of content type patterns. Like 'image/*,video/gif' will match all image/* and video/gif formats. - So * will be replaced to `.*` for regexp Last one is used as default one, or one with conrent type matched will be used. @@ -103,6 +100,6 @@ export function createStorageFromConfig (config: StorageConfig): StorageAdapter } } -export function buildStorageFromConfig (config: StorageConfiguration, dbUrl: string): AggregatorStorageAdapter { - return buildStorage(config, createRawMongoDBAdapter(dbUrl), createStorageFromConfig) +export function buildStorageFromConfig (config: StorageConfiguration): FallbackStorageAdapter { + return buildStorage(config, createStorageFromConfig) } diff --git a/server/server-storage/src/tests/aggregator.spec.ts b/server/server-storage/src/tests/aggregator.spec.ts index 12ccf3df988..ea345ea6e37 100644 --- a/server/server-storage/src/tests/aggregator.spec.ts +++ b/server/server-storage/src/tests/aggregator.spec.ts @@ -1,24 +1,23 @@ import { MeasureMetricsContext, type MeasureContext, type WorkspaceId } from '@hcengineering/core' -import type { StorageAdapter } from '@hcengineering/storage' -import { AggregatorStorageAdapter } from '../aggregator' -import { MemRawDBAdapter, MemStorageAdapter } from './memAdapters' +import type { NamedStorageAdapter } from '@hcengineering/storage' +import { FallbackStorageAdapter } from '../fallback' +import { MemStorageAdapter } from './memAdapters' describe('aggregator tests', () => { function prepare1 (): { mem1: MemStorageAdapter mem2: MemStorageAdapter - aggr: AggregatorStorageAdapter + aggr: FallbackStorageAdapter testCtx: MeasureContext ws1: WorkspaceId } { const mem1 = new MemStorageAdapter() const mem2 = new MemStorageAdapter() - const adapters = new Map() - adapters.set('mem1', mem1) - adapters.set('mem2', mem2) - const blobs = new MemRawDBAdapter() - const aggr = new AggregatorStorageAdapter(adapters, 'mem2', blobs) + const adapters: NamedStorageAdapter[] = [] + adapters.push({ name: 'mem2', adapter: mem2 }) + adapters.push({ name: 'mem1', adapter: mem1 }) + const aggr = new FallbackStorageAdapter(adapters) const testCtx = new MeasureMetricsContext('test', {}) const ws1: WorkspaceId = { name: 'ws1' } @@ -29,17 +28,15 @@ describe('aggregator tests', () => { // Test default provider await mem1.put(testCtx, ws1, 'test', 'data', 'text/plain') - await aggr.syncBlobFromStorage(testCtx, ws1, 'test', 'mem1') const stat = await aggr.stat(testCtx, ws1, 'test') expect(stat?.provider).toEqual('mem1') - // Test content typed provider await aggr.put(testCtx, ws1, 'test', 'data2', 'text/plain') const stat2 = await aggr.stat(testCtx, ws1, 'test') expect(stat2?.provider).toEqual('mem2') - const dta = Buffer.concat(await aggr.read(testCtx, ws1, 'test')).toString() + const dta = Buffer.concat((await aggr.read(testCtx, ws1, 'test')) as any).toString() expect(dta).toEqual('data2') }) }) diff --git a/server/server-storage/src/tests/memAdapters.ts b/server/server-storage/src/tests/memAdapters.ts index cd56f602451..ba6be6f8da8 100644 --- a/server/server-storage/src/tests/memAdapters.ts +++ b/server/server-storage/src/tests/memAdapters.ts @@ -1,22 +1,4 @@ -import core, { - Hierarchy, - ModelDb, - TxProcessor, - toFindResult, - type Blob, - type Class, - type Doc, - type DocumentQuery, - type DocumentUpdate, - type Domain, - type FindOptions, - type FindResult, - type MeasureContext, - type Ref, - type WorkspaceId -} from '@hcengineering/core' -import { genMinModel } from '@hcengineering/core/src/__tests__/minmodel' -import type { RawDBAdapter, RawDBAdapterStream } from '@hcengineering/server-core' +import core, { type Blob, type MeasureContext, type WorkspaceId } from '@hcengineering/core' import type { BlobStorageIterator, BucketInfo, StorageAdapter, UploadedObjectInfo } from '@hcengineering/storage' import { Readable } from 'stream' @@ -102,7 +84,7 @@ export class MemStorageAdapter implements StorageAdapter { }) }) } - const data = Buffer.concat(buffer) + const data = Buffer.concat(buffer as any) const dta = { _class: core.class.Blob, _id: objectName as any, @@ -114,7 +96,6 @@ export class MemStorageAdapter implements StorageAdapter { modifiedOn: Date.now(), provider: '_test', space: '' as any, - storageId: objectName, version: null, workspace: workspaceId.name } @@ -148,95 +129,3 @@ export class MemStorageAdapter implements StorageAdapter { return '/files/' + objectName } } - -export class MemRawDBAdapter implements RawDBAdapter { - hierarchy: Hierarchy - workspaces = new Map() - constructor () { - this.hierarchy = new Hierarchy() - const minModel = genMinModel() - minModel.forEach((it) => { - this.hierarchy.tx(it) - }) - } - - async find( - ctx: MeasureContext, - workspace: WorkspaceId, - domain: Domain, - query: DocumentQuery, - options?: Omit, 'projection' | 'lookup'> - ): Promise> { - const db = this.workspaces.get(workspace.name) - if (db === undefined) { - return toFindResult([]) - } - return await db.findAll(core.class.Blob as Ref>, query, options) - } - - async findStream( - ctx: MeasureContext, - workspace: WorkspaceId, - domain: Domain, - query: DocumentQuery, - options?: Omit, 'projection' | 'lookup'> - ): Promise> { - const db = this.workspaces.get(workspace.name) - - let result: T[] = [] - if (db !== undefined) { - result = await db.findAll(core.class.Blob as Ref>, query, options) - } - return { - next: async () => { - return result.splice(0, 50) - }, - close: async () => {} - } - } - - async upload(ctx: MeasureContext, workspace: WorkspaceId, domain: Domain, docs: T[]): Promise { - let db = this.workspaces.get(workspace.name) - if (db === undefined) { - db = new ModelDb(this.hierarchy) - this.workspaces.set(workspace.name, db) - } - for (const d of docs) { - db.addDoc(d) - } - } - - async update( - ctx: MeasureContext, - workspace: WorkspaceId, - domain: Domain, - docs: Map, DocumentUpdate> - ): Promise { - let db = this.workspaces.get(workspace.name) - if (db === undefined) { - db = new ModelDb(this.hierarchy) - this.workspaces.set(workspace.name, db) - } - for (const [du, upd] of docs.entries()) { - const doc = db.getObject(du) - TxProcessor.applyUpdate(doc, upd) - } - } - - async clean( - ctx: MeasureContext, - workspace: WorkspaceId, - domain: Domain, - docs: Ref[] - ): Promise { - const db = this.workspaces.get(workspace.name) - if (db === undefined) { - return - } - for (const d of docs) { - db.delDoc(d) - } - } - - async close (): Promise {} -} diff --git a/server/server/src/starter.ts b/server/server/src/starter.ts index 985f6d11be0..9d733ca224b 100644 --- a/server/server/src/starter.ts +++ b/server/server/src/starter.ts @@ -1,5 +1,6 @@ export interface ServerEnv { - url: string + dbUrl: string + mongoUrl?: string elasticUrl: string serverSecret: string rekoniUrl: string @@ -27,12 +28,6 @@ export function serverConfigFromEnv (): ServerEnv { } const mongoUrl = process.env.MONGO_URL - if (mongoUrl === undefined) { - console.error('please provide mongodb url') - process.exit(1) - } - - const url = dbUrl !== mongoUrl ? `${dbUrl};${mongoUrl}` : dbUrl const elasticUrl = process.env.ELASTIC_URL if (elasticUrl === undefined) { @@ -78,7 +73,8 @@ export function serverConfigFromEnv (): ServerEnv { const brandingPath = process.env.BRANDING_PATH return { - url, + dbUrl, + mongoUrl, elasticUrl, elasticIndexName, serverSecret, diff --git a/server/tool/src/index.ts b/server/tool/src/index.ts index c2268f7d936..8751ed00d4a 100644 --- a/server/tool/src/index.ts +++ b/server/tool/src/index.ts @@ -35,7 +35,8 @@ import core, { WorkspaceId, WorkspaceIdWithUrl, type Doc, - type Ref + type Ref, + type WithLookup } from '@hcengineering/core' import { consoleModelLogger, MigrateOperation, ModelLogger, tryMigrate } from '@hcengineering/model' import { DomainIndexHelperImpl, Pipeline, StorageAdapter, type DbAdapter } from '@hcengineering/server-core' @@ -79,11 +80,9 @@ export class FileModelLogger implements ModelLogger { * @public */ export function prepareTools (rawTxes: Tx[]): { - mongodbUri: string | undefined dbUrl: string txes: Tx[] } { - const mongodbUri = process.env.MONGO_URL const dbUrl = process.env.DB_URL if (dbUrl === undefined) { console.error('please provide db url.') @@ -91,7 +90,6 @@ export function prepareTools (rawTxes: Tx[]): { } return { - mongodbUri, dbUrl, txes: JSON.parse(JSON.stringify(rawTxes)) as Tx[] } @@ -157,7 +155,12 @@ export async function updateModel ( const states = await connection.findAll(core.class.MigrationState, {}) const sts = Array.from(groupByArray(states, (it) => it.plugin).entries()) - const migrateState = new Map(sts.map((it) => [it[0], new Set(it[1].map((q) => q.state))])) + + const _toSet = (vals: WithLookup[]): Set => { + return new Set(vals.map((q) => q.state)) + } + + const migrateState = new Map>(sts.map((it) => [it[0], _toSet(it[1])])) try { let i = 0 @@ -447,9 +450,11 @@ async function createUpdateIndexes ( if (adapter === undefined) { throw new PlatformError(unknownError(`Adapter for domain ${domain} not found`)) } - const dbHelper = adapter.helper() + const dbHelper = adapter.helper?.() - await domainHelper.checkDomain(ctx, domain, await dbHelper.estimatedCount(domain), dbHelper) + if (dbHelper !== undefined) { + await domainHelper.checkDomain(ctx, domain, await dbHelper.estimatedCount(domain), dbHelper) + } completed++ await progress((100 / allDomains.length) * completed) } diff --git a/server/workspace-service/src/index.ts b/server/workspace-service/src/index.ts index 3f850a09ef7..eb95fcc9bf7 100644 --- a/server/workspace-service/src/index.ts +++ b/server/workspace-service/src/index.ts @@ -74,13 +74,6 @@ export function serveWorkspaceAccount ( process.exit(1) } - // Required by the tool - const dbUri = process.env.MONGO_URL - if (dbUri === undefined) { - console.log('Please provide mongodb url') - process.exit(1) - } - const waitTimeout = parseInt(process.env.WAIT_TIMEOUT ?? '5000') setMetadata(serverToken.metadata.Secret, serverSecret) diff --git a/server/workspace-service/src/ws-operations.ts b/server/workspace-service/src/ws-operations.ts index 47c41fa1db3..457fcd47c5c 100644 --- a/server/workspace-service/src/ws-operations.ts +++ b/server/workspace-service/src/ws-operations.ts @@ -113,20 +113,16 @@ export async function createWorkspace ( await handleWsEvent?.('create-started', version, 10) - const { mongodbUri, dbUrl } = prepareTools([]) - if (mongodbUri === undefined) { - throw new Error('No MONGO_URL specified') - } - const dbUrls = mongodbUri !== undefined && dbUrl !== mongodbUri ? `${dbUrl};${mongodbUri}` : dbUrl + const { dbUrl } = prepareTools([]) const hierarchy = new Hierarchy() const modelDb = new ModelDb(hierarchy) registerServerPlugins() registerStringLoaders() - const { pipeline, storageAdapter } = await getServerPipeline(ctx, txes, mongodbUri, dbUrl, wsUrl) + const { pipeline, storageAdapter } = await getServerPipeline(ctx, txes, dbUrl, wsUrl) try { - const txFactory = getTxAdapterFactory(ctx, dbUrls, wsUrl, null, { + const txFactory = getTxAdapterFactory(ctx, dbUrl, wsUrl, null, { externalStorage: storageAdapter, fullTextUrl: 'http://localhost:9200', indexParallel: 0, @@ -134,7 +130,7 @@ export async function createWorkspace ( rekoniUrl: '', usePassedCtx: true }) - const txAdapter = await txFactory(ctx, hierarchy, dbUrl ?? mongodbUri, wsId, modelDb, storageAdapter) + const txAdapter = await txFactory(ctx, hierarchy, dbUrl, wsId, modelDb, storageAdapter) await childLogger.withLog('init-workspace', {}, async (ctx) => { await initModel(ctx, wsId, txes, txAdapter, storageAdapter, ctxModellogger, async (value) => { @@ -204,17 +200,14 @@ export async function upgradeWorkspace ( forceIndexes: boolean = false, external: boolean = false ): Promise { - const { mongodbUri, dbUrl } = prepareTools([]) - if (mongodbUri === undefined) { - throw new Error('No MONGO_URL specified') - } + const { dbUrl } = prepareTools([]) let pipeline: Pipeline | undefined let storageAdapter: StorageAdapter | undefined registerServerPlugins() registerStringLoaders() try { - ;({ pipeline, storageAdapter } = await getServerPipeline(ctx, txes, mongodbUri, dbUrl, { + ;({ pipeline, storageAdapter } = await getServerPipeline(ctx, txes, dbUrl, { name: ws.workspace, workspaceName: ws.workspaceName ?? '', workspaceUrl: ws.workspaceUrl ?? '' diff --git a/services/github/pod-github/src/platform.ts b/services/github/pod-github/src/platform.ts index 6b1c6ed307f..decfff91a4d 100644 --- a/services/github/pod-github/src/platform.ts +++ b/services/github/pod-github/src/platform.ts @@ -91,7 +91,7 @@ export class PlatformWorker { this.userManager = new UserManager(db.collection('users')) const storageConfig = storageConfigFromEnv() - this.storageAdapter = buildStorageFromConfig(storageConfig, config.MongoURL) + this.storageAdapter = buildStorageFromConfig(storageConfig) } async close (): Promise { diff --git a/services/gmail/pod-gmail/src/main.ts b/services/gmail/pod-gmail/src/main.ts index 1bf475adb70..46a9432bd55 100644 --- a/services/gmail/pod-gmail/src/main.ts +++ b/services/gmail/pod-gmail/src/main.ts @@ -44,7 +44,7 @@ export const main = async (): Promise => { setMetadata(serverToken.metadata.Secret, config.Secret) const storageConfig: StorageConfiguration = storageConfigFromEnv() - const storageAdapter = buildStorageFromConfig(storageConfig, config.MongoURI) + const storageAdapter = buildStorageFromConfig(storageConfig) const db = await getDB() const gmailController = GmailController.create(ctx, db, storageAdapter) diff --git a/services/love/src/main.ts b/services/love/src/main.ts index 8f4ad0fb735..b802327d5e6 100644 --- a/services/love/src/main.ts +++ b/services/love/src/main.ts @@ -51,7 +51,7 @@ export const main = async (): Promise => { const storageConfigs: StorageConfiguration = storageConfigFromEnv() const ctx = new MeasureMetricsContext('love', {}, {}, newMetrics()) const storageConfig = storageConfigs.storages.findLast((p) => p.name === config.StorageProviderName) - const storageAdapter = buildStorageFromConfig(storageConfigs, config.MongoUrl) + const storageAdapter = buildStorageFromConfig(storageConfigs) const app = express() const port = config.Port app.use(cors()) @@ -78,12 +78,14 @@ export const main = async (): Promise => { for (const res of event.egressInfo.fileResults) { const data = dataByUUID.get(res.filename) if (data !== undefined) { - const client = await WorkspaceClient.create(data.workspace) const prefix = rootPrefix(storageConfig, data.workspaceId) const filename = stripPrefix(prefix, res.filename) - await storageAdapter.syncBlobFromStorage(ctx, data.workspaceId, filename, storageConfig?.name) - await client.saveFile(filename, data.name) - await client.close() + const storedBlob = await storageAdapter.stat(ctx, data.workspaceId, filename) + if (storedBlob !== undefined) { + const client = await WorkspaceClient.create(data.workspace) + await client.saveFile(filename, data.name, storedBlob) + await client.close() + } dataByUUID.delete(res.filename) } else { console.log('no data found for', res.filename) diff --git a/services/love/src/workspaceClient.ts b/services/love/src/workspaceClient.ts index 9f368418cdd..99ee6de361b 100644 --- a/services/love/src/workspaceClient.ts +++ b/services/love/src/workspaceClient.ts @@ -41,7 +41,7 @@ export class WorkspaceClient { return this.client } - async saveFile (uuid: string, name: string): Promise { + async saveFile (uuid: string, name: string, blob: Blob): Promise { const current = await this.client.findOne(drive.class.Drive, { _id: love.space.Drive }) if (current === undefined) { await this.client.createDoc( @@ -59,23 +59,19 @@ export class WorkspaceClient { love.space.Drive ) } - - const blob = await this.client.findOne(core.class.Blob, { _id: uuid as Ref }) - if (blob !== undefined) { - const data = { - file: uuid as Ref, - title: name, - size: blob.size, - type: blob.contentType, - lastModified: blob.modifiedOn, - // hardcoded values from preset we use - // https://docs.livekit.io/realtime/egress/overview/#EncodingOptionsPreset - metadata: { - originalHeight: 720, - originalWidth: 1280 - } + const data = { + file: uuid as Ref, + title: name, + size: blob.size, + type: blob.contentType, + lastModified: blob.modifiedOn, + // hardcoded values from preset we use + // https://docs.livekit.io/realtime/egress/overview/#EncodingOptionsPreset + metadata: { + originalHeight: 720, + originalWidth: 1280 } - await createFile(this.client, love.space.Drive, drive.ids.Root, data) } + await createFile(this.client, love.space.Drive, drive.ids.Root, data) } } diff --git a/services/print/pod-print/src/config.ts b/services/print/pod-print/src/config.ts index b7a12dd4073..26de472d078 100644 --- a/services/print/pod-print/src/config.ts +++ b/services/print/pod-print/src/config.ts @@ -4,7 +4,6 @@ export interface Config { Port: number - DbURL: string Secret: string } @@ -13,7 +12,6 @@ const parseNumber = (str: string | undefined): number | undefined => (str !== un const config: Config = (() => { const params: Partial = { Port: parseNumber(process.env.PORT) ?? 4005, - DbURL: process.env.MONGO_URL, Secret: process.env.SECRET } diff --git a/services/print/pod-print/src/main.ts b/services/print/pod-print/src/main.ts index e7d90165f49..bf77e34e98b 100644 --- a/services/print/pod-print/src/main.ts +++ b/services/print/pod-print/src/main.ts @@ -17,7 +17,7 @@ export const main = async (): Promise => { setupMetadata() const storageConfig = storageConfigFromEnv() - const { app, close } = createServer(config.DbURL, storageConfig) + const { app, close } = createServer(storageConfig) const server = listen(app, config.Port) const shutdown = (): void => { diff --git a/services/print/pod-print/src/server.ts b/services/print/pod-print/src/server.ts index 53a46f99633..57c1d76ece8 100644 --- a/services/print/pod-print/src/server.ts +++ b/services/print/pod-print/src/server.ts @@ -110,8 +110,8 @@ const wrapRequest = (fn: AsyncRequestHandler) => (req: Request, res: Response, n handleRequest(fn, req, res, next) } -export function createServer (dbUrl: string, storageConfig: StorageConfiguration): { app: Express, close: () => void } { - const storageAdapter = buildStorageFromConfig(storageConfig, dbUrl) +export function createServer (storageConfig: StorageConfiguration): { app: Express, close: () => void } { + const storageAdapter = buildStorageFromConfig(storageConfig) const measureCtx = new MeasureMetricsContext('print', {}) const app = express() @@ -187,7 +187,7 @@ export function createServer (dbUrl: string, storageConfig: StorageConfiguration throw new ApiError(404, `File ${file} not found`) } - const htmlRes = await convertToHtml(Buffer.concat(originalFile)) + const htmlRes = await convertToHtml(Buffer.concat(originalFile as any)) if (htmlRes === undefined) { throw new ApiError(400, 'Failed to convert') diff --git a/services/sign/pod-sign/src/config.ts b/services/sign/pod-sign/src/config.ts index 3d8c0c6613d..cc71f9abd4c 100644 --- a/services/sign/pod-sign/src/config.ts +++ b/services/sign/pod-sign/src/config.ts @@ -8,7 +8,6 @@ export interface Config { AccountsUrl: string Cert: Buffer CertPwd: string - DbURL: string Port: number Secret: string ServiceID: string @@ -23,7 +22,6 @@ const config: Config = (() => { AccountsUrl: process.env.ACCOUNTS_URL, Cert: process.env.CERTIFICATE_PATH !== undefined ? fs.readFileSync(process.env.CERTIFICATE_PATH) : undefined, CertPwd: process.env.CERTIFICATE_PASSWORD ?? '', - DbURL: process.env.MONGO_URL, Port: parseNumber(process.env.PORT) ?? 4006, Secret: process.env.SECRET, ServiceID: process.env.SERVICE_ID, diff --git a/services/sign/pod-sign/src/main.ts b/services/sign/pod-sign/src/main.ts index 5d5601df28d..0e2b6063c50 100644 --- a/services/sign/pod-sign/src/main.ts +++ b/services/sign/pod-sign/src/main.ts @@ -3,9 +3,9 @@ // import { setMetadata } from '@hcengineering/platform' -import { storageConfigFromEnv } from '@hcengineering/server-storage' import serverClient from '@hcengineering/server-client' import { loadBrandingMap } from '@hcengineering/server-core' +import { storageConfigFromEnv } from '@hcengineering/server-storage' import serverToken from '@hcengineering/server-token' import config from './config' @@ -20,7 +20,7 @@ const setupMetadata = (): void => { export const main = async (): Promise => { setupMetadata() const storageConfig = storageConfigFromEnv() - const server = listen(createServer(config.DbURL, storageConfig, loadBrandingMap(config.BrandingPath)), config.Port) + const server = listen(createServer(storageConfig, loadBrandingMap(config.BrandingPath)), config.Port) const shutdown = (): void => { server.close(() => process.exit()) diff --git a/services/sign/pod-sign/src/server.ts b/services/sign/pod-sign/src/server.ts index 941ec77143e..6d25b2c090b 100644 --- a/services/sign/pod-sign/src/server.ts +++ b/services/sign/pod-sign/src/server.ts @@ -14,19 +14,19 @@ // limitations under the License. // +import { MeasureMetricsContext, generateId } from '@hcengineering/core' +import { StorageConfiguration } from '@hcengineering/server-core' +import { buildStorageFromConfig } from '@hcengineering/server-storage' +import { Token } from '@hcengineering/server-token' import cors from 'cors' import express, { type Express, type NextFunction, type Request, type Response } from 'express' -import { Token } from '@hcengineering/server-token' import { type Server } from 'http' -import { StorageConfiguration } from '@hcengineering/server-core' -import { buildStorageFromConfig } from '@hcengineering/server-storage' -import { MeasureMetricsContext, generateId } from '@hcengineering/core' +import { type Branding, type BrandingMap, extractBranding } from './branding' +import config from './config' import { ApiError } from './error' import { signPDF } from './sign' import { extractToken } from './token' -import { type Branding, type BrandingMap, extractBranding } from './branding' -import config from './config' type AsyncRequestHandler = ( req: Request, @@ -58,8 +58,8 @@ const wrapRequest = handleRequest(fn, brandings, req, res, next) } -export function createServer (dbUrl: string, storageConfig: StorageConfiguration, brandings: BrandingMap): Express { - const storageAdapter = buildStorageFromConfig(storageConfig, dbUrl) +export function createServer (storageConfig: StorageConfiguration, brandings: BrandingMap): Express { + const storageAdapter = buildStorageFromConfig(storageConfig) const measureCtx = new MeasureMetricsContext('sign', {}) const app = express() diff --git a/services/telegram-bot/pod-telegram-bot/src/start.ts b/services/telegram-bot/pod-telegram-bot/src/start.ts index ec6a15092c6..cd731a79543 100644 --- a/services/telegram-bot/pod-telegram-bot/src/start.ts +++ b/services/telegram-bot/pod-telegram-bot/src/start.ts @@ -78,7 +78,7 @@ export const start = async (): Promise => { registerLoaders() const storageConfig: StorageConfiguration = storageConfigFromEnv() - const storageAdapter = buildStorageFromConfig(storageConfig, config.MongoURL) + const storageAdapter = buildStorageFromConfig(storageConfig) const worker = await PlatformWorker.create(ctx, storageAdapter) const bot = await setUpBot(worker) diff --git a/services/telegram/pod-telegram/src/main.ts b/services/telegram/pod-telegram/src/main.ts index df1a3e335d0..fc830eb55e3 100644 --- a/services/telegram/pod-telegram/src/main.ts +++ b/services/telegram/pod-telegram/src/main.ts @@ -27,7 +27,7 @@ export const main = async (): Promise => { setMetadata(serverToken.metadata.Secret, config.Secret) const storageConfig: StorageConfiguration = storageConfigFromEnv() - const storageAdapter = buildStorageFromConfig(storageConfig, config.MongoURI) + const storageAdapter = buildStorageFromConfig(storageConfig) const platformWorker = await PlatformWorker.create(ctx, storageAdapter) const endpoints: Array<[string, Handler]> = [ diff --git a/tests/docker-compose.override.yaml b/tests/docker-compose.override.yaml index 18d54ea2170..2fde3c58ee7 100644 --- a/tests/docker-compose.override.yaml +++ b/tests/docker-compose.override.yaml @@ -4,7 +4,6 @@ services: - DB_URL=postgresql://postgres:example@postgres:5432 transactor: environment: - - MONGO_URL=mongodb://mongodb:27018 - DB_URL=postgresql://postgres:example@postgres:5432 workspace: environment: diff --git a/tests/docker-compose.yaml b/tests/docker-compose.yaml index d88c275e35d..1175e6d3ab5 100644 --- a/tests/docker-compose.yaml +++ b/tests/docker-compose.yaml @@ -65,7 +65,6 @@ services: environment: - SERVER_SECRET=secret - DB_URL=mongodb://mongodb:27018 - - MONGO_URL=mongodb://mongodb:27018 - TRANSACTOR_URL=ws://transactor:3334;ws://localhost:3334 - STORAGE_CONFIG=${STORAGE_CONFIG} - MODEL_ENABLED=* @@ -90,7 +89,6 @@ services: - SERVER_PORT=8083 - SERVER_SECRET=secret - ACCOUNTS_URL=http://localhost:3003 - - MONGO_URL=mongodb://mongodb:27018 - UPLOAD_URL=/files - ELASTIC_URL=http://elastic:9200 - GMAIL_URL=http://localhost:8088 @@ -142,7 +140,6 @@ services: - COLLABORATOR_PORT=3079 - SECRET=secret - ACCOUNTS_URL=http://account:3003 - - MONGO_URL=mongodb://mongodb:27018 - STORAGE_CONFIG=${STORAGE_CONFIG} restart: unless-stopped rekoni: diff --git a/tests/restore-pg.sh b/tests/restore-pg.sh index 4b94b63a527..1e7c7aa4bf9 100755 --- a/tests/restore-pg.sh +++ b/tests/restore-pg.sh @@ -15,4 +15,4 @@ ./tool-pg.sh configure sanity-ws --list # setup issue createdOn for yesterday -./tool-pg.sh change-field sanity-ws --objectId 65e47f1f1b875b51e3b4b983 --objectClass tracker:class:Issue --attribute createdOn --value $(($(date +%s)*1000 - 86400000)) --type number --domain task \ No newline at end of file +./tool-pg.sh change-field sanity-ws --objectId 65e47f1f1b875b51e3b4b983 --objectClass tracker:class:Issue --attribute createdOn --value $(($(date +%s)*1000 - 86400000)) --type number \ No newline at end of file diff --git a/tests/restore-workspace.sh b/tests/restore-workspace.sh index 0fb8ac485b5..05c494a8d82 100755 --- a/tests/restore-workspace.sh +++ b/tests/restore-workspace.sh @@ -15,4 +15,4 @@ ./tool.sh configure sanity-ws --list # setup issue createdOn for yesterday -./tool.sh change-field sanity-ws --objectId 65e47f1f1b875b51e3b4b983 --objectClass tracker:class:Issue --attribute createdOn --value $(($(date +%s)*1000 - 86400000)) --type number --domain task \ No newline at end of file +./tool.sh change-field sanity-ws --objectId 65e47f1f1b875b51e3b4b983 --objectClass tracker:class:Issue --attribute createdOn --value $(($(date +%s)*1000 - 86400000)) --type number \ No newline at end of file From 385bd572a32f68e8fd80bc909d6bfdbca760f684 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 22 Oct 2024 00:49:23 +0700 Subject: [PATCH 12/21] fix: use string content type instead of enum (#7007) Signed-off-by: Alexander Onnikov --- workers/datalake/schema/datalake.sql | 5 +---- workers/datalake/src/blob.ts | 8 +++----- workers/datalake/src/db.ts | 9 ++++----- 3 files changed, 8 insertions(+), 14 deletions(-) diff --git a/workers/datalake/schema/datalake.sql b/workers/datalake/schema/datalake.sql index 892564e7e9a..4dfbfed0356 100644 --- a/workers/datalake/schema/datalake.sql +++ b/workers/datalake/schema/datalake.sql @@ -3,12 +3,10 @@ CREATE SCHEMA IF NOT EXISTS blob; DROP TABLE IF EXISTS blob.blob; DROP TABLE IF EXISTS blob.data; -DROP TYPE IF EXISTS blob.content_type; DROP TYPE IF EXISTS blob.location; -- B L O B -CREATE TYPE blob.content_type AS ENUM ('application','audio','font','image','model','text','video'); CREATE TYPE blob.location AS ENUM ('kv', 'weur', 'eeur', 'wnam', 'enam', 'apac'); \echo "Creating blob.data..." @@ -17,8 +15,7 @@ CREATE TABLE blob.data ( location blob.location NOT NULL, size INT8 NOT NULL, filename UUID NOT NULL, - type blob.content_type NOT NULL, - subtype STRING(64) NOT NULL, + type STRING(255) NOT NULL, CONSTRAINT pk_data PRIMARY KEY (hash, location) ); diff --git a/workers/datalake/src/blob.ts b/workers/datalake/src/blob.ts index fc893b91514..e21f9c327ee 100644 --- a/workers/datalake/src/blob.ts +++ b/workers/datalake/src/blob.ts @@ -163,7 +163,6 @@ async function saveBlob ( const { location, bucket } = selectStorage(env, workspace) const size = file.size - const [mimetype, subtype] = type.split('/') const httpMetadata = { contentType: type, cacheControl } const filename = getUniqueFilename() @@ -179,7 +178,7 @@ async function saveBlob ( } else { await bucket.put(filename, file, { httpMetadata }) await sql.begin((sql) => [ - db.createData(sql, { hash, location, filename, type: mimetype, subtype, size }), + db.createData(sql, { hash, location, filename, type, size }), db.createBlob(sql, { workspace, name, hash, location }) ]) } @@ -201,7 +200,7 @@ async function saveBlob ( } else { // Otherwise register a new hash and blob await sql.begin((sql) => [ - db.createData(sql, { hash, location, filename, type: mimetype, subtype, size }), + db.createData(sql, { hash, location, filename, type, size }), db.createBlob(sql, { workspace, name, hash, location }) ]) } @@ -227,9 +226,8 @@ export async function handleBlobUploaded (env: Env, workspace: string, name: str } else { const size = object.size const type = object.httpMetadata.contentType ?? 'application/octet-stream' - const [mimetype, subtype] = type.split('/') - await db.createData(sql, { hash, location, filename, type: mimetype, subtype, size }) + await db.createData(sql, { hash, location, filename, type, size }) await db.createBlob(sql, { workspace, name, hash, location }) } } diff --git a/workers/datalake/src/db.ts b/workers/datalake/src/db.ts index ad7ea03f331..725c21c62cc 100644 --- a/workers/datalake/src/db.ts +++ b/workers/datalake/src/db.ts @@ -25,7 +25,6 @@ export interface BlobDataRecord extends BlobDataId { filename: UUID size: number type: string - subtype: string } export interface BlobId { @@ -47,7 +46,7 @@ export async function getData (sql: postgres.Sql, dataId: BlobDataId): Promise` - SELECT hash, location, filename, size, type, subtype + SELECT hash, location, filename, size, type FROM blob.data WHERE hash = ${hash} AND location = ${location} ` @@ -60,11 +59,11 @@ export async function getData (sql: postgres.Sql, dataId: BlobDataId): Promise { - const { hash, location, filename, size, type, subtype } = data + const { hash, location, filename, size, type } = data await sql` - UPSERT INTO blob.data (hash, location, filename, size, type, subtype) - VALUES (${hash}, ${location}, ${filename}, ${size}, ${type}, ${subtype}) + UPSERT INTO blob.data (hash, location, filename, size, type) + VALUES (${hash}, ${location}, ${filename}, ${size}, ${type}) ` } From 15ce5442c6236b37531be026d2ebf3e4412bc432 Mon Sep 17 00:00:00 2001 From: Alexander Platov Date: Tue, 22 Oct 2024 10:46:39 +0300 Subject: [PATCH 13/21] Updated ListItem layout (#7008) Signed-off-by: Alexander Platov --- .../components/calendar/DatePresenter.svelte | 3 ++- .../calendar/DueDatePresenter.svelte | 1 + .../src/components/LabelsPresenter.svelte | 18 +++++++++++++++-- .../components/ComponentEditor.svelte | 9 +++++++-- .../components/issues/DueDatePresenter.svelte | 1 + .../src/components/issues/IssueExtra.svelte | 14 +++++++++---- .../milestones/MilestoneEditor.svelte | 8 ++++++-- .../src/components/HyperlinkEditor.svelte | 6 +++--- .../src/components/list/ListItem.svelte | 14 +++++++++++-- .../src/components/list/ListPresenter.svelte | 9 +++++++++ .../presenters/GithubIssuePresenter.svelte | 20 +++++++++---------- 11 files changed, 77 insertions(+), 26 deletions(-) diff --git a/packages/ui/src/components/calendar/DatePresenter.svelte b/packages/ui/src/components/calendar/DatePresenter.svelte index 595972441cf..c041825c874 100644 --- a/packages/ui/src/components/calendar/DatePresenter.svelte +++ b/packages/ui/src/components/calendar/DatePresenter.svelte @@ -14,7 +14,7 @@ -->
{/if} diff --git a/plugins/tags-resources/src/components/LabelsPresenter.svelte b/plugins/tags-resources/src/components/LabelsPresenter.svelte index 3d98bb1ac75..dafe0c61858 100644 --- a/plugins/tags-resources/src/components/LabelsPresenter.svelte +++ b/plugins/tags-resources/src/components/LabelsPresenter.svelte @@ -49,18 +49,32 @@ let allWidth: number const widths: number[] = [] + const elements: HTMLDivElement[] = [] afterUpdate(() => { let count: number = 0 widths.forEach((i) => (count += i)) full = count > allWidth dispatch('change', { full, ckeckFilled }) + if (elements.length > 0) { + if (items.length > 4) dispatch('resize', elements[0]?.clientWidth) + else { + allWidth = 0 + for (let i = 0; i < items.length; i++) { + if (elements[i].clientWidth !== undefined && allWidth < elements[i].clientWidth) { + allWidth = elements[i].clientWidth + } + } + dispatch('resize', allWidth + (items.length - 1) * 3) + } + } }) {#if kind === 'list' || kind === 'link'} {#if items.length > 4}
{:else} - {#each items as value} -
+ {#each items as value, i} +
{/each} diff --git a/plugins/tracker-resources/src/components/components/ComponentEditor.svelte b/plugins/tracker-resources/src/components/components/ComponentEditor.svelte index 58c0210f799..53d5cfc974b 100644 --- a/plugins/tracker-resources/src/components/components/ComponentEditor.svelte +++ b/plugins/tracker-resources/src/components/components/ComponentEditor.svelte @@ -18,7 +18,7 @@ import { RuleApplyResult, getClient, getDocRules } from '@hcengineering/presentation' import { Component, Issue, IssueTemplate, Project, TrackerEvents } from '@hcengineering/tracker' import { ButtonKind, ButtonShape, ButtonSize, deviceOptionsStore as deviceInfo } from '@hcengineering/ui' - import { createEventDispatcher } from 'svelte' + import { createEventDispatcher, afterUpdate } from 'svelte' import { Analytics } from '@hcengineering/analytics' import { activeComponent } from '../../issues' @@ -47,6 +47,8 @@ const dispatch = createEventDispatcher() + let element: HTMLDivElement + const handleComponentIdChanged = async (newComponentId: Ref | null | undefined) => { if (!isEditable || newComponentId === undefined || (!Array.isArray(value) && value.component === newComponentId)) { return @@ -101,11 +103,13 @@ } } } + + afterUpdate(() => dispatch('resize', element?.clientWidth)) {#if kind === 'list'} {#if !Array.isArray(value) && value.component} -
+
diff --git a/plugins/tracker-resources/src/components/issues/IssueExtra.svelte b/plugins/tracker-resources/src/components/issues/IssueExtra.svelte index 8658ab19758..a9540f52087 100644 --- a/plugins/tracker-resources/src/components/issues/IssueExtra.svelte +++ b/plugins/tracker-resources/src/components/issues/IssueExtra.svelte @@ -13,6 +13,7 @@ // limitations under the License. --> -{#if value} +{#if value && presenters.length > 0} 0}
{#each presenters as mixinPresenter} - + {/each}
{/if} @@ -50,7 +57,6 @@