diff --git a/.vscode/settings.json b/.vscode/settings.json index 144a7ec7ac..9c9430d514 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,5 @@ { "editor.formatOnSave": true, - "files.eol": "\n" + "files.eol": "\n", + "typescript.tsdk": "node_modules/typescript/lib" } diff --git a/_check_ts_progress.sh b/_check_ts_progress.sh new file mode 100755 index 0000000000..ac74a7205b --- /dev/null +++ b/_check_ts_progress.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +cloc HEAD \ + --git --md \ + --include-lang=javascript,typescript \ + --found=filelist.txt \ + --exclude-dir=public,libraries + +grep -R \.js$ filelist.txt +rm filelist.txt \ No newline at end of file diff --git a/bin/create-anonymization-script.js b/bin/create-anonymization-script.js index b16a038de0..4c71d60713 100755 --- a/bin/create-anonymization-script.js +++ b/bin/create-anonymization-script.js @@ -1,6 +1,6 @@ #!/usr/bin/env node -const anonymizationService = require('../src/services/anonymization.js'); +const anonymizationService = require('../src/services/anonymization'); const fs = require('fs'); const path = require('path'); diff --git a/bin/release.sh b/bin/release.sh index cdaa324d2c..649a414378 100755 --- a/bin/release.sh +++ b/bin/release.sh @@ -26,9 +26,9 @@ jq '.version = "'$VERSION'"' package.json|sponge package.json git add package.json -echo 'module.exports = { buildDate:"'`date --iso-8601=seconds`'", buildRevision: "'`git log -1 --format="%H"`'" };' > src/services/build.js +echo 'export = { buildDate:"'`date --iso-8601=seconds`'", buildRevision: "'`git log -1 --format="%H"`'" };' > src/services/build.ts -git add src/services/build.js +git add src/services/build.ts TAG=v$VERSION diff --git a/db/migrations/0216__move_content_into_blobs.js b/db/migrations/0216__move_content_into_blobs.js index d2f7a9f2d2..eea287b210 100644 --- a/db/migrations/0216__move_content_into_blobs.js +++ b/db/migrations/0216__move_content_into_blobs.js @@ -1,6 +1,6 @@ module.exports = () => { - const sql = require('../../src/services/sql.js'); - const utils = require('../../src/services/utils.js'); + const sql = require('../../src/services/sql'); + const utils = require('../../src/services/utils'); const existingBlobIds = new Set(); diff --git a/db/migrations/0220__migrate_images_to_attachments.js b/db/migrations/0220__migrate_images_to_attachments.js index 74e7436139..f88894820f 100644 --- a/db/migrations/0220__migrate_images_to_attachments.js +++ b/db/migrations/0220__migrate_images_to_attachments.js @@ -1,9 +1,9 @@ module.exports = () => { - const beccaLoader = require('../../src/becca/becca_loader.js'); - const becca = require('../../src/becca/becca.js'); - const cls = require('../../src/services/cls.js'); - const log = require('../../src/services/log.js'); - const sql = require('../../src/services/sql.js'); + const beccaLoader = require('../../src/becca/becca_loader'); + const becca = require('../../src/becca/becca'); + const cls = require('../../src/services/cls'); + const log = require('../../src/services/log'); + const sql = require('../../src/services/sql'); cls.init(() => { // emergency disabling of image compression since it appears to make problems in migration to 0.61 @@ -13,7 +13,7 @@ module.exports = () => { for (const note of Object.values(becca.notes)) { try { - const attachment = note.convertToParentAttachment({autoConversion: true}); + const attachment = note.convertToParentAttachment({ autoConversion: true }); if (attachment) { log.info(`Auto-converted note '${note.noteId}' into attachment '${attachment.attachmentId}'.`); diff --git a/docker_healthcheck.js b/docker_healthcheck.js index f483d7a877..9761aebe2e 100755 --- a/docker_healthcheck.js +++ b/docker_healthcheck.js @@ -1,7 +1,7 @@ const http = require("http"); const ini = require("ini"); const fs = require("fs"); -const dataDir = require('./src/services/data_dir.js'); +const dataDir = require('./src/services/data_dir'); const config = ini.parse(fs.readFileSync(dataDir.CONFIG_INI_PATH, 'utf-8')); if (config.Network.https) { @@ -10,8 +10,8 @@ if (config.Network.https) { process.exit(0); } -const port = require('./src/services/port.js'); -const host = require('./src/services/host.js'); +const port = require('./src/services/port'); +const host = require('./src/services/host'); const options = { timeout: 2000 }; diff --git a/dump-db/inc/data_key.js b/dump-db/inc/data_key.js index 58d3dd8507..1dfc0dacff 100644 --- a/dump-db/inc/data_key.js +++ b/dump-db/inc/data_key.js @@ -1,5 +1,5 @@ const crypto = require("crypto"); -const sql = require('./sql.js'); +const sql = require('./sql'); const decryptService = require('./decrypt.js'); function getDataKey(password) { diff --git a/dump-db/inc/dump.js b/dump-db/inc/dump.js index 96fa60f94b..35191ded65 100644 --- a/dump-db/inc/dump.js +++ b/dump-db/inc/dump.js @@ -74,7 +74,7 @@ function dumpDocument(documentPath, targetPath, options) { return; } - let {content} = sql.getRow("SELECT content FROM blobs WHERE blobId = ?", [noteRow.blobId]); + let { content } = sql.getRow("SELECT content FROM blobs WHERE blobId = ?", [noteRow.blobId]); if (content !== null && noteRow.isProtected && dataKey) { content = decryptService.decrypt(dataKey, content); @@ -108,7 +108,7 @@ function dumpDocument(documentPath, targetPath, options) { } try { - fs.mkdirSync(childTargetPath, {recursive: true}); + fs.mkdirSync(childTargetPath, { recursive: true }); } catch (e) { console.error(`DUMPERROR: Creating directory ${childTargetPath} failed with error '${e.message}'`); @@ -157,7 +157,7 @@ function validatePaths(documentPath, targetPath) { } if (!fs.existsSync(targetPath)) { - const ret = fs.mkdirSync(targetPath, {recursive: true}); + const ret = fs.mkdirSync(targetPath, { recursive: true }); if (!ret) { console.error(`Target path '${targetPath}' could not be created. Run with --help to see usage.`); diff --git a/electron.js b/electron.js index 7cd04cfb2b..69f403dd83 100644 --- a/electron.js +++ b/electron.js @@ -1,10 +1,10 @@ 'use strict'; const {app, globalShortcut, BrowserWindow} = require('electron'); -const sqlInit = require('./src/services/sql_init.js'); +const sqlInit = require('./src/services/sql_init'); const appIconService = require('./src/services/app_icon.js'); -const windowService = require('./src/services/window.js'); -const tray = require('./src/services/tray.js'); +const windowService = require('./src/services/window'); +const tray = require('./src/services/tray'); // Adds debug features like hotkeys for triggering dev tools and reload require('electron-debug')(); diff --git a/nodemon.json b/nodemon.json index df14c4a84d..86e1c2ca46 100644 --- a/nodemon.json +++ b/nodemon.json @@ -2,12 +2,11 @@ "restartable": "rs", "ignore": [".git", "node_modules/**/node_modules", "src/public/"], "verbose": false, - "execMap": { - "js": "node --harmony" - }, + "exec": "ts-node", "watch": ["src/"], + "signal": "SIGTERM", "env": { "NODE_ENV": "development" }, - "ext": "js,json" + "ext": "ts,js,json" } diff --git a/package-lock.json b/package-lock.json index c8c24644ea..4123aca014 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "trilium", - "version": "0.63.3", + "version": "0.63.5", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "trilium", - "version": "0.63.3", + "version": "0.63.5", "hasInstallScript": true, "license": "AGPL-3.0-only", "dependencies": { @@ -88,6 +88,22 @@ "trilium": "src/www.js" }, "devDependencies": { + "@types/archiver": "^6.0.2", + "@types/better-sqlite3": "^7.6.9", + "@types/cls-hooked": "^4.3.8", + "@types/escape-html": "^1.0.4", + "@types/express": "^4.17.21", + "@types/html": "^1.0.4", + "@types/ini": "^4.1.0", + "@types/jsdom": "^21.1.6", + "@types/mime-types": "^2.1.4", + "@types/node": "^20.11.19", + "@types/sanitize-html": "^2.11.0", + "@types/sax": "^1.2.7", + "@types/stream-throttle": "^0.1.4", + "@types/turndown": "^5.0.4", + "@types/ws": "^8.5.10", + "@types/xml2js": "^0.4.14", "cross-env": "7.0.3", "electron": "25.9.8", "electron-builder": "24.13.3", @@ -99,6 +115,9 @@ "lorem-ipsum": "2.0.8", "nodemon": "3.1.0", "rcedit": "4.0.1", + "ts-node": "^10.9.2", + "tslib": "^2.6.2", + "typescript": "^5.3.3", "webpack": "5.90.3", "webpack-cli": "5.1.4" }, @@ -139,6 +158,28 @@ "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz", "integrity": "sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==" }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, "node_modules/@develar/schema-utils": { "version": "2.6.5", "resolved": "https://registry.npmjs.org/@develar/schema-utils/-/schema-utils-2.6.5.tgz", @@ -1105,11 +1146,63 @@ "node": ">= 10" } }, + "node_modules/@tsconfig/node10": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", + "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", + "dev": true + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true + }, "node_modules/@tweenjs/tween.js": { "version": "21.0.0", "resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-21.0.0.tgz", "integrity": "sha512-qVfOiFh0U8ZSkLgA6tf7kj2MciqRbSCWaJZRwftVO7UbtVDNsZAXpWXqvCDtIefvjC83UJB+vHTDOGm5ibXjEA==" }, + "node_modules/@types/archiver": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@types/archiver/-/archiver-6.0.2.tgz", + "integrity": "sha512-KmROQqbQzKGuaAbmK+ZcytkJ51+YqDa7NmbXjmtC5YBLSyQYo21YaUnQ3HbaPFKL1ooo6RQ6OPYPIDyxfpDDXw==", + "dev": true, + "dependencies": { + "@types/readdir-glob": "*" + } + }, + "node_modules/@types/better-sqlite3": { + "version": "7.6.9", + "resolved": "https://registry.npmjs.org/@types/better-sqlite3/-/better-sqlite3-7.6.9.tgz", + "integrity": "sha512-FvktcujPDj9XKMJQWFcl2vVl7OdRIqsSRX9b0acWwTmwLK9CF2eqo/FRcmMLNpugKoX/avA6pb7TorDLmpgTnQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dev": true, + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, "node_modules/@types/cacheable-request": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz", @@ -1121,6 +1214,24 @@ "@types/responselike": "*" } }, + "node_modules/@types/cls-hooked": { + "version": "4.3.8", + "resolved": "https://registry.npmjs.org/@types/cls-hooked/-/cls-hooked-4.3.8.tgz", + "integrity": "sha512-tf/7H883gFA6MPlWI15EQtfNZ+oPL0gLKkOlx9UHFrun1fC/FkuyNBpTKq1B5E3T4fbvjId6WifHUdSGsMMuPg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/d3-scale": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz", @@ -1147,6 +1258,12 @@ "@types/ms": "*" } }, + "node_modules/@types/escape-html": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@types/escape-html/-/escape-html-1.0.4.tgz", + "integrity": "sha512-qZ72SFTgUAZ5a7Tj6kf2SHLetiH5S6f8G5frB2SPQ3EyF02kxdyBFf4Tz4banE3xCgGnKgWLt//a6VuYHKYJTg==", + "dev": true + }, "node_modules/@types/eslint": { "version": "8.4.1", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz", @@ -1173,6 +1290,30 @@ "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", "dev": true }, + "node_modules/@types/express": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", + "dev": true, + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.17.43", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.43.tgz", + "integrity": "sha512-oaYtiBirUOPQGSWNGPWnzyAFJ0BP3cwvN4oWZQY+zUBwpVIGsKUkpBpSztp74drYcjavs7SKFZ4DX1V2QeN8rg==", + "dev": true, + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, "node_modules/@types/fs-extra": { "version": "9.0.13", "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz", @@ -1192,11 +1333,64 @@ "@types/node": "*" } }, + "node_modules/@types/html": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@types/html/-/html-1.0.4.tgz", + "integrity": "sha512-Wb1ymSAftCLxhc3D6vS0Ike/0xg7W6c+DQxAkerU6pD7C8CMzTYwvrwnlcrTfsVO/nMelB9KOKIT7+N5lOeQUg==", + "dev": true + }, "node_modules/@types/http-cache-semantics": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", "integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==" }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", + "dev": true + }, + "node_modules/@types/ini": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@types/ini/-/ini-4.1.0.tgz", + "integrity": "sha512-mTehMtc+xtnWBBvqizcqYCktKDBH2WChvx1GU3Sfe4PysFDXiNe+1YwtpVX1MDtCa4NQrSPw2+3HmvXHY3gt1w==", + "dev": true + }, + "node_modules/@types/jsdom": { + "version": "21.1.6", + "resolved": "https://registry.npmjs.org/@types/jsdom/-/jsdom-21.1.6.tgz", + "integrity": "sha512-/7kkMsC+/kMs7gAYmmBR9P0vGTnOoLhQhyhQJSlXGI5bzTHp6xdo0TtKWQAsz6pmSAeVqKSbqeyP6hytqr9FDw==", + "dev": true, + "dependencies": { + "@types/node": "*", + "@types/tough-cookie": "*", + "parse5": "^7.0.0" + } + }, + "node_modules/@types/jsdom/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/@types/jsdom/node_modules/parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "dev": true, + "dependencies": { + "entities": "^4.4.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, "node_modules/@types/json-schema": { "version": "7.0.9", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz", @@ -1241,6 +1435,18 @@ "integrity": "sha512-eC4U9MlIcu2q0KQmXszyn5Akca/0jrQmwDRgpAMJai7qBWq4amIQhZyNau4VYGtCeALvW1/NtjzJJ567aZxfKA==", "dev": true }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true + }, + "node_modules/@types/mime-types": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.4.tgz", + "integrity": "sha512-lfU4b34HOri+kAY5UheuFMWPDOI+OPceBSHZKp69gEyTL/mmJ4cnU6Y/rlme3UL3GyOn6Y42hyIEw0/q8sWx5w==", + "dev": true + }, "node_modules/@types/minimatch": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.3.tgz", @@ -1253,9 +1459,12 @@ "integrity": "sha512-xPSg0jm4mqgEkNhowKgZFBNtwoEwF6gJ4Dhww+GFpm3IgtNseHQZ5IqdNwnquZEoANxyDAKDRAdVo4Z72VvD/g==" }, "node_modules/@types/node": { - "version": "18.16.18", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.16.18.tgz", - "integrity": "sha512-/aNaQZD0+iSBAGnvvN2Cx92HqE5sZCPZtx2TsK+4nvV23fFe09jVDvpArXr2j9DnYlzuU9WuoykDDc6wqvpNcw==" + "version": "20.11.19", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.19.tgz", + "integrity": "sha512-7xMnVEcZFu0DikYjWOlRq7NTPETrm7teqUT2WkQjrTIkEgUyyGdWsj/Zg8bEJt5TNklzbPD1X3fqfsHw3SpapQ==", + "dependencies": { + "undici-types": "~5.26.4" + } }, "node_modules/@types/plist": { "version": "3.0.5", @@ -1268,6 +1477,27 @@ "xmlbuilder": ">=11.0.1" } }, + "node_modules/@types/qs": { + "version": "6.9.11", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.11.tgz", + "integrity": "sha512-oGk0gmhnEJK4Yyk+oI7EfXsLayXatCWPHary1MtcmbAifkobT9cM9yutG/hZKIseOU0MqbIwQ/u2nn/Gb+ltuQ==", + "dev": true + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true + }, + "node_modules/@types/readdir-glob": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@types/readdir-glob/-/readdir-glob-1.1.5.tgz", + "integrity": "sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/responselike": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.0.tgz", @@ -1276,6 +1506,140 @@ "@types/node": "*" } }, + "node_modules/@types/sanitize-html": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/@types/sanitize-html/-/sanitize-html-2.11.0.tgz", + "integrity": "sha512-7oxPGNQHXLHE48r/r/qjn7q0hlrs3kL7oZnGj0Wf/h9tj/6ibFyRkNbsDxaBBZ4XUZ0Dx5LGCyDJ04ytSofacQ==", + "dev": true, + "dependencies": { + "htmlparser2": "^8.0.0" + } + }, + "node_modules/@types/sanitize-html/node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dev": true, + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/@types/sanitize-html/node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dev": true, + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/@types/sanitize-html/node_modules/domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "dev": true, + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/@types/sanitize-html/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/@types/sanitize-html/node_modules/htmlparser2": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz", + "integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==", + "dev": true, + "funding": [ + "https://github.com/fb55/htmlparser2?sponsor=1", + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "entities": "^4.4.0" + } + }, + "node_modules/@types/sax": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.7.tgz", + "integrity": "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dev": true, + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.5.tgz", + "integrity": "sha512-PDRk21MnK70hja/YF8AHfC7yIsiQHn1rcXx7ijCFBX/k+XQJhQT/gw3xekXKJvx+5SXaMMS8oqQy09Mzvz2TuQ==", + "dev": true, + "dependencies": { + "@types/http-errors": "*", + "@types/mime": "*", + "@types/node": "*" + } + }, + "node_modules/@types/stream-throttle": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@types/stream-throttle/-/stream-throttle-0.1.4.tgz", + "integrity": "sha512-VxXIHGjVuK8tYsVm60rIQMmF/0xguCeen5OmK5S4Y6K64A+z+y4/GI6anRnVzaUZaJB9Ah9IfbDcO0o1gZCc/w==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==", + "dev": true + }, + "node_modules/@types/turndown": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@types/turndown/-/turndown-5.0.4.tgz", + "integrity": "sha512-28GI33lCCkU4SGH1GvjDhFgOVr+Tym4PXGBIU1buJUa6xQolniPArtUT+kv42RR2N9MsMLInkr904Aq+ESHBJg==", + "dev": true + }, "node_modules/@types/unist": { "version": "2.0.10", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz", @@ -1288,6 +1652,24 @@ "dev": true, "optional": true }, + "node_modules/@types/ws": { + "version": "8.5.10", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz", + "integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/xml2js": { + "version": "0.4.14", + "resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz", + "integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/yauzl": { "version": "2.9.2", "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.2.tgz", @@ -1568,6 +1950,15 @@ "acorn": "^8" } }, + "node_modules/acorn-walk": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz", + "integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", @@ -2028,6 +2419,12 @@ "streamx": "^2.15.0" } }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -3626,6 +4023,12 @@ "safe-buffer": "~5.2.0" } }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true + }, "node_modules/cross-env": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", @@ -5449,6 +5852,14 @@ "node": ">=8.0.0" } }, + "node_modules/electron/node_modules/@types/node": { + "version": "18.19.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.17.tgz", + "integrity": "sha512-SzyGKgwPzuWp2SHhlpXKzCX0pIOfcI4V2eF37nNBJOhwlegQ83omtVQ1XxZpDE06V/d6AQvfQdPfnw0tRC//Ng==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, "node_modules/elkjs": { "version": "0.9.2", "resolved": "https://registry.npmjs.org/elkjs/-/elkjs-0.9.2.tgz", @@ -8140,6 +8551,12 @@ "node": ">= 6" } }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, "node_modules/make-fetch-happen": { "version": "10.2.0", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.2.0.tgz", @@ -11925,10 +12342,62 @@ "node": ">=6.10" } }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, "node_modules/tslib": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.0.tgz", - "integrity": "sha512-7At1WUettjcSRHXCyYtTselblcHl9PJFFVKiCAy/bY97+BPZXSQ2wbq0P9s8tK2G7dFQfNnlJnPAiArVBVBsfA==" + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" }, "node_modules/tsscmp": { "version": "1.0.6", @@ -12041,6 +12510,11 @@ "integrity": "sha512-ekY1NhRzq0B08g4bGuX4wd2jZx5GnKz6mKSqFL4nqBlfyMGiG10gDFhDTMEfYmDL6Jy0FUIZp7wiRB+0BP7J2g==", "dev": true }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, "node_modules/unescape": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/unescape/-/unescape-1.0.1.tgz", @@ -12217,6 +12691,12 @@ "node": ">=8" } }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true + }, "node_modules/validate-npm-package-license": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", @@ -12782,6 +13262,15 @@ "node": ">=12" } }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/zip-stream": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.0.tgz", @@ -12888,6 +13377,27 @@ "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz", "integrity": "sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==" }, + "@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "requires": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "dependencies": { + "@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "requires": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + } + } + }, "@develar/schema-utils": { "version": "2.6.5", "resolved": "https://registry.npmjs.org/@develar/schema-utils/-/schema-utils-2.6.5.tgz", @@ -13608,11 +14118,63 @@ "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", "dev": true }, + "@tsconfig/node10": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", + "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", + "dev": true + }, + "@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true + }, + "@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true + }, + "@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true + }, "@tweenjs/tween.js": { "version": "21.0.0", "resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-21.0.0.tgz", "integrity": "sha512-qVfOiFh0U8ZSkLgA6tf7kj2MciqRbSCWaJZRwftVO7UbtVDNsZAXpWXqvCDtIefvjC83UJB+vHTDOGm5ibXjEA==" }, + "@types/archiver": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@types/archiver/-/archiver-6.0.2.tgz", + "integrity": "sha512-KmROQqbQzKGuaAbmK+ZcytkJ51+YqDa7NmbXjmtC5YBLSyQYo21YaUnQ3HbaPFKL1ooo6RQ6OPYPIDyxfpDDXw==", + "dev": true, + "requires": { + "@types/readdir-glob": "*" + } + }, + "@types/better-sqlite3": { + "version": "7.6.9", + "resolved": "https://registry.npmjs.org/@types/better-sqlite3/-/better-sqlite3-7.6.9.tgz", + "integrity": "sha512-FvktcujPDj9XKMJQWFcl2vVl7OdRIqsSRX9b0acWwTmwLK9CF2eqo/FRcmMLNpugKoX/avA6pb7TorDLmpgTnQ==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dev": true, + "requires": { + "@types/connect": "*", + "@types/node": "*" + } + }, "@types/cacheable-request": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz", @@ -13624,6 +14186,24 @@ "@types/responselike": "*" } }, + "@types/cls-hooked": { + "version": "4.3.8", + "resolved": "https://registry.npmjs.org/@types/cls-hooked/-/cls-hooked-4.3.8.tgz", + "integrity": "sha512-tf/7H883gFA6MPlWI15EQtfNZ+oPL0gLKkOlx9UHFrun1fC/FkuyNBpTKq1B5E3T4fbvjId6WifHUdSGsMMuPg==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, "@types/d3-scale": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz", @@ -13650,6 +14230,12 @@ "@types/ms": "*" } }, + "@types/escape-html": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@types/escape-html/-/escape-html-1.0.4.tgz", + "integrity": "sha512-qZ72SFTgUAZ5a7Tj6kf2SHLetiH5S6f8G5frB2SPQ3EyF02kxdyBFf4Tz4banE3xCgGnKgWLt//a6VuYHKYJTg==", + "dev": true + }, "@types/eslint": { "version": "8.4.1", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz", @@ -13676,6 +14262,30 @@ "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", "dev": true }, + "@types/express": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", + "dev": true, + "requires": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "@types/express-serve-static-core": { + "version": "4.17.43", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.43.tgz", + "integrity": "sha512-oaYtiBirUOPQGSWNGPWnzyAFJ0BP3cwvN4oWZQY+zUBwpVIGsKUkpBpSztp74drYcjavs7SKFZ4DX1V2QeN8rg==", + "dev": true, + "requires": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, "@types/fs-extra": { "version": "9.0.13", "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz", @@ -13695,11 +14305,57 @@ "@types/node": "*" } }, + "@types/html": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@types/html/-/html-1.0.4.tgz", + "integrity": "sha512-Wb1ymSAftCLxhc3D6vS0Ike/0xg7W6c+DQxAkerU6pD7C8CMzTYwvrwnlcrTfsVO/nMelB9KOKIT7+N5lOeQUg==", + "dev": true + }, "@types/http-cache-semantics": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", "integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==" }, + "@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", + "dev": true + }, + "@types/ini": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@types/ini/-/ini-4.1.0.tgz", + "integrity": "sha512-mTehMtc+xtnWBBvqizcqYCktKDBH2WChvx1GU3Sfe4PysFDXiNe+1YwtpVX1MDtCa4NQrSPw2+3HmvXHY3gt1w==", + "dev": true + }, + "@types/jsdom": { + "version": "21.1.6", + "resolved": "https://registry.npmjs.org/@types/jsdom/-/jsdom-21.1.6.tgz", + "integrity": "sha512-/7kkMsC+/kMs7gAYmmBR9P0vGTnOoLhQhyhQJSlXGI5bzTHp6xdo0TtKWQAsz6pmSAeVqKSbqeyP6hytqr9FDw==", + "dev": true, + "requires": { + "@types/node": "*", + "@types/tough-cookie": "*", + "parse5": "^7.0.0" + }, + "dependencies": { + "entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true + }, + "parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "dev": true, + "requires": { + "entities": "^4.4.0" + } + } + } + }, "@types/json-schema": { "version": "7.0.9", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz", @@ -13744,6 +14400,18 @@ "integrity": "sha512-eC4U9MlIcu2q0KQmXszyn5Akca/0jrQmwDRgpAMJai7qBWq4amIQhZyNau4VYGtCeALvW1/NtjzJJ567aZxfKA==", "dev": true }, + "@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true + }, + "@types/mime-types": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.4.tgz", + "integrity": "sha512-lfU4b34HOri+kAY5UheuFMWPDOI+OPceBSHZKp69gEyTL/mmJ4cnU6Y/rlme3UL3GyOn6Y42hyIEw0/q8sWx5w==", + "dev": true + }, "@types/minimatch": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.3.tgz", @@ -13756,9 +14424,12 @@ "integrity": "sha512-xPSg0jm4mqgEkNhowKgZFBNtwoEwF6gJ4Dhww+GFpm3IgtNseHQZ5IqdNwnquZEoANxyDAKDRAdVo4Z72VvD/g==" }, "@types/node": { - "version": "18.16.18", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.16.18.tgz", - "integrity": "sha512-/aNaQZD0+iSBAGnvvN2Cx92HqE5sZCPZtx2TsK+4nvV23fFe09jVDvpArXr2j9DnYlzuU9WuoykDDc6wqvpNcw==" + "version": "20.11.19", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.19.tgz", + "integrity": "sha512-7xMnVEcZFu0DikYjWOlRq7NTPETrm7teqUT2WkQjrTIkEgUyyGdWsj/Zg8bEJt5TNklzbPD1X3fqfsHw3SpapQ==", + "requires": { + "undici-types": "~5.26.4" + } }, "@types/plist": { "version": "3.0.5", @@ -13771,6 +14442,27 @@ "xmlbuilder": ">=11.0.1" } }, + "@types/qs": { + "version": "6.9.11", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.11.tgz", + "integrity": "sha512-oGk0gmhnEJK4Yyk+oI7EfXsLayXatCWPHary1MtcmbAifkobT9cM9yutG/hZKIseOU0MqbIwQ/u2nn/Gb+ltuQ==", + "dev": true + }, + "@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true + }, + "@types/readdir-glob": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@types/readdir-glob/-/readdir-glob-1.1.5.tgz", + "integrity": "sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, "@types/responselike": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.0.tgz", @@ -13779,6 +14471,117 @@ "@types/node": "*" } }, + "@types/sanitize-html": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/@types/sanitize-html/-/sanitize-html-2.11.0.tgz", + "integrity": "sha512-7oxPGNQHXLHE48r/r/qjn7q0hlrs3kL7oZnGj0Wf/h9tj/6ibFyRkNbsDxaBBZ4XUZ0Dx5LGCyDJ04ytSofacQ==", + "dev": true, + "requires": { + "htmlparser2": "^8.0.0" + }, + "dependencies": { + "dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dev": true, + "requires": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + } + }, + "domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dev": true, + "requires": { + "domelementtype": "^2.3.0" + } + }, + "domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "dev": true, + "requires": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + } + }, + "entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true + }, + "htmlparser2": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz", + "integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==", + "dev": true, + "requires": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "entities": "^4.4.0" + } + } + } + }, + "@types/sax": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.7.tgz", + "integrity": "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dev": true, + "requires": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "@types/serve-static": { + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.5.tgz", + "integrity": "sha512-PDRk21MnK70hja/YF8AHfC7yIsiQHn1rcXx7ijCFBX/k+XQJhQT/gw3xekXKJvx+5SXaMMS8oqQy09Mzvz2TuQ==", + "dev": true, + "requires": { + "@types/http-errors": "*", + "@types/mime": "*", + "@types/node": "*" + } + }, + "@types/stream-throttle": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@types/stream-throttle/-/stream-throttle-0.1.4.tgz", + "integrity": "sha512-VxXIHGjVuK8tYsVm60rIQMmF/0xguCeen5OmK5S4Y6K64A+z+y4/GI6anRnVzaUZaJB9Ah9IfbDcO0o1gZCc/w==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==", + "dev": true + }, + "@types/turndown": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@types/turndown/-/turndown-5.0.4.tgz", + "integrity": "sha512-28GI33lCCkU4SGH1GvjDhFgOVr+Tym4PXGBIU1buJUa6xQolniPArtUT+kv42RR2N9MsMLInkr904Aq+ESHBJg==", + "dev": true + }, "@types/unist": { "version": "2.0.10", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz", @@ -13791,6 +14594,24 @@ "dev": true, "optional": true }, + "@types/ws": { + "version": "8.5.10", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz", + "integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/xml2js": { + "version": "0.4.14", + "resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz", + "integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, "@types/yauzl": { "version": "2.9.2", "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.2.tgz", @@ -14031,6 +14852,12 @@ "dev": true, "requires": {} }, + "acorn-walk": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz", + "integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==", + "dev": true + }, "agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", @@ -14381,6 +15208,12 @@ } } }, + "arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -15586,6 +16419,12 @@ } } }, + "create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true + }, "cross-env": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", @@ -16410,6 +17249,16 @@ "@electron/get": "^2.0.0", "@types/node": "^18.11.18", "extract-zip": "^2.0.1" + }, + "dependencies": { + "@types/node": { + "version": "18.19.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.17.tgz", + "integrity": "sha512-SzyGKgwPzuWp2SHhlpXKzCX0pIOfcI4V2eF37nNBJOhwlegQ83omtVQ1XxZpDE06V/d6AQvfQdPfnw0tRC//Ng==", + "requires": { + "undici-types": "~5.26.4" + } + } } }, "electron-builder": { @@ -19033,6 +19882,12 @@ } } }, + "make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, "make-fetch-happen": { "version": "10.2.0", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.2.0.tgz", @@ -21840,10 +22695,39 @@ "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==" }, + "ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "requires": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + } + } + }, "tslib": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.0.tgz", - "integrity": "sha512-7At1WUettjcSRHXCyYtTselblcHl9PJFFVKiCAy/bY97+BPZXSQ2wbq0P9s8tK2G7dFQfNnlJnPAiArVBVBsfA==" + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" }, "tsscmp": { "version": "1.0.6", @@ -21931,6 +22815,11 @@ "integrity": "sha512-ekY1NhRzq0B08g4bGuX4wd2jZx5GnKz6mKSqFL4nqBlfyMGiG10gDFhDTMEfYmDL6Jy0FUIZp7wiRB+0BP7J2g==", "dev": true }, + "undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, "unescape": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/unescape/-/unescape-1.0.1.tgz", @@ -22057,6 +22946,12 @@ "sade": "^1.7.3" } }, + "v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true + }, "validate-npm-package-license": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", @@ -22463,6 +23358,12 @@ "pend": "~1.2.0" } }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true + }, "zip-stream": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.0.tgz", diff --git a/package.json b/package.json index 648ea69745..91e82dbcdf 100644 --- a/package.json +++ b/package.json @@ -109,6 +109,22 @@ "yauzl": "3.1.2" }, "devDependencies": { + "@types/archiver": "^6.0.2", + "@types/better-sqlite3": "^7.6.9", + "@types/cls-hooked": "^4.3.8", + "@types/escape-html": "^1.0.4", + "@types/express": "^4.17.21", + "@types/html": "^1.0.4", + "@types/ini": "^4.1.0", + "@types/jsdom": "^21.1.6", + "@types/mime-types": "^2.1.4", + "@types/node": "^20.11.19", + "@types/sanitize-html": "^2.11.0", + "@types/sax": "^1.2.7", + "@types/stream-throttle": "^0.1.4", + "@types/turndown": "^5.0.4", + "@types/ws": "^8.5.10", + "@types/xml2js": "^0.4.14", "cross-env": "7.0.3", "electron": "25.9.8", "electron-builder": "24.13.3", @@ -120,10 +136,13 @@ "lorem-ipsum": "2.0.8", "nodemon": "3.1.0", "rcedit": "4.0.1", + "ts-node": "^10.9.2", + "tslib": "^2.6.2", + "typescript": "^5.3.3", "webpack": "5.90.3", "webpack-cli": "5.1.4" }, "optionalDependencies": { "electron-installer-debian": "3.2.0" } -} +} \ No newline at end of file diff --git a/spec/search/lexer.spec.js b/spec/search/lexer.spec.js index 20221e9a0b..1533bf56ae 100644 --- a/spec/search/lexer.spec.js +++ b/spec/search/lexer.spec.js @@ -1,4 +1,4 @@ -const lex = require('../../src/services/search/services/lex.js'); +const lex = require('../../src/services/search/services/lex'); describe("Lexer fulltext", () => { it("simple lexing", () => { diff --git a/spec/search/parens.spec.js b/spec/search/parens.spec.js index d79cfd6ea2..ae1ac5b2fb 100644 --- a/spec/search/parens.spec.js +++ b/spec/search/parens.spec.js @@ -1,4 +1,4 @@ -const handleParens = require('../../src/services/search/services/handle_parens.js'); +const handleParens = require('../../src/services/search/services/handle_parens'); describe("Parens handler", () => { it("handles parens", () => { diff --git a/spec/search/parser.spec.js b/spec/search/parser.spec.js index 000948d473..be2e2bcaf1 100644 --- a/spec/search/parser.spec.js +++ b/spec/search/parser.spec.js @@ -1,5 +1,5 @@ -const SearchContext = require('../../src/services/search/search_context.js'); -const parse = require('../../src/services/search/services/parse.js'); +const SearchContext = require('../../src/services/search/search_context'); +const parse = require('../../src/services/search/services/parse'); function tokens(toks, cur = 0) { return toks.map(arg => { diff --git a/spec/search/search.spec.js b/spec/search/search.spec.js index 97b607afd4..c628ac2a0d 100644 --- a/spec/search/search.spec.js +++ b/spec/search/search.spec.js @@ -1,8 +1,8 @@ -const searchService = require('../../src/services/search/services/search.js'); +const searchService = require('../../src/services/search/services/search'); const BNote = require('../../src/becca/entities/bnote.js'); const BBranch = require('../../src/becca/entities/bbranch.js'); -const SearchContext = require('../../src/services/search/search_context.js'); -const dateUtils = require('../../src/services/date_utils.js'); +const SearchContext = require('../../src/services/search/search_context'); +const dateUtils = require('../../src/services/date_utils'); const becca = require('../../src/becca/becca.js'); const {NoteBuilder, findNoteByTitle, note} = require('./becca_mocking.js'); diff --git a/spec/search/value_extractor.spec.js b/spec/search/value_extractor.spec.js index 4e05380c2b..08508a4bf8 100644 --- a/spec/search/value_extractor.spec.js +++ b/spec/search/value_extractor.spec.js @@ -1,7 +1,7 @@ const {note} = require('./becca_mocking.js'); -const ValueExtractor = require('../../src/services/search/value_extractor.js'); +const ValueExtractor = require('../../src/services/search/value_extractor'); const becca = require('../../src/becca/becca.js'); -const SearchContext = require('../../src/services/search/search_context.js'); +const SearchContext = require('../../src/services/search/search_context'); const dsc = new SearchContext(); diff --git a/src/anonymize.js b/src/anonymize.ts similarity index 73% rename from src/anonymize.js rename to src/anonymize.ts index 5099b32b05..cd449e479c 100644 --- a/src/anonymize.js +++ b/src/anonymize.ts @@ -1,6 +1,6 @@ -const anonymizationService = require('./services/anonymization.js'); -const sqlInit = require('./services/sql_init.js'); -require('./becca/entity_constructor.js'); +import anonymizationService = require('./services/anonymization'); +import sqlInit = require('./services/sql_init'); +require('./becca/entity_constructor'); sqlInit.dbReady.then(async () => { try { @@ -16,7 +16,7 @@ sqlInit.dbReady.then(async () => { console.log("Anonymization failed."); } } - catch (e) { + catch (e: any) { console.error(e.message, e.stack); } diff --git a/src/app.js b/src/app.js index 45451f0cc4..49373545e4 100644 --- a/src/app.js +++ b/src/app.js @@ -5,10 +5,10 @@ const cookieParser = require('cookie-parser'); const helmet = require('helmet'); const compression = require('compression'); const sessionParser = require('./routes/session_parser.js'); -const utils = require('./services/utils.js'); +const utils = require('./services/utils'); require('./services/handlers.js'); -require('./becca/becca_loader.js'); +require('./becca/becca_loader'); const app = express(); @@ -26,10 +26,10 @@ app.use(helmet({ crossOriginEmbedderPolicy: false })); -app.use(express.text({limit: '500mb'})); -app.use(express.json({limit: '500mb'})); -app.use(express.raw({limit: '500mb'})); -app.use(express.urlencoded({extended: false})); +app.use(express.text({ limit: '500mb' })); +app.use(express.json({ limit: '500mb' })); +app.use(express.raw({ limit: '500mb' })); +app.use(express.urlencoded({ extended: false })); app.use(cookieParser()); app.use(express.static(path.join(__dirname, 'public/root'))); app.use(`/manifest.webmanifest`, express.static(path.join(__dirname, 'public/manifest.webmanifest'))); @@ -43,13 +43,13 @@ require('./routes/custom.js').register(app); require('./routes/error_handlers.js').register(app); // triggers sync timer -require('./services/sync.js'); +require('./services/sync'); // triggers backup timer -require('./services/backup.js'); +require('./services/backup'); // trigger consistency checks timer -require('./services/consistency_checks.js'); +require('./services/consistency_checks'); require('./services/scheduler.js'); diff --git a/src/becca/becca.js b/src/becca/becca-interface.ts similarity index 55% rename from src/becca/becca.js rename to src/becca/becca-interface.ts index d1e7c333e9..5a454335c6 100644 --- a/src/becca/becca.js +++ b/src/becca/becca-interface.ts @@ -1,32 +1,52 @@ -"use strict"; - -const sql = require('../services/sql.js'); -const NoteSet = require('../services/search/note_set.js'); -const NotFoundError = require('../errors/not_found_error.js'); +import sql = require('../services/sql'); +import NoteSet = require('../services/search/note_set'); +import NotFoundError = require('../errors/not_found_error'); +import BOption = require('./entities/boption'); +import BNote = require('./entities/bnote'); +import BEtapiToken = require('./entities/betapi_token'); +import BAttribute = require('./entities/battribute'); +import BBranch = require('./entities/bbranch'); +import BRevision = require('./entities/brevision'); +import BAttachment = require('./entities/battachment'); +import { AttachmentRow, RevisionRow } from './entities/rows'; +import BBlob = require('./entities/bblob'); +import BRecentNote = require('./entities/brecent_note'); +import AbstractBeccaEntity = require('./entities/abstract_becca_entity'); + +interface AttachmentOpts { + includeContentLength?: boolean; +} /** * Becca is a backend cache of all notes, branches, and attributes. * There's a similar frontend cache Froca, and share cache Shaca. */ -class Becca { +export default class Becca { + loaded!: boolean; + + notes!: Record; + branches!: Record; + childParentToBranch!: Record; + attributes!: Record; + /** Points from attribute type-name to list of attributes */ + attributeIndex!: Record; + options!: Record; + etapiTokens!: Record; + + allNoteSetCache: NoteSet | null; + constructor() { this.reset(); + this.allNoteSetCache = null; } reset() { - /** @type {Object.} */ this.notes = {}; - /** @type {Object.} */ this.branches = {}; - /** @type {Object.} */ this.childParentToBranch = {}; - /** @type {Object.} */ - this.attributes = {}; - /** @type {Object.} Points from attribute type-name to list of attributes */ + this.attributes = {}; this.attributeIndex = {}; - /** @type {Object.} */ this.options = {}; - /** @type {Object.} */ this.etapiTokens = {}; this.dirtyNoteSetCache(); @@ -38,8 +58,7 @@ class Becca { return this.getNote('root'); } - /** @returns {BAttribute[]} */ - findAttributes(type, name) { + findAttributes(type: string, name: string): BAttribute[] { name = name.trim().toLowerCase(); if (name.startsWith('#') || name.startsWith('~')) { @@ -49,9 +68,8 @@ class Becca { return this.attributeIndex[`${type}-${name}`] || []; } - /** @returns {BAttribute[]} */ - findAttributesWithPrefix(type, name) { - const resArr = []; + findAttributesWithPrefix(type: string, name: string): BAttribute[] { + const resArr: BAttribute[][] = []; const key = `${type}-${name}`; for (const idx in this.attributeIndex) { @@ -69,18 +87,16 @@ class Becca { } } - addNote(noteId, note) { + addNote(noteId: string, note: BNote) { this.notes[noteId] = note; this.dirtyNoteSetCache(); } - /** @returns {BNote|null} */ - getNote(noteId) { + getNote(noteId: string): BNote | null { return this.notes[noteId]; } - /** @returns {BNote|null} */ - getNoteOrThrow(noteId) { + getNoteOrThrow(noteId: string): BNote { const note = this.notes[noteId]; if (!note) { throw new NotFoundError(`Note '${noteId}' doesn't exist.`); @@ -89,9 +105,8 @@ class Becca { return note; } - /** @returns {BNote[]} */ - getNotes(noteIds, ignoreMissing = false) { - const filteredNotes = []; + getNotes(noteIds: string[], ignoreMissing: boolean = false): BNote[] { + const filteredNotes: BNote[] = []; for (const noteId of noteIds) { const note = this.notes[noteId]; @@ -110,13 +125,11 @@ class Becca { return filteredNotes; } - /** @returns {BBranch|null} */ - getBranch(branchId) { + getBranch(branchId: string): BBranch | null { return this.branches[branchId]; } - /** @returns {BBranch|null} */ - getBranchOrThrow(branchId) { + getBranchOrThrow(branchId: string): BBranch | null { const branch = this.getBranch(branchId); if (!branch) { throw new NotFoundError(`Branch '${branchId}' was not found in becca.`); @@ -124,13 +137,11 @@ class Becca { return branch; } - /** @returns {BAttribute|null} */ - getAttribute(attributeId) { + getAttribute(attributeId: string): BAttribute | null { return this.attributes[attributeId]; } - /** @returns {BAttribute} */ - getAttributeOrThrow(attributeId) { + getAttributeOrThrow(attributeId: string): BAttribute { const attribute = this.getAttribute(attributeId); if (!attribute) { throw new NotFoundError(`Attribute '${attributeId}' does not exist.`); @@ -139,21 +150,18 @@ class Becca { return attribute; } - /** @returns {BBranch|null} */ - getBranchFromChildAndParent(childNoteId, parentNoteId) { + getBranchFromChildAndParent(childNoteId: string, parentNoteId: string): BBranch | null { return this.childParentToBranch[`${childNoteId}-${parentNoteId}`]; } - /** @returns {BRevision|null} */ - getRevision(revisionId) { + getRevision(revisionId: string): BRevision | null { const row = sql.getRow("SELECT * FROM revisions WHERE revisionId = ?", [revisionId]); - const BRevision = require('./entities/brevision.js'); // avoiding circular dependency problems + const BRevision = require('./entities/brevision'); // avoiding circular dependency problems return row ? new BRevision(row) : null; } - /** @returns {BAttachment|null} */ - getAttachment(attachmentId, opts = {}) { + getAttachment(attachmentId: string, opts: AttachmentOpts = {}): BAttachment | null { opts.includeContentLength = !!opts.includeContentLength; const query = opts.includeContentLength @@ -163,14 +171,13 @@ class Becca { WHERE attachmentId = ? AND isDeleted = 0` : `SELECT * FROM attachments WHERE attachmentId = ? AND isDeleted = 0`; - const BAttachment = require('./entities/battachment.js'); // avoiding circular dependency problems + const BAttachment = require('./entities/battachment'); // avoiding circular dependency problems return sql.getRows(query, [attachmentId]) .map(row => new BAttachment(row))[0]; } - /** @returns {BAttachment} */ - getAttachmentOrThrow(attachmentId, opts = {}) { + getAttachmentOrThrow(attachmentId: string, opts: AttachmentOpts = {}): BAttachment { const attachment = this.getAttachment(attachmentId, opts); if (!attachment) { throw new NotFoundError(`Attachment '${attachmentId}' has not been found.`); @@ -178,38 +185,36 @@ class Becca { return attachment; } - /** @returns {BAttachment[]} */ - getAttachments(attachmentIds) { - const BAttachment = require('./entities/battachment.js'); // avoiding circular dependency problems - return sql.getManyRows("SELECT * FROM attachments WHERE attachmentId IN (???) AND isDeleted = 0", attachmentIds) + getAttachments(attachmentIds: string[]): BAttachment[] { + const BAttachment = require('./entities/battachment'); // avoiding circular dependency problems + return sql.getManyRows("SELECT * FROM attachments WHERE attachmentId IN (???) AND isDeleted = 0", attachmentIds) .map(row => new BAttachment(row)); } - /** @returns {BBlob|null} */ - getBlob(entity) { + getBlob(entity: { blobId?: string }): BBlob | null { + if (!entity.blobId) { + return null; + } + const row = sql.getRow("SELECT *, LENGTH(content) AS contentLength FROM blobs WHERE blobId = ?", [entity.blobId]); - const BBlob = require('./entities/bblob.js'); // avoiding circular dependency problems + const BBlob = require('./entities/bblob'); // avoiding circular dependency problems return row ? new BBlob(row) : null; } - /** @returns {BOption|null} */ - getOption(name) { + getOption(name: string): BOption | null { return this.options[name]; } - /** @returns {BEtapiToken[]} */ - getEtapiTokens() { + getEtapiTokens(): BEtapiToken[] { return Object.values(this.etapiTokens); } - /** @returns {BEtapiToken|null} */ - getEtapiToken(etapiTokenId) { + getEtapiToken(etapiTokenId: string): BEtapiToken | null { return this.etapiTokens[etapiTokenId]; } - /** @returns {AbstractBeccaEntity|null} */ - getEntity(entityName, entityId) { + getEntity>(entityName: string, entityId: string): AbstractBeccaEntity | null { if (!entityName || !entityId) { return null; } @@ -231,22 +236,20 @@ class Becca { throw new Error(`Unknown entity name '${camelCaseEntityName}' (original argument '${entityName}')`); } - return this[camelCaseEntityName][entityId]; + return (this as any)[camelCaseEntityName][entityId]; } - /** @returns {BRecentNote[]} */ - getRecentNotesFromQuery(query, params = []) { + getRecentNotesFromQuery(query: string, params = []): BRecentNote[] { const rows = sql.getRows(query, params); - const BRecentNote = require('./entities/brecent_note.js'); // avoiding circular dependency problems + const BRecentNote = require('./entities/brecent_note'); // avoiding circular dependency problems return rows.map(row => new BRecentNote(row)); } - /** @returns {BRevision[]} */ - getRevisionsFromQuery(query, params = []) { - const rows = sql.getRows(query, params); + getRevisionsFromQuery(query: string, params = []): BRevision[] { + const rows = sql.getRows(query, params); - const BRevision = require('./entities/brevision.js'); // avoiding circular dependency problems + const BRevision = require('./entities/brevision'); // avoiding circular dependency problems return rows.map(row => new BRevision(row)); } @@ -260,8 +263,8 @@ class Becca { if (!this.allNoteSetCache) { const allNotes = []; - for (const noteId in becca.notes) { - const note = becca.notes[noteId]; + for (const noteId in this.notes) { + const note = this.notes[noteId]; // in the process of loading data sometimes we create "skeleton" note instances which are expected to be filled later // in case of inconsistent data this might not work and search will then crash on these @@ -277,6 +280,12 @@ class Becca { } } -const becca = new Becca(); - -module.exports = becca; +/** + * This interface contains the data that is shared across all the objects of a given derived class of {@link AbstractBeccaEntity}. + * For example, all BAttributes will share their content, but all BBranches will have another set of this data. + */ +export interface ConstructorData> { + primaryKeyName: string; + entityName: string; + hashedProperties: (keyof T)[]; +} \ No newline at end of file diff --git a/src/becca/becca.ts b/src/becca/becca.ts new file mode 100644 index 0000000000..a66dc442d1 --- /dev/null +++ b/src/becca/becca.ts @@ -0,0 +1,7 @@ +"use strict"; + +import Becca from "./becca-interface"; + +const becca = new Becca(); + +export = becca; diff --git a/src/becca/becca_loader.js b/src/becca/becca_loader.ts similarity index 70% rename from src/becca/becca_loader.js rename to src/becca/becca_loader.ts index 5d6ce6f067..b7ea941c8a 100644 --- a/src/becca/becca_loader.js +++ b/src/becca/becca_loader.ts @@ -1,24 +1,26 @@ "use strict"; -const sql = require('../services/sql.js'); -const eventService = require('../services/events.js'); -const becca = require('./becca.js'); -const sqlInit = require('../services/sql_init.js'); -const log = require('../services/log.js'); -const BNote = require('./entities/bnote.js'); -const BBranch = require('./entities/bbranch.js'); -const BAttribute = require('./entities/battribute.js'); -const BOption = require('./entities/boption.js'); -const BEtapiToken = require('./entities/betapi_token.js'); -const cls = require('../services/cls.js'); -const entityConstructor = require('../becca/entity_constructor.js'); - -const beccaLoaded = new Promise((res, rej) => { +import sql = require('../services/sql'); +import eventService = require('../services/events'); +import becca = require('./becca'); +import sqlInit = require('../services/sql_init'); +import log = require('../services/log'); +import BNote = require('./entities/bnote'); +import BBranch = require('./entities/bbranch'); +import BAttribute = require('./entities/battribute'); +import BOption = require('./entities/boption'); +import BEtapiToken = require('./entities/betapi_token'); +import cls = require('../services/cls'); +import entityConstructor = require('../becca/entity_constructor'); +import { AttributeRow, BranchRow, EtapiTokenRow, NoteRow, OptionRow } from './entities/rows'; +import AbstractBeccaEntity = require('./entities/abstract_becca_entity'); + +const beccaLoaded = new Promise((res, rej) => { sqlInit.dbReady.then(() => { cls.init(() => { load(); - require('../services/options_init.js').initStartupOptions(); + require('../services/options_init').initStartupOptions(); res(); }); @@ -38,23 +40,23 @@ function load() { new BNote().update(row).init(); } - const branchRows = sql.getRawRows(`SELECT branchId, noteId, parentNoteId, prefix, notePosition, isExpanded, utcDateModified FROM branches WHERE isDeleted = 0`); + const branchRows = sql.getRawRows(`SELECT branchId, noteId, parentNoteId, prefix, notePosition, isExpanded, utcDateModified FROM branches WHERE isDeleted = 0`); // in-memory sort is faster than in the DB - branchRows.sort((a, b) => a.notePosition - b.notePosition); + branchRows.sort((a, b) => (a.notePosition || 0) - (b.notePosition || 0)); for (const row of branchRows) { new BBranch().update(row).init(); } - for (const row of sql.getRawRows(`SELECT attributeId, noteId, type, name, value, isInheritable, position, utcDateModified FROM attributes WHERE isDeleted = 0`)) { + for (const row of sql.getRawRows(`SELECT attributeId, noteId, type, name, value, isInheritable, position, utcDateModified FROM attributes WHERE isDeleted = 0`)) { new BAttribute().update(row).init(); } - for (const row of sql.getRows(`SELECT name, value, isSynced, utcDateModified FROM options`)) { + for (const row of sql.getRows(`SELECT name, value, isSynced, utcDateModified FROM options`)) { new BOption(row); } - for (const row of sql.getRows(`SELECT etapiTokenId, name, tokenHash, utcDateCreated, utcDateModified FROM etapi_tokens WHERE isDeleted = 0`)) { + for (const row of sql.getRows(`SELECT etapiTokenId, name, tokenHash, utcDateCreated, utcDateModified FROM etapi_tokens WHERE isDeleted = 0`)) { new BEtapiToken(row); } }); @@ -68,13 +70,13 @@ function load() { log.info(`Becca (note cache) load took ${Date.now() - start}ms`); } -function reload(reason) { +function reload(reason: string) { load(); - require('../services/ws.js').reloadFrontend(reason || "becca reloaded"); + require('../services/ws').reloadFrontend(reason || "becca reloaded"); } -eventService.subscribeBeccaLoader([eventService.ENTITY_CHANGE_SYNCED], ({entityName, entityRow}) => { +eventService.subscribeBeccaLoader([eventService.ENTITY_CHANGE_SYNCED], ({ entityName, entityRow }) => { if (!becca.loaded) { return; } @@ -88,7 +90,7 @@ eventService.subscribeBeccaLoader([eventService.ENTITY_CHANGE_SYNCED], ({entity if (beccaEntity) { beccaEntity.updateFromRow(entityRow); } else { - beccaEntity = new EntityClass(); + beccaEntity = new EntityClass() as AbstractBeccaEntity>; beccaEntity.updateFromRow(entityRow); beccaEntity.init(); } @@ -97,7 +99,7 @@ eventService.subscribeBeccaLoader([eventService.ENTITY_CHANGE_SYNCED], ({entity postProcessEntityUpdate(entityName, entityRow); }); -eventService.subscribeBeccaLoader(eventService.ENTITY_CHANGED, ({entityName, entity}) => { +eventService.subscribeBeccaLoader(eventService.ENTITY_CHANGED, ({ entityName, entity }) => { if (!becca.loaded) { return; } @@ -112,7 +114,7 @@ eventService.subscribeBeccaLoader(eventService.ENTITY_CHANGED, ({entityName, en * @param entityRow - can be a becca entity (change comes from this trilium instance) or just a row (from sync). * It should be therefore treated as a row. */ -function postProcessEntityUpdate(entityName, entityRow) { +function postProcessEntityUpdate(entityName: string, entityRow: any) { if (entityName === 'notes') { noteUpdated(entityRow); } else if (entityName === 'branches') { @@ -124,7 +126,7 @@ function postProcessEntityUpdate(entityName, entityRow) { } } -eventService.subscribeBeccaLoader([eventService.ENTITY_DELETED, eventService.ENTITY_DELETE_SYNCED], ({entityName, entityId}) => { +eventService.subscribeBeccaLoader([eventService.ENTITY_DELETED, eventService.ENTITY_DELETE_SYNCED], ({ entityName, entityId }) => { if (!becca.loaded) { return; } @@ -140,13 +142,13 @@ eventService.subscribeBeccaLoader([eventService.ENTITY_DELETED, eventService.ENT } }); -function noteDeleted(noteId) { +function noteDeleted(noteId: string) { delete becca.notes[noteId]; becca.dirtyNoteSetCache(); } -function branchDeleted(branchId) { +function branchDeleted(branchId: string) { const branch = becca.branches[branchId]; if (!branch) { @@ -173,23 +175,26 @@ function branchDeleted(branchId) { } delete becca.childParentToBranch[`${branch.noteId}-${branch.parentNoteId}`]; - delete becca.branches[branch.branchId]; + if (branch.branchId) { + delete becca.branches[branch.branchId]; + } } -function noteUpdated(entityRow) { +function noteUpdated(entityRow: NoteRow) { const note = becca.notes[entityRow.noteId]; if (note) { + // TODO, this wouldn't have worked in the original implementation since the variable was named __flatTextCache. // type / mime could have been changed, and they are present in flatTextCache - note.flatTextCache = null; + note.__flatTextCache = null; } } -function branchUpdated(branchRow) { +function branchUpdated(branchRow: BranchRow) { const childNote = becca.notes[branchRow.noteId]; if (childNote) { - childNote.flatTextCache = null; + childNote.__flatTextCache = null; childNote.sortParents(); // notes in the subtree can get new inherited attributes @@ -204,7 +209,7 @@ function branchUpdated(branchRow) { } } -function attributeDeleted(attributeId) { +function attributeDeleted(attributeId: string) { const attribute = becca.attributes[attributeId]; if (!attribute) { @@ -239,8 +244,7 @@ function attributeDeleted(attributeId) { } } -/** @param {BAttribute} attributeRow */ -function attributeUpdated(attributeRow) { +function attributeUpdated(attributeRow: BAttribute) { const attribute = becca.attributes[attributeRow.attributeId]; const note = becca.notes[attributeRow.noteId]; @@ -253,7 +257,7 @@ function attributeUpdated(attributeRow) { } } -function noteReorderingUpdated(branchIdList) { +function noteReorderingUpdated(branchIdList: number[]) { const parentNoteIds = new Set(); for (const branchId in branchIdList) { @@ -267,7 +271,7 @@ function noteReorderingUpdated(branchIdList) { } } -function etapiTokenDeleted(etapiTokenId) { +function etapiTokenDeleted(etapiTokenId: string) { delete becca.etapiTokens[etapiTokenId]; } @@ -275,14 +279,14 @@ eventService.subscribeBeccaLoader(eventService.ENTER_PROTECTED_SESSION, () => { try { becca.decryptProtectedNotes(); } - catch (e) { + catch (e: any) { log.error(`Could not decrypt protected notes: ${e.message} ${e.stack}`); } }); eventService.subscribeBeccaLoader(eventService.LEAVE_PROTECTED_SESSION, load); -module.exports = { +export = { load, reload, beccaLoaded diff --git a/src/becca/becca_service.js b/src/becca/becca_service.ts similarity index 82% rename from src/becca/becca_service.js rename to src/becca/becca_service.ts index 0ac5c25912..2a9eb27812 100644 --- a/src/becca/becca_service.js +++ b/src/becca/becca_service.ts @@ -1,10 +1,10 @@ "use strict"; -const becca = require('./becca.js'); -const cls = require('../services/cls.js'); -const log = require('../services/log.js'); +import becca = require('./becca'); +import cls = require('../services/cls'); +import log = require('../services/log'); -function isNotePathArchived(notePath) { +function isNotePathArchived(notePath: string[]) { const noteId = notePath[notePath.length - 1]; const note = becca.notes[noteId]; @@ -24,9 +24,9 @@ function isNotePathArchived(notePath) { return false; } -function getNoteTitle(childNoteId, parentNoteId) { +function getNoteTitle(childNoteId: string, parentNoteId?: string) { const childNote = becca.notes[childNoteId]; - const parentNote = becca.notes[parentNoteId]; + const parentNote = parentNoteId ? becca.notes[parentNoteId] : null; if (!childNote) { log.info(`Cannot find note '${childNoteId}'`); @@ -40,7 +40,7 @@ function getNoteTitle(childNoteId, parentNoteId) { return `${(branch && branch.prefix) ? `${branch.prefix} - ` : ''}${title}`; } -function getNoteTitleArrayForPath(notePathArray) { +function getNoteTitleArrayForPath(notePathArray: string[]) { if (!notePathArray || !Array.isArray(notePathArray)) { throw new Error(`${notePathArray} is not an array.`); } @@ -76,13 +76,13 @@ function getNoteTitleArrayForPath(notePathArray) { return titles; } -function getNoteTitleForPath(notePathArray) { +function getNoteTitleForPath(notePathArray: string[]) { const titles = getNoteTitleArrayForPath(notePathArray); return titles.join(' / '); } -module.exports = { +export = { getNoteTitle, getNoteTitleForPath, isNotePathArchived diff --git a/src/becca/entities/abstract_becca_entity.js b/src/becca/entities/abstract_becca_entity.ts similarity index 56% rename from src/becca/entities/abstract_becca_entity.js rename to src/becca/entities/abstract_becca_entity.ts index 3b2e6331b4..ca7446368d 100644 --- a/src/becca/entities/abstract_becca_entity.js +++ b/src/becca/entities/abstract_becca_entity.ts @@ -1,66 +1,77 @@ "use strict"; -const utils = require('../../services/utils.js'); -const sql = require('../../services/sql.js'); -const entityChangesService = require('../../services/entity_changes.js'); -const eventService = require('../../services/events.js'); -const dateUtils = require('../../services/date_utils.js'); -const cls = require('../../services/cls.js'); -const log = require('../../services/log.js'); -const protectedSessionService = require('../../services/protected_session.js'); -const blobService = require('../../services/blob.js'); - -let becca = null; +import utils = require('../../services/utils'); +import sql = require('../../services/sql'); +import entityChangesService = require('../../services/entity_changes'); +import eventService = require('../../services/events'); +import dateUtils = require('../../services/date_utils'); +import cls = require('../../services/cls'); +import log = require('../../services/log'); +import protectedSessionService = require('../../services/protected_session'); +import blobService = require('../../services/blob'); +import Becca, { ConstructorData } from '../becca-interface'; + +let becca: Becca; + +interface ContentOpts { + forceSave?: boolean; + forceFrontendReload?: boolean; +} /** * Base class for all backend entities. + * + * @type T the same entity type needed for self-reference in {@link ConstructorData}. */ -class AbstractBeccaEntity { - /** @protected */ - beforeSaving() { - if (!this[this.constructor.primaryKeyName]) { - this[this.constructor.primaryKeyName] = utils.newEntityId(); +abstract class AbstractBeccaEntity> { + + utcDateModified?: string; + protected dateCreated?: string; + protected dateModified?: string; + + utcDateCreated!: string; + + isProtected?: boolean; + isSynced?: boolean; + blobId?: string; + + protected beforeSaving() { + const constructorData = (this.constructor as unknown as ConstructorData); + if (!(this as any)[constructorData.primaryKeyName]) { + (this as any)[constructorData.primaryKeyName] = utils.newEntityId(); } } - /** @protected */ getUtcDateChanged() { return this.utcDateModified || this.utcDateCreated; } - /** - * @protected - * @returns {Becca} - */ - get becca() { + protected get becca(): Becca { if (!becca) { - becca = require('../becca.js'); + becca = require('../becca'); } - return becca; + return becca as Becca; } - /** @protected */ - putEntityChange(isDeleted) { + protected putEntityChange(isDeleted: boolean) { + const constructorData = (this.constructor as unknown as ConstructorData); entityChangesService.putEntityChange({ - entityName: this.constructor.entityName, - entityId: this[this.constructor.primaryKeyName], + entityName: constructorData.entityName, + entityId: (this as any)[constructorData.primaryKeyName], hash: this.generateHash(isDeleted), isErased: false, utcDateChanged: this.getUtcDateChanged(), - isSynced: this.constructor.entityName !== 'options' || !!this.isSynced + isSynced: constructorData.entityName !== 'options' || !!this.isSynced }); } - /** - * @protected - * @returns {string} - */ - generateHash(isDeleted) { + generateHash(isDeleted?: boolean): string { + const constructorData = (this.constructor as unknown as ConstructorData); let contentToHash = ""; - for (const propertyName of this.constructor.hashedProperties) { - contentToHash += `|${this[propertyName]}`; + for (const propertyName of constructorData.hashedProperties) { + contentToHash += `|${(this as any)[propertyName]}`; } if (isDeleted) { @@ -70,31 +81,40 @@ class AbstractBeccaEntity { return utils.hash(contentToHash).substr(0, 10); } - /** @protected */ - getPojoToSave() { + protected getPojoToSave() { return this.getPojo(); } - /** - * @protected - * @abstract - */ - getPojo() { - throw new Error(`Unimplemented getPojo() for entity '${this.constructor.name}'`) + hasStringContent(): boolean { + // TODO: Not sure why some entities don't implement it. + return true; + } + + abstract getPojo(): {}; + + init() { + // Do nothing by default, can be overriden in derived classes. + } + + abstract updateFromRow(row: unknown): void; + + get isDeleted(): boolean { + // TODO: Not sure why some entities don't implement it. + return false; } /** * Saves entity - executes SQL, but doesn't commit the transaction on its own - * - * @returns {this} */ - save(opts = {}) { - const entityName = this.constructor.entityName; - const primaryKeyName = this.constructor.primaryKeyName; - - const isNewEntity = !this[primaryKeyName]; + // TODO: opts not used but called a few times, maybe should be used by derived classes or passed to beforeSaving. + save(opts?: {}): this { + const constructorData = (this.constructor as unknown as ConstructorData); + const entityName = constructorData.entityName; + const primaryKeyName = constructorData.primaryKeyName; - this.beforeSaving(opts); + const isNewEntity = !(this as any)[primaryKeyName]; + + this.beforeSaving(); const pojo = this.getPojoToSave(); @@ -124,14 +144,14 @@ class AbstractBeccaEntity { return this; } - /** @protected */ - _setContent(content, opts = {}) { + protected _setContent(content: string | Buffer, opts: ContentOpts = {}) { // client code asks to save entity even if blobId didn't change (something else was changed) opts.forceSave = !!opts.forceSave; opts.forceFrontendReload = !!opts.forceFrontendReload; if (content === null || content === undefined) { - throw new Error(`Cannot set null content to ${this.constructor.primaryKeyName} '${this[this.constructor.primaryKeyName]}'`); + const constructorData = (this.constructor as unknown as ConstructorData); + throw new Error(`Cannot set null content to ${constructorData.primaryKeyName} '${(this as any)[constructorData.primaryKeyName]}'`); } if (this.hasStringContent()) { @@ -140,32 +160,36 @@ class AbstractBeccaEntity { content = Buffer.isBuffer(content) ? content : Buffer.from(content); } - const unencryptedContentForHashCalculation = this.#getUnencryptedContentForHashCalculation(content); + const unencryptedContentForHashCalculation = this.getUnencryptedContentForHashCalculation(content); if (this.isProtected) { if (protectedSessionService.isProtectedSessionAvailable()) { - content = protectedSessionService.encrypt(content); + const encryptedContent = protectedSessionService.encrypt(content); + if (!encryptedContent) { + throw new Error(`Unable to encrypt the content of the entity.`); + } + content = encryptedContent; } else { throw new Error(`Cannot update content of blob since protected session is not available.`); } } sql.transactional(() => { - const newBlobId = this.#saveBlob(content, unencryptedContentForHashCalculation, opts); + const newBlobId = this.saveBlob(content, unencryptedContentForHashCalculation, opts); const oldBlobId = this.blobId; if (newBlobId !== oldBlobId || opts.forceSave) { this.blobId = newBlobId; this.save(); - if (newBlobId !== oldBlobId) { - this.#deleteBlobIfNotUsed(oldBlobId); + if (oldBlobId && newBlobId !== oldBlobId) { + this.deleteBlobIfNotUsed(oldBlobId); } } }); } - #deleteBlobIfNotUsed(oldBlobId) { + private deleteBlobIfNotUsed(oldBlobId: string) { if (sql.getValue("SELECT 1 FROM notes WHERE blobId = ? LIMIT 1", [oldBlobId])) { return; } @@ -184,7 +208,7 @@ class AbstractBeccaEntity { sql.execute("DELETE FROM entity_changes WHERE entityName = 'blobs' AND entityId = ?", [oldBlobId]); } - #getUnencryptedContentForHashCalculation(unencryptedContent) { + private getUnencryptedContentForHashCalculation(unencryptedContent: Buffer | string) { if (this.isProtected) { // a "random" prefix makes sure that the calculated hash/blobId is different for a decrypted/encrypted content const encryptedPrefixSuffix = "t$[nvQg7q)&_ENCRYPTED_?M:Bf&j3jr_"; @@ -196,7 +220,7 @@ class AbstractBeccaEntity { } } - #saveBlob(content, unencryptedContentForHashCalculation, opts = {}) { + private saveBlob(content: string | Buffer, unencryptedContentForHashCalculation: string | Buffer, opts: ContentOpts = {}) { /* * We're using the unencrypted blob for the hash calculation, because otherwise the random IV would * cause every content blob to be unique which would balloon the database size (esp. with revisioning). @@ -243,41 +267,37 @@ class AbstractBeccaEntity { return newBlobId; } - /** - * @protected - * @returns {string|Buffer} - */ - _getContent() { - const row = sql.getRow(`SELECT content FROM blobs WHERE blobId = ?`, [this.blobId]); + protected _getContent(): string | Buffer { + const row = sql.getRow<{ content: string | Buffer }>(`SELECT content FROM blobs WHERE blobId = ?`, [this.blobId]); if (!row) { - throw new Error(`Cannot find content for ${this.constructor.primaryKeyName} '${this[this.constructor.primaryKeyName]}', blobId '${this.blobId}'`); + const constructorData = (this.constructor as unknown as ConstructorData); + throw new Error(`Cannot find content for ${constructorData.primaryKeyName} '${(this as any)[constructorData.primaryKeyName]}', blobId '${this.blobId}'`); } - return blobService.processContent(row.content, this.isProtected, this.hasStringContent()); + return blobService.processContent(row.content, this.isProtected || false, this.hasStringContent()); } /** * Mark the entity as (soft) deleted. It will be completely erased later. * * This is a low-level method, for notes and branches use `note.deleteNote()` and 'branch.deleteBranch()` instead. - * - * @param [deleteId=null] */ - markAsDeleted(deleteId = null) { - const entityId = this[this.constructor.primaryKeyName]; - const entityName = this.constructor.entityName; + markAsDeleted(deleteId: string | null = null) { + const constructorData = (this.constructor as unknown as ConstructorData); + const entityId = (this as any)[constructorData.primaryKeyName]; + const entityName = constructorData.entityName; this.utcDateModified = dateUtils.utcNowDateTime(); sql.execute(`UPDATE ${entityName} SET isDeleted = 1, deleteId = ?, utcDateModified = ? - WHERE ${this.constructor.primaryKeyName} = ?`, + WHERE ${constructorData.primaryKeyName} = ?`, [deleteId, this.utcDateModified, entityId]); if (this.dateModified) { this.dateModified = dateUtils.localNowDateTime(); - sql.execute(`UPDATE ${entityName} SET dateModified = ? WHERE ${this.constructor.primaryKeyName} = ?`, + sql.execute(`UPDATE ${entityName} SET dateModified = ? WHERE ${constructorData.primaryKeyName} = ?`, [this.dateModified, entityId]); } @@ -289,13 +309,14 @@ class AbstractBeccaEntity { } markAsDeletedSimple() { - const entityId = this[this.constructor.primaryKeyName]; - const entityName = this.constructor.entityName; + const constructorData = (this.constructor as unknown as ConstructorData); + const entityId = (this as any)[constructorData.primaryKeyName]; + const entityName = constructorData.entityName; this.utcDateModified = dateUtils.utcNowDateTime(); sql.execute(`UPDATE ${entityName} SET isDeleted = 1, utcDateModified = ? - WHERE ${this.constructor.primaryKeyName} = ?`, + WHERE ${constructorData.primaryKeyName} = ?`, [this.utcDateModified, entityId]); log.info(`Marking ${entityName} ${entityId} as deleted`); @@ -306,4 +327,4 @@ class AbstractBeccaEntity { } } -module.exports = AbstractBeccaEntity; +export = AbstractBeccaEntity; diff --git a/src/becca/entities/battachment.js b/src/becca/entities/battachment.ts similarity index 68% rename from src/becca/entities/battachment.js rename to src/becca/entities/battachment.ts index faeb131e75..7b203839aa 100644 --- a/src/becca/entities/battachment.js +++ b/src/becca/entities/battachment.ts @@ -1,31 +1,64 @@ "use strict"; -const utils = require('../../services/utils.js'); -const dateUtils = require('../../services/date_utils.js'); -const AbstractBeccaEntity = require('./abstract_becca_entity.js'); -const sql = require('../../services/sql.js'); -const protectedSessionService = require('../../services/protected_session.js'); -const log = require('../../services/log.js'); +import utils = require('../../services/utils'); +import dateUtils = require('../../services/date_utils'); +import AbstractBeccaEntity = require('./abstract_becca_entity'); +import sql = require('../../services/sql'); +import protectedSessionService = require('../../services/protected_session'); +import log = require('../../services/log'); +import { AttachmentRow } from './rows'; +import BNote = require('./bnote'); +import BBranch = require('./bbranch'); const attachmentRoleToNoteTypeMapping = { 'image': 'image', 'file': 'file' }; +interface ContentOpts { + // TODO: Found in bnote.ts, to check if it's actually used and not a typo. + forceSave?: boolean; + + /** will also save this BAttachment entity */ + forceFullSave?: boolean; + /** override frontend heuristics on when to reload, instruct to reload */ + forceFrontendReload?: boolean; +} + /** * Attachment represent data related/attached to the note. Conceptually similar to attributes, but intended for * larger amounts of data and generally not accessible to the user. - * - * @extends AbstractBeccaEntity */ -class BAttachment extends AbstractBeccaEntity { +class BAttachment extends AbstractBeccaEntity { static get entityName() { return "attachments"; } static get primaryKeyName() { return "attachmentId"; } static get hashedProperties() { return ["attachmentId", "ownerId", "role", "mime", "title", "blobId", "utcDateScheduledForErasureSince"]; } - constructor(row) { + noteId?: number; + attachmentId?: string; + /** either noteId or revisionId to which this attachment belongs */ + ownerId!: string; + role!: string; + mime!: string; + title!: string; + type?: keyof typeof attachmentRoleToNoteTypeMapping; + position?: number; + blobId?: string; + isProtected?: boolean; + dateModified?: string; + utcDateScheduledForErasureSince?: string | null; + /** optionally added to the entity */ + contentLength?: number; + isDecrypted?: boolean; + + constructor(row: AttachmentRow) { super(); + this.updateFromRow(row); + this.decrypt(); + } + + updateFromRow(row: AttachmentRow): void { if (!row.ownerId?.trim()) { throw new Error("'ownerId' must be given to initialize a Attachment entity"); } else if (!row.role?.trim()) { @@ -36,43 +69,21 @@ class BAttachment extends AbstractBeccaEntity { throw new Error("'title' must be given to initialize a Attachment entity"); } - /** @type {string} */ this.attachmentId = row.attachmentId; - /** - * either noteId or revisionId to which this attachment belongs - * @type {string} - */ this.ownerId = row.ownerId; - /** @type {string} */ this.role = row.role; - /** @type {string} */ this.mime = row.mime; - /** @type {string} */ this.title = row.title; - /** @type {int} */ this.position = row.position; - /** @type {string} */ this.blobId = row.blobId; - /** @type {boolean} */ this.isProtected = !!row.isProtected; - /** @type {string} */ this.dateModified = row.dateModified; - /** @type {string} */ this.utcDateModified = row.utcDateModified; - /** @type {string} */ this.utcDateScheduledForErasureSince = row.utcDateScheduledForErasureSince; - - /** - * optionally added to the entity - * @type {int} - */ this.contentLength = row.contentLength; - - this.decrypt(); } - /** @returns {BAttachment} */ - copy() { + copy(): BAttachment { return new BAttachment({ ownerId: this.ownerId, role: this.role, @@ -83,14 +94,13 @@ class BAttachment extends AbstractBeccaEntity { }); } - /** @returns {BNote} */ - getNote() { + getNote(): BNote { return this.becca.notes[this.ownerId]; } - /** @returns {boolean} true if the note has string content (not binary) */ - hasStringContent() { - return utils.isStringNote(this.type, this.mime); + /** @returns true if the note has string content (not binary) */ + hasStringContent(): boolean { + return this.type !== undefined && utils.isStringNote(this.type, this.mime); } isContentAvailable() { @@ -111,33 +121,26 @@ class BAttachment extends AbstractBeccaEntity { if (!this.isDecrypted && protectedSessionService.isProtectedSessionAvailable()) { try { - this.title = protectedSessionService.decryptString(this.title); + this.title = protectedSessionService.decryptString(this.title) || ""; this.isDecrypted = true; } - catch (e) { + catch (e: any) { log.error(`Could not decrypt attachment ${this.attachmentId}: ${e.message} ${e.stack}`); } } } - /** @returns {string|Buffer} */ - getContent() { - return this._getContent(); + getContent(): Buffer { + return this._getContent() as Buffer; } - /** - * @param content - * @param {object} [opts] - * @param {object} [opts.forceSave=false] - will also save this BAttachment entity - * @param {object} [opts.forceFrontendReload=false] - override frontend heuristics on when to reload, instruct to reload - */ - setContent(content, opts) { + setContent(content: string | Buffer, opts: ContentOpts) { this._setContent(content, opts); } - /** @returns {{note: BNote, branch: BBranch}} */ - convertToNote() { - if (this.type === 'search') { + convertToNote(): { note: BNote, branch: BBranch } { + // TODO: can this ever be "search"? + if (this.type as string === 'search') { throw new Error(`Note of type search cannot have child notes`); } @@ -154,12 +157,12 @@ class BAttachment extends AbstractBeccaEntity { throw new Error(`Cannot convert protected attachment outside of protected session`); } - const noteService = require('../../services/notes.js'); + const noteService = require('../../services/notes'); const { note, branch } = noteService.createNewNote({ parentNoteId: this.ownerId, title: this.title, - type: attachmentRoleToNoteTypeMapping[this.role], + type: (attachmentRoleToNoteTypeMapping as any)[this.role], mime: this.mime, content: this.getContent(), isProtected: this.isProtected @@ -171,6 +174,11 @@ class BAttachment extends AbstractBeccaEntity { if (this.role === 'image' && parentNote.type === 'text') { const origContent = parentNote.getContent(); + + if (typeof origContent !== "string") { + throw new Error(`Note with ID '${note.noteId} has a text type but non-string content.`); + } + const oldAttachmentUrl = `api/attachments/${this.attachmentId}/image/`; const newNoteUrl = `api/images/${note.noteId}/`; @@ -196,9 +204,9 @@ class BAttachment extends AbstractBeccaEntity { super.beforeSaving(); if (this.position === undefined || this.position === null) { - this.position = 10 + sql.getValue(`SELECT COALESCE(MAX(position), 0) - FROM attachments - WHERE ownerId = ?`, [this.noteId]); + this.position = 10 + sql.getValue(`SELECT COALESCE(MAX(position), 0) + FROM attachments + WHERE ownerId = ?`, [this.noteId]); } this.dateModified = dateUtils.localNowDateTime(); @@ -211,7 +219,7 @@ class BAttachment extends AbstractBeccaEntity { ownerId: this.ownerId, role: this.role, mime: this.mime, - title: this.title, + title: this.title || undefined, position: this.position, blobId: this.blobId, isProtected: !!this.isProtected, @@ -229,7 +237,7 @@ class BAttachment extends AbstractBeccaEntity { if (pojo.isProtected) { if (this.isDecrypted) { - pojo.title = protectedSessionService.encrypt(pojo.title); + pojo.title = protectedSessionService.encrypt(pojo.title || "") || undefined; } else { // updating protected note outside of protected session means we will keep original ciphertexts @@ -241,4 +249,4 @@ class BAttachment extends AbstractBeccaEntity { } } -module.exports = BAttachment; +export = BAttachment; diff --git a/src/becca/entities/battribute.js b/src/becca/entities/battribute.ts similarity index 84% rename from src/becca/entities/battribute.js rename to src/becca/entities/battribute.ts index 55e7db66d9..ebd9e74cfe 100644 --- a/src/becca/entities/battribute.js +++ b/src/becca/entities/battribute.ts @@ -1,30 +1,34 @@ "use strict"; -const BNote = require('./bnote.js'); -const AbstractBeccaEntity = require('./abstract_becca_entity.js'); -const sql = require('../../services/sql.js'); -const dateUtils = require('../../services/date_utils.js'); -const promotedAttributeDefinitionParser = require('../../services/promoted_attribute_definition_parser.js'); -const {sanitizeAttributeName} = require('../../services/sanitize_attribute_name.js'); - - -/** - * There are currently only two types of attributes, labels or relations. - * @typedef {"label" | "relation"} AttributeType - */ +import BNote = require('./bnote'); +import AbstractBeccaEntity = require('./abstract_becca_entity'); +import dateUtils = require('../../services/date_utils'); +import promotedAttributeDefinitionParser = require('../../services/promoted_attribute_definition_parser'); +import sanitizeAttributeName = require('../../services/sanitize_attribute_name'); +import { AttributeRow, AttributeType } from './rows'; + +interface SavingOpts { + skipValidation?: boolean; +} /** * Attribute is an abstract concept which has two real uses - label (key - value pair) * and relation (representing named relationship between source and target note) - * - * @extends AbstractBeccaEntity */ -class BAttribute extends AbstractBeccaEntity { +class BAttribute extends AbstractBeccaEntity { static get entityName() { return "attributes"; } static get primaryKeyName() { return "attributeId"; } static get hashedProperties() { return ["attributeId", "noteId", "type", "name", "value", "isInheritable"]; } - constructor(row) { + attributeId!: string; + noteId!: string; + type!: AttributeType; + name!: string; + position!: number; + value!: string; + isInheritable!: boolean; + + constructor(row?: AttributeRow) { super(); if (!row) { @@ -35,7 +39,7 @@ class BAttribute extends AbstractBeccaEntity { this.init(); } - updateFromRow(row) { + updateFromRow(row: AttributeRow) { this.update([ row.attributeId, row.noteId, @@ -48,22 +52,14 @@ class BAttribute extends AbstractBeccaEntity { ]); } - update([attributeId, noteId, type, name, value, isInheritable, position, utcDateModified]) { - /** @type {string} */ + update([attributeId, noteId, type, name, value, isInheritable, position, utcDateModified]: any) { this.attributeId = attributeId; - /** @type {string} */ this.noteId = noteId; - /** @type {AttributeType} */ this.type = type; - /** @type {string} */ this.name = name; - /** @type {int} */ this.position = position; - /** @type {string} */ this.value = value || ""; - /** @type {boolean} */ this.isInheritable = !!isInheritable; - /** @type {string} */ this.utcDateModified = utcDateModified; return this; @@ -182,12 +178,12 @@ class BAttribute extends AbstractBeccaEntity { return !(this.attributeId in this.becca.attributes); } - beforeSaving(opts = {}) { + beforeSaving(opts: SavingOpts = {}) { if (!opts.skipValidation) { this.validate(); } - this.name = sanitizeAttributeName(this.name); + this.name = sanitizeAttributeName.sanitizeAttributeName(this.name); if (!this.value) { // null value isn't allowed @@ -226,7 +222,7 @@ class BAttribute extends AbstractBeccaEntity { }; } - createClone(type, name, value, isInheritable) { + createClone(type: AttributeType, name: string, value: string, isInheritable?: boolean) { return new BAttribute({ noteId: this.noteId, type: type, @@ -239,4 +235,4 @@ class BAttribute extends AbstractBeccaEntity { } } -module.exports = BAttribute; +export = BAttribute; diff --git a/src/becca/entities/bblob.js b/src/becca/entities/bblob.ts similarity index 54% rename from src/becca/entities/bblob.js rename to src/becca/entities/bblob.ts index def5888eee..40d1c58850 100644 --- a/src/becca/entities/bblob.js +++ b/src/becca/entities/bblob.ts @@ -1,25 +1,35 @@ -class BBlob { +import AbstractBeccaEntity = require("./abstract_becca_entity"); +import { BlobRow } from "./rows"; + +// TODO: Why this does not extend the abstract becca? +class BBlob extends AbstractBeccaEntity { static get entityName() { return "blobs"; } static get primaryKeyName() { return "blobId"; } static get hashedProperties() { return ["blobId", "content"]; } - constructor(row) { - /** @type {string} */ + blobId!: string; + content!: string | Buffer; + contentLength!: number; + dateModified!: string; + utcDateModified!: string; + + constructor(row: BlobRow) { + super(); + this.updateFromRow(row); + } + + updateFromRow(row: BlobRow): void { this.blobId = row.blobId; - /** @type {string|Buffer} */ this.content = row.content; - /** @type {int} */ this.contentLength = row.contentLength; - /** @type {string} */ this.dateModified = row.dateModified; - /** @type {string} */ this.utcDateModified = row.utcDateModified; } getPojo() { return { blobId: this.blobId, - content: this.content, + content: this.content || null, contentLength: this.contentLength, dateModified: this.dateModified, utcDateModified: this.utcDateModified @@ -27,4 +37,4 @@ class BBlob { } } -module.exports = BBlob; +export = BBlob; diff --git a/src/becca/entities/bbranch.js b/src/becca/entities/bbranch.ts similarity index 74% rename from src/becca/entities/bbranch.js rename to src/becca/entities/bbranch.ts index 94cc1b802b..677bc75ef6 100644 --- a/src/becca/entities/bbranch.js +++ b/src/becca/entities/bbranch.ts @@ -1,12 +1,13 @@ "use strict"; -const BNote = require('./bnote.js'); -const AbstractBeccaEntity = require('./abstract_becca_entity.js'); -const dateUtils = require('../../services/date_utils.js'); -const utils = require('../../services/utils.js'); -const TaskContext = require('../../services/task_context.js'); -const cls = require('../../services/cls.js'); -const log = require('../../services/log.js'); +import BNote = require('./bnote'); +import AbstractBeccaEntity = require('./abstract_becca_entity'); +import dateUtils = require('../../services/date_utils'); +import utils = require('../../services/utils'); +import TaskContext = require('../../services/task_context'); +import cls = require('../../services/cls'); +import log = require('../../services/log'); +import { BranchRow } from './rows'; /** * Branch represents a relationship between a child note and its parent note. Trilium allows a note to have multiple @@ -14,16 +15,22 @@ const log = require('../../services/log.js'); * * Note that you should not rely on the branch's identity, since it can change easily with a note's move. * Always check noteId instead. - * - * @extends AbstractBeccaEntity */ -class BBranch extends AbstractBeccaEntity { +class BBranch extends AbstractBeccaEntity { static get entityName() { return "branches"; } static get primaryKeyName() { return "branchId"; } // notePosition is not part of hash because it would produce a lot of updates in case of reordering static get hashedProperties() { return ["branchId", "noteId", "parentNoteId", "prefix"]; } - constructor(row) { + branchId?: string; + noteId!: string; + parentNoteId!: string; + prefix!: string | null; + notePosition!: number; + isExpanded!: boolean; + utcDateModified?: string; + + constructor(row?: BranchRow) { super(); if (!row) { @@ -34,7 +41,7 @@ class BBranch extends AbstractBeccaEntity { this.init(); } - updateFromRow(row) { + updateFromRow(row: BranchRow) { this.update([ row.branchId, row.noteId, @@ -46,20 +53,13 @@ class BBranch extends AbstractBeccaEntity { ]); } - update([branchId, noteId, parentNoteId, prefix, notePosition, isExpanded, utcDateModified]) { - /** @type {string} */ + update([branchId, noteId, parentNoteId, prefix, notePosition, isExpanded, utcDateModified]: any) { this.branchId = branchId; - /** @type {string} */ this.noteId = noteId; - /** @type {string} */ this.parentNoteId = parentNoteId; - /** @type {string|null} */ this.prefix = prefix; - /** @type {int} */ this.notePosition = notePosition; - /** @type {boolean} */ this.isExpanded = !!isExpanded; - /** @type {string} */ this.utcDateModified = utcDateModified; return this; @@ -83,18 +83,18 @@ class BBranch extends AbstractBeccaEntity { } const parentNote = this.parentNote; - - if (!childNote.parents.includes(parentNote)) { - childNote.parents.push(parentNote); - } - - if (!parentNote.children.includes(childNote)) { - parentNote.children.push(childNote); + if (parentNote) { + if (!childNote.parents.includes(parentNote)) { + childNote.parents.push(parentNote); + } + + if (!parentNote.children.includes(childNote)) { + parentNote.children.push(childNote); + } } } - /** @returns {BNote} */ - get childNote() { + get childNote(): BNote { if (!(this.noteId in this.becca.notes)) { // entities can come out of order in sync/import, create skeleton which will be filled later this.becca.addNote(this.noteId, new BNote({noteId: this.noteId})); @@ -103,13 +103,12 @@ class BBranch extends AbstractBeccaEntity { return this.becca.notes[this.noteId]; } - /** @returns {BNote} */ - getNote() { + getNote(): BNote { return this.childNote; } - /** @returns {BNote|undefined} - root branch will have undefined parent, all other branches have to have a parent note */ - get parentNote() { + /** @returns root branch will have undefined parent, all other branches have to have a parent note */ + get parentNote(): BNote | undefined { if (!(this.parentNoteId in this.becca.notes) && this.parentNoteId !== 'none') { // entities can come out of order in sync/import, create skeleton which will be filled later this.becca.addNote(this.parentNoteId, new BNote({noteId: this.parentNoteId})); @@ -119,7 +118,7 @@ class BBranch extends AbstractBeccaEntity { } get isDeleted() { - return !(this.branchId in this.becca.branches); + return (this.branchId == undefined || !(this.branchId in this.becca.branches)); } /** @@ -138,12 +137,11 @@ class BBranch extends AbstractBeccaEntity { /** * Delete a branch. If this is a last note's branch, delete the note as well. * - * @param {string} [deleteId] - optional delete identified - * @param {TaskContext} [taskContext] + * @param deleteId - optional delete identified * - * @returns {boolean} - true if note has been deleted, false otherwise + * @returns true if note has been deleted, false otherwise */ - deleteBranch(deleteId, taskContext) { + deleteBranch(deleteId: string, taskContext: TaskContext): boolean { if (!deleteId) { deleteId = utils.randomString(10); } @@ -161,7 +159,7 @@ class BBranch extends AbstractBeccaEntity { if (parentBranches.length === 1 && parentBranches[0] === this) { // needs to be run before branches and attributes are deleted and thus attached relations disappear - const handlers = require('../../services/handlers.js'); + const handlers = require('../../services/handlers'); handlers.runAttachedRelations(note, 'runOnNoteDeletion', note); } } @@ -182,7 +180,9 @@ class BBranch extends AbstractBeccaEntity { } for (const childBranch of note.getChildBranches()) { - childBranch.deleteBranch(deleteId, taskContext); + if (childBranch) { + childBranch.deleteBranch(deleteId, taskContext); + } } // first delete children and then parent - this will show up better in recent changes @@ -222,11 +222,17 @@ class BBranch extends AbstractBeccaEntity { if (this.notePosition === undefined || this.notePosition === null) { let maxNotePos = 0; - for (const childBranch of this.parentNote.getChildBranches()) { - if (maxNotePos < childBranch.notePosition - && childBranch.noteId !== '_hidden' // hidden has a very large notePosition to always stay last - ) { - maxNotePos = childBranch.notePosition; + if (this.parentNote) { + for (const childBranch of this.parentNote.getChildBranches()) { + if (!childBranch) { + continue; + } + + if (maxNotePos < childBranch.notePosition + && childBranch.noteId !== '_hidden' // hidden has a very large notePosition to always stay last + ) { + maxNotePos = childBranch.notePosition; + } } } @@ -261,17 +267,19 @@ class BBranch extends AbstractBeccaEntity { }; } - createClone(parentNoteId, notePosition) { + createClone(parentNoteId: string, notePosition?: number) { const existingBranch = this.becca.getBranchFromChildAndParent(this.noteId, parentNoteId); if (existingBranch) { - existingBranch.notePosition = notePosition; + if (notePosition) { + existingBranch.notePosition = notePosition; + } return existingBranch; } else { return new BBranch({ noteId: this.noteId, parentNoteId: parentNoteId, - notePosition: notePosition, + notePosition: notePosition || null, prefix: this.prefix, isExpanded: this.isExpanded }); @@ -279,4 +287,4 @@ class BBranch extends AbstractBeccaEntity { } } -module.exports = BBranch; +export = BBranch; diff --git a/src/becca/entities/betapi_token.js b/src/becca/entities/betapi_token.ts similarity index 72% rename from src/becca/entities/betapi_token.js rename to src/becca/entities/betapi_token.ts index 0fded9e637..390b580bd9 100644 --- a/src/becca/entities/betapi_token.js +++ b/src/becca/entities/betapi_token.ts @@ -1,7 +1,9 @@ "use strict"; -const dateUtils = require('../../services/date_utils.js'); -const AbstractBeccaEntity = require('./abstract_becca_entity.js'); +import { EtapiTokenRow } from "./rows"; + +import dateUtils = require('../../services/date_utils'); +import AbstractBeccaEntity = require('./abstract_becca_entity'); /** * EtapiToken is an entity representing token used to authenticate against Trilium REST API from client applications. @@ -11,15 +13,18 @@ const AbstractBeccaEntity = require('./abstract_becca_entity.js'); * * The format user is presented with is "_". This is also called "authToken" to distinguish it * from tokenHash and token. - * - * @extends AbstractBeccaEntity */ -class BEtapiToken extends AbstractBeccaEntity { +class BEtapiToken extends AbstractBeccaEntity { static get entityName() { return "etapi_tokens"; } static get primaryKeyName() { return "etapiTokenId"; } static get hashedProperties() { return ["etapiTokenId", "name", "tokenHash", "utcDateCreated", "utcDateModified", "isDeleted"]; } - constructor(row) { + etapiTokenId?: string; + name!: string; + tokenHash!: string; + private _isDeleted?: boolean; + + constructor(row?: EtapiTokenRow) { super(); if (!row) { @@ -30,19 +35,17 @@ class BEtapiToken extends AbstractBeccaEntity { this.init(); } - updateFromRow(row) { - /** @type {string} */ + get isDeleted() { + return !!this._isDeleted; + } + + updateFromRow(row: EtapiTokenRow) { this.etapiTokenId = row.etapiTokenId; - /** @type {string} */ this.name = row.name; - /** @type {string} */ this.tokenHash = row.tokenHash; - /** @type {string} */ this.utcDateCreated = row.utcDateCreated || dateUtils.utcNowDateTime(); - /** @type {string} */ this.utcDateModified = row.utcDateModified || this.utcDateCreated; - /** @type {boolean} */ - this.isDeleted = !!row.isDeleted; + this._isDeleted = !!row.isDeleted; if (this.etapiTokenId) { this.becca.etapiTokens[this.etapiTokenId] = this; @@ -71,8 +74,10 @@ class BEtapiToken extends AbstractBeccaEntity { super.beforeSaving(); - this.becca.etapiTokens[this.etapiTokenId] = this; + if (this.etapiTokenId) { + this.becca.etapiTokens[this.etapiTokenId] = this; + } } } -module.exports = BEtapiToken; +export = BEtapiToken; diff --git a/src/becca/entities/bnote.js b/src/becca/entities/bnote.ts similarity index 70% rename from src/becca/entities/bnote.js rename to src/becca/entities/bnote.ts index 36f8ed3d44..e72e43adf3 100644 --- a/src/becca/entities/bnote.js +++ b/src/becca/entities/bnote.ts @@ -1,48 +1,92 @@ "use strict"; -const protectedSessionService = require('../../services/protected_session.js'); -const log = require('../../services/log.js'); -const sql = require('../../services/sql.js'); -const utils = require('../../services/utils.js'); -const dateUtils = require('../../services/date_utils.js'); -const AbstractBeccaEntity = require('./abstract_becca_entity.js'); -const BRevision = require('./brevision.js'); -const BAttachment = require('./battachment.js'); -const TaskContext = require('../../services/task_context.js'); -const dayjs = require("dayjs"); -const utc = require('dayjs/plugin/utc'); -const eventService = require('../../services/events.js'); +import protectedSessionService = require('../../services/protected_session'); +import log = require('../../services/log'); +import sql = require('../../services/sql'); +import utils = require('../../services/utils'); +import dateUtils = require('../../services/date_utils'); +import AbstractBeccaEntity = require('./abstract_becca_entity'); +import BRevision = require('./brevision'); +import BAttachment = require('./battachment'); +import TaskContext = require('../../services/task_context'); +import dayjs = require("dayjs"); +import utc = require('dayjs/plugin/utc'); +import eventService = require('../../services/events'); +import { AttachmentRow, NoteRow, NoteType, RevisionRow } from './rows'; +import BBranch = require('./bbranch'); +import BAttribute = require('./battribute'); dayjs.extend(utc); const LABEL = 'label'; const RELATION = 'relation'; -/** - * There are many different Note types, some of which are entirely opaque to the - * end user. Those types should be used only for checking against, they are - * not for direct use. - * @typedef {"file" | "image" | "search" | "noteMap" | "launcher" | "doc" | "contentWidget" | "text" | "relationMap" | "render" | "canvas" | "mermaid" | "book" | "webView" | "code"} NoteType - */ +interface NotePathRecord { + isArchived: boolean; + isInHoistedSubTree: boolean; + notePath: string[]; + isHidden: boolean; +} -/** - * @typedef {Object} NotePathRecord - * @property {boolean} isArchived - * @property {boolean} isInHoistedSubTree - * @property {Array} notePath - * @property {boolean} isHidden - */ +interface ContentOpts { + /** will also save this BNote entity */ + forceSave?: boolean; + /** override frontend heuristics on when to reload, instruct to reload */ + forceFrontendReload?: boolean; +} + +interface AttachmentOpts { + includeContentLength?: boolean; +} + +interface Relationship { + parentNoteId: string; + childNoteId: string +} + +interface ConvertOpts { + /** if true, the action is not triggered by user, but e.g. by migration, and only perfect candidates will be migrated */ + autoConversion?: boolean; +} /** * Trilium's main entity, which can represent text note, image, code note, file attachment etc. - * - * @extends AbstractBeccaEntity */ -class BNote extends AbstractBeccaEntity { +class BNote extends AbstractBeccaEntity { static get entityName() { return "notes"; } static get primaryKeyName() { return "noteId"; } static get hashedProperties() { return ["noteId", "title", "isProtected", "type", "mime", "blobId"]; } - constructor(row) { + noteId!: string; + title!: string; + type!: NoteType; + mime!: string; + /** set during the deletion operation, before it is completed (removed from becca completely). */ + isBeingDeleted!: boolean; + isDecrypted!: boolean; + + ownedAttributes!: BAttribute[]; + parentBranches!: BBranch[]; + parents!: BNote[]; + children!: BNote[]; + targetRelations!: BAttribute[]; + + __flatTextCache!: string | null; + + private __attributeCache!: BAttribute[] | null; + private __inheritableAttributeCache!: BAttribute[] | null; + private __ancestorCache!: BNote[] | null; + + // following attributes are filled during searching in the database + /** size of the content in bytes */ + contentSize!: number | null; + /** size of the note content, attachment contents in bytes */ + contentAndAttachmentsSize!: number | null; + /** size of the note content, attachment contents and revision contents in bytes */ + contentAndAttachmentsAndRevisionsSize!: number | null; + /** number of note revisions for this note */ + revisionCount!: number | null; + + constructor(row?: Partial) { super(); if (!row) { @@ -53,7 +97,7 @@ class BNote extends AbstractBeccaEntity { this.init(); } - updateFromRow(row) { + updateFromRow(row: Partial) { this.update([ row.noteId, row.title, @@ -68,104 +112,47 @@ class BNote extends AbstractBeccaEntity { ]); } - update([noteId, title, type, mime, isProtected, blobId, dateCreated, dateModified, utcDateCreated, utcDateModified]) { + update([noteId, title, type, mime, isProtected, blobId, dateCreated, dateModified, utcDateCreated, utcDateModified]: any) { // ------ Database persisted attributes ------ - /** @type {string} */ this.noteId = noteId; - /** @type {string} */ this.title = title; - /** @type {NoteType} */ this.type = type; - /** @type {string} */ this.mime = mime; - /** @type {boolean} */ this.isProtected = !!isProtected; - /** @type {string} */ this.blobId = blobId; - /** @type {string} */ this.dateCreated = dateCreated || dateUtils.localNowDateTime(); - /** @type {string} */ this.dateModified = dateModified; - /** @type {string} */ this.utcDateCreated = utcDateCreated || dateUtils.utcNowDateTime(); - /** @type {string} */ this.utcDateModified = utcDateModified; - /** - * set during the deletion operation, before it is completed (removed from becca completely) - * @type {boolean} - */ this.isBeingDeleted = false; // ------ Derived attributes ------ - /** @type {boolean} */ this.isDecrypted = !this.noteId || !this.isProtected; this.decrypt(); - /** @type {string|null} */ this.__flatTextCache = null; return this; } init() { - /** @type {BBranch[]} - * @private */ this.parentBranches = []; - /** @type {BNote[]} - * @private */ this.parents = []; - /** @type {BNote[]} - * @private */ this.children = []; - /** @type {BAttribute[]} - * @private */ this.ownedAttributes = []; - - /** @type {BAttribute[]|null} - * @private */ this.__attributeCache = null; - /** @type {BAttribute[]|null} - * @private */ this.__inheritableAttributeCache = null; - - /** @type {BAttribute[]} - * @private */ this.targetRelations = []; this.becca.addNote(this.noteId, this); - - /** @type {BNote[]|null} - * @private */ this.__ancestorCache = null; - // following attributes are filled during searching in the database - - /** - * size of the content in bytes - * @type {int|null} - * @private - */ this.contentSize = null; - /** - * size of the note content, attachment contents in bytes - * @type {int|null} - * @private - */ this.contentAndAttachmentsSize = null; - /** - * size of the note content, attachment contents and revision contents in bytes - * @type {int|null} - * @private - */ this.contentAndAttachmentsAndRevisionsSize = null; - /** - * number of note revisions for this note - * @type {int|null} - * @private - */ this.revisionCount = null; } @@ -216,9 +203,9 @@ class BNote extends AbstractBeccaEntity { return this.children && this.children.length > 0; } - /** @returns {BBranch[]} */ - getChildBranches() { - return this.children.map(childNote => this.becca.getBranchFromChildAndParent(childNote.noteId, this.noteId)); + getChildBranches(): (BBranch | null)[] { + return this.children + .map(childNote => this.becca.getBranchFromChildAndParent(childNote.noteId, this.noteId)); } /* @@ -229,19 +216,16 @@ class BNote extends AbstractBeccaEntity { * - changes in the note metadata or title should not trigger note content sync (so we keep separate utcDateModified and entity changes records) * - but to the user note content and title changes are one and the same - single dateModified (so all changes must go through Note and content is not a separate entity) */ - - /** @returns {string|Buffer} */ getContent() { return this._getContent(); } /** - * @returns {*} * @throws Error in case of invalid JSON */ - getJsonContent() { + getJsonContent(): {} | null { const content = this.getContent(); - if (!content || !content.trim()) { + if (typeof content !== "string" || !content || !content.trim()) { return null; } @@ -258,19 +242,13 @@ class BNote extends AbstractBeccaEntity { } } - /** - * @param content - * @param {object} [opts] - * @param {object} [opts.forceSave=false] - will also save this BNote entity - * @param {object} [opts.forceFrontendReload=false] - override frontend heuristics on when to reload, instruct to reload - */ - setContent(content, opts) { + setContent(content: Buffer | string, opts: ContentOpts = {}) { this._setContent(content, opts); eventService.emit(eventService.NOTE_CONTENT_CHANGE, { entity: this }); } - setJsonContent(content) { + setJsonContent(content: {}) { this.setContent(JSON.stringify(content, null, '\t')); } @@ -351,14 +329,18 @@ class BNote extends AbstractBeccaEntity { * Beware that the method must not create a copy of the array, but actually returns its internal array * (for performance reasons) * - * @param {string} [type] - (optional) attribute type to filter - * @param {string} [name] - (optional) attribute name to filter - * @returns {BAttribute[]} all note's attributes, including inherited ones + * @param type - (optional) attribute type to filter + * @param name - (optional) attribute name to filter + * @returns all note's attributes, including inherited ones */ - getAttributes(type, name) { + getAttributes(type?: string, name?: string): BAttribute[] { this.__validateTypeName(type, name); this.__ensureAttributeCacheIsAvailable(); + if (!this.__attributeCache) { + throw new Error("Attribute cache not available."); + } + if (type && name) { return this.__attributeCache.filter(attr => attr.name === name && attr.type === type); } @@ -373,15 +355,13 @@ class BNote extends AbstractBeccaEntity { } } - /** @private */ - __ensureAttributeCacheIsAvailable() { + private __ensureAttributeCacheIsAvailable() { if (!this.__attributeCache) { this.__getAttributes([]); } } - /** @private */ - __getAttributes(path) { + private __getAttributes(path: string[]) { if (path.includes(this.noteId)) { return []; } @@ -437,11 +417,7 @@ class BNote extends AbstractBeccaEntity { return this.__attributeCache; } - /** - * @private - * @returns {BAttribute[]} - */ - __getInheritableAttributes(path) { + private __getInheritableAttributes(path: string[]): BAttribute[] { if (path.includes(this.noteId)) { return []; } @@ -450,10 +426,10 @@ class BNote extends AbstractBeccaEntity { this.__getAttributes(path); // will refresh also this.__inheritableAttributeCache } - return this.__inheritableAttributeCache; + return this.__inheritableAttributeCache || []; } - __validateTypeName(type, name) { + __validateTypeName(type?: string | null, name?: string | null) { if (type && type !== 'label' && type !== 'relation') { throw new Error(`Unrecognized attribute type '${type}'. Only 'label' and 'relation' are possible values.`); } @@ -466,13 +442,7 @@ class BNote extends AbstractBeccaEntity { } } - /** - * @param type - * @param name - * @param [value] - * @returns {boolean} - */ - hasAttribute(type, name, value = null) { + hasAttribute(type: string, name: string, value: string | null = null): boolean { return !!this.getAttributes().find(attr => attr.name === name && (value === undefined || value === null || attr.value === value) @@ -480,7 +450,7 @@ class BNote extends AbstractBeccaEntity { ); } - getAttributeCaseInsensitive(type, name, value) { + getAttributeCaseInsensitive(type: string, name: string, value?: string | null) { name = name.toLowerCase(); value = value ? value.toLowerCase() : null; @@ -490,24 +460,26 @@ class BNote extends AbstractBeccaEntity { && attr.type === type); } - getRelationTarget(name) { + getRelationTarget(name: string) { const relation = this.getAttributes().find(attr => attr.name === name && attr.type === 'relation'); return relation ? relation.targetNote : null; } /** - * @param {string} name - label name - * @param {string} [value] - label value - * @returns {boolean} true if label exists (including inherited) + * @param name - label name + * @param value - label value + * @returns true if label exists (including inherited) */ - hasLabel(name, value) { return this.hasAttribute(LABEL, name, value); } + hasLabel(name: string, value?: string): boolean { + return this.hasAttribute(LABEL, name, value); + } /** - * @param {string} name - label name - * @returns {boolean} true if label exists (including inherited) and does not have "false" value. + * @param name - label name + * @returns true if label exists (including inherited) and does not have "false" value. */ - isLabelTruthy(name) { + isLabelTruthy(name: string): boolean { const label = this.getLabel(name); if (!label) { @@ -518,163 +490,185 @@ class BNote extends AbstractBeccaEntity { } /** - * @param {string} name - label name - * @param {string} [value] - label value - * @returns {boolean} true if label exists (excluding inherited) + * @param name - label name + * @param value - label value + * @returns true if label exists (excluding inherited) */ - hasOwnedLabel(name, value) { return this.hasOwnedAttribute(LABEL, name, value); } + hasOwnedLabel(name: string, value?: string): boolean { + return this.hasOwnedAttribute(LABEL, name, value); + } /** - * @param {string} name - relation name - * @param {string} [value] - relation value - * @returns {boolean} true if relation exists (including inherited) + * @param name - relation name + * @param value - relation value + * @returns true if relation exists (including inherited) */ - hasRelation(name, value) { return this.hasAttribute(RELATION, name, value); } + hasRelation(name: string, value?: string): boolean { + return this.hasAttribute(RELATION, name, value); + } /** - * @param {string} name - relation name - * @param {string} [value] - relation value - * @returns {boolean} true if relation exists (excluding inherited) + * @param name - relation name + * @param value - relation value + * @returns true if relation exists (excluding inherited) */ - hasOwnedRelation(name, value) { return this.hasOwnedAttribute(RELATION, name, value); } + hasOwnedRelation(name: string, value?: string): boolean { + return this.hasOwnedAttribute(RELATION, name, value); + } /** * @param {string} name - label name * @returns {BAttribute|null} label if it exists, null otherwise */ - getLabel(name) { return this.getAttribute(LABEL, name); } + getLabel(name: string): BAttribute | null { + return this.getAttribute(LABEL, name); + } /** - * @param {string} name - label name - * @returns {BAttribute|null} label if it exists, null otherwise + * @param name - label name + * @returns label if it exists, null otherwise */ - getOwnedLabel(name) { return this.getOwnedAttribute(LABEL, name); } + getOwnedLabel(name: string): BAttribute | null { + return this.getOwnedAttribute(LABEL, name); + } /** - * @param {string} name - relation name - * @returns {BAttribute|null} relation if it exists, null otherwise + * @param name - relation name + * @returns relation if it exists, null otherwise */ - getRelation(name) { return this.getAttribute(RELATION, name); } + getRelation(name: string): BAttribute | null { + return this.getAttribute(RELATION, name); + } /** - * @param {string} name - relation name - * @returns {BAttribute|null} relation if it exists, null otherwise + * @param name - relation name + * @returns relation if it exists, null otherwise */ - getOwnedRelation(name) { return this.getOwnedAttribute(RELATION, name); } + getOwnedRelation(name: string): BAttribute | null { + return this.getOwnedAttribute(RELATION, name); + } /** - * @param {string} name - label name - * @returns {string|null} label value if label exists, null otherwise + * @param name - label name + * @returns label value if label exists, null otherwise */ - getLabelValue(name) { return this.getAttributeValue(LABEL, name); } + getLabelValue(name: string): string | null { + return this.getAttributeValue(LABEL, name); + } /** - * @param {string} name - label name - * @returns {string|null} label value if label exists, null otherwise + * @param name - label name + * @returns label value if label exists, null otherwise */ - getOwnedLabelValue(name) { return this.getOwnedAttributeValue(LABEL, name); } + getOwnedLabelValue(name: string): string | null { + return this.getOwnedAttributeValue(LABEL, name); + } /** - * @param {string} name - relation name - * @returns {string|null} relation value if relation exists, null otherwise + * @param name - relation name + * @returns relation value if relation exists, null otherwise */ - getRelationValue(name) { return this.getAttributeValue(RELATION, name); } + getRelationValue(name: string): string | null { + return this.getAttributeValue(RELATION, name); + } /** - * @param {string} name - relation name - * @returns {string|null} relation value if relation exists, null otherwise + * @param name - relation name + * @returns relation value if relation exists, null otherwise */ - getOwnedRelationValue(name) { return this.getOwnedAttributeValue(RELATION, name); } + getOwnedRelationValue(name: string): string | null { + return this.getOwnedAttributeValue(RELATION, name); + } /** - * @param {string} type - attribute type (label, relation, etc.) - * @param {string} name - attribute name - * @param {string} [value] - attribute value - * @returns {boolean} true if note has an attribute with given type and name (excluding inherited) + * @param attribute type (label, relation, etc.) + * @param name - attribute name + * @param value - attribute value + * @returns true if note has an attribute with given type and name (excluding inherited) */ - hasOwnedAttribute(type, name, value) { + hasOwnedAttribute(type: string, name: string, value?: string): boolean { return !!this.getOwnedAttribute(type, name, value); } /** - * @param {string} type - attribute type (label, relation, etc.) - * @param {string} name - attribute name - * @returns {BAttribute} attribute of the given type and name. If there are more such attributes, first is returned. - * Returns null if there's no such attribute belonging to this note. + * @param type - attribute type (label, relation, etc.) + * @param name - attribute name + * @returns attribute of the given type and name. If there are more such attributes, first is returned. + * Returns null if there's no such attribute belonging to this note. */ - getAttribute(type, name) { + getAttribute(type: string, name: string): BAttribute | null { const attributes = this.getAttributes(); - return attributes.find(attr => attr.name === name && attr.type === type); + return attributes.find(attr => attr.name === name && attr.type === type) || null; } /** - * @param {string} type - attribute type (label, relation, etc.) - * @param {string} name - attribute name - * @returns {string|null} attribute value of given type and name or null if no such attribute exists. + * @param type - attribute type (label, relation, etc.) + * @param name - attribute name + * @returns attribute value of given type and name or null if no such attribute exists. */ - getAttributeValue(type, name) { + getAttributeValue(type: string, name: string): string | null { const attr = this.getAttribute(type, name); return attr ? attr.value : null; } /** - * @param {string} type - attribute type (label, relation, etc.) - * @param {string} name - attribute name - * @returns {string|null} attribute value of given type and name or null if no such attribute exists. + * @param type - attribute type (label, relation, etc.) + * @param name - attribute name + * @returns attribute value of given type and name or null if no such attribute exists. */ - getOwnedAttributeValue(type, name) { + getOwnedAttributeValue(type: string, name: string): string | null { const attr = this.getOwnedAttribute(type, name); return attr ? attr.value : null; } /** - * @param {string} [name] - label name to filter - * @returns {BAttribute[]} all note's labels (attributes with type label), including inherited ones + * @param name - label name to filter + * @returns all note's labels (attributes with type label), including inherited ones */ - getLabels(name) { + getLabels(name?: string): BAttribute[] { return this.getAttributes(LABEL, name); } /** - * @param {string} [name] - label name to filter - * @returns {string[]} all note's label values, including inherited ones + * @param name - label name to filter + * @returns all note's label values, including inherited ones */ - getLabelValues(name) { + getLabelValues(name: string): string[] { return this.getLabels(name).map(l => l.value); } /** - * @param {string} [name] - label name to filter - * @returns {BAttribute[]} all note's labels (attributes with type label), excluding inherited ones + * @param name - label name to filter + * @returns all note's labels (attributes with type label), excluding inherited ones */ - getOwnedLabels(name) { + getOwnedLabels(name: string): BAttribute[] { return this.getOwnedAttributes(LABEL, name); } /** - * @param {string} [name] - label name to filter - * @returns {string[]} all note's label values, excluding inherited ones + * @param name - label name to filter + * @returns all note's label values, excluding inherited ones */ - getOwnedLabelValues(name) { + getOwnedLabelValues(name: string): string[] { return this.getOwnedAttributes(LABEL, name).map(l => l.value); } /** - * @param {string} [name] - relation name to filter - * @returns {BAttribute[]} all note's relations (attributes with type relation), including inherited ones + * @param name - relation name to filter + * @returns all note's relations (attributes with type relation), including inherited ones */ - getRelations(name) { + getRelations(name?: string): BAttribute[] { return this.getAttributes(RELATION, name); } /** - * @param {string} [name] - relation name to filter - * @returns {BAttribute[]} all note's relations (attributes with type relation), excluding inherited ones + * @param name - relation name to filter + * @returns all note's relations (attributes with type relation), excluding inherited ones */ - getOwnedRelations(name) { + getOwnedRelations(name: string): BAttribute[] { return this.getOwnedAttributes(RELATION, name); } @@ -682,12 +676,12 @@ class BNote extends AbstractBeccaEntity { * Beware that the method must not create a copy of the array, but actually returns its internal array * (for performance reasons) * - * @param {string|null} [type] - (optional) attribute type to filter - * @param {string|null} [name] - (optional) attribute name to filter - * @param {string|null} [value] - (optional) attribute value to filter + * @param type - (optional) attribute type to filter + * @param name - (optional) attribute name to filter + * @param value - (optional) attribute value to filter * @returns {BAttribute[]} note's "owned" attributes - excluding inherited ones */ - getOwnedAttributes(type = null, name = null, value = null) { + getOwnedAttributes(type: string | null = null, name: string | null = null, value: string | null = null) { this.__validateTypeName(type, name); if (type && name && value !== undefined && value !== null) { @@ -712,7 +706,7 @@ class BNote extends AbstractBeccaEntity { * * This method can be significantly faster than the getAttribute() */ - getOwnedAttribute(type, name, value = null) { + getOwnedAttribute(type: string, name: string, value: string | null = null) { const attrs = this.getOwnedAttributes(type, name, value); return attrs.length > 0 ? attrs[0] : null; @@ -762,7 +756,7 @@ class BNote extends AbstractBeccaEntity { this.parents = this.parentBranches .map(branch => branch.parentNote) - .filter(note => !!note); + .filter(note => !!note) as BNote[]; } sortChildren() { @@ -776,7 +770,7 @@ class BNote extends AbstractBeccaEntity { const aBranch = becca.getBranchFromChildAndParent(a.noteId, this.noteId); const bBranch = becca.getBranchFromChildAndParent(b.noteId, this.noteId); - return (aBranch?.notePosition - bBranch?.notePosition) || 0; + return ((aBranch?.notePosition || 0) - (bBranch?.notePosition || 0)) || 0; }); } @@ -824,7 +818,7 @@ class BNote extends AbstractBeccaEntity { this.__ancestorCache = null; } - invalidateSubTree(path = []) { + invalidateSubTree(path: string[] = []) { if (path.includes(this.noteId)) { return; } @@ -864,11 +858,10 @@ class BNote extends AbstractBeccaEntity { return !!this.targetRelations.find(rel => rel.name === 'template' || rel.name === 'inherit'); } - /** @returns {BNote[]} */ - getSubtreeNotesIncludingTemplated() { - const set = new Set(); + getSubtreeNotesIncludingTemplated(): BNote[] { + const set = new Set(); - function inner(note) { + function inner(note: BNote) { // _hidden is not counted as subtree for the purpose of inheritance if (set.has(note) || note.noteId === '_hidden') { return; @@ -896,46 +889,45 @@ class BNote extends AbstractBeccaEntity { return Array.from(set); } - /** @returns {BNote[]} */ - getSearchResultNotes() { + getSearchResultNotes(): BNote[] { if (this.type !== 'search') { return []; } try { - const searchService = require('../../services/search/services/search.js'); + const searchService = require('../../services/search/services/search'); const {searchResultNoteIds} = searchService.searchFromNote(this); const becca = this.becca; - return searchResultNoteIds + return (searchResultNoteIds as string[]) // TODO: remove cast once search is converted .map(resultNoteId => becca.notes[resultNoteId]) .filter(note => !!note); } - catch (e) { + catch (e: any) { log.error(`Could not resolve search note ${this.noteId}: ${e.message}`); return []; } } - /** - * @returns {{notes: BNote[], relationships: Array.<{parentNoteId: string, childNoteId: string}>}} - */ - getSubtree({includeArchived = true, includeHidden = false, resolveSearch = false} = {}) { - const noteSet = new Set(); - const relationships = []; // list of tuples parentNoteId -> childNoteId + getSubtree({includeArchived = true, includeHidden = false, resolveSearch = false} = {}): { + notes: BNote[], + relationships: Relationship[] + } { + const noteSet = new Set(); + const relationships: Relationship[] = []; // list of tuples parentNoteId -> childNoteId - function resolveSearchNote(searchNote) { + function resolveSearchNote(searchNote: BNote) { try { for (const resultNote of searchNote.getSearchResultNotes()) { addSubtreeNotesInner(resultNote, searchNote); } } - catch (e) { + catch (e: any) { log.error(`Could not resolve search note ${searchNote?.noteId}: ${e.message}`); } } - function addSubtreeNotesInner(note, parentNote = null) { + function addSubtreeNotesInner(note: BNote, parentNote: BNote | null = null) { if (note.noteId === '_hidden' && !includeHidden) { return; } @@ -1064,13 +1056,11 @@ class BNote extends AbstractBeccaEntity { return this.__ancestorCache; } - /** @returns {string[]} */ - getAncestorNoteIds() { + getAncestorNoteIds(): string[] { return this.getAncestors().map(note => note.noteId); } - /** @returns {boolean} */ - hasAncestor(ancestorNoteId) { + hasAncestor(ancestorNoteId: string): boolean { for (const ancestorNote of this.getAncestors()) { if (ancestorNote.noteId === ancestorNoteId) { return true; @@ -1089,10 +1079,10 @@ class BNote extends AbstractBeccaEntity { return this.targetRelations; } - /** @returns {BNote[]} - returns only notes which are templated, does not include their subtrees - * in effect returns notes which are influenced by note's non-inheritable attributes */ - getInheritingNotes() { - const arr = [this]; + /** @returns returns only notes which are templated, does not include their subtrees + * in effect returns notes which are influenced by note's non-inheritable attributes */ + getInheritingNotes(): BNote[] { + const arr: BNote[] = [this]; for (const targetRelation of this.targetRelations) { if (targetRelation.name === 'template' || targetRelation.name === 'inherit') { @@ -1107,7 +1097,7 @@ class BNote extends AbstractBeccaEntity { return arr; } - getDistanceToAncestor(ancestorNoteId) { + getDistanceToAncestor(ancestorNoteId: string) { if (this.noteId === ancestorNoteId) { return 0; } @@ -1121,14 +1111,13 @@ class BNote extends AbstractBeccaEntity { return minDistance; } - /** @returns {BRevision[]} */ - getRevisions() { - return sql.getRows("SELECT * FROM revisions WHERE noteId = ?", [this.noteId]) + getRevisions(): BRevision[] { + return sql.getRows("SELECT * FROM revisions WHERE noteId = ?", [this.noteId]) .map(row => new BRevision(row)); } /** @returns {BAttachment[]} */ - getAttachments(opts = {}) { + getAttachments(opts: AttachmentOpts = {}) { opts.includeContentLength = !!opts.includeContentLength; // from testing, it looks like calculating length does not make a difference in performance even on large-ish DB // given that we're always fetching attachments only for a specific note, we might just do it always @@ -1141,12 +1130,12 @@ class BNote extends AbstractBeccaEntity { ORDER BY position` : `SELECT * FROM attachments WHERE ownerId = ? AND isDeleted = 0 ORDER BY position`; - return sql.getRows(query, [this.noteId]) + return sql.getRows(query, [this.noteId]) .map(row => new BAttachment(row)); } /** @returns {BAttachment|null} */ - getAttachmentById(attachmentId, opts = {}) { + getAttachmentById(attachmentId: string, opts: AttachmentOpts = {}) { opts.includeContentLength = !!opts.includeContentLength; const query = opts.includeContentLength @@ -1156,13 +1145,12 @@ class BNote extends AbstractBeccaEntity { WHERE ownerId = ? AND attachmentId = ? AND isDeleted = 0` : `SELECT * FROM attachments WHERE ownerId = ? AND attachmentId = ? AND isDeleted = 0`; - return sql.getRows(query, [this.noteId, attachmentId]) + return sql.getRows(query, [this.noteId, attachmentId]) .map(row => new BAttachment(row))[0]; } - /** @returns {BAttachment[]} */ - getAttachmentsByRole(role) { - return sql.getRows(` + getAttachmentsByRole(role: string): BAttachment[] { + return sql.getRows(` SELECT attachments.* FROM attachments WHERE ownerId = ? @@ -1172,8 +1160,7 @@ class BNote extends AbstractBeccaEntity { .map(row => new BAttachment(row)); } - /** @returns {BAttachment} */ - getAttachmentByTitle(title) { + getAttachmentByTitle(title: string): BAttachment { // cannot use SQL to filter by title since it can be encrypted return this.getAttachments().filter(attachment => attachment.title === title)[0]; } @@ -1181,9 +1168,9 @@ class BNote extends AbstractBeccaEntity { /** * Gives all possible note paths leading to this note. Paths containing search note are ignored (could form cycles) * - * @returns {string[][]} - array of notePaths (each represented by array of noteIds constituting the particular note path) + * @returns array of notePaths (each represented by array of noteIds constituting the particular note path) */ - getAllNotePaths() { + getAllNotePaths(): string[][] { if (this.noteId === 'root') { return [['root']]; } @@ -1201,11 +1188,7 @@ class BNote extends AbstractBeccaEntity { return notePaths; } - /** - * @param {string} [hoistedNoteId='root'] - * @return {Array} - */ - getSortedNotePathRecords(hoistedNoteId = 'root') { + getSortedNotePathRecords(hoistedNoteId: string = 'root'): NotePathRecord[] { const isHoistedRoot = hoistedNoteId === 'root'; const notePaths = this.getAllNotePaths().map(path => ({ @@ -1233,20 +1216,18 @@ class BNote extends AbstractBeccaEntity { /** * Returns a note path considered to be the "best" * - * @param {string} [hoistedNoteId='root'] - * @return {string[]} array of noteIds constituting the particular note path + * @return array of noteIds constituting the particular note path */ - getBestNotePath(hoistedNoteId = 'root') { + getBestNotePath(hoistedNoteId: string = 'root'): string[] { return this.getSortedNotePathRecords(hoistedNoteId)[0]?.notePath; } /** * Returns a note path considered to be the "best" * - * @param {string} [hoistedNoteId='root'] - * @return {string} serialized note path (e.g. 'root/a1h315/js725h') + * @return serialized note path (e.g. 'root/a1h315/js725h') */ - getBestNotePathString(hoistedNoteId = 'root') { + getBestNotePathString(hoistedNoteId: string = 'root'): string { const notePath = this.getBestNotePath(hoistedNoteId); return notePath?.join("/"); @@ -1274,10 +1255,9 @@ class BNote extends AbstractBeccaEntity { } /** - * @param ancestorNoteId - * @returns {boolean} - true if ancestorNoteId occurs in at least one of the note's paths + * @returns true if ancestorNoteId occurs in at least one of the note's paths */ - isDescendantOfNote(ancestorNoteId) { + isDescendantOfNote(ancestorNoteId: string): boolean { const notePaths = this.getAllNotePaths(); return notePaths.some(path => path.includes(ancestorNoteId)); @@ -1286,11 +1266,11 @@ class BNote extends AbstractBeccaEntity { /** * Update's given attribute's value or creates it if it doesn't exist * - * @param {string} type - attribute type (label, relation, etc.) - * @param {string} name - attribute name - * @param {string} [value] - attribute value (optional) + * @param type - attribute type (label, relation, etc.) + * @param name - attribute name + * @param value - attribute value (optional) */ - setAttribute(type, name, value) { + setAttribute(type: string, name: string, value?: string) { const attributes = this.getOwnedAttributes(); const attr = attributes.find(attr => attr.type === type && attr.name === name); @@ -1303,7 +1283,7 @@ class BNote extends AbstractBeccaEntity { } } else { - const BAttribute = require('./battribute.js'); + const BAttribute = require('./battribute'); new BAttribute({ noteId: this.noteId, @@ -1317,11 +1297,11 @@ class BNote extends AbstractBeccaEntity { /** * Removes given attribute name-value pair if it exists. * - * @param {string} type - attribute type (label, relation, etc.) - * @param {string} name - attribute name - * @param {string} [value] - attribute value (optional) + * @param type - attribute type (label, relation, etc.) + * @param name - attribute name + * @param value - attribute value (optional) */ - removeAttribute(type, name, value) { + removeAttribute(type: string, name: string, value?: string) { const attributes = this.getOwnedAttributes(); for (const attribute of attributes) { @@ -1335,15 +1315,12 @@ class BNote extends AbstractBeccaEntity { * Adds a new attribute to this note. The attribute is saved and returned. * See addLabel, addRelation for more specific methods. * - * @param {string} type - attribute type (label / relation) - * @param {string} name - name of the attribute, not including the leading ~/# - * @param {string} [value] - value of the attribute - text for labels, target note ID for relations; optional. - * @param {boolean} [isInheritable=false] - * @param {int|null} [position] - * @returns {BAttribute} + * @param type - attribute type (label / relation) + * @param name - name of the attribute, not including the leading ~/# + * @param value - value of the attribute - text for labels, target note ID for relations; optional. */ - addAttribute(type, name, value = "", isInheritable = false, position = null) { - const BAttribute = require('./battribute.js'); + addAttribute(type: string, name: string, value: string = "", isInheritable: boolean = false, position: number | null = null): BAttribute { + const BAttribute = require('./battribute'); return new BAttribute({ noteId: this.noteId, @@ -1358,12 +1335,10 @@ class BNote extends AbstractBeccaEntity { /** * Adds a new label to this note. The label attribute is saved and returned. * - * @param {string} name - name of the label, not including the leading # - * @param {string} [value] - text value of the label; optional - * @param {boolean} [isInheritable=false] - * @returns {BAttribute} + * @param name - name of the label, not including the leading # + * @param value - text value of the label; optional */ - addLabel(name, value = "", isInheritable = false) { + addLabel(name: string, value: string = "", isInheritable: boolean = false): BAttribute { return this.addAttribute(LABEL, name, value, isInheritable); } @@ -1371,24 +1346,21 @@ class BNote extends AbstractBeccaEntity { * Adds a new relation to this note. The relation attribute is saved and * returned. * - * @param {string} name - name of the relation, not including the leading ~ - * @param {string} targetNoteId - * @param {boolean} [isInheritable=false] - * @returns {BAttribute} + * @param name - name of the relation, not including the leading ~ */ - addRelation(name, targetNoteId, isInheritable = false) { + addRelation(name: string, targetNoteId: string, isInheritable: boolean = false): BAttribute { return this.addAttribute(RELATION, name, targetNoteId, isInheritable); } /** * Based on enabled, the attribute is either set or removed. * - * @param {string} type - attribute type ('relation', 'label' etc.) - * @param {boolean} enabled - toggle On or Off - * @param {string} name - attribute name - * @param {string} [value] - attribute value (optional) + * @param type - attribute type ('relation', 'label' etc.) + * @param enabled - toggle On or Off + * @param name - attribute name + * @param value - attribute value (optional) */ - toggleAttribute(type, enabled, name, value) { + toggleAttribute(type: string, enabled: boolean, name: string, value?: string) { if (enabled) { this.setAttribute(type, name, value); } @@ -1400,76 +1372,84 @@ class BNote extends AbstractBeccaEntity { /** * Based on enabled, label is either set or removed. * - * @param {boolean} enabled - toggle On or Off - * @param {string} name - label name - * @param {string} [value] - label value (optional) + * @param enabled - toggle On or Off + * @param name - label name + * @param value - label value (optional) */ - toggleLabel(enabled, name, value) { return this.toggleAttribute(LABEL, enabled, name, value); } + toggleLabel(enabled: boolean, name: string, value?: string) { + return this.toggleAttribute(LABEL, enabled, name, value); + } /** * Based on enabled, relation is either set or removed. * - * @param {boolean} enabled - toggle On or Off - * @param {string} name - relation name - * @param {string} [value] - relation value (noteId) + * @param enabled - toggle On or Off + * @param name - relation name + * @param value - relation value (noteId) */ - toggleRelation(enabled, name, value) { return this.toggleAttribute(RELATION, enabled, name, value); } + toggleRelation(enabled: boolean, name: string, value?: string) { + return this.toggleAttribute(RELATION, enabled, name, value); + } /** * Update's given label's value or creates it if it doesn't exist * - * @param {string} name - label name - * @param {string} [value] - label value + * @param name - label name + * @param value label value */ - setLabel(name, value) { return this.setAttribute(LABEL, name, value); } + setLabel(name: string, value?: string) { + return this.setAttribute(LABEL, name, value); + } /** * Update's given relation's value or creates it if it doesn't exist * - * @param {string} name - relation name - * @param {string} value - relation value (noteId) + * @param name - relation name + * @param value - relation value (noteId) */ - setRelation(name, value) { return this.setAttribute(RELATION, name, value); } + setRelation(name: string, value?: string) { + return this.setAttribute(RELATION, name, value); + } /** * Remove label name-value pair, if it exists. * - * @param {string} name - label name - * @param {string} [value] - label value + * @param name - label name + * @param value - label value */ - removeLabel(name, value) { return this.removeAttribute(LABEL, name, value); } + removeLabel(name: string, value?: string) { + return this.removeAttribute(LABEL, name, value); + } /** * Remove the relation name-value pair, if it exists. * - * @param {string} name - relation name - * @param {string} [value] - relation value (noteId) + * @param name - relation name + * @param value - relation value (noteId) */ - removeRelation(name, value) { return this.removeAttribute(RELATION, name, value); } + removeRelation(name: string, value?: string) { + return this.removeAttribute(RELATION, name, value); + } - searchNotesInSubtree(searchString) { - const searchService = require('../../services/search/services/search.js'); + searchNotesInSubtree(searchString: string) { + const searchService = require('../../services/search/services/search'); return searchService.searchNotes(searchString); } - searchNoteInSubtree(searchString) { + searchNoteInSubtree(searchString: string) { return this.searchNotesInSubtree(searchString)[0]; } - /** - * @param parentNoteId - * @returns {{success: boolean, message: string, branchId: string, notePath: string}} - */ - cloneTo(parentNoteId) { - const cloningService = require('../../services/cloning.js'); + cloneTo(parentNoteId: string) { + const cloningService = require('../../services/cloning'); - const branch = this.becca.getNote(parentNoteId).getParentBranches()[0]; + const branch = this.becca.getNote(parentNoteId)?.getParentBranches()[0]; - return cloningService.cloneNoteToBranch(this.noteId, branch.branchId); + return cloningService.cloneNoteToBranch(this.noteId, branch?.branchId); } - isEligibleForConversionToAttachment(opts = {autoConversion: false}) { + isEligibleForConversionToAttachment(opts: ConvertOpts = { autoConversion: false }) { if (this.type !== 'image' || !this.isContentAvailable() || this.hasChildren() || this.getParentBranches().length !== 1) { return false; } @@ -1507,13 +1487,9 @@ class BNote extends AbstractBeccaEntity { * * In the future, this functionality might get more generic and some of the requirements relaxed. * - * @params {Object} [opts] - * @params {bolean} [opts.autoConversion=false} if true, the action is not triggered by user, but e.g. by migration, - * and only perfect candidates will be migrated - * - * @returns {BAttachment|null} - null if note is not eligible for conversion + * @returns null if note is not eligible for conversion */ - convertToParentAttachment(opts = {autoConversion: false}) { + convertToParentAttachment(opts: ConvertOpts = { autoConversion: false }): BAttachment | null { if (!this.isEligibleForConversionToAttachment(opts)) { return null; } @@ -1533,11 +1509,15 @@ class BNote extends AbstractBeccaEntity { const oldNoteUrl = `api/images/${this.noteId}/`; const newAttachmentUrl = `api/attachments/${attachment.attachmentId}/image/`; + if (typeof parentContent !== "string") { + throw new Error("Unable to convert image note into attachment because parent note does not have a string content."); + } + const fixedContent = utils.replaceAll(parentContent, oldNoteUrl, newAttachmentUrl); parentNote.setContent(fixedContent); - const noteService = require('../../services/notes.js'); + const noteService = require('../../services/notes'); noteService.asyncPostProcessContent(parentNote, fixedContent); // to mark an unused attachment for deletion this.deleteNote(); @@ -1548,10 +1528,9 @@ class BNote extends AbstractBeccaEntity { /** * (Soft) delete a note and all its descendants. * - * @param {string} [deleteId=null] - optional delete identified - * @param {TaskContext} [taskContext] + * @param deleteId - optional delete identified */ - deleteNote(deleteId = null, taskContext = null) { + deleteNote(deleteId: string | null = null, taskContext: TaskContext | null = null) { if (this.isDeleted) { return; } @@ -1565,7 +1544,7 @@ class BNote extends AbstractBeccaEntity { } // needs to be run before branches and attributes are deleted and thus attached relations disappear - const handlers = require('../../services/handlers.js'); + const handlers = require('../../services/handlers'); handlers.runAttachedRelations(this, 'runOnNoteDeletion', this); taskContext.noteDeletionHandlerTriggered = true; @@ -1577,12 +1556,12 @@ class BNote extends AbstractBeccaEntity { decrypt() { if (this.isProtected && !this.isDecrypted && protectedSessionService.isProtectedSessionAvailable()) { try { - this.title = protectedSessionService.decryptString(this.title); + this.title = protectedSessionService.decryptString(this.title) || ""; this.__flatTextCache = null; this.isDecrypted = true; } - catch (e) { + catch (e: any) { log.error(`Could not decrypt note ${this.noteId}: ${e.message} ${e.stack}`); } } @@ -1627,10 +1606,15 @@ class BNote extends AbstractBeccaEntity { for (const noteAttachment of this.getAttachments()) { const revisionAttachment = noteAttachment.copy(); + + if (!revision.revisionId) { + throw new Error("Revision ID is missing."); + } + revisionAttachment.ownerId = revision.revisionId; - revisionAttachment.setContent(noteAttachment.getContent(), {forceSave: true}); + revisionAttachment.setContent(noteAttachment.getContent(), { forceSave: true }); - if (this.type === 'text') { + if (this.type === 'text' && typeof noteContent === "string") { // content is rewritten to point to the revision attachments noteContent = noteContent.replaceAll(`attachments/${noteAttachment.attachmentId}`, `attachments/${revisionAttachment.attachmentId}`); @@ -1651,14 +1635,14 @@ class BNote extends AbstractBeccaEntity { * Supported values are either 'attachmentId' (default) or 'title' * @returns {BAttachment} */ - saveAttachment({attachmentId, role, mime, title, content, position}, matchBy = 'attachmentId') { + saveAttachment({attachmentId, role, mime, title, content, position}: AttachmentRow, matchBy = 'attachmentId') { if (!['attachmentId', 'title'].includes(matchBy)) { throw new Error(`Unsupported value '${matchBy}' for matchBy param, has to be either 'attachmentId' or 'title'.`); } let attachment; - if (matchBy === 'title') { + if (matchBy === 'title' && title) { attachment = this.getAttachmentByTitle(title); } else if (matchBy === 'attachmentId' && attachmentId) { attachment = this.becca.getAttachmentOrThrow(attachmentId); @@ -1673,7 +1657,10 @@ class BNote extends AbstractBeccaEntity { position }); - content = content || ""; + if (!content) { + throw new Error("Attempted to save an attachment with no content."); + } + attachment.setContent(content, {forceSave: true}); return attachment; @@ -1695,7 +1682,7 @@ class BNote extends AbstractBeccaEntity { getPojo() { return { noteId: this.noteId, - title: this.title, + title: this.title || undefined, isProtected: this.isProtected, type: this.type, mime: this.mime, @@ -1712,8 +1699,8 @@ class BNote extends AbstractBeccaEntity { const pojo = this.getPojo(); if (pojo.isProtected) { - if (this.isDecrypted) { - pojo.title = protectedSessionService.encrypt(pojo.title); + if (this.isDecrypted && pojo.title) { + pojo.title = protectedSessionService.encrypt(pojo.title) || undefined; } else { // updating protected note outside of protected session means we will keep original ciphertexts @@ -1725,4 +1712,4 @@ class BNote extends AbstractBeccaEntity { } } -module.exports = BNote; +export = BNote; diff --git a/src/becca/entities/boption.js b/src/becca/entities/boption.ts similarity index 66% rename from src/becca/entities/boption.js rename to src/becca/entities/boption.ts index 6c2c3edab4..48abee024c 100644 --- a/src/becca/entities/boption.js +++ b/src/becca/entities/boption.ts @@ -1,33 +1,34 @@ "use strict"; -const dateUtils = require('../../services/date_utils.js'); -const AbstractBeccaEntity = require('./abstract_becca_entity.js'); +import dateUtils = require('../../services/date_utils'); +import AbstractBeccaEntity = require('./abstract_becca_entity'); +import { OptionRow } from './rows'; /** * Option represents a name-value pair, either directly configurable by the user or some system property. - * - * @extends AbstractBeccaEntity */ -class BOption extends AbstractBeccaEntity { +class BOption extends AbstractBeccaEntity { static get entityName() { return "options"; } static get primaryKeyName() { return "name"; } static get hashedProperties() { return ["name", "value"]; } - constructor(row) { + name!: string; + value!: string; + isSynced!: boolean; + + constructor(row?: OptionRow) { super(); - this.updateFromRow(row); + if (row) { + this.updateFromRow(row); + } this.becca.options[this.name] = this; } - updateFromRow(row) { - /** @type {string} */ + updateFromRow(row: OptionRow) { this.name = row.name; - /** @type {string} */ this.value = row.value; - /** @type {boolean} */ this.isSynced = !!row.isSynced; - /** @type {string} */ this.utcDateModified = row.utcDateModified; } @@ -47,4 +48,4 @@ class BOption extends AbstractBeccaEntity { } } -module.exports = BOption; +export = BOption; diff --git a/src/becca/entities/brecent_note.js b/src/becca/entities/brecent_note.ts similarity index 50% rename from src/becca/entities/brecent_note.js rename to src/becca/entities/brecent_note.ts index 300945b9a0..c19a836034 100644 --- a/src/becca/entities/brecent_note.js +++ b/src/becca/entities/brecent_note.ts @@ -1,25 +1,31 @@ "use strict"; -const dateUtils = require('../../services/date_utils.js'); -const AbstractBeccaEntity = require('./abstract_becca_entity.js'); +import { RecentNoteRow } from "./rows"; + +import dateUtils = require('../../services/date_utils'); +import AbstractBeccaEntity = require('./abstract_becca_entity'); /** * RecentNote represents recently visited note. - * - * @extends AbstractBeccaEntity */ -class BRecentNote extends AbstractBeccaEntity { +class BRecentNote extends AbstractBeccaEntity { static get entityName() { return "recent_notes"; } static get primaryKeyName() { return "noteId"; } + static get hashedProperties() { return ["noteId", "notePath"]; } + + noteId!: string; + notePath!: string; + utcDateCreated!: string; - constructor(row) { + constructor(row: RecentNoteRow) { super(); - /** @type {string} */ + this.updateFromRow(row); + } + + updateFromRow(row: RecentNoteRow): void { this.noteId = row.noteId; - /** @type {string} */ this.notePath = row.notePath; - /** @type {string} */ this.utcDateCreated = row.utcDateCreated || dateUtils.utcNowDateTime(); } @@ -32,4 +38,4 @@ class BRecentNote extends AbstractBeccaEntity { } } -module.exports = BRecentNote; +export = BRecentNote; diff --git a/src/becca/entities/brevision.js b/src/becca/entities/brevision.ts similarity index 68% rename from src/becca/entities/brevision.js rename to src/becca/entities/brevision.ts index 5a79f3d8f0..101506858b 100644 --- a/src/becca/entities/brevision.js +++ b/src/becca/entities/brevision.ts @@ -1,68 +1,79 @@ "use strict"; -const protectedSessionService = require('../../services/protected_session.js'); -const utils = require('../../services/utils.js'); -const dateUtils = require('../../services/date_utils.js'); -const becca = require('../becca.js'); -const AbstractBeccaEntity = require('./abstract_becca_entity.js'); -const sql = require('../../services/sql.js'); -const BAttachment = require('./battachment.js'); +import protectedSessionService = require('../../services/protected_session'); +import utils = require('../../services/utils'); +import dateUtils = require('../../services/date_utils'); +import becca = require('../becca'); +import AbstractBeccaEntity = require('./abstract_becca_entity'); +import sql = require('../../services/sql'); +import BAttachment = require('./battachment'); +import { AttachmentRow, RevisionRow } from './rows'; + +interface ContentOpts { + /** will also save this BRevision entity */ + forceSave?: boolean; +} + +interface GetByIdOpts { + includeContentLength?: boolean; +} /** * Revision represents a snapshot of note's title and content at some point in the past. * It's used for seamless note versioning. - * - * @extends AbstractBeccaEntity */ -class BRevision extends AbstractBeccaEntity { +class BRevision extends AbstractBeccaEntity { static get entityName() { return "revisions"; } static get primaryKeyName() { return "revisionId"; } static get hashedProperties() { return ["revisionId", "noteId", "title", "isProtected", "dateLastEdited", "dateCreated", "utcDateLastEdited", "utcDateCreated", "utcDateModified", "blobId"]; } - constructor(row, titleDecrypted = false) { + revisionId?: string; + noteId!: string; + type!: string; + mime!: string; + isProtected!: boolean; + title!: string; + blobId?: string; + dateLastEdited?: string; + dateCreated!: string; + utcDateLastEdited?: string; + utcDateCreated!: string; + contentLength?: number; + content?: string; + + constructor(row: RevisionRow, titleDecrypted = false) { super(); - /** @type {string} */ + this.updateFromRow(row); + if (this.isProtected && !titleDecrypted) { + const decryptedTitle = protectedSessionService.isProtectedSessionAvailable() ? protectedSessionService.decryptString(this.title) : null; + this.title = decryptedTitle || "[protected]"; + } + } + + updateFromRow(row: RevisionRow) { this.revisionId = row.revisionId; - /** @type {string} */ this.noteId = row.noteId; - /** @type {string} */ this.type = row.type; - /** @type {string} */ this.mime = row.mime; - /** @type {boolean} */ this.isProtected = !!row.isProtected; - /** @type {string} */ this.title = row.title; - /** @type {string} */ this.blobId = row.blobId; - /** @type {string} */ this.dateLastEdited = row.dateLastEdited; - /** @type {string} */ this.dateCreated = row.dateCreated; - /** @type {string} */ this.utcDateLastEdited = row.utcDateLastEdited; - /** @type {string} */ this.utcDateCreated = row.utcDateCreated; - /** @type {string} */ this.utcDateModified = row.utcDateModified; - /** @type {int} */ this.contentLength = row.contentLength; - - if (this.isProtected && !titleDecrypted) { - this.title = protectedSessionService.isProtectedSessionAvailable() - ? protectedSessionService.decryptString(this.title) - : "[protected]"; - } } getNote() { return becca.notes[this.noteId]; } - /** @returns {boolean} true if the note has string content (not binary) */ - hasStringContent() { + /** @returns true if the note has string content (not binary) */ + hasStringContent(): boolean { return utils.isStringNote(this.type, this.mime); } @@ -80,16 +91,14 @@ class BRevision extends AbstractBeccaEntity { * * This is the same approach as is used for Note's content. */ - - /** @returns {string|Buffer} */ - getContent() { - return this._getContent(); + // TODO: initial declaration included Buffer, but everywhere it's treated as a string. + getContent(): string { + return this._getContent() as string; } /** - * @returns {*} * @throws Error in case of invalid JSON */ - getJsonContent() { + getJsonContent(): {} | null { const content = this.getContent(); if (!content || !content.trim()) { @@ -99,8 +108,8 @@ class BRevision extends AbstractBeccaEntity { return JSON.parse(content); } - /** @returns {*|null} valid object or null if the content cannot be parsed as JSON */ - getJsonContentSafely() { + /** @returns valid object or null if the content cannot be parsed as JSON */ + getJsonContentSafely(): {} | null { try { return this.getJsonContent(); } @@ -109,18 +118,12 @@ class BRevision extends AbstractBeccaEntity { } } - /** - * @param content - * @param {object} [opts] - * @param {object} [opts.forceSave=false] - will also save this BRevision entity - */ - setContent(content, opts) { + setContent(content: string | Buffer, opts: ContentOpts = {}) { this._setContent(content, opts); } - /** @returns {BAttachment[]} */ - getAttachments() { - return sql.getRows(` + getAttachments(): BAttachment[] { + return sql.getRows(` SELECT attachments.* FROM attachments WHERE ownerId = ? @@ -128,8 +131,7 @@ class BRevision extends AbstractBeccaEntity { .map(row => new BAttachment(row)); } - /** @returns {BAttachment|null} */ - getAttachmentById(attachmentId, opts = {}) { + getAttachmentById(attachmentId: String, opts: GetByIdOpts = {}): BAttachment | null { opts.includeContentLength = !!opts.includeContentLength; const query = opts.includeContentLength @@ -139,13 +141,12 @@ class BRevision extends AbstractBeccaEntity { WHERE ownerId = ? AND attachmentId = ? AND isDeleted = 0` : `SELECT * FROM attachments WHERE ownerId = ? AND attachmentId = ? AND isDeleted = 0`; - return sql.getRows(query, [this.revisionId, attachmentId]) + return sql.getRows(query, [this.revisionId, attachmentId]) .map(row => new BAttachment(row))[0]; } - /** @returns {BAttachment[]} */ - getAttachmentsByRole(role) { - return sql.getRows(` + getAttachmentsByRole(role: string): BAttachment[] { + return sql.getRows(` SELECT attachments.* FROM attachments WHERE ownerId = ? @@ -155,8 +156,7 @@ class BRevision extends AbstractBeccaEntity { .map(row => new BAttachment(row)); } - /** @returns {BAttachment} */ - getAttachmentByTitle(title) { + getAttachmentByTitle(title: string): BAttachment { // cannot use SQL to filter by title since it can be encrypted return this.getAttachments().filter(attachment => attachment.title === title)[0]; } @@ -181,7 +181,7 @@ class BRevision extends AbstractBeccaEntity { type: this.type, mime: this.mime, isProtected: this.isProtected, - title: this.title, + title: this.title || undefined, blobId: this.blobId, dateLastEdited: this.dateLastEdited, dateCreated: this.dateCreated, @@ -200,7 +200,7 @@ class BRevision extends AbstractBeccaEntity { if (pojo.isProtected) { if (protectedSessionService.isProtectedSessionAvailable()) { - pojo.title = protectedSessionService.encrypt(this.title); + pojo.title = protectedSessionService.encrypt(this.title) || undefined; } else { // updating protected note outside of protected session means we will keep original ciphertexts @@ -212,4 +212,4 @@ class BRevision extends AbstractBeccaEntity { } } -module.exports = BRevision; +export = BRevision; diff --git a/src/becca/entities/rows.ts b/src/becca/entities/rows.ts new file mode 100644 index 0000000000..4428b6dde9 --- /dev/null +++ b/src/becca/entities/rows.ts @@ -0,0 +1,110 @@ +// TODO: Booleans should probably be numbers instead (as SQLite does not have booleans.); + +export interface AttachmentRow { + attachmentId?: string; + ownerId?: string; + role: string; + mime: string; + title: string; + position?: number; + blobId?: string; + isProtected?: boolean; + dateModified?: string; + utcDateModified?: string; + utcDateScheduledForErasureSince?: string; + contentLength?: number; + content?: Buffer | string; +} + +export interface RevisionRow { + revisionId?: string; + noteId: string; + type: string; + mime: string; + isProtected?: boolean; + title: string; + blobId?: string; + dateLastEdited?: string; + dateCreated: string; + utcDateLastEdited?: string; + utcDateCreated: string; + utcDateModified: string; + contentLength?: number; +} + +export interface RecentNoteRow { + noteId: string; + notePath: string; + utcDateCreated?: string; +} + +export interface OptionRow { + name: string; + value: string; + isSynced: boolean; + utcDateModified: string; +} + +export interface EtapiTokenRow { + etapiTokenId?: string; + name: string; + tokenHash: string; + utcDateCreated?: string; + utcDateModified?: string; + isDeleted?: boolean; +} + +export interface BlobRow { + blobId: string; + content: string | Buffer; + contentLength: number; + dateModified: string; + utcDateModified: string; +} + +export type AttributeType = "label" | "relation" | "label-definition" | "relation-definition"; + +export interface AttributeRow { + attributeId?: string; + noteId?: string; + type: AttributeType; + name: string; + position?: number; + value?: string; + isInheritable?: boolean; + utcDateModified?: string; +} + +export interface BranchRow { + branchId?: string; + noteId: string; + parentNoteId: string; + prefix?: string | null; + notePosition?: number | null; + isExpanded?: boolean; + isDeleted?: boolean; + utcDateModified?: string; +} + +/** + * There are many different Note types, some of which are entirely opaque to the + * end user. Those types should be used only for checking against, they are + * not for direct use. + */ +export type NoteType = ("file" | "image" | "search" | "noteMap" | "launcher" | "doc" | "contentWidget" | "text" | "relationMap" | "render" | "canvas" | "mermaid" | "book" | "webView" | "code"); + +export interface NoteRow { + noteId: string; + deleteId: string; + title: string; + type: NoteType; + mime: string; + isProtected: boolean; + isDeleted: boolean; + blobId: string; + dateCreated: string; + dateModified: string; + utcDateCreated: string; + utcDateModified: string; + content?: string; +} diff --git a/src/becca/entity_constructor.js b/src/becca/entity_constructor.js deleted file mode 100644 index 3d66ed4da4..0000000000 --- a/src/becca/entity_constructor.js +++ /dev/null @@ -1,33 +0,0 @@ -const BAttachment = require('./entities/battachment.js'); -const BAttribute = require('./entities/battribute.js'); -const BBlob = require('./entities/bblob.js'); -const BBranch = require('./entities/bbranch.js'); -const BEtapiToken = require('./entities/betapi_token.js'); -const BNote = require('./entities/bnote.js'); -const BOption = require('./entities/boption.js'); -const BRecentNote = require('./entities/brecent_note.js'); -const BRevision = require('./entities/brevision.js'); - -const ENTITY_NAME_TO_ENTITY = { - "attachments": BAttachment, - "attributes": BAttribute, - "blobs": BBlob, - "branches": BBranch, - "etapi_tokens": BEtapiToken, - "notes": BNote, - "options": BOption, - "recent_notes": BRecentNote, - "revisions": BRevision -}; - -function getEntityFromEntityName(entityName) { - if (!(entityName in ENTITY_NAME_TO_ENTITY)) { - throw new Error(`Entity for table '${entityName}' not found!`); - } - - return ENTITY_NAME_TO_ENTITY[entityName]; -} - -module.exports = { - getEntityFromEntityName -}; diff --git a/src/becca/entity_constructor.ts b/src/becca/entity_constructor.ts new file mode 100644 index 0000000000..01c51363ae --- /dev/null +++ b/src/becca/entity_constructor.ts @@ -0,0 +1,37 @@ +import { ConstructorData } from './becca-interface'; +import AbstractBeccaEntity = require('./entities/abstract_becca_entity'); +import BAttachment = require('./entities/battachment'); +import BAttribute = require('./entities/battribute'); +import BBlob = require('./entities/bblob'); +import BBranch = require('./entities/bbranch'); +import BEtapiToken = require('./entities/betapi_token'); +import BNote = require('./entities/bnote'); +import BOption = require('./entities/boption'); +import BRecentNote = require('./entities/brecent_note'); +import BRevision = require('./entities/brevision'); + +type EntityClass = new (row?: any) => AbstractBeccaEntity; + +const ENTITY_NAME_TO_ENTITY: Record & EntityClass> = { + "attachments": BAttachment, + "attributes": BAttribute, + "blobs": BBlob, + "branches": BBranch, + "etapi_tokens": BEtapiToken, + "notes": BNote, + "options": BOption, + "recent_notes": BRecentNote, + "revisions": BRevision +}; + +function getEntityFromEntityName(entityName: keyof typeof ENTITY_NAME_TO_ENTITY) { + if (!(entityName in ENTITY_NAME_TO_ENTITY)) { + throw new Error(`Entity for table '${entityName}' not found!`); + } + + return ENTITY_NAME_TO_ENTITY[entityName]; +} + +export = { + getEntityFromEntityName +}; diff --git a/src/becca/similarity.js b/src/becca/similarity.ts similarity index 91% rename from src/becca/similarity.js rename to src/becca/similarity.ts index 5be56804de..e6721d0df2 100644 --- a/src/becca/similarity.js +++ b/src/becca/similarity.ts @@ -1,8 +1,9 @@ -const becca = require('./becca.js'); -const log = require('../services/log.js'); -const beccaService = require('./becca_service.js'); -const dateUtils = require('../services/date_utils.js'); -const {JSDOM} = require("jsdom"); +import becca = require('./becca'); +import log = require('../services/log'); +import beccaService = require('./becca_service'); +import dateUtils = require('../services/date_utils'); +import { JSDOM } from "jsdom"; +import BNote = require('./entities/bnote'); const DEBUG = false; @@ -32,21 +33,25 @@ const IGNORED_ATTR_NAMES = [ "pageurl", ]; -function filterUrlValue(value) { +interface DateLimits { + minDate: string; + minExcludedDate: string; + maxExcludedDate: string; + maxDate: string; +} + +function filterUrlValue(value: string) { return value .replace(/https?:\/\//ig, "") .replace(/www.js\./ig, "") .replace(/(\.net|\.com|\.org|\.info|\.edu)/ig, ""); } -/** - * @param {BNote} note - */ -function buildRewardMap(note) { +function buildRewardMap(note: BNote) { // Need to use Map instead of object: https://github.com/zadam/trilium/issues/1895 const map = new Map(); - function addToRewardMap(text, rewardFactor) { + function addToRewardMap(text: string | undefined | null, rewardFactor: number) { if (!text) { return; } @@ -126,7 +131,7 @@ function buildRewardMap(note) { const content = note.getContent(); const dom = new JSDOM(content); - function addHeadingsToRewardMap(elName, rewardFactor) { + const addHeadingsToRewardMap = (elName: string, rewardFactor: number) => { for (const el of dom.window.document.querySelectorAll(elName)) { addToRewardMap(el.textContent, rewardFactor); } @@ -146,9 +151,9 @@ function buildRewardMap(note) { return map; } -const mimeCache = {}; +const mimeCache: Record = {}; -function trimMime(mime) { +function trimMime(mime: string) { if (!mime || mime === 'text/html') { return; } @@ -173,7 +178,7 @@ function trimMime(mime) { return mimeCache[mime]; } -function buildDateLimits(baseNote) { +function buildDateLimits(baseNote: BNote): DateLimits { const dateCreatedTs = dateUtils.parseDateTime(baseNote.utcDateCreated).getTime(); return { @@ -193,7 +198,7 @@ const WORD_BLACKLIST = [ "than", "then", "and", "either", "or", "neither", "nor", "both", "also" ]; -function splitToWords(text) { +function splitToWords(text: string) { let words = wordCache.get(text); if (!words) { @@ -221,13 +226,13 @@ function splitToWords(text) { * includeNoteLink and imageLink relation mean that notes are clearly related, but so clearly * that it doesn't actually need to be shown to the user. */ -function hasConnectingRelation(sourceNote, targetNote) { +function hasConnectingRelation(sourceNote: BNote, targetNote: BNote) { return sourceNote.getAttributes().find(attr => attr.type === 'relation' && ['includenotelink', 'imagelink'].includes(attr.name) && attr.value === targetNote.noteId); } -async function findSimilarNotes(noteId) { +async function findSimilarNotes(noteId: string) { const results = []; let i = 0; @@ -237,23 +242,23 @@ async function findSimilarNotes(noteId) { return []; } - let dateLimits; + let dateLimits: DateLimits; try { dateLimits = buildDateLimits(baseNote); } - catch (e) { + catch (e: any) { throw new Error(`Date limits failed with ${e.message}, entity: ${JSON.stringify(baseNote.getPojo())}`); } const rewardMap = buildRewardMap(baseNote); - let ancestorRewardCache = {}; + let ancestorRewardCache: Record = {}; const ancestorNoteIds = new Set(baseNote.getAncestors().map(note => note.noteId)); ancestorNoteIds.add(baseNote.noteId); let displayRewards = false; - function gatherRewards(text, factor = 1) { + function gatherRewards(text?: string | null, factor: number = 1) { if (!text) { return 0; } @@ -279,7 +284,7 @@ async function findSimilarNotes(noteId) { return counter; } - function gatherAncestorRewards(note) { + function gatherAncestorRewards(note?: BNote) { if (!note || ancestorNoteIds.has(note.noteId)) { return 0; } @@ -311,7 +316,7 @@ async function findSimilarNotes(noteId) { return ancestorRewardCache[note.noteId]; } - function computeScore(candidateNote) { + function computeScore(candidateNote: BNote) { let score = gatherRewards(trimMime(candidateNote.mime)) + gatherAncestorRewards(candidateNote); @@ -451,11 +456,11 @@ async function findSimilarNotes(noteId) { * see https://snyk.io/blog/nodejs-how-even-quick-async-functions-can-block-the-event-loop-starve-io/ */ function setImmediatePromise() { - return new Promise((resolve) => { + return new Promise((resolve) => { setTimeout(() => resolve(), 0); }); } -module.exports = { +export = { findSimilarNotes }; diff --git a/src/errors/not_found_error.js b/src/errors/not_found_error.js deleted file mode 100644 index af746b82cd..0000000000 --- a/src/errors/not_found_error.js +++ /dev/null @@ -1,7 +0,0 @@ -class NotFoundError { - constructor(message) { - this.message = message; - } -} - -module.exports = NotFoundError; \ No newline at end of file diff --git a/src/errors/not_found_error.ts b/src/errors/not_found_error.ts new file mode 100644 index 0000000000..f765e11f92 --- /dev/null +++ b/src/errors/not_found_error.ts @@ -0,0 +1,9 @@ +class NotFoundError { + message: string; + + constructor(message: string) { + this.message = message; + } +} + +export = NotFoundError; \ No newline at end of file diff --git a/src/errors/validation_error.js b/src/errors/validation_error.js deleted file mode 100644 index 1c9425669a..0000000000 --- a/src/errors/validation_error.js +++ /dev/null @@ -1,7 +0,0 @@ -class ValidationError { - constructor(message) { - this.message = message; - } -} - -module.exports = ValidationError; \ No newline at end of file diff --git a/src/errors/validation_error.ts b/src/errors/validation_error.ts new file mode 100644 index 0000000000..8b872bcbe1 --- /dev/null +++ b/src/errors/validation_error.ts @@ -0,0 +1,9 @@ +class ValidationError { + message: string; + + constructor(message: string) { + this.message = message; + } +} + +export = ValidationError; \ No newline at end of file diff --git a/src/etapi/app_info.js b/src/etapi/app_info.js index f82287170f..20c1381f09 100644 --- a/src/etapi/app_info.js +++ b/src/etapi/app_info.js @@ -1,5 +1,5 @@ -const appInfo = require('../services/app_info.js'); -const eu = require('./etapi_utils.js'); +const appInfo = require('../services/app_info'); +const eu = require('./etapi_utils'); function register(router) { eu.route(router, 'get', '/etapi/app-info', (req, res, next) => { diff --git a/src/etapi/attachments.js b/src/etapi/attachments.js index 586da7a21e..8a38020617 100644 --- a/src/etapi/attachments.js +++ b/src/etapi/attachments.js @@ -1,8 +1,8 @@ -const becca = require('../becca/becca.js'); -const eu = require('./etapi_utils.js'); +const becca = require('../becca/becca'); +const eu = require('./etapi_utils'); const mappers = require('./mappers.js'); const v = require('./validators.js'); -const utils = require('../services/utils.js'); +const utils = require('../services/utils'); function register(router) { const ALLOWED_PROPERTIES_FOR_CREATE_ATTACHMENT = { diff --git a/src/etapi/attributes.js b/src/etapi/attributes.js index 131e85ae8b..17ca00c07e 100644 --- a/src/etapi/attributes.js +++ b/src/etapi/attributes.js @@ -1,7 +1,7 @@ -const becca = require('../becca/becca.js'); -const eu = require('./etapi_utils.js'); +const becca = require('../becca/becca'); +const eu = require('./etapi_utils'); const mappers = require('./mappers.js'); -const attributeService = require('../services/attributes.js'); +const attributeService = require('../services/attributes'); const v = require('./validators.js'); function register(router) { diff --git a/src/etapi/auth.js b/src/etapi/auth.js index 96e4e2fa07..7a3b258d8d 100644 --- a/src/etapi/auth.js +++ b/src/etapi/auth.js @@ -1,7 +1,7 @@ -const becca = require('../becca/becca.js'); -const eu = require('./etapi_utils.js'); -const passwordEncryptionService = require('../services/encryption/password_encryption.js'); -const etapiTokenService = require('../services/etapi_tokens.js'); +const becca = require('../becca/becca'); +const eu = require('./etapi_utils'); +const passwordEncryptionService = require('../services/encryption/password_encryption'); +const etapiTokenService = require('../services/etapi_tokens'); function register(router, loginMiddleware) { eu.NOT_AUTHENTICATED_ROUTE(router, 'post', '/etapi/auth/login', loginMiddleware, (req, res, next) => { diff --git a/src/etapi/backup.js b/src/etapi/backup.js index b9ca75204c..7900570c47 100644 --- a/src/etapi/backup.js +++ b/src/etapi/backup.js @@ -1,5 +1,5 @@ -const eu = require('./etapi_utils.js'); -const backupService = require('../services/backup.js'); +const eu = require('./etapi_utils'); +const backupService = require('../services/backup'); function register(router) { eu.route(router, 'put', '/etapi/backup/:backupName', async (req, res, next) => { diff --git a/src/etapi/branches.js b/src/etapi/branches.js index 1d11144f96..e0337e5cb8 100644 --- a/src/etapi/branches.js +++ b/src/etapi/branches.js @@ -1,8 +1,8 @@ -const becca = require('../becca/becca.js'); -const eu = require('./etapi_utils.js'); +const becca = require('../becca/becca'); +const eu = require('./etapi_utils'); const mappers = require('./mappers.js'); -const BBranch = require('../becca/entities/bbranch.js'); -const entityChangesService = require('../services/entity_changes.js'); +const BBranch = require('../becca/entities/bbranch'); +const entityChangesService = require('../services/entity_changes'); const v = require('./validators.js'); function register(router) { diff --git a/src/etapi/etapi_utils.js b/src/etapi/etapi_utils.js index d3699b6648..b5928f4d75 100644 --- a/src/etapi/etapi_utils.js +++ b/src/etapi/etapi_utils.js @@ -1,9 +1,9 @@ -const cls = require('../services/cls.js'); -const sql = require('../services/sql.js'); -const log = require('../services/log.js'); -const becca = require('../becca/becca.js'); -const etapiTokenService = require('../services/etapi_tokens.js'); -const config = require('../services/config.js'); +const cls = require('../services/cls'); +const sql = require('../services/sql'); +const log = require('../services/log'); +const becca = require('../becca/becca'); +const etapiTokenService = require('../services/etapi_tokens'); +const config = require('../services/config'); const GENERIC_CODE = "GENERIC"; const noAuthentication = config.General && config.General.noAuthentication === true; diff --git a/src/etapi/notes.js b/src/etapi/notes.js index 69c00e795c..3ca0243672 100644 --- a/src/etapi/notes.js +++ b/src/etapi/notes.js @@ -1,14 +1,14 @@ -const becca = require('../becca/becca.js'); -const utils = require('../services/utils.js'); -const eu = require('./etapi_utils.js'); +const becca = require('../becca/becca'); +const utils = require('../services/utils'); +const eu = require('./etapi_utils'); const mappers = require('./mappers.js'); -const noteService = require('../services/notes.js'); -const TaskContext = require('../services/task_context.js'); +const noteService = require('../services/notes'); +const TaskContext = require('../services/task_context'); const v = require('./validators.js'); -const searchService = require('../services/search/services/search.js'); -const SearchContext = require('../services/search/search_context.js'); -const zipExportService = require('../services/export/zip.js'); -const zipImportService = require('../services/import/zip.js'); +const searchService = require('../services/search/services/search'); +const SearchContext = require('../services/search/search_context'); +const zipExportService = require('../services/export/zip'); +const zipImportService = require('../services/import/zip'); function register(router) { eu.route(router, 'get', '/etapi/notes', (req, res, next) => { diff --git a/src/etapi/special_notes.js b/src/etapi/special_notes.js index 64f97d07bc..68ef75fae4 100644 --- a/src/etapi/special_notes.js +++ b/src/etapi/special_notes.js @@ -1,6 +1,6 @@ -const specialNotesService = require('../services/special_notes.js'); -const dateNotesService = require('../services/date_notes.js'); -const eu = require('./etapi_utils.js'); +const specialNotesService = require('../services/special_notes'); +const dateNotesService = require('../services/date_notes'); +const eu = require('./etapi_utils'); const mappers = require('./mappers.js'); const getDateInvalidError = date => new eu.EtapiError(400, "DATE_INVALID", `Date "${date}" is not valid.`); @@ -17,7 +17,7 @@ function isValidDate(date) { function register(router) { eu.route(router, 'get', '/etapi/inbox/:date', (req, res, next) => { - const {date} = req.params; + const { date } = req.params; if (!isValidDate(date)) { throw getDateInvalidError(date); @@ -28,7 +28,7 @@ function register(router) { }); eu.route(router, 'get', '/etapi/calendar/days/:date', (req, res, next) => { - const {date} = req.params; + const { date } = req.params; if (!isValidDate(date)) { throw getDateInvalidError(date); @@ -39,7 +39,7 @@ function register(router) { }); eu.route(router, 'get', '/etapi/calendar/weeks/:date', (req, res, next) => { - const {date} = req.params; + const { date } = req.params; if (!isValidDate(date)) { throw getDateInvalidError(date); @@ -50,7 +50,7 @@ function register(router) { }); eu.route(router, 'get', '/etapi/calendar/months/:month', (req, res, next) => { - const {month} = req.params; + const { month } = req.params; if (!/[0-9]{4}-[0-9]{2}/.test(month)) { throw getMonthInvalidError(month); @@ -61,7 +61,7 @@ function register(router) { }); eu.route(router, 'get', '/etapi/calendar/years/:year', (req, res, next) => { - const {year} = req.params; + const { year } = req.params; if (!/[0-9]{4}/.test(year)) { throw getYearInvalidError(year); diff --git a/src/etapi/validators.js b/src/etapi/validators.js index 44128b8122..bcca288ca3 100644 --- a/src/etapi/validators.js +++ b/src/etapi/validators.js @@ -1,5 +1,5 @@ -const noteTypeService = require('../services/note_types.js'); -const dateUtils = require('../services/date_utils.js'); +const noteTypeService = require('../services/note_types'); +const dateUtils = require('../services/date_utils'); function mandatory(obj) { if (obj === undefined ) { @@ -64,7 +64,7 @@ function isNoteId(obj) { return; } - const becca = require('../becca/becca.js'); + const becca = require('../becca/becca'); if (typeof obj !== 'string') { return `'${obj}' is not a valid noteId`; diff --git a/src/routes/api/app_info.js b/src/routes/api/app_info.js index cb1996656c..aec592909a 100644 --- a/src/routes/api/app_info.js +++ b/src/routes/api/app_info.js @@ -1,6 +1,6 @@ "use strict"; -const appInfo = require('../../services/app_info.js'); +const appInfo = require('../../services/app_info'); function getAppInfo() { return appInfo; diff --git a/src/routes/api/attachments.js b/src/routes/api/attachments.js index 7862534a76..4cb782cb2b 100644 --- a/src/routes/api/attachments.js +++ b/src/routes/api/attachments.js @@ -1,7 +1,7 @@ -const becca = require('../../becca/becca.js'); -const blobService = require('../../services/blob.js'); -const ValidationError = require('../../errors/validation_error.js'); -const imageService = require("../../services/image.js"); +const becca = require('../../becca/becca'); +const blobService = require('../../services/blob'); +const ValidationError = require('../../errors/validation_error'); +const imageService = require("../../services/image"); function getAttachmentBlob(req) { const preview = req.query.preview === 'true'; diff --git a/src/routes/api/attributes.js b/src/routes/api/attributes.js index 0d9b541f15..2c151fccf7 100644 --- a/src/routes/api/attributes.js +++ b/src/routes/api/attributes.js @@ -1,11 +1,11 @@ "use strict"; -const sql = require('../../services/sql.js'); -const log = require('../../services/log.js'); -const attributeService = require('../../services/attributes.js'); -const BAttribute = require('../../becca/entities/battribute.js'); -const becca = require('../../becca/becca.js'); -const ValidationError = require('../../errors/validation_error.js'); +const sql = require('../../services/sql'); +const log = require('../../services/log'); +const attributeService = require('../../services/attributes'); +const BAttribute = require('../../becca/entities/battribute'); +const becca = require('../../becca/becca'); +const ValidationError = require('../../errors/validation_error'); function getEffectiveNoteAttributes(req) { const note = becca.getNote(req.params.noteId); diff --git a/src/routes/api/autocomplete.js b/src/routes/api/autocomplete.js index 4fe35a5640..9ed36f89e3 100644 --- a/src/routes/api/autocomplete.js +++ b/src/routes/api/autocomplete.js @@ -1,11 +1,11 @@ "use strict"; -const beccaService = require('../../becca/becca_service.js'); -const searchService = require('../../services/search/services/search.js'); -const log = require('../../services/log.js'); -const utils = require('../../services/utils.js'); -const cls = require('../../services/cls.js'); -const becca = require('../../becca/becca.js'); +const beccaService = require('../../becca/becca_service'); +const searchService = require('../../services/search/services/search'); +const log = require('../../services/log'); +const utils = require('../../services/utils'); +const cls = require('../../services/cls'); +const becca = require('../../becca/becca'); function getAutocomplete(req) { const query = req.query.query.trim(); diff --git a/src/routes/api/backend_log.js b/src/routes/api/backend_log.js index 51d0cc6a61..4a07a219eb 100644 --- a/src/routes/api/backend_log.js +++ b/src/routes/api/backend_log.js @@ -1,8 +1,8 @@ "use strict"; const fs = require('fs'); -const dateUtils = require('../../services/date_utils.js'); -const {LOG_DIR} = require('../../services/data_dir.js'); +const dateUtils = require('../../services/date_utils'); +const {LOG_DIR} = require('../../services/data_dir'); function getBackendLog() { const file = `${LOG_DIR}/trilium-${dateUtils.localNowDate()}.log`; diff --git a/src/routes/api/branches.js b/src/routes/api/branches.js index 17f11cd019..4638fb63e3 100644 --- a/src/routes/api/branches.js +++ b/src/routes/api/branches.js @@ -1,16 +1,16 @@ "use strict"; -const sql = require('../../services/sql.js'); -const utils = require('../../services/utils.js'); -const entityChangesService = require('../../services/entity_changes.js'); -const treeService = require('../../services/tree.js'); -const eraseService = require('../../services/erase.js'); -const becca = require('../../becca/becca.js'); -const TaskContext = require('../../services/task_context.js'); -const branchService = require('../../services/branches.js'); -const log = require('../../services/log.js'); -const ValidationError = require('../../errors/validation_error.js'); -const eventService = require("../../services/events.js"); +const sql = require('../../services/sql'); +const utils = require('../../services/utils'); +const entityChangesService = require('../../services/entity_changes'); +const treeService = require('../../services/tree'); +const eraseService = require('../../services/erase'); +const becca = require('../../becca/becca'); +const TaskContext = require('../../services/task_context'); +const branchService = require('../../services/branches'); +const log = require('../../services/log'); +const ValidationError = require('../../errors/validation_error'); +const eventService = require("../../services/events"); /** * Code in this file deals with moving and cloning branches. The relationship between note and parent note is unique diff --git a/src/routes/api/bulk_action.js b/src/routes/api/bulk_action.js index 08f59df494..27955cd358 100644 --- a/src/routes/api/bulk_action.js +++ b/src/routes/api/bulk_action.js @@ -1,5 +1,5 @@ -const becca = require('../../becca/becca.js'); -const bulkActionService = require('../../services/bulk_actions.js'); +const becca = require('../../becca/becca'); +const bulkActionService = require('../../services/bulk_actions'); function execute(req) { const {noteIds, includeDescendants} = req.body; diff --git a/src/routes/api/clipper.js b/src/routes/api/clipper.js index 1a7ac2e813..74cd4d8a76 100644 --- a/src/routes/api/clipper.js +++ b/src/routes/api/clipper.js @@ -1,18 +1,18 @@ "use strict"; -const attributeService = require('../../services/attributes.js'); -const cloneService = require('../../services/cloning.js'); -const noteService = require('../../services/notes.js'); -const dateNoteService = require('../../services/date_notes.js'); -const dateUtils = require('../../services/date_utils.js'); -const imageService = require('../../services/image.js'); -const appInfo = require('../../services/app_info.js'); -const ws = require('../../services/ws.js'); -const log = require('../../services/log.js'); -const utils = require('../../services/utils.js'); +const attributeService = require('../../services/attributes'); +const cloneService = require('../../services/cloning'); +const noteService = require('../../services/notes'); +const dateNoteService = require('../../services/date_notes'); +const dateUtils = require('../../services/date_utils'); +const imageService = require('../../services/image'); +const appInfo = require('../../services/app_info'); +const ws = require('../../services/ws'); +const log = require('../../services/log'); +const utils = require('../../services/utils'); const path = require('path'); -const htmlSanitizer = require('../../services/html_sanitizer.js'); -const {formatAttrForSearch} = require('../../services/attribute_formatter.js'); +const htmlSanitizer = require('../../services/html_sanitizer'); +const {formatAttrForSearch} = require('../../services/attribute_formatter'); const jsdom = require("jsdom"); const { JSDOM } = jsdom; diff --git a/src/routes/api/cloning.js b/src/routes/api/cloning.js index 384e769905..75a42e6755 100644 --- a/src/routes/api/cloning.js +++ b/src/routes/api/cloning.js @@ -1,6 +1,6 @@ "use strict"; -const cloningService = require('../../services/cloning.js'); +const cloningService = require('../../services/cloning'); function cloneNoteToBranch(req) { const {noteId, parentBranchId} = req.params; diff --git a/src/routes/api/database.js b/src/routes/api/database.js index 5b6b4739dc..d8d8cfa9a9 100644 --- a/src/routes/api/database.js +++ b/src/routes/api/database.js @@ -1,10 +1,10 @@ "use strict"; -const sql = require('../../services/sql.js'); -const log = require('../../services/log.js'); -const backupService = require('../../services/backup.js'); -const anonymizationService = require('../../services/anonymization.js'); -const consistencyChecksService = require('../../services/consistency_checks.js'); +const sql = require('../../services/sql'); +const log = require('../../services/log'); +const backupService = require('../../services/backup'); +const anonymizationService = require('../../services/anonymization'); +const consistencyChecksService = require('../../services/consistency_checks'); function getExistingBackups() { return backupService.getExistingBackups(); diff --git a/src/routes/api/etapi_tokens.js b/src/routes/api/etapi_tokens.js index 2c334643ab..b0d29db3ec 100644 --- a/src/routes/api/etapi_tokens.js +++ b/src/routes/api/etapi_tokens.js @@ -1,4 +1,4 @@ -const etapiTokenService = require('../../services/etapi_tokens.js'); +const etapiTokenService = require('../../services/etapi_tokens'); function getTokens() { const tokens = etapiTokenService.getTokens(); diff --git a/src/routes/api/export.js b/src/routes/api/export.js index 514fdf8619..4c098512be 100644 --- a/src/routes/api/export.js +++ b/src/routes/api/export.js @@ -1,12 +1,12 @@ "use strict"; -const zipExportService = require('../../services/export/zip.js'); -const singleExportService = require('../../services/export/single.js'); -const opmlExportService = require('../../services/export/opml.js'); -const becca = require('../../becca/becca.js'); -const TaskContext = require('../../services/task_context.js'); -const log = require('../../services/log.js'); -const NotFoundError = require('../../errors/not_found_error.js'); +const zipExportService = require('../../services/export/zip'); +const singleExportService = require('../../services/export/single'); +const opmlExportService = require('../../services/export/opml'); +const becca = require('../../becca/becca'); +const TaskContext = require('../../services/task_context'); +const log = require('../../services/log'); +const NotFoundError = require('../../errors/not_found_error'); function exportBranch(req, res) { const {branchId, type, format, version, taskId} = req.params; diff --git a/src/routes/api/files.js b/src/routes/api/files.js index 72bd0ee11f..f368383d5c 100644 --- a/src/routes/api/files.js +++ b/src/routes/api/files.js @@ -1,16 +1,16 @@ "use strict"; -const protectedSessionService = require('../../services/protected_session.js'); -const utils = require('../../services/utils.js'); -const log = require('../../services/log.js'); -const noteService = require('../../services/notes.js'); +const protectedSessionService = require('../../services/protected_session'); +const utils = require('../../services/utils'); +const log = require('../../services/log'); +const noteService = require('../../services/notes'); const tmp = require('tmp'); const fs = require('fs'); const { Readable } = require('stream'); const chokidar = require('chokidar'); -const ws = require('../../services/ws.js'); -const becca = require('../../becca/becca.js'); -const ValidationError = require('../../errors/validation_error.js'); +const ws = require('../../services/ws'); +const becca = require('../../becca/becca'); +const ValidationError = require('../../errors/validation_error'); function updateFile(req) { const note = becca.getNoteOrThrow(req.params.noteId); diff --git a/src/routes/api/fonts.js b/src/routes/api/fonts.js index 6e7cce87f4..d21db07c89 100644 --- a/src/routes/api/fonts.js +++ b/src/routes/api/fonts.js @@ -1,4 +1,4 @@ -const optionService = require('../../services/options.js'); +const optionService = require('../../services/options'); function getFontCss(req, res) { res.setHeader('Content-Type', 'text/css'); diff --git a/src/routes/api/image.js b/src/routes/api/image.js index 14b0341190..32bce601b6 100644 --- a/src/routes/api/image.js +++ b/src/routes/api/image.js @@ -1,8 +1,8 @@ "use strict"; -const imageService = require('../../services/image.js'); -const becca = require('../../becca/becca.js'); -const RESOURCE_DIR = require('../../services/resource_dir.js').RESOURCE_DIR; +const imageService = require('../../services/image'); +const becca = require('../../becca/becca'); +const RESOURCE_DIR = require('../../services/resource_dir').RESOURCE_DIR; const fs = require('fs'); function returnImageFromNote(req, res) { diff --git a/src/routes/api/import.js b/src/routes/api/import.js index 0e27ed5155..978fe9d7c5 100644 --- a/src/routes/api/import.js +++ b/src/routes/api/import.js @@ -1,20 +1,20 @@ "use strict"; -const enexImportService = require('../../services/import/enex.js'); -const opmlImportService = require('../../services/import/opml.js'); -const zipImportService = require('../../services/import/zip.js'); -const singleImportService = require('../../services/import/single.js'); -const cls = require('../../services/cls.js'); +const enexImportService = require('../../services/import/enex'); +const opmlImportService = require('../../services/import/opml'); +const zipImportService = require('../../services/import/zip'); +const singleImportService = require('../../services/import/single'); +const cls = require('../../services/cls'); const path = require('path'); -const becca = require('../../becca/becca.js'); -const beccaLoader = require('../../becca/becca_loader.js'); -const log = require('../../services/log.js'); -const TaskContext = require('../../services/task_context.js'); -const ValidationError = require('../../errors/validation_error.js'); +const becca = require('../../becca/becca'); +const beccaLoader = require('../../becca/becca_loader'); +const log = require('../../services/log'); +const TaskContext = require('../../services/task_context'); +const ValidationError = require('../../errors/validation_error'); async function importNotesToBranch(req) { - const {parentNoteId} = req.params; - const {taskId, last} = req.body; + const { parentNoteId } = req.params; + const { taskId, last } = req.body; const options = { safeImport: req.body.safeImport !== 'false', @@ -81,8 +81,8 @@ async function importNotesToBranch(req) { } async function importAttachmentsToNote(req) { - const {parentNoteId} = req.params; - const {taskId, last} = req.body; + const { parentNoteId } = req.params; + const { taskId, last } = req.body; const options = { shrinkImages: req.body.shrinkImages !== 'false', diff --git a/src/routes/api/keys.js b/src/routes/api/keys.js index 3f75f11a33..bc1b97d4a4 100644 --- a/src/routes/api/keys.js +++ b/src/routes/api/keys.js @@ -1,7 +1,7 @@ "use strict"; -const keyboardActions = require('../../services/keyboard_actions.js'); -const becca = require('../../becca/becca.js'); +const keyboardActions = require('../../services/keyboard_actions'); +const becca = require('../../becca/becca'); function getKeyboardActions() { return keyboardActions.getKeyboardActions(); diff --git a/src/routes/api/login.js b/src/routes/api/login.js index 9dbf058d46..9cb0ec8bc3 100644 --- a/src/routes/api/login.js +++ b/src/routes/api/login.js @@ -1,17 +1,17 @@ "use strict"; -const options = require('../../services/options.js'); -const utils = require('../../services/utils.js'); -const dateUtils = require('../../services/date_utils.js'); -const instanceId = require('../../services/instance_id.js'); -const passwordEncryptionService = require('../../services/encryption/password_encryption.js'); -const protectedSessionService = require('../../services/protected_session.js'); -const appInfo = require('../../services/app_info.js'); -const eventService = require('../../services/events.js'); -const sqlInit = require('../../services/sql_init.js'); -const sql = require('../../services/sql.js'); -const ws = require('../../services/ws.js'); -const etapiTokenService = require('../../services/etapi_tokens.js'); +const options = require('../../services/options'); +const utils = require('../../services/utils'); +const dateUtils = require('../../services/date_utils'); +const instanceId = require('../../services/instance_id'); +const passwordEncryptionService = require('../../services/encryption/password_encryption'); +const protectedSessionService = require('../../services/protected_session'); +const appInfo = require('../../services/app_info'); +const eventService = require('../../services/events'); +const sqlInit = require('../../services/sql_init'); +const sql = require('../../services/sql'); +const ws = require('../../services/ws'); +const etapiTokenService = require('../../services/etapi_tokens'); function loginSync(req) { if (!sqlInit.schemaExists()) { diff --git a/src/routes/api/note_map.js b/src/routes/api/note_map.js index 30ad771cab..2e13f61670 100644 --- a/src/routes/api/note_map.js +++ b/src/routes/api/note_map.js @@ -1,6 +1,6 @@ "use strict"; -const becca = require('../../becca/becca.js'); +const becca = require('../../becca/becca'); const { JSDOM } = require("jsdom"); function buildDescendantCountMap(noteIdsToCount) { diff --git a/src/routes/api/notes.js b/src/routes/api/notes.js index 54cd83330b..8787acbf8f 100644 --- a/src/routes/api/notes.js +++ b/src/routes/api/notes.js @@ -1,15 +1,15 @@ "use strict"; -const noteService = require('../../services/notes.js'); -const eraseService = require('../../services/erase.js'); -const treeService = require('../../services/tree.js'); -const sql = require('../../services/sql.js'); -const utils = require('../../services/utils.js'); -const log = require('../../services/log.js'); -const TaskContext = require('../../services/task_context.js'); -const becca = require('../../becca/becca.js'); -const ValidationError = require('../../errors/validation_error.js'); -const blobService = require('../../services/blob.js'); +const noteService = require('../../services/notes'); +const eraseService = require('../../services/erase'); +const treeService = require('../../services/tree'); +const sql = require('../../services/sql'); +const utils = require('../../services/utils'); +const log = require('../../services/log'); +const TaskContext = require('../../services/task_context'); +const becca = require('../../becca/becca'); +const ValidationError = require('../../errors/validation_error'); +const blobService = require('../../services/blob'); function getNote(req) { return becca.getNoteOrThrow(req.params.noteId); diff --git a/src/routes/api/options.js b/src/routes/api/options.js index efd70a07eb..88f72ae714 100644 --- a/src/routes/api/options.js +++ b/src/routes/api/options.js @@ -1,9 +1,9 @@ "use strict"; -const optionService = require('../../services/options.js'); -const log = require('../../services/log.js'); -const searchService = require('../../services/search/services/search.js'); -const ValidationError = require('../../errors/validation_error.js'); +const optionService = require('../../services/options'); +const log = require('../../services/log'); +const searchService = require('../../services/search/services/search'); +const ValidationError = require('../../errors/validation_error'); // options allowed to be updated directly in the Options dialog const ALLOWED_OPTIONS = new Set([ diff --git a/src/routes/api/other.js b/src/routes/api/other.js index ca92e15c6d..437636e8ed 100644 --- a/src/routes/api/other.js +++ b/src/routes/api/other.js @@ -1,5 +1,5 @@ -const becca = require('../../becca/becca.js'); -const markdownService = require('../../services/import/markdown.js'); +const becca = require('../../becca/becca'); +const markdownService = require('../../services/import/markdown'); function getIconUsage() { const iconClassToCountMap = {}; diff --git a/src/routes/api/password.js b/src/routes/api/password.js index a80d4151cb..42bd8b0e00 100644 --- a/src/routes/api/password.js +++ b/src/routes/api/password.js @@ -1,7 +1,7 @@ "use strict"; -const passwordService = require('../../services/encryption/password.js'); -const ValidationError = require('../../errors/validation_error.js'); +const passwordService = require('../../services/encryption/password'); +const ValidationError = require('../../errors/validation_error'); function changePassword(req) { if (passwordService.isPasswordSet()) { diff --git a/src/routes/api/recent_changes.js b/src/routes/api/recent_changes.js index 567abfb3ab..44e964ecf0 100644 --- a/src/routes/api/recent_changes.js +++ b/src/routes/api/recent_changes.js @@ -1,9 +1,9 @@ "use strict"; -const sql = require('../../services/sql.js'); -const protectedSessionService = require('../../services/protected_session.js'); -const noteService = require('../../services/notes.js'); -const becca = require('../../becca/becca.js'); +const sql = require('../../services/sql'); +const protectedSessionService = require('../../services/protected_session'); +const noteService = require('../../services/notes'); +const becca = require('../../becca/becca'); function getRecentChanges(req) { const {ancestorNoteId} = req.params; diff --git a/src/routes/api/recent_notes.js b/src/routes/api/recent_notes.js index 2ba5c5020a..40139477a9 100644 --- a/src/routes/api/recent_notes.js +++ b/src/routes/api/recent_notes.js @@ -1,8 +1,8 @@ "use strict"; -const BRecentNote = require('../../becca/entities/brecent_note.js'); -const sql = require('../../services/sql.js'); -const dateUtils = require('../../services/date_utils.js'); +const BRecentNote = require('../../becca/entities/brecent_note'); +const sql = require('../../services/sql'); +const dateUtils = require('../../services/date_utils'); function addRecentNote(req) { new BRecentNote({ diff --git a/src/routes/api/relation-map.js b/src/routes/api/relation-map.js index 0c3db4e6db..280ed7f672 100644 --- a/src/routes/api/relation-map.js +++ b/src/routes/api/relation-map.js @@ -1,5 +1,5 @@ -const becca = require('../../becca/becca.js'); -const sql = require('../../services/sql.js'); +const becca = require('../../becca/becca'); +const sql = require('../../services/sql'); function getRelationMap(req) { const {relationMapNoteId, noteIds} = req.body; diff --git a/src/routes/api/revisions.js b/src/routes/api/revisions.js index 72ab1ea033..e317fec951 100644 --- a/src/routes/api/revisions.js +++ b/src/routes/api/revisions.js @@ -1,14 +1,14 @@ "use strict"; -const beccaService = require('../../becca/becca_service.js'); -const revisionService = require('../../services/revisions.js'); -const utils = require('../../services/utils.js'); -const sql = require('../../services/sql.js'); -const cls = require('../../services/cls.js'); +const beccaService = require('../../becca/becca_service'); +const revisionService = require('../../services/revisions'); +const utils = require('../../services/utils'); +const sql = require('../../services/sql'); +const cls = require('../../services/cls'); const path = require('path'); -const becca = require('../../becca/becca.js'); -const blobService = require('../../services/blob.js'); -const eraseService = require("../../services/erase.js"); +const becca = require('../../becca/becca'); +const blobService = require('../../services/blob'); +const eraseService = require("../../services/erase"); function getRevisionBlob(req) { const preview = req.query.preview === 'true'; diff --git a/src/routes/api/script.js b/src/routes/api/script.js index 833a57b9be..8bd7ba712e 100644 --- a/src/routes/api/script.js +++ b/src/routes/api/script.js @@ -1,17 +1,17 @@ "use strict"; -const scriptService = require('../../services/script.js'); -const attributeService = require('../../services/attributes.js'); -const becca = require('../../becca/becca.js'); -const syncService = require('../../services/sync.js'); -const sql = require('../../services/sql.js'); +const scriptService = require('../../services/script'); +const attributeService = require('../../services/attributes'); +const becca = require('../../becca/becca'); +const syncService = require('../../services/sync'); +const sql = require('../../services/sql'); // The async/await here is very confusing, because the body.script may, but may not be async. If it is async, then we // need to await it and make the complete response including metadata available in a Promise, so that the route detects // this and does result.then(). async function exec(req) { try { - const {body} = req; + const { body } = req; const execute = body => scriptService.executeScript( body.script, @@ -115,7 +115,7 @@ function getRelationBundles(req) { function getBundle(req) { const note = becca.getNote(req.params.noteId); - const {script, params} = req.body; + const { script, params } = req.body; return scriptService.getScriptBundleForFrontend(note, script, params); } diff --git a/src/routes/api/search.js b/src/routes/api/search.js index cf48875238..96428c6af9 100644 --- a/src/routes/api/search.js +++ b/src/routes/api/search.js @@ -1,12 +1,12 @@ "use strict"; -const becca = require('../../becca/becca.js'); -const SearchContext = require('../../services/search/search_context.js'); -const searchService = require('../../services/search/services/search.js'); -const bulkActionService = require('../../services/bulk_actions.js'); -const cls = require('../../services/cls.js'); -const {formatAttrForSearch} = require('../../services/attribute_formatter.js'); -const ValidationError = require('../../errors/validation_error.js'); +const becca = require('../../becca/becca'); +const SearchContext = require('../../services/search/search_context'); +const searchService = require('../../services/search/services/search'); +const bulkActionService = require('../../services/bulk_actions'); +const cls = require('../../services/cls'); +const {formatAttrForSearch} = require('../../services/attribute_formatter'); +const ValidationError = require('../../errors/validation_error'); function searchFromNote(req) { const note = becca.getNoteOrThrow(req.params.noteId); diff --git a/src/routes/api/sender.js b/src/routes/api/sender.js index 1e55d7a7ba..b2309d3c76 100644 --- a/src/routes/api/sender.js +++ b/src/routes/api/sender.js @@ -1,10 +1,10 @@ "use strict"; const imageType = require('image-type'); -const imageService = require('../../services/image.js'); -const noteService = require('../../services/notes.js'); -const {sanitizeAttributeName} = require('../../services/sanitize_attribute_name.js'); -const specialNotesService = require('../../services/special_notes.js'); +const imageService = require('../../services/image'); +const noteService = require('../../services/notes'); +const { sanitizeAttributeName } = require('../../services/sanitize_attribute_name'); +const specialNotesService = require('../../services/special_notes'); function uploadImage(req) { const file = req.file; @@ -17,14 +17,14 @@ function uploadImage(req) { const parentNote = specialNotesService.getInboxNote(req.headers['x-local-date']); - const {note, noteId} = imageService.saveImage(parentNote.noteId, file.buffer, originalName, true); + const { note, noteId } = imageService.saveImage(parentNote.noteId, file.buffer, originalName, true); const labelsStr = req.headers['x-labels']; if (labelsStr?.trim()) { const labels = JSON.parse(labelsStr); - for (const {name, value} of labels) { + for (const { name, value } of labels) { note.setLabel(sanitizeAttributeName(name), value); } } @@ -39,7 +39,7 @@ function uploadImage(req) { function saveNote(req) { const parentNote = specialNotesService.getInboxNote(req.headers['x-local-date']); - const {note, branch} = noteService.createNewNote({ + const { note, branch } = noteService.createNewNote({ parentNoteId: parentNote.noteId, title: req.body.title, content: req.body.content, @@ -49,7 +49,7 @@ function saveNote(req) { }); if (req.body.labels) { - for (const {name, value} of req.body.labels) { + for (const { name, value } of req.body.labels) { note.setLabel(sanitizeAttributeName(name), value); } } diff --git a/src/routes/api/setup.js b/src/routes/api/setup.js index 4ed672f9db..84f310725a 100644 --- a/src/routes/api/setup.js +++ b/src/routes/api/setup.js @@ -1,9 +1,9 @@ "use strict"; -const sqlInit = require('../../services/sql_init.js'); -const setupService = require('../../services/setup.js'); -const log = require('../../services/log.js'); -const appInfo = require('../../services/app_info.js'); +const sqlInit = require('../../services/sql_init'); +const setupService = require('../../services/setup'); +const log = require('../../services/log'); +const appInfo = require('../../services/app_info'); function getStatus() { return { @@ -24,7 +24,7 @@ function setupSyncFromServer(req) { } function saveSyncSeed(req) { - const {options, syncVersion} = req.body; + const { options, syncVersion } = req.body; if (appInfo.syncVersion !== syncVersion) { const message = `Could not setup sync since local sync protocol version is ${appInfo.syncVersion} while remote is ${syncVersion}. To fix this issue, use same Trilium version on all instances.`; diff --git a/src/routes/api/similar_notes.js b/src/routes/api/similar_notes.js index 781ba0efcd..555efd1b57 100644 --- a/src/routes/api/similar_notes.js +++ b/src/routes/api/similar_notes.js @@ -1,7 +1,7 @@ "use strict"; -const similarityService = require('../../becca/similarity.js'); -const becca = require('../../becca/becca.js'); +const similarityService = require('../../becca/similarity'); +const becca = require('../../becca/becca'); async function getSimilarNotes(req) { const noteId = req.params.noteId; diff --git a/src/routes/api/special_notes.js b/src/routes/api/special_notes.js index 5f4c7933bf..5ed6e0656f 100644 --- a/src/routes/api/special_notes.js +++ b/src/routes/api/special_notes.js @@ -1,10 +1,10 @@ "use strict"; -const dateNoteService = require('../../services/date_notes.js'); -const sql = require('../../services/sql.js'); -const cls = require('../../services/cls.js'); -const specialNotesService = require('../../services/special_notes.js'); -const becca = require('../../becca/becca.js'); +const dateNoteService = require('../../services/date_notes'); +const sql = require('../../services/sql'); +const cls = require('../../services/cls'); +const specialNotesService = require('../../services/special_notes'); +const becca = require('../../becca/becca'); function getInboxNote(req) { return specialNotesService.getInboxNote(req.params.date); diff --git a/src/routes/api/sql.js b/src/routes/api/sql.js index 6a97673179..4e06ed78e0 100644 --- a/src/routes/api/sql.js +++ b/src/routes/api/sql.js @@ -1,7 +1,7 @@ "use strict"; -const sql = require('../../services/sql.js'); -const becca = require('../../becca/becca.js'); +const sql = require('../../services/sql'); +const becca = require('../../becca/becca'); function getSchema() { const tableNames = sql.getColumn(`SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name`); diff --git a/src/routes/api/stats.js b/src/routes/api/stats.js index d67d624d37..05d05d25cd 100644 --- a/src/routes/api/stats.js +++ b/src/routes/api/stats.js @@ -1,5 +1,5 @@ -const sql = require('../../services/sql.js'); -const becca = require('../../becca/becca.js'); +const sql = require('../../services/sql'); +const becca = require('../../becca/becca'); function getNoteSize(req) { const {noteId} = req.params; diff --git a/src/routes/api/sync.js b/src/routes/api/sync.js index 44da3e52ce..bd38e29053 100644 --- a/src/routes/api/sync.js +++ b/src/routes/api/sync.js @@ -1,16 +1,16 @@ "use strict"; -const syncService = require('../../services/sync.js'); -const syncUpdateService = require('../../services/sync_update.js'); -const entityChangesService = require('../../services/entity_changes.js'); -const sql = require('../../services/sql.js'); -const sqlInit = require('../../services/sql_init.js'); -const optionService = require('../../services/options.js'); -const contentHashService = require('../../services/content_hash.js'); -const log = require('../../services/log.js'); -const syncOptions = require('../../services/sync_options.js'); -const utils = require('../../services/utils.js'); -const ws = require('../../services/ws.js'); +const syncService = require('../../services/sync'); +const syncUpdateService = require('../../services/sync_update'); +const entityChangesService = require('../../services/entity_changes'); +const sql = require('../../services/sql'); +const sqlInit = require('../../services/sql_init'); +const optionService = require('../../services/options'); +const contentHashService = require('../../services/content_hash'); +const log = require('../../services/log'); +const syncOptions = require('../../services/sync_options'); +const utils = require('../../services/utils'); +const ws = require('../../services/ws'); async function testSync() { try { @@ -132,7 +132,7 @@ function getChanged(req) { const partialRequests = {}; function update(req) { - let {body} = req; + let { body } = req; const pageCount = parseInt(req.get('pageCount')); const pageIndex = parseInt(req.get('pageIndex')); @@ -164,7 +164,7 @@ function update(req) { } } - const {entities, instanceId} = body; + const { entities, instanceId } = body; sql.transactional(() => syncUpdateService.updateEntities(entities, instanceId)); } @@ -193,7 +193,7 @@ function queueSector(req) { } function checkEntityChanges() { - require('../../services/consistency_checks.js').runEntityChangesChecks(); + require('../../services/consistency_checks').runEntityChangesChecks(); } module.exports = { diff --git a/src/routes/api/tree.js b/src/routes/api/tree.js index a5b4ad6393..c8188068b9 100644 --- a/src/routes/api/tree.js +++ b/src/routes/api/tree.js @@ -1,8 +1,8 @@ "use strict"; -const becca = require('../../becca/becca.js'); -const log = require('../../services/log.js'); -const NotFoundError = require('../../errors/not_found_error.js'); +const becca = require('../../becca/becca'); +const log = require('../../services/log'); +const NotFoundError = require('../../errors/not_found_error'); function getNotesAndBranchesAndAttributes(noteIds) { noteIds = new Set(noteIds); diff --git a/src/routes/assets.js b/src/routes/assets.js index d152015576..b6b46332c0 100644 --- a/src/routes/assets.js +++ b/src/routes/assets.js @@ -1,7 +1,7 @@ -const assetPath = require('../services/asset_path.js'); +const assetPath = require('../services/asset_path'); const path = require("path"); const express = require("express"); -const env = require('../services/env.js'); +const env = require('../services/env'); const persistentCacheStatic = (root, options) => { if (!env.isDev()) { diff --git a/src/routes/custom.js b/src/routes/custom.js index c76240976e..fbf6c42c37 100644 --- a/src/routes/custom.js +++ b/src/routes/custom.js @@ -1,9 +1,9 @@ -const log = require('../services/log.js'); +const log = require('../services/log'); const fileService = require('./api/files.js'); -const scriptService = require('../services/script.js'); -const cls = require('../services/cls.js'); -const sql = require('../services/sql.js'); -const becca = require('../becca/becca.js'); +const scriptService = require('../services/script'); +const cls = require('../services/cls'); +const sql = require('../services/sql'); +const becca = require('../becca/becca'); function handleRequest(req, res) { // express puts content after first slash into 0 index element diff --git a/src/routes/error_handlers.js b/src/routes/error_handlers.js index 1e9715ed87..4d17b0d5d9 100644 --- a/src/routes/error_handlers.js +++ b/src/routes/error_handlers.js @@ -1,4 +1,4 @@ -const log = require('../services/log.js'); +const log = require('../services/log'); function register(app) { app.use((err, req, res, next) => { diff --git a/src/routes/index.js b/src/routes/index.js index d57563badd..44c6098341 100644 --- a/src/routes/index.js +++ b/src/routes/index.js @@ -1,16 +1,16 @@ "use strict"; -const sql = require('../services/sql.js'); -const attributeService = require('../services/attributes.js'); -const config = require('../services/config.js'); -const optionService = require('../services/options.js'); -const log = require('../services/log.js'); -const env = require('../services/env.js'); -const utils = require('../services/utils.js'); -const protectedSessionService = require('../services/protected_session.js'); +const sql = require('../services/sql'); +const attributeService = require('../services/attributes'); +const config = require('../services/config'); +const optionService = require('../services/options'); +const log = require('../services/log'); +const env = require('../services/env'); +const utils = require('../services/utils'); +const protectedSessionService = require('../services/protected_session'); const packageJson = require('../../package.json'); -const assetPath = require('../services/asset_path.js'); -const appPath = require('../services/app_path.js'); +const assetPath = require('../services/asset_path'); +const appPath = require('../services/app_path'); function index(req, res) { const options = optionService.getOptionMap(); diff --git a/src/routes/login.js b/src/routes/login.js index 6c4af5cdb6..649e9b854d 100644 --- a/src/routes/login.js +++ b/src/routes/login.js @@ -1,13 +1,13 @@ "use strict"; -const utils = require('../services/utils.js'); -const optionService = require('../services/options.js'); -const myScryptService = require('../services/encryption/my_scrypt.js'); -const log = require('../services/log.js'); -const passwordService = require('../services/encryption/password.js'); -const assetPath = require('../services/asset_path.js'); -const appPath = require('../services/app_path.js'); -const ValidationError = require('../errors/validation_error.js'); +const utils = require('../services/utils'); +const optionService = require('../services/options'); +const myScryptService = require('../services/encryption/my_scrypt'); +const log = require('../services/log'); +const passwordService = require('../services/encryption/password'); +const assetPath = require('../services/asset_path'); +const appPath = require('../services/app_path'); +const ValidationError = require('../errors/validation_error'); function loginPage(req, res) { res.render('login', { diff --git a/src/routes/routes.js b/src/routes/routes.js index fd3cf167a8..c782f5e9cf 100644 --- a/src/routes/routes.js +++ b/src/routes/routes.js @@ -1,20 +1,20 @@ "use strict"; -const utils = require('../services/utils.js'); +const utils = require('../services/utils'); const multer = require('multer'); -const log = require('../services/log.js'); +const log = require('../services/log'); const express = require('express'); const router = express.Router(); -const auth = require('../services/auth.js'); -const cls = require('../services/cls.js'); -const sql = require('../services/sql.js'); -const entityChangesService = require('../services/entity_changes.js'); +const auth = require('../services/auth'); +const cls = require('../services/cls'); +const sql = require('../services/sql'); +const entityChangesService = require('../services/entity_changes'); const csurf = require('csurf'); const { createPartialContentHandler } = require("express-partial-content"); const rateLimit = require("express-rate-limit"); -const AbstractBeccaEntity = require('../becca/entities/abstract_becca_entity.js'); -const NotFoundError = require('../errors/not_found_error.js'); -const ValidationError = require('../errors/validation_error.js'); +const AbstractBeccaEntity = require('../becca/entities/abstract_becca_entity'); +const NotFoundError = require('../errors/not_found_error'); +const ValidationError = require('../errors/validation_error'); // page routes const setupRoute = require('./setup.js'); @@ -27,28 +27,28 @@ const notesApiRoute = require('./api/notes.js'); const branchesApiRoute = require('./api/branches.js'); const attachmentsApiRoute = require('./api/attachments.js'); const autocompleteApiRoute = require('./api/autocomplete.js'); -const cloningApiRoute = require('./api/cloning.js'); -const revisionsApiRoute = require('./api/revisions.js'); +const cloningApiRoute = require('./api/cloning'); +const revisionsApiRoute = require('./api/revisions'); const recentChangesApiRoute = require('./api/recent_changes.js'); const optionsApiRoute = require('./api/options.js'); -const passwordApiRoute = require('./api/password.js'); -const syncApiRoute = require('./api/sync.js'); +const passwordApiRoute = require('./api/password'); +const syncApiRoute = require('./api/sync'); const loginApiRoute = require('./api/login.js'); const recentNotesRoute = require('./api/recent_notes.js'); -const appInfoRoute = require('./api/app_info.js'); +const appInfoRoute = require('./api/app_info'); const exportRoute = require('./api/export.js'); const importRoute = require('./api/import.js'); const setupApiRoute = require('./api/setup.js'); -const sqlRoute = require('./api/sql.js'); +const sqlRoute = require('./api/sql'); const databaseRoute = require('./api/database.js'); -const imageRoute = require('./api/image.js'); -const attributesRoute = require('./api/attributes.js'); +const imageRoute = require('./api/image'); +const attributesRoute = require('./api/attributes'); const scriptRoute = require('./api/script.js'); const senderRoute = require('./api/sender.js'); const filesRoute = require('./api/files.js'); -const searchRoute = require('./api/search.js'); +const searchRoute = require('./api/search'); const bulkActionRoute = require('./api/bulk_action.js'); -const specialNotesRoute = require('./api/special_notes.js'); +const specialNotesRoute = require('./api/special_notes'); const noteMapRoute = require('./api/note_map.js'); const clipperRoute = require('./api/clipper.js'); const similarNotesRoute = require('./api/similar_notes.js'); @@ -56,20 +56,20 @@ const keysRoute = require('./api/keys.js'); const backendLogRoute = require('./api/backend_log.js'); const statsRoute = require('./api/stats.js'); const fontsRoute = require('./api/fonts.js'); -const etapiTokensApiRoutes = require('./api/etapi_tokens.js'); +const etapiTokensApiRoutes = require('./api/etapi_tokens'); const relationMapApiRoute = require('./api/relation-map'); const otherRoute = require('./api/other.js'); const shareRoutes = require('../share/routes.js'); const etapiAuthRoutes = require('../etapi/auth.js'); -const etapiAppInfoRoutes = require('../etapi/app_info.js'); +const etapiAppInfoRoutes = require('../etapi/app_info'); const etapiAttachmentRoutes = require('../etapi/attachments.js'); -const etapiAttributeRoutes = require('../etapi/attributes.js'); +const etapiAttributeRoutes = require('../etapi/attributes'); const etapiBranchRoutes = require('../etapi/branches.js'); const etapiNoteRoutes = require('../etapi/notes.js'); -const etapiSpecialNoteRoutes = require('../etapi/special_notes.js'); +const etapiSpecialNoteRoutes = require('../etapi/special_notes'); const etapiSpecRoute = require('../etapi/spec.js'); -const etapiBackupRoute = require('../etapi/backup.js'); +const etapiBackupRoute = require('../etapi/backup'); const csrfMiddleware = csurf({ cookie: true, @@ -230,7 +230,7 @@ function register(app) { apiRoute(GET, '/api/app-info', appInfoRoute.getAppInfo); // docker health check - route(GET, '/api/health-check', [], () => ({"status": "ok"}), apiResultHandler); + route(GET, '/api/health-check', [], () => ({ "status": "ok" }), apiResultHandler); // group of the services below are meant to be executed from the outside route(GET, '/api/setup/status', [], setupApiRoute.getStatus, apiResultHandler); diff --git a/src/routes/session_parser.js b/src/routes/session_parser.js index 404159b628..afe99fa2f6 100644 --- a/src/routes/session_parser.js +++ b/src/routes/session_parser.js @@ -1,6 +1,6 @@ const session = require("express-session"); -const sessionSecret = require('../services/session_secret.js'); -const dataDir = require('../services/data_dir.js'); +const sessionSecret = require('../services/session_secret'); +const dataDir = require('../services/data_dir'); const FileStore = require('session-file-store')(session); const sessionParser = session({ diff --git a/src/routes/setup.js b/src/routes/setup.js index be14c2310f..f99bc8c51c 100644 --- a/src/routes/setup.js +++ b/src/routes/setup.js @@ -1,16 +1,16 @@ "use strict"; -const sqlInit = require('../services/sql_init.js'); -const setupService = require('../services/setup.js'); -const utils = require('../services/utils.js'); -const assetPath = require('../services/asset_path.js'); -const appPath = require('../services/app_path.js'); +const sqlInit = require('../services/sql_init'); +const setupService = require('../services/setup'); +const utils = require('../services/utils'); +const assetPath = require('../services/asset_path'); +const appPath = require('../services/app_path'); function setupPage(req, res) { if (sqlInit.isDbInitialized()) { if (utils.isElectron()) { - const windowService = require('../services/window.js'); - const {app} = require('electron'); + const windowService = require('../services/window'); + const { app } = require('electron'); windowService.createMainWindow(app); windowService.closeSetupWindow(); } diff --git a/src/services/anonymization.js b/src/services/anonymization.ts similarity index 90% rename from src/services/anonymization.js rename to src/services/anonymization.ts index 3beb1c1d67..4e17058a78 100644 --- a/src/services/anonymization.js +++ b/src/services/anonymization.ts @@ -1,10 +1,10 @@ -const BUILTIN_ATTRIBUTES = require('./builtin_attributes.js'); -const fs = require("fs-extra"); -const dataDir = require('./data_dir.js'); -const dateUtils = require('./date_utils.js'); -const Database = require("better-sqlite3"); -const sql = require('./sql.js'); -const path = require("path"); +import BUILTIN_ATTRIBUTES = require('./builtin_attributes'); +import fs = require("fs-extra"); +import dataDir = require('./data_dir'); +import dateUtils = require('./date_utils'); +import Database = require("better-sqlite3"); +import sql = require('./sql'); +import path = require("path"); function getFullAnonymizationScript() { // we want to delete all non-builtin attributes because they can contain sensitive names and values @@ -48,7 +48,7 @@ function getLightAnonymizationScript() { AND value != '';`; } -async function createAnonymizedCopy(type) { +async function createAnonymizedCopy(type: "full" | "light") { if (!['full', 'light'].includes(type)) { throw new Error(`Unrecognized anonymization type '${type}'`); } @@ -90,7 +90,7 @@ function getExistingAnonymizedDatabases() { })); } -module.exports = { +export = { getFullAnonymizationScript, createAnonymizedCopy, getExistingAnonymizedDatabases diff --git a/src/services/api-interface.ts b/src/services/api-interface.ts new file mode 100644 index 0000000000..30c2486936 --- /dev/null +++ b/src/services/api-interface.ts @@ -0,0 +1,17 @@ +import { OptionRow } from "../becca/entities/rows"; + +/** + * Response for /api/setup/status. + */ +export interface SetupStatusResponse { + syncVersion: number; + schemaExists: boolean; +} + +/** + * Response for /api/setup/sync-seed. + */ +export interface SetupSyncSeedResponse { + syncVersion: number; + options: OptionRow[] +} \ No newline at end of file diff --git a/src/services/app_icon.js b/src/services/app_icon.ts similarity index 75% rename from src/services/app_icon.js rename to src/services/app_icon.ts index 51e3191133..91f85d3e9d 100644 --- a/src/services/app_icon.js +++ b/src/services/app_icon.ts @@ -1,12 +1,12 @@ "use strict"; -const path = require('path'); -const {ELECTRON_APP_ROOT_DIR} = require('./resource_dir.js'); -const log = require('./log.js'); -const os = require('os'); -const fs = require('fs'); -const config = require('./config.js'); -const utils = require('./utils.js'); +import path = require('path'); +import resourceDir = require('./resource_dir'); +import log = require('./log'); +import os = require('os'); +import fs = require('fs'); +import config = require('./config'); +import utils = require('./utils'); const template = `[Desktop Entry] Type=Application @@ -28,7 +28,7 @@ function installLocalAppIcon() { return; } - if (!fs.existsSync(path.resolve(ELECTRON_APP_ROOT_DIR, "trilium-portable.sh"))) { + if (!fs.existsSync(path.resolve(resourceDir.ELECTRON_APP_ROOT_DIR, "trilium-portable.sh"))) { // simple heuristic to detect ".tar.xz" linux build (i.e., not flatpak, not debian) // only in such case it's necessary to create an icon return; @@ -56,16 +56,16 @@ function installLocalAppIcon() { function getDesktopFileContent() { return template - .replace("#APP_ROOT_DIR#", escapePath(ELECTRON_APP_ROOT_DIR)) + .replace("#APP_ROOT_DIR#", escapePath(resourceDir.ELECTRON_APP_ROOT_DIR)) .replace("#EXE_PATH#", escapePath(getExePath())); } -function escapePath(path) { +function escapePath(path: string) { return path.replace(/ /g, "\\ "); } function getExePath() { - return path.resolve(ELECTRON_APP_ROOT_DIR, 'trilium'); + return path.resolve(resourceDir.ELECTRON_APP_ROOT_DIR, 'trilium'); } module.exports = { diff --git a/src/services/app_info.js b/src/services/app_info.ts similarity index 69% rename from src/services/app_info.js rename to src/services/app_info.ts index 1d9d7f965b..92bf2dee1a 100644 --- a/src/services/app_info.js +++ b/src/services/app_info.ts @@ -1,21 +1,21 @@ "use strict"; -const build = require('./build.js'); -const packageJson = require('../../package.json'); -const {TRILIUM_DATA_DIR} = require('./data_dir.js'); +import build = require('./build'); +import packageJson = require('../../package.json'); +import dataDir = require('./data_dir'); const APP_DB_VERSION = 228; const SYNC_VERSION = 32; const CLIPPER_PROTOCOL_VERSION = "1.0"; -module.exports = { +export = { appVersion: packageJson.version, dbVersion: APP_DB_VERSION, nodeVersion: process.version, syncVersion: SYNC_VERSION, buildDate: build.buildDate, buildRevision: build.buildRevision, - dataDirectory: TRILIUM_DATA_DIR, + dataDirectory: dataDir.TRILIUM_DATA_DIR, clipperProtocolVersion: CLIPPER_PROTOCOL_VERSION, utcDateTime: new Date().toISOString() // for timezone inference }; diff --git a/src/services/app_path.js b/src/services/app_path.js deleted file mode 100644 index ab449495d7..0000000000 --- a/src/services/app_path.js +++ /dev/null @@ -1,6 +0,0 @@ -const assetPath = require('./asset_path.js'); -const env = require('./env.js'); - -module.exports = env.isDev() - ? assetPath + "/app" - : assetPath + "/app-dist"; diff --git a/src/services/app_path.ts b/src/services/app_path.ts new file mode 100644 index 0000000000..3bfa7de40d --- /dev/null +++ b/src/services/app_path.ts @@ -0,0 +1,6 @@ +import assetPath = require('./asset_path'); +import env = require('./env'); + +export = env.isDev() + ? assetPath + "/app" + : assetPath + "/app-dist"; diff --git a/src/services/asset_path.js b/src/services/asset_path.js deleted file mode 100644 index a32ebc553e..0000000000 --- a/src/services/asset_path.js +++ /dev/null @@ -1,3 +0,0 @@ -const packageJson = require('../../package.json'); - -module.exports = `assets/v${packageJson.version}`; diff --git a/src/services/asset_path.ts b/src/services/asset_path.ts new file mode 100644 index 0000000000..53ffebba97 --- /dev/null +++ b/src/services/asset_path.ts @@ -0,0 +1,3 @@ +import packageJson = require('../../package.json'); + +export = `assets/v${packageJson.version}`; diff --git a/src/services/attribute_formatter.js b/src/services/attribute_formatter.ts similarity index 83% rename from src/services/attribute_formatter.js rename to src/services/attribute_formatter.ts index c8a9c1de61..a7b4b75007 100644 --- a/src/services/attribute_formatter.js +++ b/src/services/attribute_formatter.ts @@ -1,6 +1,8 @@ "use strict"; -function formatAttrForSearch(attr, searchWithValue) { +import { AttributeRow } from "../becca/entities/rows"; + +function formatAttrForSearch(attr: AttributeRow, searchWithValue: boolean) { let searchStr = ''; if (attr.type === 'label') { @@ -27,7 +29,7 @@ function formatAttrForSearch(attr, searchWithValue) { return searchStr; } -function formatValue(val) { +function formatValue(val: string) { if (!/[^\w]/.test(val)) { return val; } @@ -45,6 +47,6 @@ function formatValue(val) { } } -module.exports = { +export = { formatAttrForSearch }; diff --git a/src/services/attributes.js b/src/services/attributes.ts similarity index 67% rename from src/services/attributes.js rename to src/services/attributes.ts index 632a6e09a7..0ae77dc362 100644 --- a/src/services/attributes.js +++ b/src/services/attributes.ts @@ -1,17 +1,18 @@ "use strict"; -const searchService = require('./search/services/search.js'); -const sql = require('./sql.js'); -const becca = require('../becca/becca.js'); -const BAttribute = require('../becca/entities/battribute.js'); -const {formatAttrForSearch} = require('./attribute_formatter.js'); -const BUILTIN_ATTRIBUTES = require('./builtin_attributes.js'); +import searchService = require('./search/services/search'); +import sql = require('./sql'); +import becca = require('../becca/becca'); +import BAttribute = require('../becca/entities/battribute'); +import attributeFormatter = require('./attribute_formatter'); +import BUILTIN_ATTRIBUTES = require('./builtin_attributes'); +import BNote = require('../becca/entities/bnote'); +import { AttributeRow } from '../becca/entities/rows'; const ATTRIBUTE_TYPES = ['label', 'relation']; -/** @returns {BNote[]} */ -function getNotesWithLabel(name, value = undefined) { - const query = formatAttrForSearch({type: 'label', name, value}, value !== undefined); +function getNotesWithLabel(name: string, value?: string): BNote[] { + const query = attributeFormatter.formatAttrForSearch({type: 'label', name, value}, value !== undefined); return searchService.searchNotes(query, { includeArchivedNotes: true, ignoreHoistedNote: true @@ -19,8 +20,7 @@ function getNotesWithLabel(name, value = undefined) { } // TODO: should be in search service -/** @returns {BNote|null} */ -function getNoteWithLabel(name, value = undefined) { +function getNoteWithLabel(name: string, value?: string): BNote | null { // optimized version (~20 times faster) without using normal search, useful for e.g., finding date notes const attrs = becca.findAttributes('label', name); @@ -39,7 +39,7 @@ function getNoteWithLabel(name, value = undefined) { return null; } -function createLabel(noteId, name, value = "") { +function createLabel(noteId: string, name: string, value: string = "") { return createAttribute({ noteId: noteId, type: 'label', @@ -48,7 +48,7 @@ function createLabel(noteId, name, value = "") { }); } -function createRelation(noteId, name, targetNoteId) { +function createRelation(noteId: string, name: string, targetNoteId: string) { return createAttribute({ noteId: noteId, type: 'relation', @@ -57,14 +57,14 @@ function createRelation(noteId, name, targetNoteId) { }); } -function createAttribute(attribute) { +function createAttribute(attribute: AttributeRow) { return new BAttribute(attribute).save(); } -function getAttributeNames(type, nameLike) { +function getAttributeNames(type: string, nameLike: string) { nameLike = nameLike.toLowerCase(); - let names = sql.getColumn( + let names = sql.getColumn( `SELECT DISTINCT name FROM attributes WHERE isDeleted = 0 @@ -98,11 +98,11 @@ function getAttributeNames(type, nameLike) { return names; } -function isAttributeType(type) { +function isAttributeType(type: string): boolean { return ATTRIBUTE_TYPES.includes(type); } -function isAttributeDangerous(type, name) { +function isAttributeDangerous(type: string, name: string): boolean { return BUILTIN_ATTRIBUTES.some(attr => attr.type === type && attr.name.toLowerCase() === name.trim().toLowerCase() && @@ -110,7 +110,7 @@ function isAttributeDangerous(type, name) { ); } -module.exports = { +export = { getNotesWithLabel, getNoteWithLabel, createLabel, diff --git a/src/services/auth.js b/src/services/auth.ts similarity index 65% rename from src/services/auth.js rename to src/services/auth.ts index adb5651342..d54a1ea969 100644 --- a/src/services/auth.js +++ b/src/services/auth.ts @@ -1,16 +1,27 @@ "use strict"; -const etapiTokenService = require('./etapi_tokens.js'); -const log = require('./log.js'); -const sqlInit = require('./sql_init.js'); -const utils = require('./utils.js'); -const passwordEncryptionService = require('./encryption/password_encryption.js'); -const config = require('./config.js'); -const passwordService = require('./encryption/password.js'); +import etapiTokenService = require('./etapi_tokens'); +import log = require('./log'); +import sqlInit = require('./sql_init'); +import utils = require('./utils'); +import passwordEncryptionService = require('./encryption/password_encryption'); +import config = require('./config'); +import passwordService = require('./encryption/password'); +import type { NextFunction, Request, Response } from 'express'; const noAuthentication = config.General && config.General.noAuthentication === true; -function checkAuth(req, res, next) { +interface AppRequest extends Request { + headers: { + authorization?: string; + "trilium-cred"?: string; + } + session: { + loggedIn: boolean; + } +} + +function checkAuth(req: AppRequest, res: Response, next: NextFunction) { if (!sqlInit.isDbInitialized()) { res.redirect("setup"); } @@ -24,7 +35,7 @@ function checkAuth(req, res, next) { // for electron things which need network stuff // currently, we're doing that for file upload because handling form data seems to be difficult -function checkApiAuthOrElectron(req, res, next) { +function checkApiAuthOrElectron(req: AppRequest, res: Response, next: NextFunction) { if (!req.session.loggedIn && !utils.isElectron() && !noAuthentication) { reject(req, res, "Logged in session not found"); } @@ -33,7 +44,7 @@ function checkApiAuthOrElectron(req, res, next) { } } -function checkApiAuth(req, res, next) { +function checkApiAuth(req: AppRequest, res: Response, next: NextFunction) { if (!req.session.loggedIn && !noAuthentication) { reject(req, res, "Logged in session not found"); } @@ -42,7 +53,7 @@ function checkApiAuth(req, res, next) { } } -function checkAppInitialized(req, res, next) { +function checkAppInitialized(req: AppRequest, res: Response, next: NextFunction) { if (!sqlInit.isDbInitialized()) { res.redirect("setup"); } @@ -51,7 +62,7 @@ function checkAppInitialized(req, res, next) { } } -function checkPasswordSet(req, res, next) { +function checkPasswordSet(req: AppRequest, res: Response, next: NextFunction) { if (!utils.isElectron() && !passwordService.isPasswordSet()) { res.redirect("set-password"); } else { @@ -59,7 +70,7 @@ function checkPasswordSet(req, res, next) { } } -function checkPasswordNotSet(req, res, next) { +function checkPasswordNotSet(req: AppRequest, res: Response, next: NextFunction) { if (!utils.isElectron() && passwordService.isPasswordSet()) { res.redirect("login"); } else { @@ -67,7 +78,7 @@ function checkPasswordNotSet(req, res, next) { } } -function checkAppNotInitialized(req, res, next) { +function checkAppNotInitialized(req: AppRequest, res: Response, next: NextFunction) { if (sqlInit.isDbInitialized()) { reject(req, res, "App already initialized."); } @@ -76,7 +87,7 @@ function checkAppNotInitialized(req, res, next) { } } -function checkEtapiToken(req, res, next) { +function checkEtapiToken(req: AppRequest, res: Response, next: NextFunction) { if (etapiTokenService.isValidAuthHeader(req.headers.authorization)) { next(); } @@ -85,7 +96,7 @@ function checkEtapiToken(req, res, next) { } } -function reject(req, res, message) { +function reject(req: AppRequest, res: Response, message: string) { log.info(`${req.method} ${req.path} rejected with 401 ${message}`); res.setHeader("Content-Type", "text/plain") @@ -93,7 +104,7 @@ function reject(req, res, message) { .send(message); } -function checkCredentials(req, res, next) { +function checkCredentials(req: AppRequest, res: Response, next: NextFunction) { if (!sqlInit.isDbInitialized()) { res.setHeader("Content-Type", "text/plain") .status(400) @@ -109,7 +120,7 @@ function checkCredentials(req, res, next) { } const header = req.headers['trilium-cred'] || ''; - const auth = new Buffer.from(header, 'base64').toString(); + const auth = Buffer.from(header, 'base64').toString(); const colonIndex = auth.indexOf(':'); const password = colonIndex === -1 ? "" : auth.substr(colonIndex + 1); // username is ignored @@ -124,7 +135,7 @@ function checkCredentials(req, res, next) { } } -module.exports = { +export = { checkAuth, checkApiAuth, checkAppInitialized, diff --git a/src/services/backend_script_api.js b/src/services/backend_script_api.ts similarity index 57% rename from src/services/backend_script_api.js rename to src/services/backend_script_api.ts index 5abf704edc..34767ae75d 100644 --- a/src/services/backend_script_api.js +++ b/src/services/backend_script_api.ts @@ -1,27 +1,39 @@ -const log = require('./log.js'); -const noteService = require('./notes.js'); -const sql = require('./sql.js'); -const utils = require('./utils.js'); -const attributeService = require('./attributes.js'); -const dateNoteService = require('./date_notes.js'); -const treeService = require('./tree.js'); -const config = require('./config.js'); -const axios = require('axios'); -const dayjs = require('dayjs'); -const xml2js = require('xml2js'); -const cloningService = require('./cloning.js'); -const appInfo = require('./app_info.js'); -const searchService = require('./search/services/search.js'); -const SearchContext = require('./search/search_context.js'); -const becca = require('../becca/becca.js'); -const ws = require('./ws.js'); -const SpacedUpdate = require('./spaced_update.js'); -const specialNotesService = require('./special_notes.js'); -const branchService = require('./branches.js'); -const exportService = require('./export/zip.js'); -const syncMutex = require('./sync_mutex.js'); -const backupService = require('./backup.js'); -const optionsService = require('./options.js'); +import log = require('./log'); +import noteService = require('./notes'); +import sql = require('./sql'); +import utils = require('./utils'); +import attributeService = require('./attributes'); +import dateNoteService = require('./date_notes'); +import treeService = require('./tree'); +import config = require('./config'); +import axios = require('axios'); +import dayjs = require('dayjs'); +import xml2js = require('xml2js'); +import cloningService = require('./cloning'); +import appInfo = require('./app_info'); +import searchService = require('./search/services/search'); +import SearchContext = require('./search/search_context'); +import becca = require('../becca/becca'); +import ws = require('./ws'); +import SpacedUpdate = require('./spaced_update'); +import specialNotesService = require('./special_notes'); +import branchService = require('./branches'); +import exportService = require('./export/zip'); +import syncMutex = require('./sync_mutex'); +import backupService = require('./backup'); +import optionsService = require('./options'); +import BNote = require('../becca/entities/bnote'); +import AbstractBeccaEntity = require('../becca/entities/abstract_becca_entity'); +import BBranch = require('../becca/entities/bbranch'); +import BAttribute = require('../becca/entities/battribute'); +import BAttachment = require('../becca/entities/battachment'); +import BRevision = require('../becca/entities/brevision'); +import BEtapiToken = require('../becca/entities/betapi_token'); +import BOption = require('../becca/entities/boption'); +import { AttributeRow, AttributeType, NoteType } from '../becca/entities/rows'; +import Becca from '../becca/becca-interface'; +import { NoteParams } from './note-interface'; +import { ApiParams } from './backend_script_api_interface'; /** @@ -35,231 +47,399 @@ const optionsService = require('./options.js'); * @var {BackendScriptApi} api */ -/** - *

This is the main backend API interface for scripts. All the properties and methods are published in the "api" object - * available in the JS backend notes. You can use e.g. api.log(api.startNote.title);

- * - * @constructor - */ -function BackendScriptApi(currentNote, apiParams) { +interface SearchParams { + includeArchivedNotes?: boolean; + ignoreHoistedNote?: boolean; +} + +interface NoteAndBranch { + note: BNote; + /** object having "note" and "branch" keys representing respective objects */ + branch: BBranch; +} + +interface Api { /** * Note where the script started executing (entrypoint). * As an analogy, in C this would be the file which contains the main() function of the current process. - * @type {BNote} */ - this.startNote = apiParams.startNote; + startNote?: BNote; + /** * Note where the script is currently executing. This comes into play when your script is spread in multiple code * notes, the script starts in "startNote", but then through function calls may jump into another note (currentNote). * A similar concept in C would be __FILE__ * Don't mix this up with the concept of active note. - * @type {BNote} */ - this.currentNote = currentNote; + currentNote: BNote; + /** * Entity whose event triggered this execution - * @type {AbstractBeccaEntity} */ - this.originEntity = apiParams.originEntity; - - for (const key in apiParams) { - this[key] = apiParams[key]; - } - + originEntity?: AbstractBeccaEntity; + /** * Axios library for HTTP requests. See {@link https://axios-http.com} for documentation * @type {axios} * @deprecated use native (browser compatible) fetch() instead */ - this.axios = axios; + axios: typeof axios; + /** * day.js library for date manipulation. See {@link https://day.js.org} for documentation - * @type {dayjs} */ - this.dayjs = dayjs; + dayjs: typeof dayjs; + /** * xml2js library for XML parsing. See {@link https://github.com/Leonidas-from-XIV/node-xml2js} for documentation - * @type {xml2js} */ - this.xml2js = xml2js; + xml2js: typeof xml2js; + /** * Instance name identifies particular Trilium instance. It can be useful for scripts * if some action needs to happen on only one specific instance. + */ + getInstanceName(): string | null; + + getNote(noteId: string): BNote | null; + getBranch(branchId: string): BBranch | null; + getAttribute(attachmentId: string): BAttribute | null; + getAttachment(attachmentId: string): BAttachment | null; + getRevision(revisionId: string): BRevision | null; + getEtapiToken(etapiTokenId: string): BEtapiToken | null; + getEtapiTokens(): BEtapiToken[]; + getOption(optionName: string): BOption | null; + getOptions(): BOption[]; + getAttribute(attributeId: string): BAttribute | null; + + /** + * This is a powerful search method - you can search by attributes and their values, e.g.: + * "#dateModified =* MONTH AND #log". See {@link https://github.com/zadam/trilium/wiki/Search} for full documentation for all options + */ + searchForNotes(query: string, searchParams: SearchParams): BNote[]; + + /** + * This is a powerful search method - you can search by attributes and their values, e.g.: + * "#dateModified =* MONTH AND #log". See {@link https://github.com/zadam/trilium/wiki/Search} for full documentation for all options * - * @returns {string|null} + * @param {string} query + * @param {Object} [searchParams] */ - this.getInstanceName = () => config.General ? config.General.instanceName : null; + searchForNote(query: string, searchParams: SearchParams): BNote | null; + + /** + * Retrieves notes with given label name & value + * + * @param name - attribute name + * @param value - attribute value + */ + getNotesWithLabel(name: string, value?: string): BNote[]; /** - * @method - * @param {string} noteId - * @returns {BNote|null} + * Retrieves first note with given label name & value + * + * @param name - attribute name + * @param value - attribute value */ - this.getNote = noteId => becca.getNote(noteId); + getNoteWithLabel(name: string, value?: string): BNote | null; /** - * @method - * @param {string} branchId - * @returns {BBranch|null} + * If there's no branch between note and parent note, create one. Otherwise, do nothing. Returns the new or existing branch. + * + * @param prefix - if branch is created between note and parent note, set this prefix */ - this.getBranch = branchId => becca.getBranch(branchId); + ensureNoteIsPresentInParent(noteId: string, parentNoteId: string, prefix: string): { + branch: BBranch | null + }; /** - * @method - * @param {string} attributeId - * @returns {BAttribute|null} + * If there's a branch between note and parent note, remove it. Otherwise, do nothing. */ - this.getAttribute = attributeId => becca.getAttribute(attributeId); + ensureNoteIsAbsentFromParent(noteId: string, parentNoteId: string): void; /** - * @method - * @param {string} attachmentId - * @returns {BAttachment|null} + * Based on the value, either create or remove branch between note and parent note. + * + * @param present - true if we want the branch to exist, false if we want it gone + * @param prefix - if branch is created between note and parent note, set this prefix */ - this.getAttachment = attachmentId => becca.getAttachment(attachmentId); + toggleNoteInParent(present: true, noteId: string, parentNoteId: string, prefix: string): void; /** - * @method - * @param {string} revisionId - * @returns {BRevision|null} + * Create text note. See also createNewNote() for more options. */ - this.getRevision = revisionId => becca.getRevision(revisionId); + createTextNote(parentNoteId: string, title: string, content: string): NoteAndBranch; /** - * @method - * @param {string} etapiTokenId - * @returns {BEtapiToken|null} + * Create data note - data in this context means object serializable to JSON. Created note will be of type 'code' and + * JSON MIME type. See also createNewNote() for more options. */ - this.getEtapiToken = etapiTokenId => becca.getEtapiToken(etapiTokenId); + createDataNote(parentNoteId: string, title: string, content: {}): NoteAndBranch; /** - * @method - * @returns {BEtapiToken[]} + * @returns object contains newly created entities note and branch */ - this.getEtapiTokens = () => becca.getEtapiTokens(); + createNewNote(params: NoteParams): NoteAndBranch; /** - * @method - * @param {string} optionName - * @returns {BOption|null} + * @deprecated please use createTextNote() with similar API for simpler use cases or createNewNote() for more complex needs + * @param parentNoteId - create new note under this parent + * @returns object contains newly created entities note and branch */ - this.getOption = optionName => becca.getOption(optionName); + createNote(parentNoteId: string, title: string, content: string, extraOptions: Omit & { + /** should the note be JSON */ + json?: boolean; + attributes?: AttributeRow[] + }): NoteAndBranch; + + logMessages: Record; + logSpacedUpdates: Record; /** - * @method - * @returns {BOption[]} + * Log given message to trilium logs and log pane in UI */ - this.getOptions = () => optionsService.getOptions(); + log(message: string): void; /** - * @method - * @param {string} attributeId - * @returns {BAttribute|null} + * Returns root note of the calendar. */ - this.getAttribute = attributeId => becca.getAttribute(attributeId); + getRootCalendarNote(): BNote | null; /** - * This is a powerful search method - you can search by attributes and their values, e.g.: - * "#dateModified =* MONTH AND #log". See {@link https://github.com/zadam/trilium/wiki/Search} for full documentation for all options + * Returns day note for given date. If such note doesn't exist, it is created. * * @method - * @param {string} query - * @param {Object} [searchParams] - * @returns {BNote[]} + * @param date in YYYY-MM-DD format + * @param rootNote - specify calendar root note, normally leave empty to use the default calendar */ - this.searchForNotes = (query, searchParams = {}) => { - if (searchParams.includeArchivedNotes === undefined) { - searchParams.includeArchivedNotes = true; - } + getDayNote(date: string, rootNote?: BNote): BNote | null; - if (searchParams.ignoreHoistedNote === undefined) { - searchParams.ignoreHoistedNote = true; - } + /** + * Returns today's day note. If such note doesn't exist, it is created. + * + * @param rootNote specify calendar root note, normally leave empty to use the default calendar + */ + getTodayNote(rootNote?: BNote): BNote | null; - const noteIds = searchService.findResultsWithQuery(query, new SearchContext(searchParams)) - .map(sr => sr.noteId); + /** + * Returns note for the first date of the week of the given date. + * + * @param date in YYYY-MM-DD format + * @param rootNote - specify calendar root note, normally leave empty to use the default calendar + */ + getWeekNote(date: string, options: { + // TODO: Deduplicate type with date_notes.ts once ES modules are added. + /** either "monday" (default) or "sunday" */ + startOfTheWeek: "monday" | "sunday"; + }, rootNote: BNote): BNote | null; - return becca.getNotes(noteIds); - }; + /** + * Returns month note for given date. If such a note doesn't exist, it is created. + * + * @param date in YYYY-MM format + * @param rootNote - specify calendar root note, normally leave empty to use the default calendar + */ + getMonthNote(date: string, rootNote: BNote): BNote | null; /** - * This is a powerful search method - you can search by attributes and their values, e.g.: - * "#dateModified =* MONTH AND #log". See {@link https://github.com/zadam/trilium/wiki/Search} for full documentation for all options + * Returns year note for given year. If such a note doesn't exist, it is created. * - * @method - * @param {string} query - * @param {Object} [searchParams] - * @returns {BNote|null} + * @param year in YYYY format + * @param rootNote - specify calendar root note, normally leave empty to use the default calendar */ - this.searchForNote = (query, searchParams = {}) => { - const notes = this.searchForNotes(query, searchParams); + getYearNote(year: string, rootNote?: BNote): BNote | null; - return notes.length > 0 ? notes[0] : null; - }; + /** + * Sort child notes of a given note. + */ + sortNotes(parentNoteId: string, sortConfig: { + /** 'title', 'dateCreated', 'dateModified' or a label name + * See {@link https://github.com/zadam/trilium/wiki/Sorting} for details. */ + sortBy?: string; + reverse?: boolean; + foldersFirst?: boolean; + }): void; /** - * Retrieves notes with given label name & value + * This method finds note by its noteId and prefix and either sets it to the given parentNoteId + * or removes the branch (if parentNoteId is not given). * - * @method - * @param {string} name - attribute name - * @param {string} [value] - attribute value - * @returns {BNote[]} + * This method looks similar to toggleNoteInParent() but differs because we're looking up branch by prefix. + * + * @deprecated this method is pretty confusing and serves specialized purpose only */ - this.getNotesWithLabel = attributeService.getNotesWithLabel; + setNoteToParent(noteId: string, prefix: string, parentNoteId: string | null): void; /** - * Retrieves first note with given label name & value + * This functions wraps code which is supposed to be running in transaction. If transaction already + * exists, then we'll use that transaction. * - * @method - * @param {string} name - attribute name - * @param {string} [value] - attribute value - * @returns {BNote|null} + * @param func + * @returns result of func callback */ - this.getNoteWithLabel = attributeService.getNoteWithLabel; + transactional(func: () => void): any; /** - * If there's no branch between note and parent note, create one. Otherwise, do nothing. Returns the new or existing branch. + * Return randomly generated string of given length. This random string generation is NOT cryptographically secure. * - * @method - * @param {string} noteId - * @param {string} parentNoteId - * @param {string} prefix - if branch is created between note and parent note, set this prefix - * @returns {{branch: BBranch|null}} + * @param length of the string + * @returns random string */ - this.ensureNoteIsPresentInParent = cloningService.ensureNoteIsPresentInParent; + randomString(length: number): string; /** - * If there's a branch between note and parent note, remove it. Otherwise, do nothing. - * - * @method - * @param {string} noteId - * @param {string} parentNoteId - * @returns {void} + * @param to escape + * @returns escaped string */ - this.ensureNoteIsAbsentFromParent = cloningService.ensureNoteIsAbsentFromParent; + escapeHtml(string: string): string; /** - * Based on the value, either create or remove branch between note and parent note. + * @param string to unescape + * @returns unescaped string + */ + unescapeHtml(string: string): string; + + /** + * sql + * @type {module:sql} + */ + sql: any; + + getAppInfo(): typeof appInfo; + + /** + * Creates a new launcher to the launchbar. If the launcher (id) already exists, it will be updated. + */ + createOrUpdateLauncher(opts: { + /** id of the launcher, only alphanumeric at least 6 characters long */ + id: string; + /** one of + * - "note" - activating the launcher will navigate to the target note (specified in targetNoteId param) + * - "script" - activating the launcher will execute the script (specified in scriptNoteId param) + * - "customWidget" - the launcher will be rendered with a custom widget (specified in widgetNoteId param) + */ + type: "note" | "script" | "customWidget"; + title: string; + /** if true, will be created in the "Visible launchers", otherwise in "Available launchers" */ + isVisible: boolean; + /** name of the boxicon to be used (e.g. "bx-time") */ + icon: string; + /** will activate the target note/script upon pressing, e.g. "ctrl+e" */ + keyboardShortcut: string; + /** for type "note" */ + targetNoteId: string; + /** for type "script" */ + scriptNoteId: string; + /** for type "customWidget" */ + widgetNoteId?: string; + }): { note: BNote }; + + /** + * @param format - either 'html' or 'markdown' + */ + exportSubtreeToZipFile(noteId: string, format: "markdown" | "html", zipFilePath: string): Promise; + + /** + * Executes given anonymous function on the frontend(s). + * Internally, this serializes the anonymous function into string and sends it to frontend(s) via WebSocket. + * Note that there can be multiple connected frontend instances (e.g. in different tabs). In such case, all + * instances execute the given function. * - * @method - * @param {boolean} present - true if we want the branch to exist, false if we want it gone - * @param {string} noteId - * @param {string} parentNoteId - * @param {string} prefix - if branch is created between note and parent note, set this prefix - * @returns {void} + * @param script - script to be executed on the frontend + * @param params - list of parameters to the anonymous function to be sent to frontend + * @returns no return value is provided. */ - this.toggleNoteInParent = cloningService.toggleNoteInParent; + runOnFrontend(script: () => void | string, params: []): void; /** - * Create text note. See also createNewNote() for more options. + * Sync process can make data intermittently inconsistent. Scripts which require strong data consistency + * can use this function to wait for a possible sync process to finish and prevent new sync process from starting + * while it is running. * - * @method - * @param {string} parentNoteId - * @param {string} title - * @param {string} content - * @returns {{note: BNote, branch: BBranch}} - object having "note" and "branch" keys representing respective objects + * Because this is an async process, the inner callback doesn't have automatic transaction handling, so in case + * you need to make some DB changes, you need to surround your call with api.transactional(...) + * + * @param callback - function to be executed while sync process is not running + * @returns resolves once the callback is finished (callback is awaited) + */ + runOutsideOfSync(callback: () => void): Promise; + + /** + * @param backupName - If the backupName is e.g. "now", then the backup will be written to "backup-now.db" file + * @returns resolves once the backup is finished */ + backupNow(backupName: string): Promise; + + /** + * This object contains "at your risk" and "no BC guarantees" objects for advanced use cases. + */ + __private: { + /** provides access to the backend in-memory object graph, see {@link https://github.com/zadam/trilium/blob/master/src/becca/becca.js} */ + becca: Becca; + }; +} + +// TODO: Convert to class. +/** + *

This is the main backend API interface for scripts. All the properties and methods are published in the "api" object + * available in the JS backend notes. You can use e.g. api.log(api.startNote.title);

+ * + * @constructor + */ +function BackendScriptApi(this: Api, currentNote: BNote, apiParams: ApiParams) { + this.startNote = apiParams.startNote; + + this.currentNote = currentNote; + + this.originEntity = apiParams.originEntity; + + for (const key in apiParams) { + (this as any)[key] = apiParams[key as keyof ApiParams]; + } + + this.axios = axios; + this.dayjs = dayjs; + this.xml2js = xml2js; + this.getInstanceName = () => config.General ? config.General.instanceName : null; + this.getNote = noteId => becca.getNote(noteId); + this.getBranch = branchId => becca.getBranch(branchId); + this.getAttribute = attributeId => becca.getAttribute(attributeId); + this.getAttachment = attachmentId => becca.getAttachment(attachmentId); + this.getRevision = revisionId => becca.getRevision(revisionId); + this.getEtapiToken = etapiTokenId => becca.getEtapiToken(etapiTokenId); + this.getEtapiTokens = () => becca.getEtapiTokens(); + this.getOption = optionName => becca.getOption(optionName); + this.getOptions = () => optionsService.getOptions(); + this.getAttribute = attributeId => becca.getAttribute(attributeId); + + this.searchForNotes = (query, searchParams = {}) => { + if (searchParams.includeArchivedNotes === undefined) { + searchParams.includeArchivedNotes = true; + } + + if (searchParams.ignoreHoistedNote === undefined) { + searchParams.ignoreHoistedNote = true; + } + + const noteIds = searchService.findResultsWithQuery(query, new SearchContext(searchParams)) + .map(sr => sr.noteId); + + return becca.getNotes(noteIds); + }; + + + this.searchForNote = (query, searchParams = {}) => { + const notes = this.searchForNotes(query, searchParams); + + return notes.length > 0 ? notes[0] : null; + }; + + this.getNotesWithLabel = attributeService.getNotesWithLabel; + this.getNoteWithLabel = attributeService.getNoteWithLabel; + this.ensureNoteIsPresentInParent = cloningService.ensureNoteIsPresentInParent; + this.ensureNoteIsAbsentFromParent = cloningService.ensureNoteIsAbsentFromParent; + this.toggleNoteInParent = cloningService.toggleNoteInParent; this.createTextNote = (parentNoteId, title, content = '') => noteService.createNewNote({ parentNoteId, title, @@ -267,16 +447,6 @@ function BackendScriptApi(currentNote, apiParams) { type: 'text' }); - /** - * Create data note - data in this context means object serializable to JSON. Created note will be of type 'code' and - * JSON MIME type. See also createNewNote() for more options. - * - * @method - * @param {string} parentNoteId - * @param {string} title - * @param {object} content - * @returns {{note: BNote, branch: BBranch}} object having "note" and "branch" keys representing respective objects - */ this.createDataNote = (parentNoteId, title, content = {}) => noteService.createNewNote({ parentNoteId, title, @@ -284,53 +454,28 @@ function BackendScriptApi(currentNote, apiParams) { type: 'code', mime: 'application/json' }); - - /** - * @method - * - * @param {object} params - * @param {string} params.parentNoteId - * @param {string} params.title - * @param {string|Buffer} params.content - * @param {NoteType} params.type - text, code, file, image, search, book, relationMap, canvas, webView - * @param {string} [params.mime] - value is derived from default mimes for type - * @param {boolean} [params.isProtected=false] - * @param {boolean} [params.isExpanded=false] - * @param {string} [params.prefix=''] - * @param {int} [params.notePosition] - default is last existing notePosition in a parent + 10 - * @returns {{note: BNote, branch: BBranch}} object contains newly created entities note and branch - */ + this.createNewNote = noteService.createNewNote; - - /** - * @method - * @deprecated please use createTextNote() with similar API for simpler use cases or createNewNote() for more complex needs - * - * @param {string} parentNoteId - create new note under this parent - * @param {string} title - * @param {string} [content=""] - * @param {object} [extraOptions={}] - * @param {boolean} [extraOptions.json=false] - should the note be JSON - * @param {boolean} [extraOptions.isProtected=false] - should the note be protected - * @param {string} [extraOptions.type='text'] - note type - * @param {string} [extraOptions.mime='text/html'] - MIME type of the note - * @param {object[]} [extraOptions.attributes=[]] - attributes to be created for this note - * @param {AttributeType} extraOptions.attributes.type - attribute type - label, relation etc. - * @param {string} extraOptions.attributes.name - attribute name - * @param {string} [extraOptions.attributes.value] - attribute value - * @returns {{note: BNote, branch: BBranch}} object contains newly created entities note and branch - */ - this.createNote = (parentNoteId, title, content = "", extraOptions= {}) => { - extraOptions.parentNoteId = parentNoteId; - extraOptions.title = title; - + + this.createNote = (parentNoteId, title, content = "", _extraOptions = {}) => { const parentNote = becca.getNote(parentNoteId); + if (!parentNote) { + throw new Error(`Unable to find parent note with ID ${parentNote}.`); + } + + let extraOptions: NoteParams = { + ..._extraOptions, + content: "", + type: "text", + parentNoteId, + title + }; // code note type can be inherited, otherwise "text" is the default extraOptions.type = parentNote.type === 'code' ? 'code' : 'text'; extraOptions.mime = parentNote.type === 'code' ? parentNote.mime : 'text/html'; - if (extraOptions.json) { + if (_extraOptions.json) { extraOptions.content = JSON.stringify(content || {}, null, '\t'); extraOptions.type = 'code'; extraOptions.mime = 'application/json'; @@ -340,9 +485,9 @@ function BackendScriptApi(currentNote, apiParams) { } return sql.transactional(() => { - const {note, branch} = noteService.createNewNote(extraOptions); + const { note, branch } = noteService.createNewNote(extraOptions); - for (const attr of extraOptions.attributes || []) { + for (const attr of _extraOptions.attributes || []) { attributeService.createAttribute({ noteId: note.noteId, type: attr.type, @@ -352,24 +497,21 @@ function BackendScriptApi(currentNote, apiParams) { }); } - return {note, branch}; + return { note, branch }; }); }; this.logMessages = {}; this.logSpacedUpdates = {}; - - /** - * Log given message to trilium logs and log pane in UI - * - * @method - * @param message - * @returns {void} - */ + this.log = message => { log.info(message); - const {noteId} = this.startNote; + if (!this.startNote) { + return; + } + + const { noteId } = this.startNote; this.logMessages[noteId] = this.logMessages[noteId] || []; this.logSpacedUpdates[noteId] = this.logSpacedUpdates[noteId] || new SpacedUpdate(() => { @@ -387,77 +529,13 @@ function BackendScriptApi(currentNote, apiParams) { this.logSpacedUpdates[noteId].scheduleUpdate(); }; - /** - * Returns root note of the calendar. - * - * @method - * @returns {BNote|null} - */ this.getRootCalendarNote = dateNoteService.getRootCalendarNote; - - /** - * Returns day note for given date. If such note doesn't exist, it is created. - * - * @method - * @param {string} date in YYYY-MM-DD format - * @param {BNote} [rootNote] - specify calendar root note, normally leave empty to use the default calendar - * @returns {BNote|null} - */ this.getDayNote = dateNoteService.getDayNote; - - /** - * Returns today's day note. If such note doesn't exist, it is created. - * - * @method - * @param {BNote} [rootNote] - specify calendar root note, normally leave empty to use the default calendar - * @returns {BNote|null} - */ this.getTodayNote = dateNoteService.getTodayNote; - - /** - * Returns note for the first date of the week of the given date. - * - * @method - * @param {string} date in YYYY-MM-DD format - * @param {object} [options] - * @param {string} [options.startOfTheWeek=monday] - either "monday" (default) or "sunday" - * @param {BNote} [rootNote] - specify calendar root note, normally leave empty to use the default calendar - * @returns {BNote|null} - */ this.getWeekNote = dateNoteService.getWeekNote; - - /** - * Returns month note for given date. If such a note doesn't exist, it is created. - * - * @method - * @param {string} date in YYYY-MM format - * @param {BNote} [rootNote] - specify calendar root note, normally leave empty to use the default calendar - * @returns {BNote|null} - */ this.getMonthNote = dateNoteService.getMonthNote; - - /** - * Returns year note for given year. If such a note doesn't exist, it is created. - * - * @method - * @param {string} year in YYYY format - * @param {BNote} [rootNote] - specify calendar root note, normally leave empty to use the default calendar - * @returns {BNote|null} - */ this.getYearNote = dateNoteService.getYearNote; - /** - * Sort child notes of a given note. - * - * @method - * @param {string} parentNoteId - this note's child notes will be sorted - * @param {object} [sortConfig] - * @param {string} [sortConfig.sortBy=title] - 'title', 'dateCreated', 'dateModified' or a label name - * See {@link https://github.com/zadam/trilium/wiki/Sorting} for details. - * @param {boolean} [sortConfig.reverse=false] - * @param {boolean} [sortConfig.foldersFirst=false] - * @returns {void} - */ this.sortNotes = (parentNoteId, sortConfig = {}) => treeService.sortNotes( parentNoteId, sortConfig.sortBy || "title", @@ -465,85 +543,15 @@ function BackendScriptApi(currentNote, apiParams) { !!sortConfig.foldersFirst ); - /** - * This method finds note by its noteId and prefix and either sets it to the given parentNoteId - * or removes the branch (if parentNoteId is not given). - * - * This method looks similar to toggleNoteInParent() but differs because we're looking up branch by prefix. - * - * @method - * @deprecated this method is pretty confusing and serves specialized purpose only - * @param {string} noteId - * @param {string} prefix - * @param {string|null} parentNoteId - * @returns {void} - */ this.setNoteToParent = treeService.setNoteToParent; - - /** - * This functions wraps code which is supposed to be running in transaction. If transaction already - * exists, then we'll use that transaction. - * - * @method - * @param {function} func - * @returns {any} result of func callback - */ this.transactional = sql.transactional; - - /** - * Return randomly generated string of given length. This random string generation is NOT cryptographically secure. - * - * @method - * @param {int} length of the string - * @returns {string} random string - */ this.randomString = utils.randomString; - - /** - * @method - * @param {string} string to escape - * @returns {string} escaped string - */ this.escapeHtml = utils.escapeHtml; - - /** - * @method - * @param {string} string to unescape - * @returns {string} unescaped string - */ this.unescapeHtml = utils.unescapeHtml; - - /** - * sql - * @type {module:sql} - */ this.sql = sql; - - /** - * @method - * @returns {{syncVersion, appVersion, buildRevision, dbVersion, dataDirectory, buildDate}|*} - object representing basic info about running Trilium version - */ this.getAppInfo = () => appInfo; - /** - * Creates a new launcher to the launchbar. If the launcher (id) already exists, it will be updated. - * - * @method - * @param {object} opts - * @param {string} opts.id - id of the launcher, only alphanumeric at least 6 characters long - * @param {"note" | "script" | "customWidget"} opts.type - one of - * * "note" - activating the launcher will navigate to the target note (specified in targetNoteId param) - * * "script" - activating the launcher will execute the script (specified in scriptNoteId param) - * * "customWidget" - the launcher will be rendered with a custom widget (specified in widgetNoteId param) - * @param {string} opts.title - * @param {boolean} [opts.isVisible=false] - if true, will be created in the "Visible launchers", otherwise in "Available launchers" - * @param {string} [opts.icon] - name of the boxicon to be used (e.g. "bx-time") - * @param {string} [opts.keyboardShortcut] - will activate the target note/script upon pressing, e.g. "ctrl+e" - * @param {string} [opts.targetNoteId] - for type "note" - * @param {string} [opts.scriptNoteId] - for type "script" - * @param {string} [opts.widgetNoteId] - for type "customWidget" - * @returns {{note: BNote}} - */ + this.createOrUpdateLauncher = opts => { if (!opts.id) { throw new Error("ID is a mandatory parameter for api.createOrUpdateLauncher(opts)"); } if (!opts.id.match(/[a-z0-9]{6,1000}/i)) { throw new Error(`ID must be an alphanumeric string at least 6 characters long.`); } @@ -600,45 +608,30 @@ function BackendScriptApi(currentNote, apiParams) { launcherNote.removeLabel('iconClass'); } - return {note: launcherNote}; + return { note: launcherNote }; }; - /** - * @method - * @param {string} noteId - * @param {string} format - either 'html' or 'markdown' - * @param {string} zipFilePath - * @returns {Promise} - */ this.exportSubtreeToZipFile = async (noteId, format, zipFilePath) => await exportService.exportToZipFile(noteId, format, zipFilePath); - /** - * Executes given anonymous function on the frontend(s). - * Internally, this serializes the anonymous function into string and sends it to frontend(s) via WebSocket. - * Note that there can be multiple connected frontend instances (e.g. in different tabs). In such case, all - * instances execute the given function. - * - * @method - * @param {string} script - script to be executed on the frontend - * @param {Array.} params - list of parameters to the anonymous function to be sent to frontend - * @returns {undefined} - no return value is provided. - */ - this.runOnFrontend = async (script, params = []) => { - if (typeof script === "function") { - script = script.toString(); + this.runOnFrontend = async (_script, params = []) => { + let script: string; + if (typeof _script === "string") { + script = _script; + } else { + script = _script.toString(); } ws.sendMessageToAllClients({ type: 'execute-script', script: script, params: prepareParams(params), - startNoteId: this.startNote.noteId, + startNoteId: this.startNote?.noteId, currentNoteId: this.currentNote.noteId, originEntityName: "notes", // currently there's no other entity on the frontend which can trigger event - originEntityId: this.originEntity?.noteId || null + originEntityId: (this.originEntity && "noteId" in this.originEntity && (this.originEntity as BNote)?.noteId) || null }); - function prepareParams(params) { + function prepareParams(params: any[]) { if (!params) { return params; } @@ -653,36 +646,15 @@ function BackendScriptApi(currentNote, apiParams) { }); } }; - - /** - * Sync process can make data intermittently inconsistent. Scripts which require strong data consistency - * can use this function to wait for a possible sync process to finish and prevent new sync process from starting - * while it is running. - * - * Because this is an async process, the inner callback doesn't have automatic transaction handling, so in case - * you need to make some DB changes, you need to surround your call with api.transactional(...) - * - * @method - * @param {function} callback - function to be executed while sync process is not running - * @returns {Promise} - resolves once the callback is finished (callback is awaited) - */ + this.runOutsideOfSync = syncMutex.doExclusively; - - /** - * @method - * @param {string} backupName - If the backupName is e.g. "now", then the backup will be written to "backup-now.db" file - * @returns {Promise} - resolves once the backup is finished - */ this.backupNow = backupService.backupNow; - - /** - * This object contains "at your risk" and "no BC guarantees" objects for advanced use cases. - * - * @property {Becca} becca - provides access to the backend in-memory object graph, see {@link https://github.com/zadam/trilium/blob/master/src/becca/becca.js} - */ + this.__private = { becca } } -module.exports = BackendScriptApi; +export = BackendScriptApi as any as { + new (currentNote: BNote, apiParams: ApiParams): Api +}; diff --git a/src/services/backend_script_api_interface.ts b/src/services/backend_script_api_interface.ts new file mode 100644 index 0000000000..7031d1bc3f --- /dev/null +++ b/src/services/backend_script_api_interface.ts @@ -0,0 +1,7 @@ +import AbstractBeccaEntity = require("../becca/entities/abstract_becca_entity"); +import BNote = require("../becca/entities/bnote"); + +export interface ApiParams { + startNote?: BNote; + originEntity?: AbstractBeccaEntity; +} \ No newline at end of file diff --git a/src/services/backup.js b/src/services/backup.ts similarity index 74% rename from src/services/backup.js rename to src/services/backup.ts index ef36d91cf1..3000c9c9a8 100644 --- a/src/services/backup.js +++ b/src/services/backup.ts @@ -1,14 +1,16 @@ "use strict"; -const dateUtils = require('./date_utils.js'); -const optionService = require('./options.js'); -const fs = require('fs-extra'); -const dataDir = require('./data_dir.js'); -const log = require('./log.js'); -const syncMutexService = require('./sync_mutex.js'); -const cls = require('./cls.js'); -const sql = require('./sql.js'); -const path = require('path'); +import dateUtils = require('./date_utils'); +import optionService = require('./options'); +import fs = require('fs-extra'); +import dataDir = require('./data_dir'); +import log = require('./log'); +import syncMutexService = require('./sync_mutex'); +import cls = require('./cls'); +import sql = require('./sql'); +import path = require('path'); + +type BackupType = ("daily" | "weekly" | "monthly"); function getExistingBackups() { if (!fs.existsSync(dataDir.BACKUP_DIR)) { @@ -35,13 +37,13 @@ function regularBackup() { }); } -function isBackupEnabled(backupType) { +function isBackupEnabled(backupType: BackupType) { const optionName = `${backupType}BackupEnabled`; return optionService.getOptionBool(optionName); } -function periodBackup(optionName, backupType, periodInSeconds) { +function periodBackup(optionName: string, backupType: BackupType, periodInSeconds: number) { if (!isBackupEnabled(backupType)) { return; } @@ -56,7 +58,7 @@ function periodBackup(optionName, backupType, periodInSeconds) { } } -async function backupNow(name) { +async function backupNow(name: string) { // we don't want to back up DB in the middle of sync with potentially inconsistent DB state return await syncMutexService.doExclusively(async () => { const backupFile = `${dataDir.BACKUP_DIR}/backup-${name}.db`; @@ -73,7 +75,7 @@ if (!fs.existsSync(dataDir.BACKUP_DIR)) { fs.mkdirSync(dataDir.BACKUP_DIR, 0o700); } -module.exports = { +export = { getExistingBackups, backupNow, regularBackup diff --git a/src/services/blob-interface.ts b/src/services/blob-interface.ts new file mode 100644 index 0000000000..8bfcf13225 --- /dev/null +++ b/src/services/blob-interface.ts @@ -0,0 +1,5 @@ +export interface Blob { + blobId: string; + content: string | Buffer; + utcDateModified: string; +} \ No newline at end of file diff --git a/src/services/blob.js b/src/services/blob.ts similarity index 68% rename from src/services/blob.js rename to src/services/blob.ts index 73f972a36c..fac1adfadc 100644 --- a/src/services/blob.js +++ b/src/services/blob.ts @@ -1,9 +1,10 @@ -const becca = require('../becca/becca.js'); -const NotFoundError = require('../errors/not_found_error.js'); -const protectedSessionService = require('./protected_session.js'); -const utils = require('./utils.js'); +import becca = require('../becca/becca'); +import NotFoundError = require('../errors/not_found_error'); +import protectedSessionService = require('./protected_session'); +import utils = require('./utils'); +import type { Blob } from "./blob-interface"; -function getBlobPojo(entityName, entityId) { +function getBlobPojo(entityName: string, entityId: string) { const entity = becca.getEntity(entityName, entityId); if (!entity) { throw new NotFoundError(`Entity ${entityName} '${entityId}' was not found.`); @@ -19,13 +20,13 @@ function getBlobPojo(entityName, entityId) { if (!entity.hasStringContent()) { pojo.content = null; } else { - pojo.content = processContent(pojo.content, entity.isProtected, true); + pojo.content = processContent(pojo.content, !!entity.isProtected, true); } return pojo; } -function processContent(content, isProtected, isStringContent) { +function processContent(content: Buffer | string | null, isProtected: boolean, isStringContent: boolean) { if (isProtected) { if (protectedSessionService.isProtectedSessionAvailable()) { content = content === null ? null : protectedSessionService.decrypt(content); @@ -48,11 +49,11 @@ function processContent(content, isProtected, isStringContent) { } } -function calculateContentHash({blobId, content}) { +function calculateContentHash({blobId, content}: Blob) { return utils.hash(`${blobId}|${content.toString()}`); } -module.exports = { +export = { getBlobPojo, processContent, calculateContentHash diff --git a/src/services/branches.js b/src/services/branches.ts similarity index 61% rename from src/services/branches.js rename to src/services/branches.ts index 644bde80e8..7ee32c9498 100644 --- a/src/services/branches.js +++ b/src/services/branches.ts @@ -1,7 +1,8 @@ -const treeService = require('./tree.js'); -const sql = require('./sql.js'); +import treeService = require('./tree'); +import sql = require('./sql'); +import BBranch = require('../becca/entities/bbranch.js'); -function moveBranchToNote(branchToMove, targetParentNoteId) { +function moveBranchToNote(branchToMove: BBranch, targetParentNoteId: string) { if (branchToMove.parentNoteId === targetParentNoteId) { return {success: true}; // no-op } @@ -12,8 +13,8 @@ function moveBranchToNote(branchToMove, targetParentNoteId) { return [200, validationResult]; } - const maxNotePos = sql.getValue('SELECT MAX(notePosition) FROM branches WHERE parentNoteId = ? AND isDeleted = 0', [targetParentNoteId]); - const newNotePos = maxNotePos === null ? 0 : maxNotePos + 10; + const maxNotePos = sql.getValue('SELECT MAX(notePosition) FROM branches WHERE parentNoteId = ? AND isDeleted = 0', [targetParentNoteId]); + const newNotePos = !maxNotePos ? 0 : maxNotePos + 10; const newBranch = branchToMove.createClone(targetParentNoteId, newNotePos); newBranch.save(); @@ -26,10 +27,10 @@ function moveBranchToNote(branchToMove, targetParentNoteId) { }; } -function moveBranchToBranch(branchToMove, targetParentBranch) { +function moveBranchToBranch(branchToMove: BBranch, targetParentBranch: BBranch) { const res = moveBranchToNote(branchToMove, targetParentBranch.noteId); - if (!res.success) { + if (!("success" in res) || !res.success) { return res; } @@ -42,7 +43,7 @@ function moveBranchToBranch(branchToMove, targetParentBranch) { return res; } -module.exports = { +export = { moveBranchToBranch, moveBranchToNote }; diff --git a/src/services/build.js b/src/services/build.js deleted file mode 100644 index aaae9be510..0000000000 --- a/src/services/build.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { buildDate:"2024-03-28T07:11:39+01:00", buildRevision: "399458b52f250b22be22d980a78de0b3390d7521" }; diff --git a/src/services/build.ts b/src/services/build.ts new file mode 100644 index 0000000000..b392adba08 --- /dev/null +++ b/src/services/build.ts @@ -0,0 +1 @@ +export = { buildDate:"2024-03-28T07:11:39+01:00", buildRevision: "399458b52f250b22be22d980a78de0b3390d7521" }; diff --git a/src/services/builtin_attributes.js b/src/services/builtin_attributes.ts similarity index 99% rename from src/services/builtin_attributes.js rename to src/services/builtin_attributes.ts index eb21f04f37..6ccabb4e1a 100644 --- a/src/services/builtin_attributes.js +++ b/src/services/builtin_attributes.ts @@ -1,4 +1,4 @@ -module.exports = [ +export = [ // label names { type: 'label', name: 'inbox' }, { type: 'label', name: 'disableVersioning' }, diff --git a/src/services/bulk_actions.js b/src/services/bulk_actions.ts similarity index 80% rename from src/services/bulk_actions.js rename to src/services/bulk_actions.ts index 3513705787..c91b1c0c97 100644 --- a/src/services/bulk_actions.js +++ b/src/services/bulk_actions.ts @@ -1,12 +1,30 @@ -const log = require('./log.js'); -const revisionService = require('./revisions.js'); -const becca = require('../becca/becca.js'); -const cloningService = require('./cloning.js'); -const branchService = require('./branches.js'); -const utils = require('./utils.js'); -const eraseService = require("./erase.js"); - -const ACTION_HANDLERS = { +import log = require('./log'); +import becca = require('../becca/becca'); +import cloningService = require('./cloning'); +import branchService = require('./branches'); +import utils = require('./utils'); +import eraseService = require("./erase"); +import BNote = require('../becca/entities/bnote'); + +interface Action { + labelName: string; + labelValue: string; + oldLabelName: string; + newLabelName: string; + + + relationName: string; + oldRelationName: string; + newRelationName: string; + + targetNoteId: string; + targetParentNoteId: string; + newTitle: string; + script: string; +} +type ActionHandler = (action: Action, note: BNote) => void; + +const ACTION_HANDLERS: Record = { addLabel: (action, note) => { note.addLabel(action.labelName, action.labelValue); }, @@ -19,7 +37,10 @@ const ACTION_HANDLERS = { note.deleteNote(deleteId); }, deleteRevisions: (action, note) => { - eraseService.eraseRevisions(note.getRevisions().map(rev => rev.revisionId)); + const revisionIds = note.getRevisions() + .map(rev => rev.revisionId) + .filter((rev) => !!rev) as string[]; + eraseService.eraseRevisions(revisionIds); }, deleteLabel: (action, note) => { for (const label of note.getOwnedLabels(action.labelName)) { @@ -107,7 +128,7 @@ const ACTION_HANDLERS = { } }; -function getActions(note) { +function getActions(note: BNote) { return note.getLabels('action') .map(actionLabel => { let action; @@ -129,7 +150,7 @@ function getActions(note) { .filter(a => !!a); } -function executeActions(note, searchResultNoteIds) { +function executeActions(note: BNote, searchResultNoteIds: string[]) { const actions = getActions(note); for (const resultNoteId of searchResultNoteIds) { @@ -144,13 +165,13 @@ function executeActions(note, searchResultNoteIds) { log.info(`Applying action handler to note ${resultNote.noteId}: ${JSON.stringify(action)}`); ACTION_HANDLERS[action.name](action, resultNote); - } catch (e) { + } catch (e: any) { log.error(`ExecuteScript search action failed with ${e.message}`); } } } } -module.exports = { +export = { executeActions }; diff --git a/src/services/cloning.js b/src/services/cloning.ts similarity index 87% rename from src/services/cloning.js rename to src/services/cloning.ts index 4baae5de7e..bb21425bcf 100644 --- a/src/services/cloning.js +++ b/src/services/cloning.ts @@ -1,13 +1,13 @@ "use strict"; -const sql = require('./sql.js'); -const eventChangesService = require('./entity_changes.js'); -const treeService = require('./tree.js'); -const BBranch = require('../becca/entities/bbranch.js'); -const becca = require('../becca/becca.js'); -const log = require('./log.js'); - -function cloneNoteToParentNote(noteId, parentNoteId, prefix = null) { +const sql = require('./sql'); +const eventChangesService = require('./entity_changes'); +const treeService = require('./tree'); +const BBranch = require('../becca/entities/bbranch'); +const becca = require('../becca/becca'); +const log = require('./log'); + +function cloneNoteToParentNote(noteId: string, parentNoteId: string, prefix: string | null = null) { if (!(noteId in becca.notes) || !(parentNoteId in becca.notes)) { return { success: false, message: 'Note cannot be cloned because either the cloned note or the intended parent is deleted.' }; } @@ -43,7 +43,7 @@ function cloneNoteToParentNote(noteId, parentNoteId, prefix = null) { }; } -function cloneNoteToBranch(noteId, parentBranchId, prefix) { +function cloneNoteToBranch(noteId: string, parentBranchId: string, prefix: string) { const parentBranch = becca.getBranch(parentBranchId); if (!parentBranch) { @@ -58,7 +58,7 @@ function cloneNoteToBranch(noteId, parentBranchId, prefix) { return ret; } -function ensureNoteIsPresentInParent(noteId, parentNoteId, prefix) { +function ensureNoteIsPresentInParent(noteId: string, parentNoteId: string, prefix: string) { if (!(noteId in becca.notes)) { return { branch: null, success: false, message: `Note '${noteId}' is deleted.` }; } else if (!(parentNoteId in becca.notes)) { @@ -89,7 +89,7 @@ function ensureNoteIsPresentInParent(noteId, parentNoteId, prefix) { return { branch: branch, success: true }; } -function ensureNoteIsAbsentFromParent(noteId, parentNoteId) { +function ensureNoteIsAbsentFromParent(noteId: string, parentNoteId: string) { const branchId = sql.getValue(`SELECT branchId FROM branches WHERE noteId = ? AND parentNoteId = ? AND isDeleted = 0`, [noteId, parentNoteId]); const branch = becca.getBranch(branchId); @@ -109,7 +109,7 @@ function ensureNoteIsAbsentFromParent(noteId, parentNoteId) { } } -function toggleNoteInParent(present, noteId, parentNoteId, prefix) { +function toggleNoteInParent(present: boolean, noteId: string, parentNoteId: string, prefix: string) { if (present) { return ensureNoteIsPresentInParent(noteId, parentNoteId, prefix); } @@ -118,7 +118,7 @@ function toggleNoteInParent(present, noteId, parentNoteId, prefix) { } } -function cloneNoteAfter(noteId, afterBranchId) { +function cloneNoteAfter(noteId: string, afterBranchId: string) { if (['_hidden', 'root'].includes(noteId)) { return { success: false, message: `Cloning the note '${noteId}' is forbidden.` }; } @@ -175,7 +175,7 @@ function cloneNoteAfter(noteId, afterBranchId) { return { success: true, branchId: branch.branchId }; } -module.exports = { +export = { cloneNoteToBranch, cloneNoteToParentNote, ensureNoteIsPresentInParent, diff --git a/src/services/cls.js b/src/services/cls.ts similarity index 81% rename from src/services/cls.js rename to src/services/cls.ts index 8e2c2870c0..d119c8a936 100644 --- a/src/services/cls.js +++ b/src/services/cls.ts @@ -1,26 +1,29 @@ -const clsHooked = require('cls-hooked'); +import clsHooked = require('cls-hooked'); +import { EntityChange } from './entity_changes_interface'; const namespace = clsHooked.createNamespace("trilium"); -function init(callback) { +type Callback = (...args: any[]) => any; + +function init(callback: Callback) { return namespace.runAndReturn(callback); } -function wrap(callback) { +function wrap(callback: Callback) { return () => { try { init(callback); } - catch (e) { + catch (e: any) { console.log(`Error occurred: ${e.message}: ${e.stack}`); } } } -function get(key) { +function get(key: string) { return namespace.get(key); } -function set(key, value) { +function set(key: string, value: any) { namespace.set(key, value); } @@ -48,7 +51,7 @@ function isEntityEventsDisabled() { return !!namespace.get('disableEntityEvents'); } -function setMigrationRunning(running) { +function setMigrationRunning(running: boolean) { namespace.set('migrationRunning', !!running); } @@ -56,7 +59,7 @@ function isMigrationRunning() { return !!namespace.get('migrationRunning'); } -function disableSlowQueryLogging(disable) { +function disableSlowQueryLogging(disable: boolean) { namespace.set('disableSlowQueryLogging', disable); } @@ -72,7 +75,7 @@ function getAndClearEntityChangeIds() { return entityChangeIds; } -function putEntityChange(entityChange) { +function putEntityChange(entityChange: EntityChange) { if (namespace.get('ignoreEntityChangeIds')) { return; } @@ -93,7 +96,7 @@ function ignoreEntityChangeIds() { namespace.set('ignoreEntityChangeIds', true); } -module.exports = { +export = { init, wrap, get, diff --git a/src/services/config.js b/src/services/config.ts similarity index 65% rename from src/services/config.js rename to src/services/config.ts index 2968f12480..f0437a2d15 100644 --- a/src/services/config.js +++ b/src/services/config.ts @@ -1,10 +1,10 @@ "use strict"; -const ini = require('ini'); -const fs = require('fs'); -const dataDir = require('./data_dir.js'); -const path = require('path'); -const resourceDir = require('./resource_dir.js'); +import ini = require('ini'); +import fs = require('fs'); +import dataDir = require('./data_dir'); +import path = require('path'); +import resourceDir = require('./resource_dir'); const configSampleFilePath = path.resolve(resourceDir.RESOURCE_DIR, "config-sample.ini"); @@ -16,4 +16,4 @@ if (!fs.existsSync(dataDir.CONFIG_INI_PATH)) { const config = ini.parse(fs.readFileSync(dataDir.CONFIG_INI_PATH, 'utf-8')); -module.exports = config; +export = config; diff --git a/src/services/consistency_checks.js b/src/services/consistency_checks.ts similarity index 87% rename from src/services/consistency_checks.js rename to src/services/consistency_checks.ts index 63f934d4f0..81ce810ca3 100644 --- a/src/services/consistency_checks.js +++ b/src/services/consistency_checks.ts @@ -1,33 +1,42 @@ "use strict"; -const sql = require('./sql.js'); -const sqlInit = require('./sql_init.js'); -const log = require('./log.js'); -const ws = require('./ws.js'); -const syncMutexService = require('./sync_mutex.js'); -const cls = require('./cls.js'); -const entityChangesService = require('./entity_changes.js'); -const optionsService = require('./options.js'); -const BBranch = require('../becca/entities/bbranch.js'); -const revisionService = require('./revisions.js'); -const becca = require('../becca/becca.js'); -const utils = require('../services/utils.js'); -const eraseService = require('../services/erase.js'); -const {sanitizeAttributeName} = require('./sanitize_attribute_name.js'); -const noteTypes = require('../services/note_types.js').getNoteTypeNames(); +import sql = require('./sql'); +import sqlInit = require('./sql_init'); +import log = require('./log'); +import ws = require('./ws'); +import syncMutexService = require('./sync_mutex'); +import cls = require('./cls'); +import entityChangesService = require('./entity_changes'); +import optionsService = require('./options'); +import BBranch = require('../becca/entities/bbranch'); +import revisionService = require('./revisions'); +import becca = require('../becca/becca'); +import utils = require('../services/utils'); +import eraseService = require('../services/erase'); +import sanitizeAttributeName = require('./sanitize_attribute_name'); +import noteTypesService = require('../services/note_types'); +import { BranchRow, NoteRow } from '../becca/entities/rows'; +import { EntityChange, EntityRow } from './entity_changes_interface'; +const noteTypes = noteTypesService.getNoteTypeNames(); class ConsistencyChecks { + + private autoFix: boolean; + private unrecoveredConsistencyErrors: boolean; + private fixedIssues: boolean; + private reloadNeeded: boolean; + /** * @param autoFix - automatically fix all encountered problems. False is only for debugging during development (fail fast) */ - constructor(autoFix) { + constructor(autoFix: boolean) { this.autoFix = autoFix; this.unrecoveredConsistencyErrors = false; this.fixedIssues = false; this.reloadNeeded = false; } - findAndFixIssues(query, fixerCb) { + findAndFixIssues(query: string, fixerCb: (res: any) => void) { const results = sql.getRows(query); for (const res of results) { @@ -39,7 +48,7 @@ class ConsistencyChecks { } else { this.unrecoveredConsistencyErrors = true; } - } catch (e) { + } catch (e: any) { logError(`Fixer failed with ${e.message} ${e.stack}`); this.unrecoveredConsistencyErrors = true; } @@ -49,8 +58,8 @@ class ConsistencyChecks { } checkTreeCycles() { - const childToParents = {}; - const rows = sql.getRows("SELECT noteId, parentNoteId FROM branches WHERE isDeleted = 0"); + const childToParents: Record = {}; + const rows = sql.getRows("SELECT noteId, parentNoteId FROM branches WHERE isDeleted = 0"); for (const row of rows) { const childNoteId = row.noteId; @@ -61,7 +70,7 @@ class ConsistencyChecks { } /** @returns {boolean} true if cycle was found and we should try again */ - const checkTreeCycle = (noteId, path) => { + const checkTreeCycle = (noteId: string, path: string[]) => { if (noteId === 'root') { return false; } @@ -70,8 +79,10 @@ class ConsistencyChecks { if (path.includes(parentNoteId)) { if (this.autoFix) { const branch = becca.getBranchFromChildAndParent(noteId, parentNoteId); - branch.markAsDeleted('cycle-autofix'); - logFix(`Branch '${branch.branchId}' between child '${noteId}' and parent '${parentNoteId}' has been deleted since it was causing a tree cycle.`); + if (branch) { + branch.markAsDeleted('cycle-autofix'); + logFix(`Branch '${branch.branchId}' between child '${noteId}' and parent '${parentNoteId}' has been deleted since it was causing a tree cycle.`); + } return true; } @@ -133,6 +144,9 @@ class ConsistencyChecks { ({branchId, noteId}) => { if (this.autoFix) { const branch = becca.getBranch(branchId); + if (!branch) { + return; + } branch.markAsDeleted(); this.reloadNeeded = true; @@ -154,12 +168,21 @@ class ConsistencyChecks { if (this.autoFix) { // Delete the old branch and recreate it with root as parent. const oldBranch = becca.getBranch(branchId); + if (!oldBranch) { + return; + } + const noteId = oldBranch.noteId; oldBranch.markAsDeleted("missing-parent"); let message = `Branch '${branchId}' was missing parent note '${parentNoteId}', so it was deleted. `; - if (becca.getNote(noteId).getParentBranches().length === 0) { + const note = becca.getNote(noteId); + if (!note) { + return; + } + + if (note.getParentBranches().length === 0) { const newBranch = new BBranch({ parentNoteId: 'root', noteId: noteId, @@ -188,6 +211,9 @@ class ConsistencyChecks { ({attributeId, noteId}) => { if (this.autoFix) { const attribute = becca.getAttribute(attributeId); + if (!attribute) { + return; + } attribute.markAsDeleted(); this.reloadNeeded = true; @@ -208,6 +234,9 @@ class ConsistencyChecks { ({attributeId, noteId}) => { if (this.autoFix) { const attribute = becca.getAttribute(attributeId); + if (!attribute) { + return; + } attribute.markAsDeleted(); this.reloadNeeded = true; @@ -230,6 +259,9 @@ class ConsistencyChecks { ({attachmentId, ownerId}) => { if (this.autoFix) { const attachment = becca.getAttachment(attachmentId); + if (!attachment) { + return; + } attachment.markAsDeleted(); this.reloadNeeded = false; @@ -258,6 +290,7 @@ class ConsistencyChecks { ({branchId, noteId}) => { if (this.autoFix) { const branch = becca.getBranch(branchId); + if (!branch) return; branch.markAsDeleted(); this.reloadNeeded = true; @@ -278,6 +311,9 @@ class ConsistencyChecks { `, ({branchId, parentNoteId}) => { if (this.autoFix) { const branch = becca.getBranch(branchId); + if (!branch) { + return; + } branch.markAsDeleted(); this.reloadNeeded = true; @@ -321,7 +357,7 @@ class ConsistencyChecks { HAVING COUNT(1) > 1`, ({noteId, parentNoteId}) => { if (this.autoFix) { - const branchIds = sql.getColumn( + const branchIds = sql.getColumn( `SELECT branchId FROM branches WHERE noteId = ? @@ -333,9 +369,17 @@ class ConsistencyChecks { // it's not necessarily "original" branch, it's just the only one which will survive const origBranch = branches[0]; + if (!origBranch) { + logError(`Unable to find original branch.`); + return; + } // delete all but the first branch for (const branch of branches.slice(1)) { + if (!branch) { + continue; + } + branch.markAsDeleted(); logFix(`Removing branch '${branch.branchId}' since it's a parent-child duplicate of branch '${origBranch.branchId}'`); @@ -357,6 +401,7 @@ class ConsistencyChecks { ({attachmentId, noteId}) => { if (this.autoFix) { const attachment = becca.getAttachment(attachmentId); + if (!attachment) return; attachment.markAsDeleted(); this.reloadNeeded = false; @@ -379,6 +424,7 @@ class ConsistencyChecks { ({noteId, type}) => { if (this.autoFix) { const note = becca.getNote(noteId); + if (!note) return; note.type = 'file'; // file is a safe option to recover notes if the type is not known note.save(); @@ -404,6 +450,10 @@ class ConsistencyChecks { const fakeDate = "2000-01-01 00:00:00Z"; const blankContent = getBlankContent(isProtected, type, mime); + if (!blankContent) { + logError(`Unable to recover note ${noteId} since it's content could not be retrieved (might be protected note).`); + return; + } const blobId = utils.hashedBlobId(blankContent); const blobAlreadyExists = !!sql.getValue("SELECT 1 FROM blobs WHERE blobId = ?", [blobId]); @@ -452,7 +502,11 @@ class ConsistencyChecks { if (this.autoFix) { const note = becca.getNote(noteId); const blankContent = getBlankContent(false, type, mime); - note.setContent(blankContent); + if (!note) return; + + if (blankContent) { + note.setContent(blankContent); + } this.reloadNeeded = true; @@ -506,7 +560,7 @@ class ConsistencyChecks { AND branches.isDeleted = 0`, ({parentNoteId}) => { if (this.autoFix) { - const branchIds = sql.getColumn(` + const branchIds = sql.getColumn(` SELECT branchId FROM branches WHERE isDeleted = 0 @@ -515,6 +569,8 @@ class ConsistencyChecks { const branches = branchIds.map(branchId => becca.getBranch(branchId)); for (const branch of branches) { + if (!branch) continue; + // delete the old wrong branch branch.markAsDeleted("parent-is-search"); @@ -543,6 +599,7 @@ class ConsistencyChecks { ({attributeId}) => { if (this.autoFix) { const relation = becca.getAttribute(attributeId); + if (!relation) return; relation.markAsDeleted(); this.reloadNeeded = true; @@ -563,6 +620,7 @@ class ConsistencyChecks { ({attributeId, type}) => { if (this.autoFix) { const attribute = becca.getAttribute(attributeId); + if (!attribute) return; attribute.type = 'label'; attribute.save(); @@ -584,6 +642,7 @@ class ConsistencyChecks { ({attributeId, noteId}) => { if (this.autoFix) { const attribute = becca.getAttribute(attributeId); + if (!attribute) return; attribute.markAsDeleted(); this.reloadNeeded = true; @@ -605,6 +664,7 @@ class ConsistencyChecks { ({attributeId, targetNoteId}) => { if (this.autoFix) { const attribute = becca.getAttribute(attributeId); + if (!attribute) return; attribute.markAsDeleted(); this.reloadNeeded = true; @@ -616,14 +676,14 @@ class ConsistencyChecks { }); } - runEntityChangeChecks(entityName, key) { + runEntityChangeChecks(entityName: string, key: string) { this.findAndFixIssues(` SELECT ${key} as entityId FROM ${entityName} LEFT JOIN entity_changes ec ON ec.entityName = '${entityName}' AND ec.entityId = ${entityName}.${key} WHERE ec.id IS NULL`, ({entityId}) => { - const entityRow = sql.getRow(`SELECT * FROM ${entityName} WHERE ${key} = ?`, [entityId]); + const entityRow = sql.getRow(`SELECT * FROM ${entityName} WHERE ${key} = ?`, [entityId]); if (this.autoFix) { entityChangesService.putEntityChange({ @@ -691,10 +751,10 @@ class ConsistencyChecks { } findWronglyNamedAttributes() { - const attrNames = sql.getColumn(`SELECT DISTINCT name FROM attributes`); + const attrNames = sql.getColumn(`SELECT DISTINCT name FROM attributes`); for (const origName of attrNames) { - const fixedName = sanitizeAttributeName(origName); + const fixedName = sanitizeAttributeName.sanitizeAttributeName(origName); if (fixedName !== origName) { if (this.autoFix) { @@ -721,7 +781,7 @@ class ConsistencyChecks { findSyncIssues() { const lastSyncedPush = parseInt(sql.getValue("SELECT value FROM options WHERE name = 'lastSyncedPush'")); - const maxEntityChangeId = sql.getValue("SELECT MAX(id) FROM entity_changes"); + const maxEntityChangeId = sql.getValue("SELECT MAX(id) FROM entity_changes"); if (lastSyncedPush > maxEntityChangeId) { if (this.autoFix) { @@ -766,15 +826,15 @@ class ConsistencyChecks { } if (this.reloadNeeded) { - require('../becca/becca_loader.js').reload("consistency checks need becca reload"); + require('../becca/becca_loader').reload("consistency checks need becca reload"); } return !this.unrecoveredConsistencyErrors; } runDbDiagnostics() { - function getTableRowCount(tableName) { - const count = sql.getValue(`SELECT COUNT(1) FROM ${tableName}`); + function getTableRowCount(tableName: string) { + const count = sql.getValue(`SELECT COUNT(1) FROM ${tableName}`); return `${tableName}: ${count}`; } @@ -810,7 +870,7 @@ class ConsistencyChecks { } } -function getBlankContent(isProtected, type, mime) { +function getBlankContent(isProtected: boolean, type: string, mime: string) { if (isProtected) { return null; // this is wrong for protected non-erased notes, but we cannot create a valid value without a password } @@ -822,11 +882,11 @@ function getBlankContent(isProtected, type, mime) { return ''; // empty string might be a wrong choice for some note types, but it's the best guess } -function logFix(message) { +function logFix(message: string) { log.info(`Consistency issue fixed: ${message}`); } -function logError(message) { +function logError(message: string) { log.info(`Consistency error: ${message}`); } @@ -837,7 +897,7 @@ function runPeriodicChecks() { consistencyChecks.runChecks(); } -async function runOnDemandChecks(autoFix) { +async function runOnDemandChecks(autoFix: boolean) { const consistencyChecks = new ConsistencyChecks(autoFix); await consistencyChecks.runChecks(); } diff --git a/src/services/content_hash.js b/src/services/content_hash.ts similarity index 79% rename from src/services/content_hash.js rename to src/services/content_hash.ts index c9ca7fef6e..2067e177fa 100644 --- a/src/services/content_hash.js +++ b/src/services/content_hash.ts @@ -1,9 +1,11 @@ "use strict"; -const sql = require('./sql.js'); -const utils = require('./utils.js'); -const log = require('./log.js'); -const eraseService = require('./erase.js'); +import sql = require('./sql'); +import utils = require('./utils'); +import log = require('./log'); +import eraseService = require('./erase'); + +type SectorHash = Record; function getEntityHashes() { // blob erasure is not synced, we should check before each sync if there's some blob to erase @@ -12,8 +14,9 @@ function getEntityHashes() { const startTime = new Date(); // we know this is slow and the total content hash calculation time is logged + type HashRow = [ string, string, string, boolean ]; const hashRows = sql.disableSlowQueryLogging( - () => sql.getRawRows(` + () => sql.getRawRows(` SELECT entityName, entityId, hash, @@ -27,7 +30,7 @@ function getEntityHashes() { // sorting by entityId is enough, hashes will be segmented by entityName later on anyway hashRows.sort((a, b) => a[1] < b[1] ? -1 : 1); - const hashMap = {}; + const hashMap: Record = {}; for (const [entityName, entityId, hash, isErased] of hashRows) { const entityHashMap = hashMap[entityName] = hashMap[entityName] || {}; @@ -51,13 +54,13 @@ function getEntityHashes() { return hashMap; } -function checkContentHashes(otherHashes) { +function checkContentHashes(otherHashes: Record) { const entityHashes = getEntityHashes(); const failedChecks = []; for (const entityName in entityHashes) { - const thisSectorHashes = entityHashes[entityName] || {}; - const otherSectorHashes = otherHashes[entityName] || {}; + const thisSectorHashes: SectorHash = entityHashes[entityName] || {}; + const otherSectorHashes: SectorHash = otherHashes[entityName] || {}; const sectors = new Set(Object.keys(thisSectorHashes).concat(Object.keys(otherSectorHashes))); @@ -77,7 +80,7 @@ function checkContentHashes(otherHashes) { return failedChecks; } -module.exports = { +export = { getEntityHashes, checkContentHashes }; diff --git a/src/services/data_dir.js b/src/services/data_dir.ts similarity index 93% rename from src/services/data_dir.js rename to src/services/data_dir.ts index 7971eabf4c..1b267850ab 100644 --- a/src/services/data_dir.js +++ b/src/services/data_dir.ts @@ -8,14 +8,14 @@ * - as a fallback if the previous step fails, we'll use home dir */ -const os = require('os'); -const fs = require('fs'); -const path = require('path'); +import os = require('os'); +import fs = require('fs'); +import path = require('path'); function getAppDataDir() { let appDataDir = os.homedir(); // fallback if OS is not recognized - if (os.platform() === 'win32') { + if (os.platform() === 'win32' && process.env.APPDATA) { appDataDir = process.env.APPDATA; } else if (os.platform() === 'linux') { @@ -68,7 +68,7 @@ const LOG_DIR = process.env.TRILIUM_LOG_DIR || `${DIR_SEP}log`; const ANONYMIZED_DB_DIR = process.env.TRILIUM_ANONYMIZED_DB_DIR || `${DIR_SEP}anonymized-db`; const CONFIG_INI_PATH = process.env.TRILIUM_CONFIG_INI_PATH || `${DIR_SEP}config.ini`; -module.exports = { +export = { TRILIUM_DATA_DIR, DOCUMENT_PATH, BACKUP_DIR, diff --git a/src/services/date_notes.js b/src/services/date_notes.ts similarity index 77% rename from src/services/date_notes.js rename to src/services/date_notes.ts index 69e2a06a0a..9891069396 100644 --- a/src/services/date_notes.js +++ b/src/services/date_notes.ts @@ -1,13 +1,14 @@ "use strict"; -const noteService = require('./notes.js'); -const attributeService = require('./attributes.js'); -const dateUtils = require('./date_utils.js'); -const sql = require('./sql.js'); -const protectedSessionService = require('./protected_session.js'); -const searchService = require('../services/search/services/search.js'); -const SearchContext = require('../services/search/search_context.js'); -const hoistedNoteService = require('./hoisted_note.js'); +import noteService = require('./notes'); +import attributeService = require('./attributes'); +import dateUtils = require('./date_utils'); +import sql = require('./sql'); +import protectedSessionService = require('./protected_session'); +import searchService = require('../services/search/services/search'); +import SearchContext = require('../services/search/search_context'); +import hoistedNoteService = require('./hoisted_note'); +import BNote = require('../becca/entities/bnote'); const CALENDAR_ROOT_LABEL = 'calendarRoot'; const YEAR_LABEL = 'yearNote'; @@ -17,7 +18,9 @@ const DATE_LABEL = 'dateNote'; const DAYS = ['Sunday','Monday','Tuesday','Wednesday','Thursday','Friday','Saturday']; const MONTHS = ['January','February','March','April','May','June','July','August','September','October','November','December']; -function createNote(parentNote, noteTitle) { +type StartOfWeek = "monday" | "sunday"; + +function createNote(parentNote: BNote, noteTitle: string) { return noteService.createNewNote({ parentNoteId: parentNote.noteId, title: noteTitle, @@ -27,13 +30,12 @@ function createNote(parentNote, noteTitle) { }).note; } -/** @returns {BNote} */ -function getRootCalendarNote() { +function getRootCalendarNote(): BNote { let rootNote; const workspaceNote = hoistedNoteService.getWorkspaceNote(); - if (!workspaceNote.isRoot()) { + if (!workspaceNote || !workspaceNote.isRoot()) { rootNote = searchService.findFirstNoteWithQuery('#workspaceCalendarRoot', new SearchContext({ignoreHoistedNote: false})); } @@ -57,14 +59,11 @@ function getRootCalendarNote() { }); } - return rootNote; + return rootNote as BNote; } -/** @returns {BNote} */ -function getYearNote(dateStr, rootNote = null) { - if (!rootNote) { - rootNote = getRootCalendarNote(); - } +function getYearNote(dateStr: string, _rootNote: BNote | null = null): BNote { + const rootNote = _rootNote || getRootCalendarNote(); const yearStr = dateStr.trim().substr(0, 4); @@ -88,10 +87,10 @@ function getYearNote(dateStr, rootNote = null) { } }); - return yearNote; + return yearNote as unknown as BNote; } -function getMonthNoteTitle(rootNote, monthNumber, dateObj) { +function getMonthNoteTitle(rootNote: BNote, monthNumber: string, dateObj: Date) { const pattern = rootNote.getOwnedLabelValue("monthPattern") || "{monthNumberPadded} - {month}"; const monthName = MONTHS[dateObj.getMonth()]; @@ -102,11 +101,8 @@ function getMonthNoteTitle(rootNote, monthNumber, dateObj) { .replace(/{month}/g, monthName); } -/** @returns {BNote} */ -function getMonthNote(dateStr, rootNote = null) { - if (!rootNote) { - rootNote = getRootCalendarNote(); - } +function getMonthNote(dateStr: string, _rootNote: BNote | null = null): BNote { + const rootNote = _rootNote || getRootCalendarNote(); const monthStr = dateStr.substr(0, 7); const monthNumber = dateStr.substr(5, 2); @@ -137,10 +133,10 @@ function getMonthNote(dateStr, rootNote = null) { } }); - return monthNote; + return monthNote as unknown as BNote; } -function getDayNoteTitle(rootNote, dayNumber, dateObj) { +function getDayNoteTitle(rootNote: BNote, dayNumber: string, dateObj: Date) { const pattern = rootNote.getOwnedLabelValue("datePattern") || "{dayInMonthPadded} - {weekDay}"; const weekDay = DAYS[dateObj.getDay()]; @@ -154,18 +150,15 @@ function getDayNoteTitle(rootNote, dayNumber, dateObj) { } /** produces 1st, 2nd, 3rd, 4th, 21st, 31st for 1, 2, 3, 4, 21, 31 */ -function ordinal(dayNumber) { +function ordinal(dayNumber: number) { const suffixes = ["th", "st", "nd", "rd"]; const suffix = suffixes[(dayNumber - 20) % 10] || suffixes[dayNumber] || suffixes[0]; return `${dayNumber}${suffix}`; } -/** @returns {BNote} */ -function getDayNote(dateStr, rootNote = null) { - if (!rootNote) { - rootNote = getRootCalendarNote(); - } +function getDayNote(dateStr: string, _rootNote: BNote | null = null): BNote { + const rootNote = _rootNote || getRootCalendarNote(); dateStr = dateStr.trim().substr(0, 10); @@ -195,14 +188,14 @@ function getDayNote(dateStr, rootNote = null) { } }); - return dateNote; + return dateNote as unknown as BNote; } -function getTodayNote(rootNote = null) { +function getTodayNote(rootNote: BNote | null = null) { return getDayNote(dateUtils.localNowDate(), rootNote); } -function getStartOfTheWeek(date, startOfTheWeek) { +function getStartOfTheWeek(date: Date, startOfTheWeek: StartOfWeek) { const day = date.getDay(); let diff; @@ -219,7 +212,11 @@ function getStartOfTheWeek(date, startOfTheWeek) { return new Date(date.setDate(diff)); } -function getWeekNote(dateStr, options = {}, rootNote = null) { +interface WeekNoteOpts { + startOfTheWeek?: StartOfWeek +} + +function getWeekNote(dateStr: string, options: WeekNoteOpts = {}, rootNote: BNote | null = null) { const startOfTheWeek = options.startOfTheWeek || "monday"; const dateObj = getStartOfTheWeek(dateUtils.parseLocalDate(dateStr), startOfTheWeek); @@ -229,7 +226,7 @@ function getWeekNote(dateStr, options = {}, rootNote = null) { return getDayNote(dateStr, rootNote); } -module.exports = { +export = { getRootCalendarNote, getYearNote, getMonthNote, diff --git a/src/services/date_utils.js b/src/services/date_utils.ts similarity index 87% rename from src/services/date_utils.js rename to src/services/date_utils.ts index 4eb47bdb74..88b6ecb69e 100644 --- a/src/services/date_utils.js +++ b/src/services/date_utils.ts @@ -1,5 +1,5 @@ -const dayjs = require('dayjs'); -const cls = require('./cls.js'); +import dayjs = require('dayjs'); +import cls = require('./cls'); const LOCAL_DATETIME_FORMAT = 'YYYY-MM-DD HH:mm:ss.SSSZZ'; const UTC_DATETIME_FORMAT = 'YYYY-MM-DD HH:mm:ssZ'; @@ -29,15 +29,15 @@ function localNowDate() { } } -function pad(num) { +function pad(num: number) { return num <= 9 ? `0${num}` : `${num}`; } -function utcDateStr(date) { +function utcDateStr(date: Date) { return date.toISOString().split('T')[0]; } -function utcDateTimeStr(date) { +function utcDateTimeStr(date: Date) { return date.toISOString().replace('T', ' '); } @@ -45,16 +45,16 @@ function utcDateTimeStr(date) { * @param str - needs to be in the ISO 8601 format "YYYY-MM-DDTHH:MM:SS.sssZ" format as outputted by dateStr(). * also is assumed to be GMT time (as indicated by the "Z" at the end), *not* local time */ -function parseDateTime(str) { +function parseDateTime(str: string) { try { return new Date(Date.parse(str)); } - catch (e) { + catch (e: any) { throw new Error(`Can't parse date from '${str}': ${e.stack}`); } } -function parseLocalDate(str) { +function parseLocalDate(str: string) { const datePart = str.substr(0, 10); // not specifying the timezone and specifying the time means Date.parse() will use the local timezone @@ -65,7 +65,7 @@ function getDateTimeForFile() { return new Date().toISOString().substr(0, 19).replace(/:/g, ''); } -function validateLocalDateTime(str) { +function validateLocalDateTime(str: string | null | undefined) { if (!str) { return; } @@ -80,7 +80,7 @@ function validateLocalDateTime(str) { } } -function validateUtcDateTime(str) { +function validateUtcDateTime(str: string | undefined) { if (!str) { return; } @@ -95,7 +95,7 @@ function validateUtcDateTime(str) { } } -module.exports = { +export = { utcNowDateTime, localNowDateTime, localNowDate, diff --git a/src/services/encryption/data_encryption.js b/src/services/encryption/data_encryption.ts similarity index 85% rename from src/services/encryption/data_encryption.js rename to src/services/encryption/data_encryption.ts index 6b81686ed4..b82a0e8c6a 100644 --- a/src/services/encryption/data_encryption.js +++ b/src/services/encryption/data_encryption.ts @@ -1,9 +1,9 @@ "use strict"; -const crypto = require('crypto'); -const log = require('../log.js'); +import crypto = require('crypto'); +import log = require('../log'); -function arraysIdentical(a, b) { +function arraysIdentical(a: any[] | Buffer, b: any[] | Buffer) { let i = a.length; if (i !== b.length) return false; while (i--) { @@ -12,12 +12,12 @@ function arraysIdentical(a, b) { return true; } -function shaArray(content) { +function shaArray(content: crypto.BinaryLike) { // we use this as a simple checksum and don't rely on its security, so SHA-1 is good enough return crypto.createHash('sha1').update(content).digest(); } -function pad(data) { +function pad(data: Buffer): Buffer { if (data.length > 16) { data = data.slice(0, 16); } @@ -30,7 +30,7 @@ function pad(data) { return Buffer.from(data); } -function encrypt(key, plainText) { +function encrypt(key: Buffer, plainText: Buffer | string) { if (!key) { throw new Error("No data key!"); } @@ -51,10 +51,7 @@ function encrypt(key, plainText) { return encryptedDataWithIv.toString('base64'); } -/** - * @returns {Buffer|false|null} - */ -function decrypt(key, cipherText) { +function decrypt(key: Buffer, cipherText: string | Buffer): Buffer | false | null { if (cipherText === null) { return null; } @@ -88,12 +85,12 @@ function decrypt(key, cipherText) { return payload; } - catch (e) { + catch (e: any) { // recovery from https://github.com/zadam/trilium/issues/510 if (e.message?.includes("WRONG_FINAL_BLOCK_LENGTH") || e.message?.includes("wrong final block length")) { log.info("Caught WRONG_FINAL_BLOCK_LENGTH, returning cipherText instead"); - return cipherText; + return Buffer.from(cipherText); } else { throw e; @@ -101,7 +98,7 @@ function decrypt(key, cipherText) { } } -function decryptString(dataKey, cipherText) { +function decryptString(dataKey: Buffer, cipherText: string) { const buffer = decrypt(dataKey, cipherText); if (buffer === null) { @@ -115,7 +112,7 @@ function decryptString(dataKey, cipherText) { return buffer.toString('utf-8'); } -module.exports = { +export = { encrypt, decrypt, decryptString diff --git a/src/services/encryption/my_scrypt.js b/src/services/encryption/my_scrypt.ts similarity index 58% rename from src/services/encryption/my_scrypt.js rename to src/services/encryption/my_scrypt.ts index bc01cdde7e..c80632bc06 100644 --- a/src/services/encryption/my_scrypt.js +++ b/src/services/encryption/my_scrypt.ts @@ -1,28 +1,28 @@ "use strict"; -const optionService = require('../options.js'); -const crypto = require('crypto'); +import optionService = require('../options'); +import crypto = require('crypto'); -function getVerificationHash(password) { +function getVerificationHash(password: crypto.BinaryLike) { const salt = optionService.getOption('passwordVerificationSalt'); return getScryptHash(password, salt); } -function getPasswordDerivedKey(password) { +function getPasswordDerivedKey(password: crypto.BinaryLike) { const salt = optionService.getOption('passwordDerivedKeySalt'); return getScryptHash(password, salt); } -function getScryptHash(password, salt) { +function getScryptHash(password: crypto.BinaryLike, salt: crypto.BinaryLike) { const hashed = crypto.scryptSync(password, salt, 32, {N: 16384, r:8, p:1}); return hashed; } -module.exports = { +export = { getVerificationHash, getPasswordDerivedKey }; diff --git a/src/services/encryption/password.js b/src/services/encryption/password.ts similarity index 78% rename from src/services/encryption/password.js rename to src/services/encryption/password.ts index 563e86c02b..4d6bf66a3f 100644 --- a/src/services/encryption/password.js +++ b/src/services/encryption/password.ts @@ -1,16 +1,16 @@ "use strict"; -const sql = require('../sql.js'); -const optionService = require('../options.js'); -const myScryptService = require('./my_scrypt.js'); -const utils = require('../utils.js'); -const passwordEncryptionService = require('./password_encryption.js'); +import sql = require('../sql'); +import optionService = require('../options'); +import myScryptService = require('./my_scrypt'); +import utils = require('../utils'); +import passwordEncryptionService = require('./password_encryption'); function isPasswordSet() { return !!sql.getValue("SELECT value FROM options WHERE name = 'passwordVerificationHash'"); } -function changePassword(currentPassword, newPassword) { +function changePassword(currentPassword: string, newPassword: string) { if (!isPasswordSet()) { throw new Error("Password has not been set yet, so it cannot be changed. Use 'setPassword' instead."); } @@ -29,8 +29,11 @@ function changePassword(currentPassword, newPassword) { optionService.setOption('passwordDerivedKeySalt', utils.randomSecureToken(32)); const newPasswordVerificationKey = utils.toBase64(myScryptService.getVerificationHash(newPassword)); - - passwordEncryptionService.setDataKey(newPassword, decryptedDataKey); + + if (decryptedDataKey) { + // TODO: what should happen if the decrypted data key is null? + passwordEncryptionService.setDataKey(newPassword, decryptedDataKey); + } optionService.setOption('passwordVerificationHash', newPasswordVerificationKey); }); @@ -40,7 +43,7 @@ function changePassword(currentPassword, newPassword) { }; } -function setPassword(password) { +function setPassword(password: string) { if (isPasswordSet()) { throw new Error("Password is set already. Either change it or perform 'reset password' first."); } @@ -48,13 +51,13 @@ function setPassword(password) { optionService.createOption('passwordVerificationSalt', utils.randomSecureToken(32), true); optionService.createOption('passwordDerivedKeySalt', utils.randomSecureToken(32), true); - const passwordVerificationKey = utils.toBase64(myScryptService.getVerificationHash(password), true); + const passwordVerificationKey = utils.toBase64(myScryptService.getVerificationHash(password)); optionService.createOption('passwordVerificationHash', passwordVerificationKey, true); // passwordEncryptionService expects these options to already exist optionService.createOption('encryptedDataKey', '', true); - passwordEncryptionService.setDataKey(password, utils.randomSecureToken(16), true); + passwordEncryptionService.setDataKey(password, utils.randomSecureToken(16)); return { success: true @@ -75,7 +78,7 @@ function resetPassword() { }; } -module.exports = { +export = { isPasswordSet, changePassword, setPassword, diff --git a/src/services/encryption/password_encryption.js b/src/services/encryption/password_encryption.ts similarity index 71% rename from src/services/encryption/password_encryption.js rename to src/services/encryption/password_encryption.ts index 336be8f2f3..36420d03d2 100644 --- a/src/services/encryption/password_encryption.js +++ b/src/services/encryption/password_encryption.ts @@ -1,9 +1,9 @@ -const optionService = require('../options.js'); -const myScryptService = require('./my_scrypt.js'); -const utils = require('../utils.js'); -const dataEncryptionService = require('./data_encryption.js'); +import optionService = require('../options'); +import myScryptService = require('./my_scrypt'); +import utils = require('../utils'); +import dataEncryptionService = require('./data_encryption'); -function verifyPassword(password) { +function verifyPassword(password: string) { const givenPasswordHash = utils.toBase64(myScryptService.getVerificationHash(password)); const dbPasswordHash = optionService.getOptionOrNull('passwordVerificationHash'); @@ -15,7 +15,7 @@ function verifyPassword(password) { return givenPasswordHash === dbPasswordHash; } -function setDataKey(password, plainTextDataKey) { +function setDataKey(password: string, plainTextDataKey: string | Buffer) { const passwordDerivedKey = myScryptService.getPasswordDerivedKey(password); const newEncryptedDataKey = dataEncryptionService.encrypt(passwordDerivedKey, plainTextDataKey); @@ -23,8 +23,7 @@ function setDataKey(password, plainTextDataKey) { optionService.setOption('encryptedDataKey', newEncryptedDataKey); } -/** @return {Buffer} */ -function getDataKey(password) { +function getDataKey(password: string) { const passwordDerivedKey = myScryptService.getPasswordDerivedKey(password); const encryptedDataKey = optionService.getOption('encryptedDataKey'); @@ -34,7 +33,7 @@ function getDataKey(password) { return decryptedDataKey; } -module.exports = { +export = { verifyPassword, getDataKey, setDataKey diff --git a/src/services/entity_changes.js b/src/services/entity_changes.ts similarity index 74% rename from src/services/entity_changes.js rename to src/services/entity_changes.ts index 0e0a8ddb3f..91f84f834b 100644 --- a/src/services/entity_changes.js +++ b/src/services/entity_changes.ts @@ -1,27 +1,29 @@ -const sql = require('./sql.js'); -const dateUtils = require('./date_utils.js'); -const log = require('./log.js'); -const cls = require('./cls.js'); -const utils = require('./utils.js'); -const instanceId = require('./instance_id.js'); -const becca = require('../becca/becca.js'); -const blobService = require('../services/blob.js'); +import sql = require('./sql'); +import dateUtils = require('./date_utils'); +import log = require('./log'); +import cls = require('./cls'); +import utils = require('./utils'); +import instanceId = require('./instance_id'); +import becca = require('../becca/becca'); +import blobService = require('../services/blob'); +import { EntityChange } from './entity_changes_interface'; +import type { Blob } from "./blob-interface"; let maxEntityChangeId = 0; -function putEntityChangeWithInstanceId(origEntityChange, instanceId) { +function putEntityChangeWithInstanceId(origEntityChange: EntityChange, instanceId: string) { const ec = {...origEntityChange, instanceId}; putEntityChange(ec); } -function putEntityChangeWithForcedChange(origEntityChange) { +function putEntityChangeWithForcedChange(origEntityChange: EntityChange) { const ec = {...origEntityChange, changeId: null}; putEntityChange(ec); } -function putEntityChange(origEntityChange) { +function putEntityChange(origEntityChange: EntityChange) { const ec = {...origEntityChange}; delete ec.id; @@ -36,12 +38,14 @@ function putEntityChange(origEntityChange) { ec.isErased = ec.isErased ? 1 : 0; ec.id = sql.replace("entity_changes", ec); - maxEntityChangeId = Math.max(maxEntityChangeId, ec.id); + if (ec.id) { + maxEntityChangeId = Math.max(maxEntityChangeId, ec.id); + } cls.putEntityChange(ec); } -function putNoteReorderingEntityChange(parentNoteId, componentId) { +function putNoteReorderingEntityChange(parentNoteId: string, componentId?: string) { putEntityChange({ entityName: "note_reordering", entityId: parentNoteId, @@ -53,7 +57,7 @@ function putNoteReorderingEntityChange(parentNoteId, componentId) { instanceId }); - const eventService = require('./events.js'); + const eventService = require('./events'); eventService.emit(eventService.ENTITY_CHANGED, { entityName: 'note_reordering', @@ -61,7 +65,7 @@ function putNoteReorderingEntityChange(parentNoteId, componentId) { }); } -function putEntityChangeForOtherInstances(ec) { +function putEntityChangeForOtherInstances(ec: EntityChange) { putEntityChange({ ...ec, changeId: null, @@ -69,8 +73,8 @@ function putEntityChangeForOtherInstances(ec) { }); } -function addEntityChangesForSector(entityName, sector) { - const entityChanges = sql.getRows(`SELECT * FROM entity_changes WHERE entityName = ? AND SUBSTR(entityId, 1, 1) = ?`, [entityName, sector]); +function addEntityChangesForSector(entityName: string, sector: string) { + const entityChanges = sql.getRows(`SELECT * FROM entity_changes WHERE entityName = ? AND SUBSTR(entityId, 1, 1) = ?`, [entityName, sector]); let entitiesInserted = entityChanges.length; @@ -89,9 +93,9 @@ function addEntityChangesForSector(entityName, sector) { log.info(`Added sector ${sector} of '${entityName}' (${entitiesInserted} entities) to the sync queue.`); } -function addEntityChangesForDependingEntity(sector, tableName, primaryKeyColumn) { +function addEntityChangesForDependingEntity(sector: string, tableName: string, primaryKeyColumn: string) { // problem in blobs might be caused by problem in entity referencing the blob - const dependingEntityChanges = sql.getRows(` + const dependingEntityChanges = sql.getRows(` SELECT dep_change.* FROM entity_changes orig_sector JOIN ${tableName} ON ${tableName}.blobId = orig_sector.entityId @@ -105,7 +109,7 @@ function addEntityChangesForDependingEntity(sector, tableName, primaryKeyColumn) return dependingEntityChanges.length; } -function cleanupEntityChangesForMissingEntities(entityName, entityPrimaryKey) { +function cleanupEntityChangesForMissingEntities(entityName: string, entityPrimaryKey: string) { sql.execute(` DELETE FROM entity_changes @@ -115,11 +119,11 @@ function cleanupEntityChangesForMissingEntities(entityName, entityPrimaryKey) { AND entityId NOT IN (SELECT ${entityPrimaryKey} FROM ${entityName})`); } -function fillEntityChanges(entityName, entityPrimaryKey, condition = '') { +function fillEntityChanges(entityName: string, entityPrimaryKey: string, condition = '') { cleanupEntityChangesForMissingEntities(entityName, entityPrimaryKey); sql.transactional(() => { - const entityIds = sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName} ${condition}`); + const entityIds = sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName} ${condition}`); let createdCount = 0; @@ -133,14 +137,14 @@ function fillEntityChanges(entityName, entityPrimaryKey, condition = '') { createdCount++; - const ec = { + const ec: Partial = { entityName, entityId, isErased: false }; if (entityName === 'blobs') { - const blob = sql.getRow("SELECT blobId, content, utcDateModified FROM blobs WHERE blobId = ?", [entityId]); + const blob = sql.getRow("SELECT blobId, content, utcDateModified FROM blobs WHERE blobId = ?", [entityId]); ec.hash = blobService.calculateContentHash(blob); ec.utcDateChanged = blob.utcDateModified; ec.isSynced = true; // blobs are always synced @@ -161,7 +165,7 @@ function fillEntityChanges(entityName, entityPrimaryKey, condition = '') { } } - putEntityChange(ec); + putEntityChange(ec as EntityChange); } if (createdCount > 0) { @@ -186,10 +190,10 @@ function fillAllEntityChanges() { } function recalculateMaxEntityChangeId() { - maxEntityChangeId = sql.getValue("SELECT COALESCE(MAX(id), 0) FROM entity_changes"); + maxEntityChangeId = sql.getValue("SELECT COALESCE(MAX(id), 0) FROM entity_changes"); } -module.exports = { +export = { putNoteReorderingEntityChange, putEntityChangeForOtherInstances, putEntityChangeWithForcedChange, diff --git a/src/services/entity_changes_interface.ts b/src/services/entity_changes_interface.ts new file mode 100644 index 0000000000..11eb69ccb6 --- /dev/null +++ b/src/services/entity_changes_interface.ts @@ -0,0 +1,27 @@ +export interface EntityChange { + id?: number | null; + noteId?: string; + entityName: string; + entityId: string; + entity?: any; + positions?: Record; + hash: string; + utcDateChanged?: string; + utcDateModified?: string; + utcDateCreated?: string; + isSynced: boolean | 1 | 0; + isErased: boolean | 1 | 0; + componentId?: string | null; + changeId?: string | null; + instanceId?: string | null; +} + +export interface EntityRow { + isDeleted?: boolean; + content?: Buffer | string; +} + +export interface EntityChangeRecord { + entityChange: EntityChange; + entity?: EntityRow; +} diff --git a/src/services/env.js b/src/services/env.js deleted file mode 100644 index e7fa6caf81..0000000000 --- a/src/services/env.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - isDev: function () { - return !!(process.env.TRILIUM_ENV && process.env.TRILIUM_ENV === 'dev'); - } -}; \ No newline at end of file diff --git a/src/services/env.ts b/src/services/env.ts new file mode 100644 index 0000000000..23c94ee0f3 --- /dev/null +++ b/src/services/env.ts @@ -0,0 +1,7 @@ +function isDev() { + return !!(process.env.TRILIUM_ENV && process.env.TRILIUM_ENV === 'dev'); +} + +export = { + isDev +}; \ No newline at end of file diff --git a/src/services/erase.js b/src/services/erase.ts similarity index 67% rename from src/services/erase.js rename to src/services/erase.ts index da65c3ebbb..6e6804f3d2 100644 --- a/src/services/erase.js +++ b/src/services/erase.ts @@ -1,13 +1,14 @@ -const sql = require("./sql.js"); -const revisionService = require("./revisions.js"); -const log = require("./log.js"); -const entityChangesService = require("./entity_changes.js"); -const optionService = require("./options.js"); -const dateUtils = require("./date_utils.js"); -const sqlInit = require("./sql_init.js"); -const cls = require("./cls.js"); - -function eraseNotes(noteIdsToErase) { +import sql = require("./sql"); +import revisionService = require("./revisions"); +import log = require("./log"); +import entityChangesService = require("./entity_changes"); +import optionService = require("./options"); +import dateUtils = require("./date_utils"); +import sqlInit = require("./sql_init"); +import cls = require("./cls"); +import { EntityChange } from "./entity_changes_interface"; + +function eraseNotes(noteIdsToErase: string[]) { if (noteIdsToErase.length === 0) { return; } @@ -16,17 +17,17 @@ function eraseNotes(noteIdsToErase) { setEntityChangesAsErased(sql.getManyRows(`SELECT * FROM entity_changes WHERE entityName = 'notes' AND entityId IN (???)`, noteIdsToErase)); // we also need to erase all "dependent" entities of the erased notes - const branchIdsToErase = sql.getManyRows(`SELECT branchId FROM branches WHERE noteId IN (???)`, noteIdsToErase) + const branchIdsToErase = sql.getManyRows<{ branchId: string }>(`SELECT branchId FROM branches WHERE noteId IN (???)`, noteIdsToErase) .map(row => row.branchId); eraseBranches(branchIdsToErase); - const attributeIdsToErase = sql.getManyRows(`SELECT attributeId FROM attributes WHERE noteId IN (???)`, noteIdsToErase) + const attributeIdsToErase = sql.getManyRows<{ attributeId: string }>(`SELECT attributeId FROM attributes WHERE noteId IN (???)`, noteIdsToErase) .map(row => row.attributeId); eraseAttributes(attributeIdsToErase); - const revisionIdsToErase = sql.getManyRows(`SELECT revisionId FROM revisions WHERE noteId IN (???)`, noteIdsToErase) + const revisionIdsToErase = sql.getManyRows<{ revisionId: string }>(`SELECT revisionId FROM revisions WHERE noteId IN (???)`, noteIdsToErase) .map(row => row.revisionId); eraseRevisions(revisionIdsToErase); @@ -34,7 +35,7 @@ function eraseNotes(noteIdsToErase) { log.info(`Erased notes: ${JSON.stringify(noteIdsToErase)}`); } -function setEntityChangesAsErased(entityChanges) { +function setEntityChangesAsErased(entityChanges: EntityChange[]) { for (const ec of entityChanges) { ec.isErased = true; // we're not changing hash here, not sure if good or not @@ -45,7 +46,7 @@ function setEntityChangesAsErased(entityChanges) { } } -function eraseBranches(branchIdsToErase) { +function eraseBranches(branchIdsToErase: string[]) { if (branchIdsToErase.length === 0) { return; } @@ -57,7 +58,7 @@ function eraseBranches(branchIdsToErase) { log.info(`Erased branches: ${JSON.stringify(branchIdsToErase)}`); } -function eraseAttributes(attributeIdsToErase) { +function eraseAttributes(attributeIdsToErase: string[]) { if (attributeIdsToErase.length === 0) { return; } @@ -69,7 +70,7 @@ function eraseAttributes(attributeIdsToErase) { log.info(`Erased attributes: ${JSON.stringify(attributeIdsToErase)}`); } -function eraseAttachments(attachmentIdsToErase) { +function eraseAttachments(attachmentIdsToErase: string[]) { if (attachmentIdsToErase.length === 0) { return; } @@ -81,7 +82,7 @@ function eraseAttachments(attachmentIdsToErase) { log.info(`Erased attachments: ${JSON.stringify(attachmentIdsToErase)}`); } -function eraseRevisions(revisionIdsToErase) { +function eraseRevisions(revisionIdsToErase: string[]) { if (revisionIdsToErase.length === 0) { return; } @@ -116,7 +117,7 @@ function eraseUnusedBlobs() { log.info(`Erased unused blobs: ${JSON.stringify(unusedBlobIds)}`); } -function eraseDeletedEntities(eraseEntitiesAfterTimeInSeconds = null) { +function eraseDeletedEntities(eraseEntitiesAfterTimeInSeconds: number | null = null) { // this is important also so that the erased entity changes are sent to the connected clients sql.transactional(() => { if (eraseEntitiesAfterTimeInSeconds === null) { @@ -125,41 +126,33 @@ function eraseDeletedEntities(eraseEntitiesAfterTimeInSeconds = null) { const cutoffDate = new Date(Date.now() - eraseEntitiesAfterTimeInSeconds * 1000); - const noteIdsToErase = sql.getColumn("SELECT noteId FROM notes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); - + const noteIdsToErase = sql.getColumn("SELECT noteId FROM notes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); eraseNotes(noteIdsToErase); - const branchIdsToErase = sql.getColumn("SELECT branchId FROM branches WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); - + const branchIdsToErase = sql.getColumn("SELECT branchId FROM branches WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); eraseBranches(branchIdsToErase); - const attributeIdsToErase = sql.getColumn("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); - + const attributeIdsToErase = sql.getColumn("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); eraseAttributes(attributeIdsToErase); - const attachmentIdsToErase = sql.getColumn("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); - + const attachmentIdsToErase = sql.getColumn("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); eraseAttachments(attachmentIdsToErase); eraseUnusedBlobs(); }); } -function eraseNotesWithDeleteId(deleteId) { - const noteIdsToErase = sql.getColumn("SELECT noteId FROM notes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); - +function eraseNotesWithDeleteId(deleteId: string) { + const noteIdsToErase = sql.getColumn("SELECT noteId FROM notes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); eraseNotes(noteIdsToErase); - const branchIdsToErase = sql.getColumn("SELECT branchId FROM branches WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); - + const branchIdsToErase = sql.getColumn("SELECT branchId FROM branches WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); eraseBranches(branchIdsToErase); - const attributeIdsToErase = sql.getColumn("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); - + const attributeIdsToErase = sql.getColumn("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); eraseAttributes(attributeIdsToErase); - const attachmentIdsToErase = sql.getColumn("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); - + const attachmentIdsToErase = sql.getColumn("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); eraseAttachments(attachmentIdsToErase); eraseUnusedBlobs(); @@ -173,13 +166,13 @@ function eraseUnusedAttachmentsNow() { eraseScheduledAttachments(0); } -function eraseScheduledAttachments(eraseUnusedAttachmentsAfterSeconds = null) { +function eraseScheduledAttachments(eraseUnusedAttachmentsAfterSeconds: number | null = null) { if (eraseUnusedAttachmentsAfterSeconds === null) { eraseUnusedAttachmentsAfterSeconds = optionService.getOptionInt('eraseUnusedAttachmentsAfterSeconds'); } const cutOffDate = dateUtils.utcDateTimeStr(new Date(Date.now() - (eraseUnusedAttachmentsAfterSeconds * 1000))); - const attachmentIdsToErase = sql.getColumn('SELECT attachmentId FROM attachments WHERE utcDateScheduledForErasureSince < ?', [cutOffDate]); + const attachmentIdsToErase = sql.getColumn('SELECT attachmentId FROM attachments WHERE utcDateScheduledForErasureSince < ?', [cutOffDate]); eraseAttachments(attachmentIdsToErase); } @@ -193,7 +186,7 @@ sqlInit.dbReady.then(() => { setInterval(cls.wrap(() => eraseScheduledAttachments()), 3600 * 1000); }); -module.exports = { +export = { eraseDeletedNotesNow, eraseUnusedAttachmentsNow, eraseNotesWithDeleteId, diff --git a/src/services/etapi_tokens.js b/src/services/etapi_tokens.ts similarity index 83% rename from src/services/etapi_tokens.js rename to src/services/etapi_tokens.ts index 4033b80166..2989d3923b 100644 --- a/src/services/etapi_tokens.js +++ b/src/services/etapi_tokens.ts @@ -1,17 +1,17 @@ -const becca = require('../becca/becca.js'); -const utils = require('./utils.js'); -const BEtapiToken = require('../becca/entities/betapi_token.js'); -const crypto = require("crypto"); +import becca = require('../becca/becca'); +import utils = require('./utils'); +import BEtapiToken = require('../becca/entities/betapi_token'); +import crypto = require("crypto"); function getTokens() { return becca.getEtapiTokens(); } -function getTokenHash(token) { +function getTokenHash(token: crypto.BinaryLike) { return crypto.createHash('sha256').update(token).digest('base64'); } -function createToken(tokenName) { +function createToken(tokenName: string) { const token = utils.randomSecureToken(32); const tokenHash = getTokenHash(token); @@ -25,7 +25,7 @@ function createToken(tokenName) { }; } -function parseAuthToken(auth) { +function parseAuthToken(auth: string | undefined) { if (!auth) { return null; } @@ -64,7 +64,7 @@ function parseAuthToken(auth) { } } -function isValidAuthHeader(auth) { +function isValidAuthHeader(auth: string | undefined) { const parsed = parseAuthToken(auth); if (!parsed) { @@ -93,7 +93,7 @@ function isValidAuthHeader(auth) { } } -function renameToken(etapiTokenId, newName) { +function renameToken(etapiTokenId: string, newName: string) { const etapiToken = becca.getEtapiToken(etapiTokenId); if (!etapiToken) { @@ -104,7 +104,7 @@ function renameToken(etapiTokenId, newName) { etapiToken.save(); } -function deleteToken(etapiTokenId) { +function deleteToken(etapiTokenId: string) { const etapiToken = becca.getEtapiToken(etapiTokenId); if (!etapiToken) { @@ -114,7 +114,7 @@ function deleteToken(etapiTokenId) { etapiToken.markAsDeletedSimple(); } -module.exports = { +export = { getTokens, createToken, renameToken, diff --git a/src/services/events.js b/src/services/events.ts similarity index 82% rename from src/services/events.js rename to src/services/events.ts index acb4091075..fffcc3982a 100644 --- a/src/services/events.js +++ b/src/services/events.ts @@ -1,4 +1,4 @@ -const log = require('./log.js'); +const log = require('./log'); const NOTE_TITLE_CHANGED = "NOTE_TITLE_CHANGED"; const ENTER_PROTECTED_SESSION = "ENTER_PROTECTED_SESSION"; @@ -11,13 +11,16 @@ const ENTITY_DELETE_SYNCED = "ENTITY_DELETE_SYNCED"; const CHILD_NOTE_CREATED = "CHILD_NOTE_CREATED"; const NOTE_CONTENT_CHANGE = "NOTE_CONTENT_CHANGED"; -const eventListeners = {}; +type EventType = string | string[]; +type EventListener = (data: any) => void; + +const eventListeners: Record = {}; /** * @param {string|string[]}eventTypes - can be either single event or an array of events * @param listener */ -function subscribe(eventTypes, listener) { +function subscribe(eventTypes: EventType, listener: EventListener) { if (!Array.isArray(eventTypes)) { eventTypes = [ eventTypes ]; } @@ -28,7 +31,7 @@ function subscribe(eventTypes, listener) { } } -function subscribeBeccaLoader(eventTypes, listener) { +function subscribeBeccaLoader(eventTypes: EventType, listener: EventListener) { if (!Array.isArray(eventTypes)) { eventTypes = [ eventTypes ]; } @@ -41,7 +44,7 @@ function subscribeBeccaLoader(eventTypes, listener) { } } -function emit(eventType, data) { +function emit(eventType: string, data: any) { const listeners = eventListeners[eventType]; if (listeners) { @@ -49,7 +52,7 @@ function emit(eventType, data) { try { listener(data); } - catch (e) { + catch (e: any) { log.error(`Listener threw error: ${e.message}, stack: ${e.stack}`); // we won't stop execution because of listener } @@ -57,7 +60,7 @@ function emit(eventType, data) { } } -module.exports = { +export = { subscribe, subscribeBeccaLoader, emit, diff --git a/src/services/export/md.js b/src/services/export/md.ts similarity index 52% rename from src/services/export/md.js rename to src/services/export/md.ts index 32022bee04..3e6754c1c7 100644 --- a/src/services/export/md.js +++ b/src/services/export/md.ts @@ -1,11 +1,11 @@ "use strict"; -const TurndownService = require('turndown'); -const turndownPluginGfm = require('joplin-turndown-plugin-gfm'); +import TurndownService = require('turndown'); +import turndownPluginGfm = require('joplin-turndown-plugin-gfm'); -let instance = null; +let instance: TurndownService | null = null; -function toMarkdown(content) { +function toMarkdown(content: string) { if (instance === null) { instance = new TurndownService({ codeBlockStyle: 'fenced' }); instance.use(turndownPluginGfm.gfm); @@ -14,6 +14,6 @@ function toMarkdown(content) { return instance.turndown(content); } -module.exports = { +export = { toMarkdown }; diff --git a/src/services/export/opml.js b/src/services/export/opml.ts similarity index 71% rename from src/services/export/opml.js rename to src/services/export/opml.ts index 976fb5b24a..63bdf14fd5 100644 --- a/src/services/export/opml.js +++ b/src/services/export/opml.ts @@ -1,9 +1,12 @@ "use strict"; -const utils = require('../utils.js'); -const becca = require('../../becca/becca.js'); +import utils = require('../utils'); +import becca = require('../../becca/becca'); +import TaskContext = require('../task_context'); +import BBranch = require('../../becca/entities/bbranch'); +import { Response } from 'express'; -function exportToOpml(taskContext, branch, version, res) { +function exportToOpml(taskContext: TaskContext, branch: BBranch, version: string, res: Response) { if (!['1.0', '2.0'].includes(version)) { throw new Error(`Unrecognized OPML version ${version}`); } @@ -12,9 +15,12 @@ function exportToOpml(taskContext, branch, version, res) { const note = branch.getNote(); - function exportNoteInner(branchId) { + function exportNoteInner(branchId: string) { const branch = becca.getBranch(branchId); + if (!branch) { throw new Error("Unable to find branch."); } + const note = branch.getNote(); + if (!note) { throw new Error("Unable to find note."); } if (note.hasOwnedLabel('excludeFromExport')) { return; @@ -24,13 +30,13 @@ function exportToOpml(taskContext, branch, version, res) { if (opmlVersion === 1) { const preparedTitle = escapeXmlAttribute(title); - const preparedContent = note.hasStringContent() ? prepareText(note.getContent()) : ''; + const preparedContent = note.hasStringContent() ? prepareText(note.getContent() as string) : ''; res.write(`\n`); } else if (opmlVersion === 2) { const preparedTitle = escapeXmlAttribute(title); - const preparedContent = note.hasStringContent() ? escapeXmlAttribute(note.getContent()) : ''; + const preparedContent = note.hasStringContent() ? escapeXmlAttribute(note.getContent() as string) : ''; res.write(`\n`); } @@ -41,7 +47,9 @@ function exportToOpml(taskContext, branch, version, res) { taskContext.increaseProgressCount(); for (const child of note.getChildBranches()) { - exportNoteInner(child.branchId); + if (child?.branchId) { + exportNoteInner(child.branchId); + } } res.write(''); @@ -60,7 +68,9 @@ function exportToOpml(taskContext, branch, version, res) { `); - exportNoteInner(branch.branchId); + if (branch.branchId) { + exportNoteInner(branch.branchId); + } res.write(` `); @@ -69,7 +79,7 @@ function exportToOpml(taskContext, branch, version, res) { taskContext.taskSucceeded(); } -function prepareText(text) { +function prepareText(text: string) { const newLines = text.replace(/(]*>|)/g, '\n') .replace(/ /g, ' '); // nbsp isn't in XML standard (only HTML) @@ -80,7 +90,7 @@ function prepareText(text) { return escaped.replace(/\n/g, ' '); } -function escapeXmlAttribute(text) { +function escapeXmlAttribute(text: string) { return text.replace(/&/g, '&') .replace(//g, '>') @@ -88,6 +98,6 @@ function escapeXmlAttribute(text) { .replace(/'/g, '''); } -module.exports = { +export = { exportToOpml }; diff --git a/src/services/export/single.js b/src/services/export/single.ts similarity index 85% rename from src/services/export/single.js rename to src/services/export/single.ts index a3bc03335a..f4ac586929 100644 --- a/src/services/export/single.js +++ b/src/services/export/single.ts @@ -1,12 +1,15 @@ "use strict"; -const mimeTypes = require('mime-types'); -const html = require('html'); -const utils = require('../utils.js'); -const mdService = require('./md.js'); -const becca = require('../../becca/becca.js'); - -function exportSingleNote(taskContext, branch, format, res) { +import mimeTypes = require('mime-types'); +import html = require('html'); +import utils = require('../utils'); +import mdService = require('./md'); +import becca = require('../../becca/becca'); +import TaskContext = require('../task_context'); +import BBranch = require('../../becca/entities/bbranch'); +import { Response } from 'express'; + +function exportSingleNote(taskContext: TaskContext, branch: BBranch, format: "html" | "markdown", res: Response) { const note = branch.getNote(); if (note.type === 'image' || note.type === 'file') { @@ -20,6 +23,9 @@ function exportSingleNote(taskContext, branch, format, res) { let payload, extension, mime; let content = note.getContent(); + if (typeof content !== "string") { + throw new Error("Unsupported context type for export."); + } if (note.type === 'text') { if (format === 'html') { @@ -64,7 +70,7 @@ function exportSingleNote(taskContext, branch, format, res) { taskContext.taskSucceeded(); } -function inlineAttachments(content) { +function inlineAttachments(content: string) { content = content.replace(/src="[^"]*api\/images\/([a-zA-Z0-9_]+)\/?[^"]+"/g, (match, noteId) => { const note = becca.getNote(noteId); if (!note || !note.mime.startsWith('image/')) { @@ -119,6 +125,6 @@ function inlineAttachments(content) { return content; } -module.exports = { +export = { exportSingleNote }; diff --git a/src/services/export/zip.js b/src/services/export/zip.ts similarity index 71% rename from src/services/export/zip.js rename to src/services/export/zip.ts index dba7338d30..9d369e593b 100644 --- a/src/services/export/zip.js +++ b/src/services/export/zip.ts @@ -1,33 +1,28 @@ "use strict"; -const html = require('html'); -const dateUtils = require('../date_utils.js'); -const path = require('path'); -const mimeTypes = require('mime-types'); -const mdService = require('./md.js'); -const packageInfo = require('../../../package.json'); -const utils = require('../utils.js'); -const protectedSessionService = require('../protected_session.js'); -const sanitize = require("sanitize-filename"); -const fs = require("fs"); -const becca = require('../../becca/becca.js'); -const RESOURCE_DIR = require('../../services/resource_dir.js').RESOURCE_DIR; -const archiver = require('archiver'); -const log = require('../log.js'); -const TaskContext = require('../task_context.js'); -const ValidationError = require('../../errors/validation_error.js'); -const NoteMeta = require('../meta/note_meta.js'); -const AttachmentMeta = require('../meta/attachment_meta.js'); -const AttributeMeta = require('../meta/attribute_meta.js'); - -/** - * @param {TaskContext} taskContext - * @param {BBranch} branch - * @param {string} format - 'html' or 'markdown' - * @param {object} res - express response - * @param {boolean} setHeaders - */ -async function exportToZip(taskContext, branch, format, res, setHeaders = true) { +import html = require('html'); +import dateUtils = require('../date_utils'); +import path = require('path'); +import mimeTypes = require('mime-types'); +import mdService = require('./md'); +import packageInfo = require('../../../package.json'); +import utils = require('../utils'); +import protectedSessionService = require('../protected_session'); +import sanitize = require("sanitize-filename"); +import fs = require("fs"); +import becca = require('../../becca/becca'); +const RESOURCE_DIR = require('../../services/resource_dir').RESOURCE_DIR; +import archiver = require('archiver'); +import log = require('../log'); +import TaskContext = require('../task_context'); +import ValidationError = require('../../errors/validation_error'); +import NoteMeta = require('../meta/note_meta'); +import AttachmentMeta = require('../meta/attachment_meta'); +import AttributeMeta = require('../meta/attribute_meta'); +import BBranch = require('../../becca/entities/bbranch'); +import { Response } from 'express'; + +async function exportToZip(taskContext: TaskContext, branch: BBranch, format: "html" | "markdown", res: Response | fs.WriteStream, setHeaders = true) { if (!['html', 'markdown'].includes(format)) { throw new ValidationError(`Only 'html' and 'markdown' allowed as export format, '${format}' given`); } @@ -36,15 +31,9 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) zlib: { level: 9 } // Sets the compression level. }); - /** @type {Object.} */ - const noteIdToMeta = {}; + const noteIdToMeta: Record = {}; - /** - * @param {Object.} existingFileNames - * @param {string} fileName - * @returns {string} - */ - function getUniqueFilename(existingFileNames, fileName) { + function getUniqueFilename(existingFileNames: Record, fileName: string) { const lcFileName = fileName.toLowerCase(); if (lcFileName in existingFileNames) { @@ -67,14 +56,7 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) } } - /** - * @param {string|null} type - * @param {string} mime - * @param {string} baseFileName - * @param {Object.} existingFileNames - * @return {string} - */ - function getDataFileName(type, mime, baseFileName, existingFileNames) { + function getDataFileName(type: string | null, mime: string, baseFileName: string, existingFileNames: Record): string { let fileName = baseFileName.trim(); if (fileName.length > 30) { fileName = fileName.substr(0, 30).trim(); @@ -115,13 +97,7 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) return getUniqueFilename(existingFileNames, fileName); } - /** - * @param {BBranch} branch - * @param {NoteMeta} parentMeta - * @param {Object.} existingFileNames - * @returns {NoteMeta|null} - */ - function createNoteMeta(branch, parentMeta, existingFileNames) { + function createNoteMeta(branch: BBranch, parentMeta: Partial, existingFileNames: Record): NoteMeta | null { const note = branch.getNote(); if (note.hasOwnedLabel('excludeFromExport')) { @@ -136,24 +112,26 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) baseFileName = baseFileName.substr(0, 200); } + if (!parentMeta.notePath) { throw new Error("Missing parent note path."); } const notePath = parentMeta.notePath.concat([note.noteId]); if (note.noteId in noteIdToMeta) { const fileName = getUniqueFilename(existingFileNames, `${baseFileName}.clone.${format === 'html' ? 'html' : 'md'}`); - const meta = new NoteMeta(); - meta.isClone = true; - meta.noteId = note.noteId; - meta.notePath = notePath; - meta.title = note.getTitleOrProtected(); - meta.prefix = branch.prefix; - meta.dataFileName = fileName; - meta.type = 'text'; // export will have text description - meta.format = format; + const meta: NoteMeta = { + isClone: true, + noteId: note.noteId, + notePath: notePath, + title: note.getTitleOrProtected(), + prefix: branch.prefix, + dataFileName: fileName, + type: 'text', // export will have text description + format: format + }; return meta; } - const meta = new NoteMeta(); + const meta: Partial = {}; meta.isClone = false; meta.noteId = note.noteId; meta.notePath = notePath; @@ -164,12 +142,14 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) meta.type = note.type; meta.mime = note.mime; meta.attributes = note.getOwnedAttributes().map(attribute => { - const attrMeta = new AttributeMeta(); - attrMeta.type = attribute.type; - attrMeta.name = attribute.name; - attrMeta.value = attribute.value; - attrMeta.isInheritable = attribute.isInheritable; - attrMeta.position = attribute.position; + const attrMeta = { + type: attribute.type, + name: attribute.name, + value: attribute.value, + isInheritable: attribute.isInheritable, + position: attribute.position + }; + return attrMeta; }); @@ -179,12 +159,12 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) meta.format = format; } - noteIdToMeta[note.noteId] = meta; + noteIdToMeta[note.noteId] = meta as NoteMeta; // sort children for having a stable / reproducible export format note.sortChildren(); const childBranches = note.getChildBranches() - .filter(branch => branch.noteId !== '_hidden'); + .filter(branch => branch?.noteId !== '_hidden'); const available = !note.isProtected || protectedSessionService.isProtectedSessionAvailable(); @@ -196,18 +176,19 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) const attachments = note.getAttachments(); meta.attachments = attachments .map(attachment => { - const attMeta = new AttachmentMeta(); - attMeta.attachmentId = attachment.attachmentId; - attMeta.title = attachment.title; - attMeta.role = attachment.role; - attMeta.mime = attachment.mime; - attMeta.position = attachment.position; - attMeta.dataFileName = getDataFileName( - null, - attachment.mime, - baseFileName + "_" + attachment.title, - existingFileNames - ); + const attMeta: AttachmentMeta = { + attachmentId: attachment.attachmentId, + title: attachment.title, + role: attachment.role, + mime: attachment.mime, + position: attachment.position, + dataFileName: getDataFileName( + null, + attachment.mime, + baseFileName + "_" + attachment.title, + existingFileNames + ) + }; return attMeta; }); @@ -219,7 +200,9 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) const childExistingNames = {}; for (const childBranch of childBranches) { - const note = createNoteMeta(childBranch, meta, childExistingNames); + if (!childBranch) { continue; } + + const note = createNoteMeta(childBranch, meta as NoteMeta, childExistingNames); // can be undefined if export is disabled for this note if (note) { @@ -228,18 +211,13 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) } } - return meta; + return meta as NoteMeta; } - /** - * @param {string} targetNoteId - * @param {NoteMeta} sourceMeta - * @return {string|null} - */ - function getNoteTargetUrl(targetNoteId, sourceMeta) { + function getNoteTargetUrl(targetNoteId: string, sourceMeta: NoteMeta): string | null { const targetMeta = noteIdToMeta[targetNoteId]; - if (!targetMeta) { + if (!targetMeta || !targetMeta.notePath || !sourceMeta.notePath) { return null; } @@ -256,24 +234,20 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) for (let i = 0; i < targetPath.length - 1; i++) { const meta = noteIdToMeta[targetPath[i]]; - - url += `${encodeURIComponent(meta.dirFileName)}/`; + if (meta.dirFileName) { + url += `${encodeURIComponent(meta.dirFileName)}/`; + } } const meta = noteIdToMeta[targetPath[targetPath.length - 1]]; // link can target note which is only "folder-note" and as such, will not have a file in an export - url += encodeURIComponent(meta.dataFileName || meta.dirFileName); + url += encodeURIComponent(meta.dataFileName || meta.dirFileName || ""); return url; } - /** - * @param {string} content - * @param {NoteMeta} noteMeta - * @return {string} - */ - function rewriteLinks(content, noteMeta) { + function rewriteLinks(content: string, noteMeta: NoteMeta): string { content = content.replace(/src="[^"]*api\/images\/([a-zA-Z0-9_]+)\/[^"]*"/g, (match, targetNoteId) => { const url = getNoteTargetUrl(targetNoteId, noteMeta); @@ -300,10 +274,10 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) return content; - function findAttachment(targetAttachmentId) { + function findAttachment(targetAttachmentId: string) { let url; - const attachmentMeta = noteMeta.attachments.find(attMeta => attMeta.attachmentId === targetAttachmentId); + const attachmentMeta = (noteMeta.attachments || []).find(attMeta => attMeta.attachmentId === targetAttachmentId); if (attachmentMeta) { // easy job here, because attachment will be in the same directory as the note's data file. url = attachmentMeta.dataFileName; @@ -314,21 +288,17 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true) } } - /** - * @param {string} title - * @param {string|Buffer} content - * @param {NoteMeta} noteMeta - * @return {string|Buffer} - */ - function prepareContent(title, content, noteMeta) { - if (['html', 'markdown'].includes(noteMeta.format)) { + function prepareContent(title: string, content: string | Buffer, noteMeta: NoteMeta): string | Buffer { + if (['html', 'markdown'].includes(noteMeta?.format || "")) { content = content.toString(); content = rewriteLinks(content, noteMeta); } - if (noteMeta.format === 'html') { + if (noteMeta.format === 'html' && typeof content === "string") { if (!content.substr(0, 100).toLowerCase().includes(" 0 && !markdownContent.startsWith("# ")) { @@ -368,17 +338,17 @@ ${markdownContent}`; } } - /** - * @param {NoteMeta} noteMeta - * @param {string} filePathPrefix - */ - function saveNote(noteMeta, filePathPrefix) { + function saveNote(noteMeta: NoteMeta, filePathPrefix: string) { log.info(`Exporting note '${noteMeta.noteId}'`); + if (!noteMeta.noteId || !noteMeta.title) { + throw new Error("Missing note meta."); + } + if (noteMeta.isClone) { const targetUrl = getNoteTargetUrl(noteMeta.noteId, noteMeta); - let content = `

This is a clone of a note. Go to its primary location.

`; + let content: string | Buffer = `

This is a clone of a note. Go to its primary location.

`; content = prepareContent(noteMeta.title, content, noteMeta); @@ -388,6 +358,8 @@ ${markdownContent}`; } const note = becca.getNote(noteMeta.noteId); + if (!note) { throw new Error("Unable to find note."); } + if (!note.utcDateModified) { throw new Error("Unable to find modification date."); } if (noteMeta.dataFileName) { const content = prepareContent(noteMeta.title, note.getContent(), noteMeta); @@ -400,7 +372,9 @@ ${markdownContent}`; taskContext.increaseProgressCount(); - for (const attachmentMeta of noteMeta.attachments) { + for (const attachmentMeta of noteMeta.attachments || []) { + if (!attachmentMeta.attachmentId) { continue; } + const attachment = note.getAttachmentById(attachmentMeta.attachmentId); const content = attachment.getContent(); @@ -410,29 +384,25 @@ ${markdownContent}`; }); } - if (noteMeta.children?.length > 0) { + if (noteMeta.children?.length || 0 > 0) { const directoryPath = filePathPrefix + noteMeta.dirFileName; // create directory archive.append('', { name: `${directoryPath}/`, date: dateUtils.parseDateTime(note.utcDateModified) }); - for (const childMeta of noteMeta.children) { + for (const childMeta of noteMeta.children || []) { saveNote(childMeta, `${directoryPath}/`); } } } - /** - * @param {NoteMeta} rootMeta - * @param {NoteMeta} navigationMeta - */ - function saveNavigation(rootMeta, navigationMeta) { - function saveNavigationInner(meta) { + function saveNavigation(rootMeta: NoteMeta, navigationMeta: NoteMeta) { + function saveNavigationInner(meta: NoteMeta) { let html = '
  • '; const escapedTitle = utils.escapeHtml(`${meta.prefix ? `${meta.prefix} - ` : ''}${meta.title}`); - if (meta.dataFileName) { + if (meta.dataFileName && meta.noteId) { const targetUrl = getNoteTargetUrl(meta.noteId, rootMeta); html += `${escapedTitle}`; @@ -470,16 +440,12 @@ ${markdownContent}`; archive.append(prettyHtml, { name: navigationMeta.dataFileName }); } - /** - * @param {NoteMeta} rootMeta - * @param {NoteMeta} indexMeta - */ - function saveIndex(rootMeta, indexMeta) { + function saveIndex(rootMeta: NoteMeta, indexMeta: NoteMeta) { let firstNonEmptyNote; let curMeta = rootMeta; while (!firstNonEmptyNote) { - if (curMeta.dataFileName) { + if (curMeta.dataFileName && curMeta.noteId) { firstNonEmptyNote = getNoteTargetUrl(curMeta.noteId, rootMeta); } @@ -506,17 +472,13 @@ ${markdownContent}`; archive.append(fullHtml, { name: indexMeta.dataFileName }); } - /** - * @param {NoteMeta} rootMeta - * @param {NoteMeta} cssMeta - */ - function saveCss(rootMeta, cssMeta) { + function saveCss(rootMeta: NoteMeta, cssMeta: NoteMeta) { const cssContent = fs.readFileSync(`${RESOURCE_DIR}/libraries/ckeditor/ckeditor-content.css`); archive.append(cssContent, { name: cssMeta.dataFileName }); } - const existingFileNames = format === 'html' ? ['navigation', 'index'] : []; + const existingFileNames: Record = format === 'html' ? {'navigation': 0, 'index': 1} : {}; const rootMeta = createNoteMeta(branch, { notePath: [] }, existingFileNames); const metaFile = { @@ -525,7 +487,9 @@ ${markdownContent}`; files: [ rootMeta ] }; - let navigationMeta, indexMeta, cssMeta; + let navigationMeta: NoteMeta | null = null; + let indexMeta: NoteMeta | null = null; + let cssMeta: NoteMeta | null = null; if (format === 'html') { navigationMeta = { @@ -552,7 +516,7 @@ ${markdownContent}`; for (const noteMeta of Object.values(noteIdToMeta)) { // filter out relations which are not inside this export - noteMeta.attributes = noteMeta.attributes.filter(attr => { + noteMeta.attributes = (noteMeta.attributes || []).filter(attr => { if (attr.type !== 'relation') { return true; } else if (attr.value in noteIdToMeta) { @@ -567,7 +531,9 @@ ${markdownContent}`; } if (!rootMeta) { // corner case of disabled export for exported note - res.sendStatus(400); + if ("sendStatus" in res) { + res.sendStatus(400); + } return; } @@ -578,6 +544,10 @@ ${markdownContent}`; saveNote(rootMeta, ''); if (format === 'html') { + if (!navigationMeta || !indexMeta || !cssMeta) { + throw new Error("Missing meta."); + } + saveNavigation(rootMeta, navigationMeta); saveIndex(rootMeta, indexMeta); saveCss(rootMeta, cssMeta); @@ -586,7 +556,7 @@ ${markdownContent}`; const note = branch.getNote(); const zipFileName = `${branch.prefix ? `${branch.prefix} - ` : ""}${note.getTitleOrProtected()}.zip`; - if (setHeaders) { + if (setHeaders && "setHeader" in res) { res.setHeader('Content-Disposition', utils.getContentDisposition(zipFileName)); res.setHeader('Content-Type', 'application/zip'); } @@ -597,7 +567,7 @@ ${markdownContent}`; taskContext.taskSucceeded(); } -async function exportToZipFile(noteId, format, zipFilePath) { +async function exportToZipFile(noteId: string, format: "markdown" | "html", zipFilePath: string) { const fileOutputStream = fs.createWriteStream(zipFilePath); const taskContext = new TaskContext('no-progress-reporting'); @@ -612,7 +582,7 @@ async function exportToZipFile(noteId, format, zipFilePath) { log.info(`Exported '${noteId}' with format '${format}' to '${zipFilePath}'`); } -module.exports = { +export = { exportToZip, exportToZipFile }; diff --git a/src/services/handlers.js b/src/services/handlers.js index 769c69485a..5a54a67ac5 100644 --- a/src/services/handlers.js +++ b/src/services/handlers.js @@ -1,11 +1,11 @@ -const eventService = require('./events.js'); -const scriptService = require('./script.js'); -const treeService = require('./tree.js'); -const noteService = require('./notes.js'); -const becca = require('../becca/becca.js'); -const BAttribute = require('../becca/entities/battribute.js'); -const hiddenSubtreeService = require('./hidden_subtree.js'); -const oneTimeTimer = require('./one_time_timer.js'); +const eventService = require('./events'); +const scriptService = require('./script'); +const treeService = require('./tree'); +const noteService = require('./notes'); +const becca = require('../becca/becca'); +const BAttribute = require('../becca/entities/battribute'); +const hiddenSubtreeService = require('./hidden_subtree'); +const oneTimeTimer = require('./one_time_timer'); function runAttachedRelations(note, relationName, originEntity) { if (!note) { @@ -42,7 +42,7 @@ eventService.subscribe(eventService.NOTE_TITLE_CHANGED, note => { } }); -eventService.subscribe([ eventService.ENTITY_CHANGED, eventService.ENTITY_DELETED ], ({ entityName, entity }) => { +eventService.subscribe([eventService.ENTITY_CHANGED, eventService.ENTITY_DELETED], ({ entityName, entity }) => { if (entityName === 'attributes') { runAttachedRelations(entity.getNote(), 'runOnAttributeChange', entity); @@ -58,7 +58,7 @@ eventService.subscribe([ eventService.ENTITY_CHANGED, eventService.ENTITY_DELETE } }); -eventService.subscribe(eventService.ENTITY_CHANGED, ({entityName, entity}) => { +eventService.subscribe(eventService.ENTITY_CHANGED, ({ entityName, entity }) => { if (entityName === 'branches') { const parentNote = becca.getNote(entity.parentNoteId); @@ -74,7 +74,7 @@ eventService.subscribe(eventService.ENTITY_CHANGED, ({entityName, entity}) => { } }); -eventService.subscribe(eventService.NOTE_CONTENT_CHANGE, ({entity}) => { +eventService.subscribe(eventService.NOTE_CONTENT_CHANGE, ({ entity }) => { runAttachedRelations(entity, 'runOnNoteContentChange', entity); }); diff --git a/src/services/hidden_subtree.js b/src/services/hidden_subtree.ts similarity index 93% rename from src/services/hidden_subtree.js rename to src/services/hidden_subtree.ts index 9c2820943b..5478aed1cf 100644 --- a/src/services/hidden_subtree.js +++ b/src/services/hidden_subtree.ts @@ -1,8 +1,10 @@ -const becca = require('../becca/becca.js'); -const noteService = require('./notes.js'); -const BAttribute = require('../becca/entities/battribute.js'); -const log = require('./log.js'); -const migrationService = require('./migration.js'); +import BAttribute = require("../becca/entities/battribute"); +import { AttributeType, NoteType } from "../becca/entities/rows"; + +import becca = require('../becca/becca'); +import noteService = require('./notes'); +import log = require('./log'); +import migrationService = require('./migration'); const LBTPL_ROOT = "_lbTplRoot"; const LBTPL_BASE = "_lbTplBase"; @@ -13,13 +15,36 @@ const LBTPL_BUILTIN_WIDGET = "_lbTplBuiltinWidget"; const LBTPL_SPACER = "_lbTplSpacer"; const LBTPL_CUSTOM_WIDGET = "_lbTplCustomWidget"; +interface Attribute { + type: AttributeType; + name: string; + isInheritable?: boolean; + value?: string +} + +interface Item { + notePosition?: number; + id: string; + title: string; + type: NoteType; + icon?: string; + attributes?: Attribute[]; + children?: Item[]; + isExpanded?: boolean; + baseSize?: string; + growthFactor?: string; + targetNoteId?: "_backendLog" | "_globalNoteMap"; + builtinWidget?: "bookmarks" | "spacer" | "backInHistoryButton" | "forwardInHistoryButton" | "syncStatus" | "protectedSession" | "todayInJournal" | "calendar"; + command?: "jumpToNote" | "searchNotes" | "createNoteIntoInbox" | "showRecentChanges"; +} + /* * Hidden subtree is generated as a "predictable structure" which means that it avoids generating random IDs to always * produce the same structure. This is needed because it is run on multiple instances in the sync cluster which might produce * duplicate subtrees. This way, all instances will generate the same structure with the same IDs. */ -const HIDDEN_SUBTREE_DEFINITION = { +const HIDDEN_SUBTREE_DEFINITION: Item = { id: '_hidden', title: 'Hidden Notes', type: 'doc', @@ -244,7 +269,7 @@ function checkHiddenSubtree(force = false) { checkHiddenSubtreeRecursively('root', HIDDEN_SUBTREE_DEFINITION); } -function checkHiddenSubtreeRecursively(parentNoteId, item) { +function checkHiddenSubtreeRecursively(parentNoteId: string, item: Item) { if (!item.id || !item.type || !item.title) { throw new Error(`Item does not contain mandatory properties: ${JSON.stringify(item)}`); } @@ -337,7 +362,7 @@ function checkHiddenSubtreeRecursively(parentNoteId, item) { } } -module.exports = { +export = { checkHiddenSubtree, LBTPL_ROOT, LBTPL_BASE, diff --git a/src/services/hoisted_note.js b/src/services/hoisted_note.ts similarity index 81% rename from src/services/hoisted_note.js rename to src/services/hoisted_note.ts index f3f4e56756..fca4232695 100644 --- a/src/services/hoisted_note.js +++ b/src/services/hoisted_note.ts @@ -1,5 +1,5 @@ -const cls = require('./cls.js'); -const becca = require('../becca/becca.js'); +import cls = require('./cls'); +import becca = require('../becca/becca'); function getHoistedNoteId() { return cls.getHoistedNoteId(); @@ -26,14 +26,14 @@ function isHoistedInHiddenSubtree() { function getWorkspaceNote() { const hoistedNote = becca.getNote(cls.getHoistedNoteId()); - if (hoistedNote.isRoot() || hoistedNote.hasLabel('workspace')) { + if (hoistedNote && (hoistedNote.isRoot() || hoistedNote.hasLabel('workspace'))) { return hoistedNote; } else { return becca.getRoot(); } } -module.exports = { +export = { getHoistedNoteId, getWorkspaceNote, isHoistedInHiddenSubtree diff --git a/src/services/host.js b/src/services/host.js deleted file mode 100644 index 2430d45c2d..0000000000 --- a/src/services/host.js +++ /dev/null @@ -1,3 +0,0 @@ -const config = require('./config.js'); - -module.exports = process.env.TRILIUM_HOST || config['Network']['host'] || '0.0.0.0'; diff --git a/src/services/host.ts b/src/services/host.ts new file mode 100644 index 0000000000..3daef1504b --- /dev/null +++ b/src/services/host.ts @@ -0,0 +1,3 @@ +import config = require('./config'); + +export = process.env.TRILIUM_HOST || config['Network']['host'] || '0.0.0.0'; diff --git a/src/services/html_sanitizer.js b/src/services/html_sanitizer.ts similarity index 87% rename from src/services/html_sanitizer.js rename to src/services/html_sanitizer.ts index 99bb567ad8..8e82edd3cc 100644 --- a/src/services/html_sanitizer.js +++ b/src/services/html_sanitizer.ts @@ -1,18 +1,18 @@ -const sanitizeHtml = require('sanitize-html'); -const sanitizeUrl = require('@braintree/sanitize-url').sanitizeUrl; +import sanitizeHtml = require('sanitize-html'); +import sanitizeUrl = require('@braintree/sanitize-url'); // intended mainly as protection against XSS via import // secondarily, it (partly) protects against "CSS takeover" // sanitize also note titles, label values etc. - there are so many usages which make it difficult // to guarantee all of them are properly handled -function sanitize(dirtyHtml) { +function sanitize(dirtyHtml: string) { if (!dirtyHtml) { return dirtyHtml; } // avoid H1 per https://github.com/zadam/trilium/issues/1552 // demote H1, and if that conflicts with existing H2, demote that, etc - const transformTags = {}; + const transformTags: Record = {}; const lowercasedHtml = dirtyHtml.toLowerCase(); for (let i = 1; i < 6; ++i) { if (lowercasedHtml.includes(` { - return sanitizeUrl(url).trim(); + sanitizeUrl: (url: string) => { + return sanitizeUrl.sanitizeUrl(url).trim(); } }; diff --git a/src/services/image.js b/src/services/image.ts similarity index 79% rename from src/services/image.js rename to src/services/image.ts index 5a45e85caf..0a95b87d6c 100644 --- a/src/services/image.js +++ b/src/services/image.ts @@ -1,19 +1,19 @@ "use strict"; -const becca = require('../becca/becca.js'); -const log = require('./log.js'); -const protectedSessionService = require('./protected_session.js'); -const noteService = require('./notes.js'); -const optionService = require('./options.js'); -const sql = require('./sql.js'); -const jimp = require('jimp'); -const imageType = require('image-type'); -const sanitizeFilename = require('sanitize-filename'); -const isSvg = require('is-svg'); -const isAnimated = require('is-animated'); -const htmlSanitizer = require('./html_sanitizer.js'); - -async function processImage(uploadBuffer, originalName, shrinkImageSwitch) { +import becca = require('../becca/becca'); +import log = require('./log'); +import protectedSessionService = require('./protected_session'); +import noteService = require('./notes'); +import optionService = require('./options'); +import sql = require('./sql'); +import jimp = require('jimp'); +import imageType = require('image-type'); +import sanitizeFilename = require('sanitize-filename'); +import isSvg = require('is-svg'); +import isAnimated = require('is-animated'); +import htmlSanitizer = require('./html_sanitizer'); + +async function processImage(uploadBuffer: Buffer, originalName: string, shrinkImageSwitch: boolean) { const compressImages = optionService.getOptionBool("compressImages"); const origImageFormat = getImageType(uploadBuffer); @@ -44,7 +44,7 @@ async function processImage(uploadBuffer, originalName, shrinkImageSwitch) { }; } -function getImageType(buffer) { +function getImageType(buffer: Buffer) { if (isSvg(buffer)) { return { ext: 'svg' @@ -57,18 +57,19 @@ function getImageType(buffer) { } } -function getImageMimeFromExtension(ext) { +function getImageMimeFromExtension(ext: string) { ext = ext.toLowerCase(); return `image/${ext === 'svg' ? 'svg+xml' : ext}`; } -function updateImage(noteId, uploadBuffer, originalName) { +function updateImage(noteId: string, uploadBuffer: Buffer, originalName: string) { log.info(`Updating image ${noteId}: ${originalName}`); originalName = htmlSanitizer.sanitize(originalName); const note = becca.getNote(noteId); + if (!note) { throw new Error("Unable to find note."); } note.saveRevision(); @@ -85,7 +86,7 @@ function updateImage(noteId, uploadBuffer, originalName) { }); } -function saveImage(parentNoteId, uploadBuffer, originalName, shrinkImageSwitch, trimFilename = false) { +function saveImage(parentNoteId: string, uploadBuffer: Buffer, originalName: string, shrinkImageSwitch: boolean, trimFilename = false) { log.info(`Saving image ${originalName} into parent ${parentNoteId}`); if (trimFilename && originalName.length > 40) { @@ -95,6 +96,7 @@ function saveImage(parentNoteId, uploadBuffer, originalName, shrinkImageSwitch, const fileName = sanitizeFilename(originalName); const parentNote = becca.getNote(parentNoteId); + if (!parentNote) { throw new Error("Unable to find parent note."); } const {note} = noteService.createNewNote({ parentNoteId, @@ -131,7 +133,7 @@ function saveImage(parentNoteId, uploadBuffer, originalName, shrinkImageSwitch, }; } -function saveImageToAttachment(noteId, uploadBuffer, originalName, shrinkImageSwitch, trimFilename = false) { +function saveImageToAttachment(noteId: string, uploadBuffer: Buffer, originalName: string, shrinkImageSwitch?: boolean, trimFilename = false) { log.info(`Saving image '${originalName}' as attachment into note '${noteId}'`); if (trimFilename && originalName.length > 40) { @@ -154,15 +156,16 @@ function saveImageToAttachment(noteId, uploadBuffer, originalName, shrinkImageSw setTimeout(() => { sql.transactional(() => { const note = becca.getNoteOrThrow(noteId); - const noteService = require('../services/notes.js'); + const noteService = require('../services/notes'); noteService.asyncPostProcessContent(note, note.getContent()); // to mark an unused attachment for deletion }); }, 5000); // resizing images asynchronously since JIMP does not support sync operation - processImage(uploadBuffer, originalName, shrinkImageSwitch).then(({buffer, imageFormat}) => { + processImage(uploadBuffer, originalName, !!shrinkImageSwitch).then(({buffer, imageFormat}) => { sql.transactional(() => { // re-read, might be changed in the meantime + if (!attachment.attachmentId) { throw new Error("Missing attachment ID."); } attachment = becca.getAttachmentOrThrow(attachment.attachmentId); attachment.mime = getImageMimeFromExtension(imageFormat.ext); @@ -179,7 +182,7 @@ function saveImageToAttachment(noteId, uploadBuffer, originalName, shrinkImageSw return attachment; } -async function shrinkImage(buffer, originalName) { +async function shrinkImage(buffer: Buffer, originalName: string) { let jpegQuality = optionService.getOptionInt('imageJpegQuality', 0); if (jpegQuality < 10 || jpegQuality > 100) { @@ -190,7 +193,7 @@ async function shrinkImage(buffer, originalName) { try { finalImageBuffer = await resize(buffer, jpegQuality); } - catch (e) { + catch (e: any) { log.error(`Failed to resize image '${originalName}', stack: ${e.stack}`); finalImageBuffer = buffer; @@ -205,7 +208,7 @@ async function shrinkImage(buffer, originalName) { return finalImageBuffer; } -async function resize(buffer, quality) { +async function resize(buffer: Buffer, quality: number) { const imageMaxWidthHeight = optionService.getOptionInt('imageMaxWidthHeight'); const start = Date.now(); @@ -231,7 +234,7 @@ async function resize(buffer, quality) { return resultBuffer; } -module.exports = { +export = { saveImage, saveImageToAttachment, updateImage diff --git a/src/services/import/common.ts b/src/services/import/common.ts new file mode 100644 index 0000000000..75c61fb35e --- /dev/null +++ b/src/services/import/common.ts @@ -0,0 +1,5 @@ +export interface File { + originalname: string; + mimetype: string; + buffer: string | Buffer; +} \ No newline at end of file diff --git a/src/services/import/enex.js b/src/services/import/enex.ts similarity index 81% rename from src/services/import/enex.js rename to src/services/import/enex.ts index db0dc9d6ca..8277ae1bde 100644 --- a/src/services/import/enex.js +++ b/src/services/import/enex.ts @@ -1,20 +1,23 @@ -const sax = require("sax"); -const stream = require('stream'); -const {Throttle} = require('stream-throttle'); -const log = require('../log.js'); -const utils = require('../utils.js'); -const sql = require('../sql.js'); -const noteService = require('../notes.js'); -const imageService = require('../image.js'); -const protectedSessionService = require('../protected_session.js'); -const htmlSanitizer = require('../html_sanitizer.js'); -const {sanitizeAttributeName} = require('../sanitize_attribute_name.js'); +import sax = require("sax"); +import stream = require('stream'); +import { Throttle } from 'stream-throttle'; +import log = require('../log'); +import utils = require('../utils'); +import sql = require('../sql'); +import noteService = require('../notes'); +import imageService = require('../image'); +import protectedSessionService = require('../protected_session'); +import htmlSanitizer = require('../html_sanitizer'); +import sanitizeAttributeName = require('../sanitize_attribute_name'); +import TaskContext = require("../task_context"); +import BNote = require("../../becca/entities/bnote"); +import { File } from "./common"; /** * date format is e.g. 20181121T193703Z or 2013-04-14T16:19:00.000Z (Mac evernote, see #3496) * @returns trilium date format, e.g. 2013-04-14 16:19:00.000Z */ -function parseDate(text) { +function parseDate(text: string) { // convert ISO format to the "20181121T193703Z" format text = text.replace(/[-:]/g, ""); @@ -25,10 +28,34 @@ function parseDate(text) { return text; } -let note = {}; -let resource; +interface Attribute { + type: string; + name: string; + value: string; +} + +interface Resource { + title: string; + content?: Buffer | string; + mime?: string; + attributes: Attribute[]; +} + +interface Note { + title: string; + attributes: Attribute[]; + utcDateCreated: string; + utcDateModified: string; + noteId: string; + blobId: string; + content: string; + resources: Resource[] +} + +let note: Partial = {}; +let resource: Resource; -function importEnex(taskContext, file, parentNote) { +function importEnex(taskContext: TaskContext, file: File, parentNote: BNote) { const saxStream = sax.createStream(true); const rootNoteTitle = file.originalname.toLowerCase().endsWith(".enex") @@ -45,7 +72,7 @@ function importEnex(taskContext, file, parentNote) { isProtected: parentNote.isProtected && protectedSessionService.isProtectedSessionAvailable(), }).note; - function extractContent(content) { + function extractContent(content: string) { const openingNoteIndex = content.indexOf(''); if (openingNoteIndex !== -1) { @@ -90,7 +117,7 @@ function importEnex(taskContext, file, parentNote) { } - const path = []; + const path: string[] = []; function getCurrentTag() { if (path.length >= 1) { @@ -108,8 +135,8 @@ function importEnex(taskContext, file, parentNote) { // unhandled errors will throw, since this is a proper node event emitter. log.error(`error when parsing ENEX file: ${e}`); // clear the error - this._parser.error = null; - this._parser.resume(); + (saxStream._parser as any).error = null; + saxStream._parser.resume(); }); saxStream.on("text", text => { @@ -123,13 +150,15 @@ function importEnex(taskContext, file, parentNote) { labelName = 'pageUrl'; } - labelName = sanitizeAttributeName(labelName); + labelName = sanitizeAttributeName.sanitizeAttributeName(labelName || ""); - note.attributes.push({ - type: 'label', - name: labelName, - value: text - }); + if (note.attributes) { + note.attributes.push({ + type: 'label', + name: labelName, + value: text + }); + } } else if (previousTag === 'resource-attributes') { if (currentTag === 'file-name') { @@ -169,10 +198,10 @@ function importEnex(taskContext, file, parentNote) { note.utcDateCreated = parseDate(text); } else if (currentTag === 'updated') { note.utcDateModified = parseDate(text); - } else if (currentTag === 'tag') { + } else if (currentTag === 'tag' && note.attributes) { note.attributes.push({ type: 'label', - name: sanitizeAttributeName(text), + name: sanitizeAttributeName.sanitizeAttributeName(text), value: '' }) } @@ -201,11 +230,13 @@ function importEnex(taskContext, file, parentNote) { attributes: [] }; - note.resources.push(resource); + if (note.resources) { + note.resources.push(resource); + } } }); - function updateDates(note, utcDateCreated, utcDateModified) { + function updateDates(note: BNote, utcDateCreated?: string, utcDateModified?: string) { // it's difficult to force custom dateCreated and dateModified to Note entity, so we do it post-creation with SQL sql.execute(` UPDATE notes @@ -227,6 +258,10 @@ function importEnex(taskContext, file, parentNote) { // make a copy because stream continues with the next call and note gets overwritten let {title, content, attributes, resources, utcDateCreated, utcDateModified} = note; + if (!title || !content) { + throw new Error("Missing title or content for note."); + } + content = extractContent(content); const noteEntity = noteService.createNewNote({ @@ -239,7 +274,7 @@ function importEnex(taskContext, file, parentNote) { isProtected: parentNote.isProtected && protectedSessionService.isProtectedSessionAvailable(), }).note; - for (const attr of attributes) { + for (const attr of attributes || []) { noteEntity.addAttribute(attr.type, attr.name, attr.value); } @@ -249,12 +284,14 @@ function importEnex(taskContext, file, parentNote) { taskContext.increaseProgressCount(); - for (const resource of resources) { + for (const resource of resources || []) { if (!resource.content) { continue; } - resource.content = utils.fromBase64(resource.content); + if (typeof resource.content === "string") { + resource.content = utils.fromBase64(resource.content); + } const hash = utils.md5(resource.content); @@ -273,6 +310,10 @@ function importEnex(taskContext, file, parentNote) { resource.mime = resource.mime || "application/octet-stream"; const createFileNote = () => { + if (typeof resource.content !== "string") { + throw new Error("Missing or wrong content type for resource."); + } + const resourceNote = noteService.createNewNote({ parentNoteId: noteEntity.noteId, title: resource.title, @@ -292,7 +333,7 @@ function importEnex(taskContext, file, parentNote) { const resourceLink = `${utils.escapeHtml(resource.title)}`; - content = content.replace(mediaRegex, resourceLink); + content = (content || "").replace(mediaRegex, resourceLink); }; if (resource.mime && resource.mime.startsWith('image/')) { @@ -301,7 +342,7 @@ function importEnex(taskContext, file, parentNote) { ? resource.title : `image.${resource.mime.substr(6)}`; // default if real name is not present - const attachment = imageService.saveImageToAttachment(noteEntity.noteId, resource.content, originalName, taskContext.data.shrinkImages); + const attachment = imageService.saveImageToAttachment(noteEntity.noteId, resource.content, originalName, !!taskContext.data?.shrinkImages); const encodedTitle = encodeURIComponent(attachment.title); const url = `api/attachments/${attachment.attachmentId}/image/${encodedTitle}`; @@ -314,7 +355,7 @@ function importEnex(taskContext, file, parentNote) { // otherwise the image would be removed since no note would include it content += imageLink; } - } catch (e) { + } catch (e: any) { log.error(`error when saving image from ENEX file: ${e.message}`); createFileNote(); } @@ -368,4 +409,4 @@ function importEnex(taskContext, file, parentNote) { }); } -module.exports = { importEnex }; +export = { importEnex }; diff --git a/src/services/import/markdown.js b/src/services/import/markdown.js deleted file mode 100644 index 90b6d1f43f..0000000000 --- a/src/services/import/markdown.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; - -const marked = require("marked"); -const htmlSanitizer = require('../html_sanitizer.js'); -const importUtils = require('./utils.js'); - -function renderToHtml(content, title) { - const html = marked.parse(content, { - mangle: false, - headerIds: false - }); - const h1Handled = importUtils.handleH1(html, title); // h1 handling needs to come before sanitization - return htmlSanitizer.sanitize(h1Handled); -} - -module.exports = { - renderToHtml -}; diff --git a/src/services/import/markdown.ts b/src/services/import/markdown.ts new file mode 100644 index 0000000000..47e37df130 --- /dev/null +++ b/src/services/import/markdown.ts @@ -0,0 +1,17 @@ +"use strict"; + +import marked = require("marked"); +import htmlSanitizer = require('../html_sanitizer'); +import importUtils = require('./utils'); + +function renderToHtml(content: string, title: string) { + const html = marked.parse(content, { + async: false + }) as string; // FIXME: mangle and headerIds does not seem to exist in marked + const h1Handled = importUtils.handleH1(html, title); // h1 handling needs to come before sanitization + return htmlSanitizer.sanitize(h1Handled); +} + +export = { + renderToHtml +}; diff --git a/src/services/import/mime.js b/src/services/import/mime.ts similarity index 81% rename from src/services/import/mime.js rename to src/services/import/mime.ts index 179bffebd4..dd4c962441 100644 --- a/src/services/import/mime.js +++ b/src/services/import/mime.ts @@ -1,9 +1,10 @@ "use strict"; -const mimeTypes = require('mime-types'); -const path = require('path'); +import mimeTypes = require('mime-types'); +import path = require('path'); +import { TaskData } from '../task_context_interface'; -const CODE_MIME_TYPES = { +const CODE_MIME_TYPES: Record = { 'text/plain': true, 'text/x-csrc': true, 'text/x-c++src': true, @@ -44,7 +45,7 @@ const CODE_MIME_TYPES = { }; // extensions missing in mime-db -const EXTENSION_TO_MIME = { +const EXTENSION_TO_MIME: Record = { ".c": "text/x-csrc", ".cs": "text/x-csharp", ".clj": "text/x-clojure", @@ -65,7 +66,7 @@ const EXTENSION_TO_MIME = { }; /** @returns false if MIME is not detected */ -function getMime(fileName) { +function getMime(fileName: string) { if (fileName.toLowerCase() === 'dockerfile') { return "text/x-dockerfile"; } @@ -79,7 +80,7 @@ function getMime(fileName) { return mimeTypes.lookup(fileName); } -function getType(options, mime) { +function getType(options: TaskData, mime: string) { mime = mime ? mime.toLowerCase() : ''; if (options.textImportedAsText && (mime === 'text/html' || ['text/markdown', 'text/x-markdown'].includes(mime))) { @@ -96,18 +97,20 @@ function getType(options, mime) { } } -function normalizeMimeType(mime) { +function normalizeMimeType(mime: string) { mime = mime ? mime.toLowerCase() : ''; + const mappedMime = CODE_MIME_TYPES[mime]; - if (!(mime in CODE_MIME_TYPES) || CODE_MIME_TYPES[mime] === true) { + if (mappedMime === true) { return mime; + } else if (typeof mappedMime === "string") { + return mappedMime; } - else { - return CODE_MIME_TYPES[mime]; - } + + return undefined; } -module.exports = { +export = { getMime, getType, normalizeMimeType diff --git a/src/services/import/opml.js b/src/services/import/opml.ts similarity index 67% rename from src/services/import/opml.js rename to src/services/import/opml.ts index eb7b891d62..66d2e944b5 100644 --- a/src/services/import/opml.js +++ b/src/services/import/opml.ts @@ -1,20 +1,37 @@ "use strict"; -const noteService = require('../../services/notes.js'); -const parseString = require('xml2js').parseString; -const protectedSessionService = require('../protected_session.js'); -const htmlSanitizer = require('../html_sanitizer.js'); - -/** - * @param {TaskContext} taskContext - * @param {Buffer} fileBuffer - * @param {BNote} parentNote - * @returns {Promise<*[]|*>} - */ -async function importOpml(taskContext, fileBuffer, parentNote) { - const xml = await new Promise(function(resolve, reject) +import noteService = require('../../services/notes'); +import xml2js = require("xml2js"); +import protectedSessionService = require('../protected_session'); +import htmlSanitizer = require('../html_sanitizer'); +import TaskContext = require('../task_context'); +import BNote = require('../../becca/entities/bnote'); +const parseString = xml2js.parseString; + +interface OpmlXml { + opml: OpmlBody; +} + +interface OpmlBody { + $: { + version: string + } + body: OpmlOutline[] +} + +interface OpmlOutline { + $: { + title: string; + text: string; + _note: string; + }; + outline: OpmlOutline[]; +} + +async function importOpml(taskContext: TaskContext, fileBuffer: Buffer, parentNote: BNote) { + const xml = await new Promise(function(resolve, reject) { - parseString(fileBuffer, function (err, result) { + parseString(fileBuffer, function (err: any, result: OpmlXml) { if (err) { reject(err); } @@ -30,7 +47,7 @@ async function importOpml(taskContext, fileBuffer, parentNote) { const opmlVersion = parseInt(xml.opml.$.version); - function importOutline(outline, parentNoteId) { + function importOutline(outline: OpmlOutline, parentNoteId: string) { let title, content; if (opmlVersion === 1) { @@ -83,7 +100,7 @@ async function importOpml(taskContext, fileBuffer, parentNote) { return returnNote; } -function toHtml(text) { +function toHtml(text: string) { if (!text) { return ''; } @@ -91,6 +108,6 @@ function toHtml(text) { return `

    ${text.replace(/(?:\r\n|\r|\n)/g, '

    ')}

    `; } -module.exports = { +export = { importOpml }; diff --git a/src/services/import/single.js b/src/services/import/single.ts similarity index 63% rename from src/services/import/single.js rename to src/services/import/single.ts index 7d3ea164f3..abf66c529b 100644 --- a/src/services/import/single.js +++ b/src/services/import/single.ts @@ -1,18 +1,27 @@ "use strict"; -const noteService = require('../../services/notes.js'); -const imageService = require('../../services/image.js'); -const protectedSessionService = require('../protected_session.js'); -const markdownService = require('./markdown.js'); -const mimeService = require('./mime.js'); -const utils = require('../../services/utils.js'); -const importUtils = require('./utils.js'); -const htmlSanitizer = require('../html_sanitizer.js'); - -function importSingleFile(taskContext, file, parentNote) { +import BNote = require("../../becca/entities/bnote"); +import TaskContext = require("../task_context"); + +import noteService = require('../../services/notes'); +import imageService = require('../../services/image'); +import protectedSessionService = require('../protected_session'); +import markdownService = require('./markdown'); +import mimeService = require('./mime'); +import utils = require('../../services/utils'); +import importUtils = require('./utils'); +import htmlSanitizer = require('../html_sanitizer'); + +interface File { + originalname: string; + mimetype: string; + buffer: string | Buffer; +} + +function importSingleFile(taskContext: TaskContext, file: File, parentNote: BNote) { const mime = mimeService.getMime(file.originalname) || file.mimetype; - if (taskContext.data.textImportedAsText) { + if (taskContext?.data?.textImportedAsText) { if (mime === 'text/html') { return importHtml(taskContext, file, parentNote); } else if (['text/markdown', 'text/x-markdown'].includes(mime)) { @@ -22,7 +31,7 @@ function importSingleFile(taskContext, file, parentNote) { } } - if (taskContext.data.codeImportedAsCode && mimeService.getType(taskContext.data, mime) === 'code') { + if (taskContext?.data?.codeImportedAsCode && mimeService.getType(taskContext.data, mime) === 'code') { return importCodeNote(taskContext, file, parentNote); } @@ -33,15 +42,21 @@ function importSingleFile(taskContext, file, parentNote) { return importFile(taskContext, file, parentNote); } -function importImage(file, parentNote, taskContext) { - const {note} = imageService.saveImage(parentNote.noteId, file.buffer, file.originalname, taskContext.data.shrinkImages); +function importImage(file: File, parentNote: BNote, taskContext: TaskContext) { + if (typeof file.buffer === "string") { + throw new Error("Invalid file content for image."); + } + const {note} = imageService.saveImage(parentNote.noteId, file.buffer, file.originalname, !!taskContext.data?.shrinkImages); taskContext.increaseProgressCount(); return note; } -function importFile(taskContext, file, parentNote) { +function importFile(taskContext: TaskContext, file: File, parentNote: BNote) { + if (typeof file.buffer !== "string") { + throw new Error("Invalid file content for text."); + } const originalName = file.originalname; const {note} = noteService.createNewNote({ @@ -60,8 +75,8 @@ function importFile(taskContext, file, parentNote) { return note; } -function importCodeNote(taskContext, file, parentNote) { - const title = utils.getNoteTitle(file.originalname, taskContext.data.replaceUnderscoresWithSpaces); +function importCodeNote(taskContext: TaskContext, file: File, parentNote: BNote) { + const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces); const content = file.buffer.toString("utf-8"); const detectedMime = mimeService.getMime(file.originalname) || file.mimetype; const mime = mimeService.normalizeMimeType(detectedMime); @@ -80,8 +95,8 @@ function importCodeNote(taskContext, file, parentNote) { return note; } -function importPlainText(taskContext, file, parentNote) { - const title = utils.getNoteTitle(file.originalname, taskContext.data.replaceUnderscoresWithSpaces); +function importPlainText(taskContext: TaskContext, file: File, parentNote: BNote) { + const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces); const plainTextContent = file.buffer.toString("utf-8"); const htmlContent = convertTextToHtml(plainTextContent); @@ -99,7 +114,7 @@ function importPlainText(taskContext, file, parentNote) { return note; } -function convertTextToHtml(text) { +function convertTextToHtml(text: string) { // 1: Plain Text Search text = text.replace(/&/g, "&"). replace(/([^<]*)<\/h1>/gi, (match, text) => { if (title.trim() === text.trim()) { return ""; // remove whole H1 tag @@ -11,6 +11,6 @@ function handleH1(content, title) { return content; } -module.exports = { +export = { handleH1 }; diff --git a/src/services/import/zip.js b/src/services/import/zip.ts similarity index 72% rename from src/services/import/zip.js rename to src/services/import/zip.ts index 5097ba925d..cda7347aad 100644 --- a/src/services/import/zip.js +++ b/src/services/import/zip.ts @@ -1,43 +1,45 @@ "use strict"; -const BAttribute = require('../../becca/entities/battribute.js'); -const utils = require('../../services/utils.js'); -const log = require('../../services/log.js'); -const noteService = require('../../services/notes.js'); -const attributeService = require('../../services/attributes.js'); -const BBranch = require('../../becca/entities/bbranch.js'); -const path = require('path'); -const protectedSessionService = require('../protected_session.js'); -const mimeService = require('./mime.js'); -const treeService = require('../tree.js'); -const yauzl = require("yauzl"); -const htmlSanitizer = require('../html_sanitizer.js'); -const becca = require('../../becca/becca.js'); -const BAttachment = require('../../becca/entities/battachment.js'); -const markdownService = require('./markdown.js'); - -/** - * @param {TaskContext} taskContext - * @param {Buffer} fileBuffer - * @param {BNote} importRootNote - * @returns {Promise} - */ -async function importZip(taskContext, fileBuffer, importRootNote) { - /** @type {Object.} maps from original noteId (in ZIP file) to newly generated noteId */ - const noteIdMap = {}; - /** @type {Object.} maps from original attachmentId (in ZIP file) to newly generated attachmentId */ - const attachmentIdMap = {}; - const attributes = []; +import BAttribute = require('../../becca/entities/battribute'); +import utils = require('../../services/utils'); +import log = require('../../services/log'); +import noteService = require('../../services/notes'); +import attributeService = require('../../services/attributes'); +import BBranch = require('../../becca/entities/bbranch'); +import path = require('path'); +import protectedSessionService = require('../protected_session'); +import mimeService = require('./mime'); +import treeService = require('../tree'); +import yauzl = require("yauzl"); +import htmlSanitizer = require('../html_sanitizer'); +import becca = require('../../becca/becca'); +import BAttachment = require('../../becca/entities/battachment'); +import markdownService = require('./markdown'); +import TaskContext = require('../task_context'); +import BNote = require('../../becca/entities/bnote'); +import NoteMeta = require('../meta/note_meta'); +import AttributeMeta = require('../meta/attribute_meta'); +import { Stream } from 'stream'; +import { NoteType } from '../../becca/entities/rows'; + +interface MetaFile { + files: NoteMeta[] +} + +async function importZip(taskContext: TaskContext, fileBuffer: Buffer, importRootNote: BNote): Promise { + /** maps from original noteId (in ZIP file) to newly generated noteId */ + const noteIdMap: Record = {}; + /** type maps from original attachmentId (in ZIP file) to newly generated attachmentId */ + const attachmentIdMap: Record = {}; + const attributes: AttributeMeta[] = []; // path => noteId, used only when meta file is not available - /** @type {Object.} path => noteId | attachmentId */ - const createdPaths = { '/': importRootNote.noteId, '\\': importRootNote.noteId }; - let metaFile = null; - /** @type {BNote} */ - let firstNote = null; - /** @type {Set.} */ - const createdNoteIds = new Set(); - - function getNewNoteId(origNoteId) { + /** path => noteId | attachmentId */ + const createdPaths: Record = { '/': importRootNote.noteId, '\\': importRootNote.noteId }; + let metaFile!: MetaFile; + let firstNote!: BNote; + const createdNoteIds = new Set(); + + function getNewNoteId(origNoteId: string) { if (!origNoteId.trim()) { // this probably shouldn't happen, but still good to have this precaution return "empty_note_id"; @@ -55,7 +57,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return noteIdMap[origNoteId]; } - function getNewAttachmentId(origAttachmentId) { + function getNewAttachmentId(origAttachmentId: string) { if (!origAttachmentId.trim()) { // this probably shouldn't happen, but still good to have this precaution return "empty_attachment_id"; @@ -68,12 +70,8 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return attachmentIdMap[origAttachmentId]; } - /** - * @param {NoteMeta} parentNoteMeta - * @param {string} dataFileName - */ - function getAttachmentMeta(parentNoteMeta, dataFileName) { - for (const noteMeta of parentNoteMeta.children) { + function getAttachmentMeta(parentNoteMeta: NoteMeta, dataFileName: string) { + for (const noteMeta of parentNoteMeta.children || []) { for (const attachmentMeta of noteMeta.attachments || []) { if (attachmentMeta.dataFileName === dataFileName) { return { @@ -88,22 +86,20 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return {}; } - /** @returns {{noteMeta: NoteMeta|undefined, parentNoteMeta: NoteMeta|undefined, attachmentMeta: AttachmentMeta|undefined}} */ - function getMeta(filePath) { + function getMeta(filePath: string) { if (!metaFile) { return {}; } const pathSegments = filePath.split(/[\/\\]/g); - /** @type {NoteMeta} */ - let cursor = { + let cursor: NoteMeta | undefined = { isImportRoot: true, - children: metaFile.files + children: metaFile.files, + dataFileName: "" }; - /** @type {NoteMeta} */ - let parent; + let parent!: NoteMeta; for (const segment of pathSegments) { if (!cursor?.children?.length) { @@ -111,7 +107,9 @@ async function importZip(taskContext, fileBuffer, importRootNote) { } parent = cursor; - cursor = parent.children.find(file => file.dataFileName === segment || file.dirFileName === segment); + if (parent.children) { + cursor = parent.children.find(file => file.dataFileName === segment || file.dirFileName === segment); + } if (!cursor) { return getAttachmentMeta(parent, segment); @@ -120,19 +118,15 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return { parentNoteMeta: parent, - noteMeta: cursor + noteMeta: cursor, + attachmentMeta: null }; } - /** - * @param {string} filePath - * @param {NoteMeta} parentNoteMeta - * @return {string} - */ - function getParentNoteId(filePath, parentNoteMeta) { + function getParentNoteId(filePath: string, parentNoteMeta?: NoteMeta) { let parentNoteId; - if (parentNoteMeta) { + if (parentNoteMeta?.noteId) { parentNoteId = parentNoteMeta.isImportRoot ? importRootNote.noteId : getNewNoteId(parentNoteMeta.noteId); } else { @@ -151,13 +145,8 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return parentNoteId; } - /** - * @param {NoteMeta} noteMeta - * @param {string} filePath - * @return {string} - */ - function getNoteId(noteMeta, filePath) { - if (noteMeta) { + function getNoteId(noteMeta: NoteMeta | undefined, filePath: string): string { + if (noteMeta?.noteId) { return getNewNoteId(noteMeta.noteId); } @@ -176,23 +165,19 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return noteId; } - function detectFileTypeAndMime(taskContext, filePath) { + function detectFileTypeAndMime(taskContext: TaskContext, filePath: string) { const mime = mimeService.getMime(filePath) || "application/octet-stream"; - const type = mimeService.getType(taskContext.data, mime); + const type = mimeService.getType(taskContext.data || {}, mime); return { mime, type }; } - /** - * @param {BNote} note - * @param {NoteMeta} noteMeta - */ - function saveAttributes(note, noteMeta) { + function saveAttributes(note: BNote, noteMeta: NoteMeta | undefined) { if (!noteMeta) { return; } - for (const attr of noteMeta.attributes) { + for (const attr of noteMeta.attributes || []) { attr.noteId = note.noteId; if (attr.type === 'label-definition') { @@ -218,11 +203,11 @@ async function importZip(taskContext, fileBuffer, importRootNote) { attr.value = getNewNoteId(attr.value); } - if (taskContext.data.safeImport && attributeService.isAttributeDangerous(attr.type, attr.name)) { + if (taskContext.data?.safeImport && attributeService.isAttributeDangerous(attr.type, attr.name)) { attr.name = `disabled:${attr.name}`; } - if (taskContext.data.safeImport) { + if (taskContext.data?.safeImport) { attr.name = htmlSanitizer.sanitize(attr.name); attr.value = htmlSanitizer.sanitize(attr.value); } @@ -231,7 +216,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { } } - function saveDirectory(filePath) { + function saveDirectory(filePath: string) { const { parentNoteMeta, noteMeta } = getMeta(filePath); const noteId = getNoteId(noteMeta, filePath); @@ -240,17 +225,21 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return; } - const noteTitle = utils.getNoteTitle(filePath, taskContext.data.replaceUnderscoresWithSpaces, noteMeta); + const noteTitle = utils.getNoteTitle(filePath, !!taskContext.data?.replaceUnderscoresWithSpaces, noteMeta); const parentNoteId = getParentNoteId(filePath, parentNoteMeta); + if (!parentNoteId) { + throw new Error("Missing parent note ID."); + } + const {note} = noteService.createNewNote({ parentNoteId: parentNoteId, - title: noteTitle, + title: noteTitle || "", content: '', noteId: noteId, type: resolveNoteType(noteMeta?.type), mime: noteMeta ? noteMeta.mime : 'text/html', - prefix: noteMeta ? noteMeta.prefix : '', + prefix: noteMeta ? noteMeta.prefix || "" : '', isExpanded: noteMeta ? noteMeta.isExpanded : false, notePosition: (noteMeta && firstNote) ? noteMeta.notePosition : undefined, isProtected: importRootNote.isProtected && protectedSessionService.isProtectedSessionAvailable(), @@ -265,8 +254,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return noteId; } - /** @returns {{attachmentId: string}|{noteId: string}} */ - function getEntityIdFromRelativeUrl(url, filePath) { + function getEntityIdFromRelativeUrl(url: string, filePath: string) { while (url.startsWith("./")) { url = url.substr(2); } @@ -287,7 +275,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { const { noteMeta, attachmentMeta } = getMeta(absUrl); - if (attachmentMeta) { + if (attachmentMeta && attachmentMeta.attachmentId && noteMeta.noteId) { return { attachmentId: getNewAttachmentId(attachmentMeta.attachmentId), noteId: getNewNoteId(noteMeta.noteId) @@ -299,15 +287,8 @@ async function importZip(taskContext, fileBuffer, importRootNote) { } } - /** - * @param {string} content - * @param {string} noteTitle - * @param {string} filePath - * @param {NoteMeta} noteMeta - * @return {string} - */ - function processTextNoteContent(content, noteTitle, filePath, noteMeta) { - function isUrlAbsolute(url) { + function processTextNoteContent(content: string, noteTitle: string, filePath: string, noteMeta?: NoteMeta) { + function isUrlAbsolute(url: string) { return /^(?:[a-z]+:)?\/\//i.test(url); } @@ -321,7 +302,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { } }); - if (taskContext.data.safeImport) { + if (taskContext.data?.safeImport) { content = htmlSanitizer.sanitize(content); } @@ -336,7 +317,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { try { url = decodeURIComponent(url).trim(); - } catch (e) { + } catch (e: any) { log.error(`Cannot parse image URL '${url}', keeping original. Error: ${e.message}.`); return `src="${url}"`; } @@ -359,7 +340,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { content = content.replace(/href="([^"]*)"/g, (match, url) => { try { url = decodeURIComponent(url).trim(); - } catch (e) { + } catch (e: any) { log.error(`Cannot parse link URL '${url}', keeping original. Error: ${e.message}.`); return `href="${url}"`; } @@ -395,7 +376,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return content; } - function removeTriliumTags(content) { + function removeTriliumTags(content: string) { const tagsToRemove = [ '

    ([^<]*)<\/h1>', '([^<]*)<\/title>' @@ -407,26 +388,18 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return content; } - /** - * @param {NoteMeta} noteMeta - * @param {string} type - * @param {string} mime - * @param {string|Buffer} content - * @param {string} noteTitle - * @param {string} filePath - * @return {string} - */ - function processNoteContent(noteMeta, type, mime, content, noteTitle, filePath) { - if (noteMeta?.format === 'markdown' - || (!noteMeta && taskContext.data.textImportedAsText && ['text/markdown', 'text/x-markdown'].includes(mime))) { + function processNoteContent(noteMeta: NoteMeta | undefined, type: string, mime: string, content: string | Buffer, noteTitle: string, filePath: string) { + if ((noteMeta?.format === 'markdown' + || (!noteMeta && taskContext.data?.textImportedAsText && ['text/markdown', 'text/x-markdown'].includes(mime))) + && typeof content === "string") { content = markdownService.renderToHtml(content, noteTitle); } - if (type === 'text') { + if (type === 'text' && typeof content === "string") { content = processTextNoteContent(content, noteTitle, filePath, noteMeta); } - if (type === 'relationMap' && noteMeta) { + if (type === 'relationMap' && noteMeta && typeof content === "string") { const relationMapLinks = (noteMeta.attributes || []) .filter(attr => attr.type === 'relation' && attr.name === 'relationMapLink'); @@ -440,11 +413,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return content; } - /** - * @param {string} filePath - * @param {Buffer} content - */ - function saveNote(filePath, content) { + function saveNote(filePath: string, content: string | Buffer) { const { parentNoteMeta, noteMeta, attachmentMeta } = getMeta(filePath); if (noteMeta?.noImport) { @@ -453,7 +422,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { const noteId = getNoteId(noteMeta, filePath); - if (attachmentMeta) { + if (attachmentMeta && attachmentMeta.attachmentId) { const attachment = new BAttachment({ attachmentId: getNewAttachmentId(attachmentMeta.attachmentId), ownerId: noteId, @@ -487,16 +456,20 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return; } - let { type, mime } = noteMeta ? noteMeta : detectFileTypeAndMime(taskContext, filePath); - type = resolveNoteType(type); + let { mime } = noteMeta ? noteMeta : detectFileTypeAndMime(taskContext, filePath); + if (!mime) { + throw new Error("Unable to resolve mime type."); + } + + let type = resolveNoteType(noteMeta?.type); if (type !== 'file' && type !== 'image') { content = content.toString("utf-8"); } - const noteTitle = utils.getNoteTitle(filePath, taskContext.data.replaceUnderscoresWithSpaces, noteMeta); + const noteTitle = utils.getNoteTitle(filePath, taskContext.data?.replaceUnderscoresWithSpaces || false, noteMeta); - content = processNoteContent(noteMeta, type, mime, content, noteTitle, filePath); + content = processNoteContent(noteMeta, type, mime, content, noteTitle || "", filePath); let note = becca.getNote(noteId); @@ -508,7 +481,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { if (note.type === undefined) { note.type = type; note.mime = mime; - note.title = noteTitle; + note.title = noteTitle || ""; note.isProtected = isProtected; note.save(); } @@ -519,21 +492,25 @@ async function importZip(taskContext, fileBuffer, importRootNote) { new BBranch({ noteId, parentNoteId, - isExpanded: noteMeta.isExpanded, - prefix: noteMeta.prefix, - notePosition: noteMeta.notePosition + isExpanded: noteMeta?.isExpanded, + prefix: noteMeta?.prefix, + notePosition: noteMeta?.notePosition }).save(); } } else { + if (typeof content !== "string") { + throw new Error("Incorrect content type."); + } + ({note} = noteService.createNewNote({ parentNoteId: parentNoteId, - title: noteTitle, + title: noteTitle || "", content: content, noteId, type, mime, - prefix: noteMeta ? noteMeta.prefix : '', + prefix: noteMeta ? noteMeta.prefix || "" : '', isExpanded: noteMeta ? noteMeta.isExpanded : false, // root notePosition should be ignored since it relates to the original document // now import root should be placed after existing notes into new parent @@ -560,7 +537,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { // we're running two passes to make sure that the meta file is loaded before the rest of the files is processed. - await readZipFile(fileBuffer, async (zipfile, entry) => { + await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => { const filePath = normalizeFilePath(entry.fileName); if (filePath === '!!!meta.json') { @@ -572,7 +549,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { zipfile.readEntry(); }); - await readZipFile(fileBuffer, async (zipfile, entry) => { + await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => { const filePath = normalizeFilePath(entry.fileName); if (/\/$/.test(entry.fileName)) { @@ -590,6 +567,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) { for (const noteId of createdNoteIds) { const note = becca.getNote(noteId); + if (!note) continue; await noteService.asyncPostProcessContent(note, note.getContent()); if (!metaFile) { @@ -615,8 +593,8 @@ async function importZip(taskContext, fileBuffer, importRootNote) { return firstNote; } -/** @returns {string} path without leading or trailing slash and backslashes converted to forward ones */ -function normalizeFilePath(filePath) { +/** @returns path without leading or trailing slash and backslashes converted to forward ones */ +function normalizeFilePath(filePath: string): string { filePath = filePath.replace(/\\/g, "/"); if (filePath.startsWith("/")) { @@ -630,29 +608,30 @@ function normalizeFilePath(filePath) { return filePath; } -/** @returns {Promise<Buffer>} */ -function streamToBuffer(stream) { - const chunks = []; +function streamToBuffer(stream: Stream): Promise<Buffer> { + const chunks: Uint8Array[] = []; stream.on('data', chunk => chunks.push(chunk)); return new Promise((res, rej) => stream.on('end', () => res(Buffer.concat(chunks)))); } -/** @returns {Promise<Buffer>} */ -function readContent(zipfile, entry) { +function readContent(zipfile: yauzl.ZipFile, entry: yauzl.Entry): Promise<Buffer> { return new Promise((res, rej) => { zipfile.openReadStream(entry, function(err, readStream) { if (err) rej(err); + if (!readStream) throw new Error("Unable to read content."); streamToBuffer(readStream).then(res); }); }); } -function readZipFile(buffer, processEntryCallback) { +function readZipFile(buffer: Buffer, processEntryCallback: (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => void) { return new Promise((res, rej) => { yauzl.fromBuffer(buffer, {lazyEntries: true, validateEntrySizes: false}, function(err, zipfile) { if (err) throw err; + if (!zipfile) throw new Error("Unable to read zip file."); + zipfile.readEntry(); zipfile.on("entry", entry => processEntryCallback(zipfile, entry)); zipfile.on("end", res); @@ -660,20 +639,19 @@ function readZipFile(buffer, processEntryCallback) { }); } -function resolveNoteType(type) { +function resolveNoteType(type: string | undefined): NoteType { // BC for ZIPs created in Triliun 0.57 and older if (type === 'relation-map') { - type = 'relationMap'; + return 'relationMap'; } else if (type === 'note-map') { - type = 'noteMap'; + return 'noteMap'; } else if (type === 'web-view') { - type = 'webView'; + return 'webView'; } - return type || "text"; + return "text"; } - -module.exports = { +export = { importZip }; diff --git a/src/services/instance_id.js b/src/services/instance_id.js deleted file mode 100644 index 49f0a42095..0000000000 --- a/src/services/instance_id.js +++ /dev/null @@ -1,5 +0,0 @@ -const utils = require('./utils.js'); - -const instanceId = utils.randomString(12); - -module.exports = instanceId; diff --git a/src/services/instance_id.ts b/src/services/instance_id.ts new file mode 100644 index 0000000000..6e0eb503c3 --- /dev/null +++ b/src/services/instance_id.ts @@ -0,0 +1,5 @@ +import utils = require('./utils'); + +const instanceId = utils.randomString(12); + +export = instanceId; diff --git a/src/services/keyboard_actions.js b/src/services/keyboard_actions.ts similarity index 97% rename from src/services/keyboard_actions.js rename to src/services/keyboard_actions.ts index 694b7a1d7f..715c626849 100644 --- a/src/services/keyboard_actions.js +++ b/src/services/keyboard_actions.ts @@ -1,8 +1,9 @@ "use strict"; -const optionService = require('./options.js'); -const log = require('./log.js'); -const utils = require('./utils.js'); +import optionService = require('./options'); +import log = require('./log'); +import utils = require('./utils'); +import { KeyboardShortcut } from './keyboard_actions_interface'; const isMac = process.platform === "darwin"; const isElectron = utils.isElectron(); @@ -16,7 +17,7 @@ const isElectron = utils.isElectron(); * e.g. CTRL-C in note tree does something a bit different from CTRL-C in the text editor. */ -const DEFAULT_KEYBOARD_ACTIONS = [ +const DEFAULT_KEYBOARD_ACTIONS: KeyboardShortcut[] = [ { separator: "Note navigation" }, @@ -606,15 +607,15 @@ for (const action of DEFAULT_KEYBOARD_ACTIONS) { } function getKeyboardActions() { - const actions = JSON.parse(JSON.stringify(DEFAULT_KEYBOARD_ACTIONS)); + const actions: KeyboardShortcut[] = JSON.parse(JSON.stringify(DEFAULT_KEYBOARD_ACTIONS)); for (const action of actions) { - action.effectiveShortcuts = action.effectiveShortcuts ? action.defaultShortcuts.slice() : []; + action.effectiveShortcuts = action.defaultShortcuts ? action.defaultShortcuts.slice() : []; } for (const option of optionService.getOptions()) { if (option.name.startsWith('keyboardShortcuts')) { - let actionName = option.name.substr(17); + let actionName = option.name.substring(17); actionName = actionName.charAt(0).toLowerCase() + actionName.slice(1); const action = actions.find(ea => ea.actionName === actionName); @@ -636,7 +637,7 @@ function getKeyboardActions() { return actions; } -module.exports = { +export = { DEFAULT_KEYBOARD_ACTIONS, getKeyboardActions }; diff --git a/src/services/keyboard_actions_interface.ts b/src/services/keyboard_actions_interface.ts new file mode 100644 index 0000000000..3e3621ecd6 --- /dev/null +++ b/src/services/keyboard_actions_interface.ts @@ -0,0 +1,12 @@ +export interface KeyboardShortcut { + separator?: string; + actionName?: string; + description?: string; + defaultShortcuts?: string[]; + effectiveShortcuts?: string[]; + scope?: string; +} + +export interface KeyboardShortcutWithRequiredActionName extends KeyboardShortcut { + actionName: string; +} \ No newline at end of file diff --git a/src/services/log.js b/src/services/log.ts similarity index 80% rename from src/services/log.js rename to src/services/log.ts index 935d695ef4..742fdcdb18 100644 --- a/src/services/log.js +++ b/src/services/log.ts @@ -1,14 +1,15 @@ "use strict"; -const fs = require('fs'); -const dataDir = require('./data_dir.js'); -const cls = require('./cls.js'); +import { Request, Response } from "express"; +import fs = require("fs"); +import dataDir = require('./data_dir'); +import cls = require('./cls'); if (!fs.existsSync(dataDir.LOG_DIR)) { fs.mkdirSync(dataDir.LOG_DIR, 0o700); } -let logFile = null; +let logFile!: fs.WriteStream; const SECOND = 1000; const MINUTE = 60 * SECOND; @@ -17,7 +18,7 @@ const DAY = 24 * HOUR; const NEW_LINE = process.platform === "win32" ? '\r\n' : '\n'; -let todaysMidnight = null; +let todaysMidnight!: Date; initLogFile(); @@ -39,7 +40,7 @@ function initLogFile() { logFile = fs.createWriteStream(path, {flags: 'a'}); } -function checkDate(millisSinceMidnight) { +function checkDate(millisSinceMidnight: number) { if (millisSinceMidnight >= DAY) { initLogFile(); @@ -49,7 +50,7 @@ function checkDate(millisSinceMidnight) { return millisSinceMidnight; } -function log(str) { +function log(str: string) { const bundleNoteId = cls.get("bundleNoteId"); if (bundleNoteId) { @@ -65,17 +66,17 @@ function log(str) { console.log(str); } -function info(message) { +function info(message: string) { log(message); } -function error(message) { +function error(message: string) { log(`ERROR: ${message}`); } const requestBlacklist = [ "/libraries", "/app", "/images", "/stylesheets", "/api/recent-notes" ]; -function request(req, res, timeMs, responseLength = "?") { +function request(req: Request, res: Response, timeMs: number, responseLength = "?") { for (const bl of requestBlacklist) { if (req.url.startsWith(bl)) { return; @@ -90,13 +91,13 @@ function request(req, res, timeMs, responseLength = "?") { `${res.statusCode} ${req.method} ${req.url} with ${responseLength} bytes took ${timeMs}ms`); } -function pad(num) { +function pad(num: number) { num = Math.floor(num); return num < 10 ? (`0${num}`) : num.toString(); } -function padMilli(num) { +function padMilli(num: number) { if (num < 10) { return `00${num}`; } @@ -108,7 +109,7 @@ function padMilli(num) { } } -function formatTime(millisSinceMidnight) { +function formatTime(millisSinceMidnight: number) { return `${pad(millisSinceMidnight / HOUR)}:${pad((millisSinceMidnight % HOUR) / MINUTE)}:${pad((millisSinceMidnight % MINUTE) / SECOND)}.${padMilli(millisSinceMidnight % SECOND)}`; } @@ -116,7 +117,7 @@ function formatDate() { return `${pad(todaysMidnight.getFullYear())}-${pad(todaysMidnight.getMonth() + 1)}-${pad(todaysMidnight.getDate())}`; } -module.exports = { +export = { info, error, request diff --git a/src/services/meta/attachment_meta.js b/src/services/meta/attachment_meta.js deleted file mode 100644 index 067a4a3363..0000000000 --- a/src/services/meta/attachment_meta.js +++ /dev/null @@ -1,16 +0,0 @@ -class AttachmentMeta { - /** @type {string} */ - attachmentId; - /** @type {string} */ - title; - /** @type {string} */ - role; - /** @type {string} */ - mime; - /** @type {int} */ - position; - /** @type {string} */ - dataFileName; -} - -module.exports = AttachmentMeta; diff --git a/src/services/meta/attachment_meta.ts b/src/services/meta/attachment_meta.ts new file mode 100644 index 0000000000..8c237ab21c --- /dev/null +++ b/src/services/meta/attachment_meta.ts @@ -0,0 +1,10 @@ +interface AttachmentMeta { + attachmentId?: string; + title: string; + role: string; + mime: string; + position?: number; + dataFileName: string; +} + +export = AttachmentMeta; diff --git a/src/services/meta/attribute_meta.js b/src/services/meta/attribute_meta.js deleted file mode 100644 index 3d50cb5855..0000000000 --- a/src/services/meta/attribute_meta.js +++ /dev/null @@ -1,14 +0,0 @@ -class AttributeMeta { - /** @type {string} */ - type; - /** @type {string} */ - name; - /** @type {string} */ - value; - /** @type {boolean} */ - isInheritable; - /** @type {int} */ - position; -} - -module.exports = AttributeMeta; diff --git a/src/services/meta/attribute_meta.ts b/src/services/meta/attribute_meta.ts new file mode 100644 index 0000000000..de79df9131 --- /dev/null +++ b/src/services/meta/attribute_meta.ts @@ -0,0 +1,12 @@ +import { AttributeType } from "../../becca/entities/rows"; + +interface AttributeMeta { + noteId?: string; + type: AttributeType; + name: string; + value: string; + isInheritable?: boolean; + position?: number; +} + +export = AttributeMeta; diff --git a/src/services/meta/note_meta.js b/src/services/meta/note_meta.js deleted file mode 100644 index fd24381d65..0000000000 --- a/src/services/meta/note_meta.js +++ /dev/null @@ -1,36 +0,0 @@ -class NoteMeta { - /** @type {string} */ - noteId; - /** @type {string} */ - notePath; - /** @type {boolean} */ - isClone; - /** @type {string} */ - title; - /** @type {int} */ - notePosition; - /** @type {string} */ - prefix; - /** @type {boolean} */ - isExpanded; - /** @type {string} */ - type; - /** @type {string} */ - mime; - /** @type {string} - 'html' or 'markdown', applicable to text notes only */ - format; - /** @type {string} */ - dataFileName; - /** @type {string} */ - dirFileName; - /** @type {boolean} - this file should not be imported (e.g., HTML navigation) */ - noImport = false; - /** @type {AttributeMeta[]} */ - attributes; - /** @type {AttachmentMeta[]} */ - attachments; - /** @type {NoteMeta[]|undefined} */ - children; -} - -module.exports = NoteMeta; diff --git a/src/services/meta/note_meta.ts b/src/services/meta/note_meta.ts new file mode 100644 index 0000000000..b3012f29ae --- /dev/null +++ b/src/services/meta/note_meta.ts @@ -0,0 +1,26 @@ +import AttachmentMeta = require("./attachment_meta"); +import AttributeMeta = require("./attribute_meta"); + +interface NoteMeta { + noteId?: string; + notePath?: string[]; + isClone?: boolean; + title?: string; + notePosition?: number; + prefix?: string | null; + isExpanded?: boolean; + type?: string; + mime?: string; + /** 'html' or 'markdown', applicable to text notes only */ + format?: "html" | "markdown"; + dataFileName: string; + dirFileName?: string; + /** this file should not be imported (e.g., HTML navigation) */ + noImport?: boolean; + isImportRoot?: boolean; + attributes?: AttributeMeta[]; + attachments?: AttachmentMeta[]; + children?: NoteMeta[]; +} + +export = NoteMeta; diff --git a/src/services/migration.js b/src/services/migration.ts similarity index 84% rename from src/services/migration.js rename to src/services/migration.ts index aeb7ba2172..ca647c5397 100644 --- a/src/services/migration.js +++ b/src/services/migration.ts @@ -1,11 +1,18 @@ -const backupService = require('./backup.js'); -const sql = require('./sql.js'); -const fs = require('fs-extra'); -const log = require('./log.js'); -const utils = require('./utils.js'); -const resourceDir = require('./resource_dir.js'); -const appInfo = require('./app_info.js'); -const cls = require('./cls.js'); +import backupService = require('./backup'); +import sql = require('./sql'); +import fs = require('fs-extra'); +import log = require('./log'); +import utils = require('./utils'); +import resourceDir = require('./resource_dir'); +import appInfo = require('./app_info'); +import cls = require('./cls'); + +interface MigrationInfo { + dbVersion: number; + name: string; + file: string; + type: string; +} async function migrate() { const currentDbVersion = getDbVersion(); @@ -25,7 +32,12 @@ async function migrate() { : 'before-migration' ); - const migrations = fs.readdirSync(resourceDir.MIGRATIONS_DIR).map(file => { + const migrationFiles = fs.readdirSync(resourceDir.MIGRATIONS_DIR); + if (migrationFiles == null) { + return; + } + + const migrations = migrationFiles.map(file => { const match = file.match(/^([0-9]{4})__([a-zA-Z0-9_ ]+)\.(sql|js)$/); if (!match) { return null; @@ -45,7 +57,7 @@ async function migrate() { } else { return null; } - }).filter(el => !!el); + }).filter((el): el is MigrationInfo => !!el); migrations.sort((a, b) => a.dbVersion - b.dbVersion); @@ -67,7 +79,7 @@ async function migrate() { WHERE name = ?`, [mig.dbVersion.toString(), "dbVersion"]); log.info(`Migration to version ${mig.dbVersion} has been successful.`); - } catch (e) { + } catch (e: any) { log.error(`error during migration to version ${mig.dbVersion}: ${e.stack}`); log.error("migration failed, crashing hard"); // this is not very user-friendly :-/ @@ -84,7 +96,7 @@ async function migrate() { } } -function executeMigration(mig) { +function executeMigration(mig: MigrationInfo) { if (mig.type === 'sql') { const migrationSql = fs.readFileSync(`${resourceDir.MIGRATIONS_DIR}/${mig.file}`).toString('utf8'); @@ -131,7 +143,7 @@ async function migrateIfNecessary() { } } -module.exports = { +export = { migrateIfNecessary, isDbUpToDate }; diff --git a/src/services/note-interface.ts b/src/services/note-interface.ts new file mode 100644 index 0000000000..0b99c31ef3 --- /dev/null +++ b/src/services/note-interface.ts @@ -0,0 +1,27 @@ +import { NoteType } from "../becca/entities/rows"; + +export interface NoteParams { + /** optionally can force specific noteId */ + noteId?: string; + branchId?: string; + parentNoteId: string; + templateNoteId?: string; + title: string; + content: string; + /** text, code, file, image, search, book, relationMap, canvas, webView */ + type: NoteType; + /** default value is derived from default mimes for type */ + mime?: string; + /** default is false */ + isProtected?: boolean; + /** default is false */ + isExpanded?: boolean; + /** default is empty string */ + prefix?: string; + /** default is the last existing notePosition in a parent + 10 */ + notePosition?: number; + dateCreated?: string; + utcDateCreated?: string; + ignoreForbiddenParents?: boolean; + target?: "into"; +} \ No newline at end of file diff --git a/src/services/note_types.js b/src/services/note_types.ts similarity index 93% rename from src/services/note_types.js rename to src/services/note_types.ts index f29bf8ab4d..54d9d1f442 100644 --- a/src/services/note_types.js +++ b/src/services/note_types.ts @@ -16,7 +16,7 @@ const noteTypes = [ { type: 'contentWidget', defaultMime: '' } ]; -function getDefaultMimeForNoteType(typeName) { +function getDefaultMimeForNoteType(typeName: string) { const typeRec = noteTypes.find(nt => nt.type === typeName); if (!typeRec) { @@ -26,7 +26,7 @@ function getDefaultMimeForNoteType(typeName) { return typeRec.defaultMime; } -module.exports = { +export = { getNoteTypeNames: () => noteTypes.map(nt => nt.type), getDefaultMimeForNoteType }; diff --git a/src/services/notes.js b/src/services/notes.ts similarity index 82% rename from src/services/notes.js rename to src/services/notes.ts index 80a3b8e841..6dd68dc73e 100644 --- a/src/services/notes.js +++ b/src/services/notes.ts @@ -1,52 +1,63 @@ -const sql = require('./sql.js'); -const optionService = require('./options.js'); -const dateUtils = require('./date_utils.js'); -const entityChangesService = require('./entity_changes.js'); -const eventService = require('./events.js'); -const cls = require('../services/cls.js'); -const protectedSessionService = require('../services/protected_session.js'); -const log = require('../services/log.js'); -const utils = require('../services/utils.js'); -const revisionService = require('./revisions.js'); -const request = require('./request.js'); -const path = require('path'); -const url = require('url'); -const becca = require('../becca/becca.js'); -const BBranch = require('../becca/entities/bbranch.js'); -const BNote = require('../becca/entities/bnote.js'); -const BAttribute = require('../becca/entities/battribute.js'); -const BAttachment = require('../becca/entities/battachment.js'); -const dayjs = require("dayjs"); -const htmlSanitizer = require('./html_sanitizer.js'); -const ValidationError = require('../errors/validation_error.js'); -const noteTypesService = require('./note_types.js'); -const fs = require("fs"); -const ws = require('./ws.js'); -const html2plaintext = require('html2plaintext') - -/** @param {BNote} parentNote */ -function getNewNotePosition(parentNote) { +import sql = require('./sql'); +import optionService = require('./options'); +import dateUtils = require('./date_utils'); +import entityChangesService = require('./entity_changes'); +import eventService = require('./events'); +import cls = require('../services/cls'); +import protectedSessionService = require('../services/protected_session'); +import log = require('../services/log'); +import utils = require('../services/utils'); +import revisionService = require('./revisions'); +import request = require('./request'); +import path = require('path'); +import url = require('url'); +import becca = require('../becca/becca'); +import BBranch = require('../becca/entities/bbranch'); +import BNote = require('../becca/entities/bnote'); +import BAttribute = require('../becca/entities/battribute'); +import BAttachment = require('../becca/entities/battachment'); +import dayjs = require("dayjs"); +import htmlSanitizer = require('./html_sanitizer'); +import ValidationError = require('../errors/validation_error'); +import noteTypesService = require('./note_types'); +import fs = require("fs"); +import ws = require('./ws'); +import html2plaintext = require('html2plaintext'); +import { AttachmentRow, AttributeRow, BranchRow, NoteRow, NoteType } from '../becca/entities/rows'; +import TaskContext = require('./task_context'); +import { NoteParams } from './note-interface'; + +interface FoundLink { + name: "imageLink" | "internalLink" | "includeNoteLink" | "relationMapLink", + value: string +} + +interface Attachment { + attachmentId?: string; + title: string; +} + +function getNewNotePosition(parentNote: BNote) { if (parentNote.isLabelTruthy('newNotesOnTop')) { const minNotePos = parentNote.getChildBranches() - .filter(branch => branch.noteId !== '_hidden') // has "always last" note position - .reduce((min, note) => Math.min(min, note.notePosition), 0); + .filter(branch => branch?.noteId !== '_hidden') // has "always last" note position + .reduce((min, note) => Math.min(min, note?.notePosition || 0), 0); return minNotePos - 10; } else { const maxNotePos = parentNote.getChildBranches() - .filter(branch => branch.noteId !== '_hidden') // has "always last" note position - .reduce((max, note) => Math.max(max, note.notePosition), 0); + .filter(branch => branch?.noteId !== '_hidden') // has "always last" note position + .reduce((max, note) => Math.max(max, note?.notePosition || 0), 0); return maxNotePos + 10; } } -/** @param {BNote} note */ -function triggerNoteTitleChanged(note) { +function triggerNoteTitleChanged(note: BNote) { eventService.emit(eventService.NOTE_TITLE_CHANGED, note); } -function deriveMime(type, mime) { +function deriveMime(type: string, mime?: string) { if (!type) { throw new Error(`Note type is a required param`); } @@ -58,11 +69,7 @@ function deriveMime(type, mime) { return noteTypesService.getDefaultMimeForNoteType(type); } -/** - * @param {BNote} parentNote - * @param {BNote} childNote - */ -function copyChildAttributes(parentNote, childNote) { +function copyChildAttributes(parentNote: BNote, childNote: BNote) { for (const attr of parentNote.getAttributes()) { if (attr.name.startsWith("child:")) { const name = attr.name.substr(6); @@ -86,8 +93,7 @@ function copyChildAttributes(parentNote, childNote) { } } -/** @param {BNote} parentNote */ -function getNewNoteTitle(parentNote) { +function getNewNoteTitle(parentNote: BNote) { let title = "new note"; const titleTemplate = parentNote.getLabelValue('titleTemplate'); @@ -101,7 +107,7 @@ function getNewNoteTitle(parentNote) { // - parentNote title = eval(`\`${titleTemplate}\``); - } catch (e) { + } catch (e: any) { log.error(`Title template of note '${parentNote.noteId}' failed with: ${e.message}`); } } @@ -114,7 +120,13 @@ function getNewNoteTitle(parentNote) { return title; } -function getAndValidateParent(params) { +interface GetValidateParams { + parentNoteId: string; + type: string; + ignoreForbiddenParents?: boolean; +} + +function getAndValidateParent(params: GetValidateParams) { const parentNote = becca.notes[params.parentNoteId]; if (!parentNote) { @@ -141,24 +153,10 @@ function getAndValidateParent(params) { return parentNote; } -/** - * Following object properties are mandatory: - * - {string} parentNoteId - * - {string} title - * - {*} content - * - {string} type - text, code, file, image, search, book, relationMap, canvas, render - * - * The following are optional (have defaults) - * - {string} mime - value is derived from default mimes for type - * - {boolean} isProtected - default is false - * - {boolean} isExpanded - default is false - * - {string} prefix - default is empty string - * - {int} notePosition - default is the last existing notePosition in a parent + 10 - * - * @param params - * @returns {{note: BNote, branch: BBranch}} - */ -function createNewNote(params) { +function createNewNote(params: NoteParams): { + note: BNote; + branch: BBranch; +} { const parentNote = getAndValidateParent(params); if (params.title === null || params.title === undefined) { @@ -209,7 +207,7 @@ function createNewNote(params) { noteId: note.noteId, parentNoteId: params.parentNoteId, notePosition: params.notePosition !== undefined ? params.notePosition : getNewNotePosition(parentNote), - prefix: params.prefix, + prefix: params.prefix || "", isExpanded: !!params.isExpanded }).save(); } @@ -253,7 +251,7 @@ function createNewNote(params) { }); } -function createNewNoteWithTarget(target, targetBranchId, params) { +function createNewNoteWithTarget(target: ("into" | "after"), targetBranchId: string, params: NoteParams) { if (!params.type) { const parentNote = becca.notes[params.parentNoteId]; @@ -285,13 +283,7 @@ function createNewNoteWithTarget(target, targetBranchId, params) { } } -/** - * @param {BNote} note - * @param {boolean} protect - * @param {boolean} includingSubTree - * @param {TaskContext} taskContext - */ -function protectNoteRecursively(note, protect, includingSubTree, taskContext) { +function protectNoteRecursively(note: BNote, protect: boolean, includingSubTree: boolean, taskContext: TaskContext) { protectNote(note, protect); taskContext.increaseProgressCount(); @@ -303,11 +295,7 @@ function protectNoteRecursively(note, protect, includingSubTree, taskContext) { } } -/** - * @param {BNote} note - * @param {boolean} protect - */ -function protectNote(note, protect) { +function protectNote(note: BNote, protect: boolean) { if (!protectedSessionService.isProtectedSessionAvailable()) { throw new Error(`Cannot (un)protect note '${note.noteId}' with protect flag '${protect}' without active protected session`); } @@ -345,8 +333,8 @@ function protectNote(note, protect) { } } -function checkImageAttachments(note, content) { - const foundAttachmentIds = new Set(); +function checkImageAttachments(note: BNote, content: string) { + const foundAttachmentIds = new Set<string>(); let match; const imgRegExp = /src="[^"]*api\/attachments\/([a-zA-Z0-9_]+)\/image/g; @@ -362,7 +350,7 @@ function checkImageAttachments(note, content) { const attachments = note.getAttachments(); for (const attachment of attachments) { - const attachmentInContent = foundAttachmentIds.has(attachment.attachmentId); + const attachmentInContent = attachment.attachmentId && foundAttachmentIds.has(attachment.attachmentId); if (attachment.utcDateScheduledForErasureSince && attachmentInContent) { attachment.utcDateScheduledForErasureSince = null; @@ -373,7 +361,7 @@ function checkImageAttachments(note, content) { } } - const existingAttachmentIds = new Set(attachments.map(att => att.attachmentId)); + const existingAttachmentIds = new Set<string | undefined>(attachments.map(att => att.attachmentId)); const unknownAttachmentIds = Array.from(foundAttachmentIds).filter(foundAttId => !existingAttachmentIds.has(foundAttId)); const unknownAttachments = becca.getAttachments(unknownAttachmentIds); @@ -412,7 +400,7 @@ function checkImageAttachments(note, content) { }; } -function findImageLinks(content, foundLinks) { +function findImageLinks(content: string, foundLinks: FoundLink[]) { const re = /src="[^"]*api\/images\/([a-zA-Z0-9_]+)\//g; let match; @@ -428,7 +416,7 @@ function findImageLinks(content, foundLinks) { return content.replace(/src="[^"]*\/api\/images\//g, 'src="api/images/'); } -function findInternalLinks(content, foundLinks) { +function findInternalLinks(content: string, foundLinks: FoundLink[]) { const re = /href="[^"]*#root[a-zA-Z0-9_\/]*\/([a-zA-Z0-9_]+)\/?"/g; let match; @@ -443,7 +431,7 @@ function findInternalLinks(content, foundLinks) { return content.replace(/href="[^"]*#root/g, 'href="#root'); } -function findIncludeNoteLinks(content, foundLinks) { +function findIncludeNoteLinks(content: string, foundLinks: FoundLink[]) { const re = /<section class="include-note[^>]+data-note-id="([a-zA-Z0-9_]+)"[^>]*>/g; let match; @@ -457,7 +445,7 @@ function findIncludeNoteLinks(content, foundLinks) { return content; } -function findRelationMapLinks(content, foundLinks) { +function findRelationMapLinks(content: string, foundLinks: FoundLink[]) { const obj = JSON.parse(content); for (const note of obj.notes) { @@ -468,9 +456,9 @@ function findRelationMapLinks(content, foundLinks) { } } -const imageUrlToAttachmentIdMapping = {}; +const imageUrlToAttachmentIdMapping: Record<string, string> = {}; -async function downloadImage(noteId, imageUrl) { +async function downloadImage(noteId: string, imageUrl: string) { const unescapedUrl = utils.unescapeHtml(imageUrl); try { @@ -493,30 +481,30 @@ async function downloadImage(noteId, imageUrl) { } const parsedUrl = url.parse(unescapedUrl); - const title = path.basename(parsedUrl.pathname); + const title = path.basename(parsedUrl.pathname || ""); - const imageService = require('../services/image.js'); + const imageService = require('../services/image'); const attachment = imageService.saveImageToAttachment(noteId, imageBuffer, title, true, true); imageUrlToAttachmentIdMapping[imageUrl] = attachment.attachmentId; log.info(`Download of '${imageUrl}' succeeded and was saved as image attachment '${attachment.attachmentId}' of note '${noteId}'`); } - catch (e) { + catch (e: any) { log.error(`Download of '${imageUrl}' for note '${noteId}' failed with error: ${e.message} ${e.stack}`); } } /** url => download promise */ -const downloadImagePromises = {}; +const downloadImagePromises: Record<string, Promise<void>> = {}; -function replaceUrl(content, url, attachment) { +function replaceUrl(content: string, url: string, attachment: Attachment) { const quotedUrl = utils.quoteRegex(url); return content.replace(new RegExp(`\\s+src=[\"']${quotedUrl}[\"']`, "ig"), ` src="api/attachments/${attachment.attachmentId}/image/${encodeURIComponent(attachment.title)}"`); } -function downloadImages(noteId, content) { +function downloadImages(noteId: string, content: string) { const imageRe = /<img[^>]*?\ssrc=['"]([^'">]+)['"]/ig; let imageMatch; @@ -528,7 +516,7 @@ function downloadImages(noteId, content) { const imageBase64 = url.substr(inlineImageMatch[0].length); const imageBuffer = Buffer.from(imageBase64, 'base64'); - const imageService = require('../services/image.js'); + const imageService = require('../services/image'); const attachment = imageService.saveImageToAttachment(noteId, imageBuffer, "inline image", true, true); const encodedTitle = encodeURIComponent(attachment.title); @@ -589,6 +577,11 @@ function downloadImages(noteId, content) { const origContent = origNote.getContent(); let updatedContent = origContent; + if (typeof updatedContent !== "string") { + log.error(`Note '${noteId}' has a non-string content, cannot replace image link.`); + return; + } + for (const url in imageUrlToAttachmentIdMapping) { const imageNote = imageNotes.find(note => note.noteId === imageUrlToAttachmentIdMapping[url]); @@ -612,11 +605,7 @@ function downloadImages(noteId, content) { return content; } -/** - * @param {BNote} note - * @param {string} content - */ -function saveAttachments(note, content) { +function saveAttachments(note: BNote, content: string) { const inlineAttachmentRe = /<a[^>]*?\shref=['"]data:([^;'">]+);base64,([^'">]+)['"][^>]*>(.*?)<\/a>/igm; let attachmentMatch; @@ -645,11 +634,7 @@ function saveAttachments(note, content) { return content; } -/** - * @param {BNote} note - * @param {string} content - */ -function saveLinks(note, content) { +function saveLinks(note: BNote, content: string | Buffer) { if ((note.type !== 'text' && note.type !== 'relationMap') || (note.isProtected && !protectedSessionService.isProtectedSessionAvailable())) { return { @@ -658,10 +643,10 @@ function saveLinks(note, content) { }; } - const foundLinks = []; + const foundLinks: FoundLink[] = []; let forceFrontendReload = false; - if (note.type === 'text') { + if (note.type === 'text' && typeof content === "string") { content = downloadImages(note.noteId, content); content = saveAttachments(note, content); @@ -671,7 +656,7 @@ function saveLinks(note, content) { ({forceFrontendReload, content} = checkImageAttachments(note, content)); } - else if (note.type === 'relationMap') { + else if (note.type === 'relationMap' && typeof content === "string") { findRelationMapLinks(content, foundLinks); } else { @@ -716,8 +701,7 @@ function saveLinks(note, content) { return { forceFrontendReload, content }; } -/** @param {BNote} note */ -function saveRevisionIfNeeded(note) { +function saveRevisionIfNeeded(note: BNote) { // files and images are versioned separately if (note.type === 'file' || note.type === 'image' || note.isLabelTruthy('disableVersioning')) { return; @@ -738,10 +722,10 @@ function saveRevisionIfNeeded(note) { } } -function updateNoteData(noteId, content, attachments = []) { +function updateNoteData(noteId: string, content: string, attachments: AttachmentRow[] = []) { const note = becca.getNote(noteId); - if (!note.isContentAvailable()) { + if (!note || !note.isContentAvailable()) { throw new Error(`Note '${noteId}' is not available for change!`); } @@ -752,10 +736,9 @@ function updateNoteData(noteId, content, attachments = []) { note.setContent(newContent, { forceFrontendReload }); if (attachments?.length > 0) { - /** @var {Object<string, BAttachment>} */ const existingAttachmentsByTitle = utils.toMap(note.getAttachments({includeContentLength: false}), 'title'); - for (const {attachmentId, role, mime, title, content, position} of attachments) { + for (const {attachmentId, role, mime, title, position, content} of attachments) { if (attachmentId || !(title in existingAttachmentsByTitle)) { note.saveAttachment({attachmentId, role, mime, title, content, position}); } else { @@ -763,18 +746,16 @@ function updateNoteData(noteId, content, attachments = []) { existingAttachment.role = role; existingAttachment.mime = mime; existingAttachment.position = position; - existingAttachment.setContent(content, {forceSave: true}); + if (content) { + existingAttachment.setContent(content, {forceSave: true}); + } } } } } -/** - * @param {string} noteId - * @param {TaskContext} taskContext - */ -function undeleteNote(noteId, taskContext) { - const noteRow = sql.getRow("SELECT * FROM notes WHERE noteId = ?", [noteId]); +function undeleteNote(noteId: string, taskContext: TaskContext) { + const noteRow = sql.getRow<NoteRow>("SELECT * FROM notes WHERE noteId = ?", [noteId]); if (!noteRow.isDeleted) { log.error(`Note '${noteId}' is not deleted and thus cannot be undeleted.`); @@ -793,19 +774,14 @@ function undeleteNote(noteId, taskContext) { } } -/** - * @param {string} branchId - * @param {string} deleteId - * @param {TaskContext} taskContext - */ -function undeleteBranch(branchId, deleteId, taskContext) { - const branchRow = sql.getRow("SELECT * FROM branches WHERE branchId = ?", [branchId]) +function undeleteBranch(branchId: string, deleteId: string, taskContext: TaskContext) { + const branchRow = sql.getRow<BranchRow>("SELECT * FROM branches WHERE branchId = ?", [branchId]) if (!branchRow.isDeleted) { return; } - const noteRow = sql.getRow("SELECT * FROM notes WHERE noteId = ?", [branchRow.noteId]); + const noteRow = sql.getRow<NoteRow>("SELECT * FROM notes WHERE noteId = ?", [branchRow.noteId]); if (noteRow.isDeleted && noteRow.deleteId !== deleteId) { return; @@ -818,10 +794,14 @@ function undeleteBranch(branchId, deleteId, taskContext) { if (noteRow.isDeleted && noteRow.deleteId === deleteId) { // becca entity was already created as skeleton in "new Branch()" above const noteEntity = becca.getNote(noteRow.noteId); + if (!noteEntity) { + throw new Error("Unable to find the just restored branch."); + } + noteEntity.updateFromRow(noteRow); noteEntity.save(); - const attributeRows = sql.getRows(` + const attributeRows = sql.getRows<AttributeRow>(` SELECT * FROM attributes WHERE isDeleted = 1 AND deleteId = ? @@ -830,10 +810,11 @@ function undeleteBranch(branchId, deleteId, taskContext) { for (const attributeRow of attributeRows) { // relation might point to a note which hasn't been undeleted yet and would thus throw up + // TODO: skipValidation is not used. new BAttribute(attributeRow).save({skipValidation: true}); } - const attachmentRows = sql.getRows(` + const attachmentRows = sql.getRows<AttachmentRow>(` SELECT * FROM attachments WHERE isDeleted = 1 AND deleteId = ? @@ -843,7 +824,7 @@ function undeleteBranch(branchId, deleteId, taskContext) { new BAttachment(attachmentRow).save(); } - const childBranchIds = sql.getColumn(` + const childBranchIds = sql.getColumn<string>(` SELECT branches.branchId FROM branches WHERE branches.isDeleted = 1 @@ -859,8 +840,8 @@ function undeleteBranch(branchId, deleteId, taskContext) { /** * @returns return deleted branchIds of an undeleted parent note */ -function getUndeletedParentBranchIds(noteId, deleteId) { - return sql.getColumn(` +function getUndeletedParentBranchIds(noteId: string, deleteId: string) { + return sql.getColumn<string>(` SELECT branches.branchId FROM branches JOIN notes AS parentNote ON parentNote.noteId = branches.parentNoteId @@ -870,7 +851,7 @@ function getUndeletedParentBranchIds(noteId, deleteId) { AND parentNote.isDeleted = 0`, [noteId, deleteId]); } -function scanForLinks(note, content) { +function scanForLinks(note: BNote, content: string | Buffer) { if (!note || !['text', 'relationMap'].includes(note.type)) { return; } @@ -884,17 +865,15 @@ function scanForLinks(note, content) { } }); } - catch (e) { + catch (e: any) { log.error(`Could not scan for links note '${note.noteId}': ${e.message} ${e.stack}`); } } /** - * @param {BNote} note - * @param {string} content * Things which have to be executed after updating content, but asynchronously (separate transaction) */ -async function asyncPostProcessContent(note, content) { +async function asyncPostProcessContent(note: BNote, content: string | Buffer) { if (cls.isMigrationRunning()) { // this is rarely needed for migrations, but can cause trouble by e.g. triggering downloads return; @@ -908,7 +887,7 @@ async function asyncPostProcessContent(note, content) { } // all keys should be replaced by the corresponding values -function replaceByMap(str, mapObj) { +function replaceByMap(str: string, mapObj: Record<string, string>) { if (!mapObj) { return str; } @@ -918,7 +897,7 @@ function replaceByMap(str, mapObj) { return str.replace(re, matched => mapObj[matched]); } -function duplicateSubtree(origNoteId, newParentNoteId) { +function duplicateSubtree(origNoteId: string, newParentNoteId: string) { if (origNoteId === 'root') { throw new Error('Duplicating root is not possible'); } @@ -931,6 +910,10 @@ function duplicateSubtree(origNoteId, newParentNoteId) { const noteIdMapping = getNoteIdMapping(origNote); + if (!origBranch) { + throw new Error("Unable to find original branch to duplicate."); + } + const res = duplicateSubtreeInner(origNote, origBranch, newParentNoteId, noteIdMapping); if (!res.note.title.endsWith('(dup)')) { @@ -942,20 +925,25 @@ function duplicateSubtree(origNoteId, newParentNoteId) { return res; } -function duplicateSubtreeWithoutRoot(origNoteId, newNoteId) { +function duplicateSubtreeWithoutRoot(origNoteId: string, newNoteId: string) { if (origNoteId === 'root') { throw new Error('Duplicating root is not possible'); } const origNote = becca.getNote(origNoteId); + if (origNote == null) { + throw new Error("Unable to find note to duplicate."); + } + const noteIdMapping = getNoteIdMapping(origNote); - for (const childBranch of origNote.getChildBranches()) { - duplicateSubtreeInner(childBranch.getNote(), childBranch, newNoteId, noteIdMapping); + if (childBranch) { + duplicateSubtreeInner(childBranch.getNote(), childBranch, newNoteId, noteIdMapping); + } } } -function duplicateSubtreeInner(origNote, origBranch, newParentNoteId, noteIdMapping) { +function duplicateSubtreeInner(origNote: BNote, origBranch: BBranch, newParentNoteId: string, noteIdMapping: Record<string, string>) { if (origNote.isProtected && !protectedSessionService.isProtectedSessionAvailable()) { throw new Error(`Cannot duplicate note '${origNote.noteId}' because it is protected and protected session is not available. Enter protected session and try again.`); } @@ -981,7 +969,7 @@ function duplicateSubtreeInner(origNote, origBranch, newParentNoteId, noteIdMapp let content = origNote.getContent(); - if (['text', 'relationMap', 'search'].includes(origNote.type)) { + if (typeof content === "string" && ['text', 'relationMap', 'search'].includes(origNote.type)) { // fix links in the content content = replaceByMap(content, noteIdMapping); } @@ -1002,11 +990,14 @@ function duplicateSubtreeInner(origNote, origBranch, newParentNoteId, noteIdMapp } // the relation targets may not be created yet, the mapping is pre-generated - attr.save({skipValidation: true}); + // TODO: This used to be `attr.save({skipValidation: true});`, but skipValidation is in beforeSaving. + attr.save(); } for (const childBranch of origNote.getChildBranches()) { - duplicateSubtreeInner(childBranch.getNote(), childBranch, newNote.noteId, noteIdMapping); + if (childBranch) { + duplicateSubtreeInner(childBranch.getNote(), childBranch, newNote.noteId, noteIdMapping); + } } return newNote; @@ -1031,8 +1022,8 @@ function duplicateSubtreeInner(origNote, origBranch, newParentNoteId, noteIdMapp } } -function getNoteIdMapping(origNote) { - const noteIdMapping = {}; +function getNoteIdMapping(origNote: BNote) { + const noteIdMapping: Record<string, string> = {}; // pregenerate new noteIds since we'll need to fix relation references even for not yet created notes for (const origNoteId of origNote.getDescendantNoteIds()) { @@ -1042,7 +1033,7 @@ function getNoteIdMapping(origNote) { return noteIdMapping; } -module.exports = { +export = { createNewNote, createNewNoteWithTarget, updateNoteData, diff --git a/src/services/one_time_timer.js b/src/services/one_time_timer.ts similarity index 77% rename from src/services/one_time_timer.js rename to src/services/one_time_timer.ts index 648c250bbe..033a8b3c16 100644 --- a/src/services/one_time_timer.js +++ b/src/services/one_time_timer.ts @@ -1,4 +1,4 @@ -const scheduledExecutions = {}; +const scheduledExecutions: Record<string, boolean> = {}; /** * Subsequent calls will not move the timer to the future. The first caller determines the time of execution. @@ -6,7 +6,7 @@ const scheduledExecutions = {}; * The good thing about synchronous better-sqlite3 is that this cannot interrupt transaction. The execution will be called * only outside of a transaction. */ -function scheduleExecution(name, milliseconds, cb) { +function scheduleExecution(name: string, milliseconds: number, cb: () => void) { if (name in scheduledExecutions) { return; } @@ -20,6 +20,6 @@ function scheduleExecution(name, milliseconds, cb) { }, milliseconds); } -module.exports = { +export = { scheduleExecution }; diff --git a/src/services/options.js b/src/services/options.ts similarity index 65% rename from src/services/options.js rename to src/services/options.ts index 6c33b9478f..b658e4ff71 100644 --- a/src/services/options.js +++ b/src/services/options.ts @@ -1,22 +1,21 @@ -const becca = require('../becca/becca.js'); -const sql = require('./sql.js'); +import becca = require('../becca/becca'); +import { OptionRow } from '../becca/entities/rows'; +import sql = require('./sql'); -/** @returns {string|null} */ -function getOptionOrNull(name) { +function getOptionOrNull(name: string): string | null { let option; if (becca.loaded) { option = becca.getOption(name); } else { // e.g. in initial sync becca is not loaded because DB is not initialized - option = sql.getRow("SELECT * FROM options WHERE name = ?", [name]); + option = sql.getRow<OptionRow>("SELECT * FROM options WHERE name = ?", [name]); } return option ? option.value : null; } -/** @returns {string} */ -function getOption(name) { +function getOption(name: string) { const val = getOptionOrNull(name); if (val === null) { @@ -26,8 +25,7 @@ function getOption(name) { return val; } -/** @returns {int} */ -function getOptionInt(name, defaultValue = undefined) { +function getOptionInt(name: string, defaultValue?: number): number { const val = getOption(name); const intVal = parseInt(val); @@ -43,19 +41,18 @@ function getOptionInt(name, defaultValue = undefined) { return intVal; } -/** @returns {boolean} */ -function getOptionBool(name) { +function getOptionBool(name: string): boolean { const val = getOption(name); - if (!['true', 'false'].includes(val)) { + if (typeof val !== "string" || !['true', 'false'].includes(val)) { throw new Error(`Could not parse '${val}' into boolean for option '${name}'`); } return val === 'true'; } -function setOption(name, value) { - if (value === true || value === false) { +function setOption(name: string, value: string | number | boolean) { + if (value === true || value === false || typeof value === "number") { value = value.toString(); } @@ -71,9 +68,9 @@ function setOption(name, value) { } } -function createOption(name, value, isSynced) { +function createOption(name: string, value: string | number, isSynced: boolean) { // to avoid circular dependency, need to find a better solution - const BOption = require('../becca/entities/boption.js'); + const BOption = require('../becca/entities/boption'); new BOption({ name: name, @@ -87,7 +84,7 @@ function getOptions() { } function getOptionMap() { - const map = {}; + const map: Record<string | number, string> = {}; for (const option of Object.values(becca.options)) { map[option.name] = option.value; @@ -96,7 +93,7 @@ function getOptionMap() { return map; } -module.exports = { +export = { getOption, getOptionInt, getOptionBool, diff --git a/src/services/options_init.js b/src/services/options_init.ts similarity index 89% rename from src/services/options_init.js rename to src/services/options_init.ts index 95514d6d6b..e2ba1520a1 100644 --- a/src/services/options_init.js +++ b/src/services/options_init.ts @@ -1,16 +1,22 @@ -const optionService = require('./options.js'); -const appInfo = require('./app_info.js'); -const utils = require('./utils.js'); -const log = require('./log.js'); -const dateUtils = require('./date_utils.js'); -const keyboardActions = require('./keyboard_actions.js'); +import optionService = require('./options'); +import appInfo = require('./app_info'); +import utils = require('./utils'); +import log = require('./log'); +import dateUtils = require('./date_utils'); +import keyboardActions = require('./keyboard_actions'); +import { KeyboardShortcutWithRequiredActionName } from './keyboard_actions_interface'; function initDocumentOptions() { optionService.createOption('documentId', utils.randomSecureToken(16), false); optionService.createOption('documentSecret', utils.randomSecureToken(16), false); } -function initNotSyncedOptions(initialized, opts = {}) { +interface NotSyncedOpts { + syncServerHost?: string; + syncProxy?: string; +} + +function initNotSyncedOptions(initialized: boolean, opts: NotSyncedOpts = {}) { optionService.createOption('openNoteContexts', JSON.stringify([ { notePath: 'root', @@ -21,7 +27,7 @@ function initNotSyncedOptions(initialized, opts = {}) { optionService.createOption('lastDailyBackupDate', dateUtils.utcNowDateTime(), false); optionService.createOption('lastWeeklyBackupDate', dateUtils.utcNowDateTime(), false); optionService.createOption('lastMonthlyBackupDate', dateUtils.utcNowDateTime(), false); - optionService.createOption('dbVersion', appInfo.dbVersion, false); + optionService.createOption('dbVersion', appInfo.dbVersion.toString(), false); optionService.createOption('initialized', initialized ? 'true' : 'false', false); @@ -117,8 +123,8 @@ function initStartupOptions() { } function getKeyboardDefaultOptions() { - return keyboardActions.DEFAULT_KEYBOARD_ACTIONS - .filter(ka => !!ka.actionName) + return (keyboardActions.DEFAULT_KEYBOARD_ACTIONS + .filter(ka => !!ka.actionName) as KeyboardShortcutWithRequiredActionName[]) .map(ka => ({ name: `keyboardShortcuts${ka.actionName.charAt(0).toUpperCase()}${ka.actionName.slice(1)}`, value: JSON.stringify(ka.defaultShortcuts), @@ -126,7 +132,7 @@ function getKeyboardDefaultOptions() { })); } -module.exports = { +export = { initDocumentOptions, initNotSyncedOptions, initStartupOptions diff --git a/src/services/port.js b/src/services/port.ts similarity index 71% rename from src/services/port.js rename to src/services/port.ts index 938eb33248..c37992d424 100644 --- a/src/services/port.js +++ b/src/services/port.ts @@ -1,9 +1,9 @@ -const config = require('./config.js'); -const utils = require('./utils.js'); -const env = require('./env.js'); -const dataDir = require('./data_dir.js'); +import config = require('./config'); +import utils = require('./utils'); +import env = require('./env'); +import dataDir = require('./data_dir'); -function parseAndValidate(portStr, source) { +function parseAndValidate(portStr: string, source: string) { const portNum = parseInt(portStr); if (isNaN(portNum) || portNum < 0 || portNum >= 65536) { @@ -14,7 +14,7 @@ function parseAndValidate(portStr, source) { return portNum; } -let port; +let port: number; if (process.env.TRILIUM_PORT) { port = parseAndValidate(process.env.TRILIUM_PORT, "environment variable TRILIUM_PORT"); @@ -24,4 +24,4 @@ if (process.env.TRILIUM_PORT) { port = parseAndValidate(config['Network']['port'] || '3000', `Network.port in ${dataDir.CONFIG_INI_PATH}`); } -module.exports = port; +export = port; diff --git a/src/services/promoted_attribute_definition_parser.js b/src/services/promoted_attribute_definition_parser.ts similarity index 78% rename from src/services/promoted_attribute_definition_parser.js rename to src/services/promoted_attribute_definition_parser.ts index 937dae1de2..3efe16f4d9 100644 --- a/src/services/promoted_attribute_definition_parser.js +++ b/src/services/promoted_attribute_definition_parser.ts @@ -1,6 +1,15 @@ -function parse(value) { +interface DefinitionObject { + isPromoted?: boolean; + labelType?: string; + multiplicity?: string; + numberPrecision?: number; + promotedAlias?: string; + inverseRelation?: string; +} + +function parse(value: string): DefinitionObject { const tokens = value.split(',').map(t => t.trim()); - const defObj = {}; + const defObj: DefinitionObject = {}; for (const token of tokens) { if (token === 'promoted') { @@ -35,6 +44,6 @@ function parse(value) { return defObj; } -module.exports = { +export = { parse }; diff --git a/src/services/protected_session.js b/src/services/protected_session.js deleted file mode 100644 index 2ade5e338a..0000000000 --- a/src/services/protected_session.js +++ /dev/null @@ -1,76 +0,0 @@ -"use strict"; - -const log = require('./log.js'); -const dataEncryptionService = require('./encryption/data_encryption.js'); - -let dataKey = null; - -function setDataKey(decryptedDataKey) { - dataKey = Array.from(decryptedDataKey); -} - -function getDataKey() { - return dataKey; -} - -function resetDataKey() { - dataKey = null; -} - -function isProtectedSessionAvailable() { - return !!dataKey; -} - -function encrypt(plainText) { - if (plainText === null) { - return null; - } - - return dataEncryptionService.encrypt(getDataKey(), plainText); -} - -function decrypt(cipherText) { - if (cipherText === null) { - return null; - } - - return dataEncryptionService.decrypt(getDataKey(), cipherText); -} - -function decryptString(cipherText) { - return dataEncryptionService.decryptString(getDataKey(), cipherText); -} - -let lastProtectedSessionOperationDate = null; - -function touchProtectedSession() { - if (isProtectedSessionAvailable()) { - lastProtectedSessionOperationDate = Date.now(); - } -} - -function checkProtectedSessionExpiration() { - const options = require('./options.js'); - const protectedSessionTimeout = options.getOptionInt('protectedSessionTimeout'); - if (isProtectedSessionAvailable() - && lastProtectedSessionOperationDate - && Date.now() - lastProtectedSessionOperationDate > protectedSessionTimeout * 1000) { - - resetDataKey(); - - log.info("Expiring protected session"); - - require('./ws.js').reloadFrontend("leaving protected session"); - } -} - -module.exports = { - setDataKey, - resetDataKey, - isProtectedSessionAvailable, - encrypt, - decrypt, - decryptString, - touchProtectedSession, - checkProtectedSessionExpiration -}; diff --git a/src/services/protected_session.ts b/src/services/protected_session.ts new file mode 100644 index 0000000000..6c1d6c2239 --- /dev/null +++ b/src/services/protected_session.ts @@ -0,0 +1,82 @@ +"use strict"; + +import log = require('./log'); +import dataEncryptionService = require('./encryption/data_encryption'); + +let dataKey: Buffer | null = null; + +function setDataKey(decryptedDataKey: Buffer) { + dataKey = Buffer.from(decryptedDataKey); +} + +function getDataKey() { + return dataKey; +} + +function resetDataKey() { + dataKey = null; +} + +function isProtectedSessionAvailable() { + return !!dataKey; +} + +function encrypt(plainText: string | Buffer) { + const dataKey = getDataKey(); + if (plainText === null || dataKey === null) { + return null; + } + + return dataEncryptionService.encrypt(dataKey, plainText); +} + +function decrypt(cipherText: string | Buffer): Buffer | null { + const dataKey = getDataKey(); + if (cipherText === null || dataKey === null) { + return null; + } + + return dataEncryptionService.decrypt(dataKey, cipherText) || null; +} + +function decryptString(cipherText: string): string | null { + const dataKey = getDataKey(); + if (dataKey === null) { + return null; + } + return dataEncryptionService.decryptString(dataKey, cipherText); +} + +let lastProtectedSessionOperationDate: number | null = null; + +function touchProtectedSession() { + if (isProtectedSessionAvailable()) { + lastProtectedSessionOperationDate = Date.now(); + } +} + +function checkProtectedSessionExpiration() { + const options = require('./options'); + const protectedSessionTimeout = options.getOptionInt('protectedSessionTimeout'); + if (isProtectedSessionAvailable() + && lastProtectedSessionOperationDate + && Date.now() - lastProtectedSessionOperationDate > protectedSessionTimeout * 1000) { + + resetDataKey(); + + log.info("Expiring protected session"); + + require('./ws').reloadFrontend("leaving protected session"); + } +} + +export = { + setDataKey, + resetDataKey, + isProtectedSessionAvailable, + encrypt, + decrypt, + decryptString, + touchProtectedSession, + checkProtectedSessionExpiration +}; diff --git a/src/services/request.js b/src/services/request.ts similarity index 74% rename from src/services/request.js rename to src/services/request.ts index 610e44f515..d4a17ad4e5 100644 --- a/src/services/request.js +++ b/src/services/request.ts @@ -1,40 +1,63 @@ "use strict"; -const utils = require('./utils.js'); -const log = require('./log.js'); -const url = require('url'); -const syncOptions = require('./sync_options.js'); +import utils = require('./utils'); +import log = require('./log'); +import url = require('url'); +import syncOptions = require('./sync_options'); +import { ExecOpts } from './request_interface'; // this service provides abstraction over node's HTTP/HTTPS and electron net.client APIs // this allows supporting system proxy -function exec(opts) { - const client = getClient(opts); +interface ClientOpts { + method: string; + url: string; + protocol?: string | null; + host?: string | null; + port?: string | null; + path?: string | null; + timeout?: number; + headers?: Record<string, string | number>; + agent?: any; + proxy?: string | null; +} + +type RequestEvent = ("error" | "response" | "abort"); + +interface Request { + on(event: RequestEvent, cb: (e: any) => void): void; + end(payload?: string): void; +} +interface Client { + request(opts: ClientOpts): Request; +} + +function exec<T>(opts: ExecOpts): Promise<T> { + const client = getClient(opts); + // hack for cases where electron.net does not work, but we don't want to set proxy if (opts.proxy === 'noproxy') { opts.proxy = null; } - if (!opts.paging) { - opts.paging = { - pageCount: 1, - pageIndex: 0, - requestId: 'n/a' - }; - } + const paging = opts.paging || { + pageCount: 1, + pageIndex: 0, + requestId: 'n/a' + }; const proxyAgent = getProxyAgent(opts); const parsedTargetUrl = url.parse(opts.url); return new Promise((resolve, reject) => { try { - const headers = { + const headers: Record<string, string | number> = { Cookie: (opts.cookieJar && opts.cookieJar.header) || "", - 'Content-Type': opts.paging.pageCount === 1 ? 'application/json' : 'text/plain', - pageCount: opts.paging.pageCount, - pageIndex: opts.paging.pageIndex, - requestId: opts.paging.requestId + 'Content-Type': paging.pageCount === 1 ? 'application/json' : 'text/plain', + pageCount: paging.pageCount, + pageIndex: paging.pageIndex, + requestId: paging.requestId }; if (opts.auth) { @@ -63,9 +86,9 @@ function exec(opts) { } let responseStr = ''; - let chunks = []; + let chunks: Buffer[] = []; - response.on('data', chunk => chunks.push(chunk)); + response.on('data', (chunk: Buffer) => chunks.push(chunk)); response.on('end', () => { // use Buffer instead of string concatenation to avoid implicit decoding for each chunk @@ -77,7 +100,7 @@ function exec(opts) { const jsonObj = responseStr.trim() ? JSON.parse(responseStr) : null; resolve(jsonObj); - } catch (e) { + } catch (e: any) { log.error(`Failed to deserialize sync response: ${responseStr}`); reject(generateError(opts, e.message)); @@ -89,7 +112,7 @@ function exec(opts) { const jsonObj = JSON.parse(responseStr); errorMessage = jsonObj?.message || ''; - } catch (e) { + } catch (e: any) { errorMessage = responseStr.substr(0, Math.min(responseStr.length, 100)); } @@ -106,17 +129,17 @@ function exec(opts) { : opts.body; } - request.end(payload); + request.end(payload as string); } - catch (e) { + catch (e: any) { reject(generateError(opts, e.message)); } }); } -function getImage(imageUrl) { +function getImage(imageUrl: string) { const proxyConf = syncOptions.getSyncProxy(); - const opts = { + const opts: ClientOpts = { method: 'GET', url: imageUrl, proxy: proxyConf !== "noproxy" ? proxyConf : null @@ -151,15 +174,15 @@ function getImage(imageUrl) { reject(generateError(opts, `${response.statusCode} ${response.statusMessage}`)); } - const chunks = [] + const chunks: Buffer[] = [] - response.on('data', chunk => chunks.push(chunk)); + response.on('data', (chunk: Buffer) => chunks.push(chunk)); response.on('end', () => resolve(Buffer.concat(chunks))); }); request.end(undefined); } - catch (e) { + catch (e: any) { reject(generateError(opts, e.message)); } }); @@ -167,14 +190,14 @@ function getImage(imageUrl) { const HTTP = 'http:', HTTPS = 'https:'; -function getProxyAgent(opts) { +function getProxyAgent(opts: ClientOpts) { if (!opts.proxy) { return null; } const {protocol} = url.parse(opts.url); - if (![HTTP, HTTPS].includes(protocol)) { + if (!protocol || ![HTTP, HTTPS].includes(protocol)) { return null; } @@ -185,11 +208,11 @@ function getProxyAgent(opts) { return new AgentClass(opts.proxy); } -function getClient(opts) { +function getClient(opts: ClientOpts): Client { // it's not clear how to explicitly configure proxy (as opposed to system proxy), // so in that case, we always use node's modules if (utils.isElectron() && !opts.proxy) { - return require('electron').net; + return require('electron').net as Client; } else { const {protocol} = url.parse(opts.url); @@ -203,11 +226,14 @@ function getClient(opts) { } } -function generateError(opts, message) { +function generateError(opts: { + method: string; + url: string; +}, message: string) { return new Error(`Request to ${opts.method} ${opts.url} failed, error: ${message}`); } -module.exports = { +export = { exec, getImage }; diff --git a/src/services/request_interface.ts b/src/services/request_interface.ts new file mode 100644 index 0000000000..45f9defa10 --- /dev/null +++ b/src/services/request_interface.ts @@ -0,0 +1,20 @@ +export interface CookieJar { + header?: string; +} + +export interface ExecOpts { + proxy: "noproxy" | string | null; + method: string; + url: string; + paging?: { + pageCount: number; + pageIndex: number; + requestId: string; + }; + cookieJar?: CookieJar; + auth?: { + password?: string; + }, + timeout: number; + body?: string | {}; +} \ No newline at end of file diff --git a/src/services/resource_dir.js b/src/services/resource_dir.ts similarity index 85% rename from src/services/resource_dir.js rename to src/services/resource_dir.ts index 088a6d7d30..cba351ac84 100644 --- a/src/services/resource_dir.js +++ b/src/services/resource_dir.ts @@ -1,6 +1,6 @@ -const log = require('./log.js'); -const path = require('path'); -const fs = require('fs'); +import log = require('./log'); +import path = require('path'); +import fs = require('fs'); const RESOURCE_DIR = path.resolve(__dirname, "../.."); @@ -20,7 +20,7 @@ if (!fs.existsSync(MIGRATIONS_DIR)) { process.exit(1); } -module.exports = { +export = { RESOURCE_DIR, MIGRATIONS_DIR, DB_INIT_DIR, diff --git a/src/services/revisions.js b/src/services/revisions.ts similarity index 78% rename from src/services/revisions.js rename to src/services/revisions.ts index 7432847890..9cd281c13a 100644 --- a/src/services/revisions.js +++ b/src/services/revisions.ts @@ -1,14 +1,12 @@ "use strict"; -const log = require('./log.js'); -const sql = require('./sql.js'); -const protectedSessionService = require('./protected_session.js'); -const dateUtils = require('./date_utils.js'); - -/** - * @param {BNote} note - */ -function protectRevisions(note) { +import log = require('./log'); +import sql = require('./sql'); +import protectedSessionService = require('./protected_session'); +import dateUtils = require('./date_utils'); +import BNote = require('../becca/entities/bnote'); + +function protectRevisions(note: BNote) { if (!protectedSessionService.isProtectedSessionAvailable()) { throw new Error(`Cannot (un)protect revisions of note '${note.noteId}' without active protected session`); } @@ -18,7 +16,7 @@ function protectRevisions(note) { try { const content = revision.getContent(); - revision.isProtected = note.isProtected; + revision.isProtected = !!note.isProtected; // this will force de/encryption revision.setContent(content, {forceSave: true}); @@ -46,6 +44,6 @@ function protectRevisions(note) { } } -module.exports = { +export = { protectRevisions }; diff --git a/src/services/sanitize_attribute_name.js b/src/services/sanitize_attribute_name.ts similarity index 75% rename from src/services/sanitize_attribute_name.js rename to src/services/sanitize_attribute_name.ts index 3a7c9ff67b..62b2b03b34 100644 --- a/src/services/sanitize_attribute_name.js +++ b/src/services/sanitize_attribute_name.ts @@ -1,5 +1,5 @@ -function sanitizeAttributeName(origName) { - let fixedName; +function sanitizeAttributeName(origName: string) { + let fixedName: string; if (origName === '') { fixedName = "unnamed"; @@ -13,6 +13,6 @@ function sanitizeAttributeName(origName) { } -module.exports = { +export = { sanitizeAttributeName }; diff --git a/src/services/scheduler.js b/src/services/scheduler.js index 48a19815a7..99503ff59a 100644 --- a/src/services/scheduler.js +++ b/src/services/scheduler.js @@ -1,11 +1,11 @@ -const scriptService = require('./script.js'); -const cls = require('./cls.js'); -const sqlInit = require('./sql_init.js'); -const config = require('./config.js'); -const log = require('./log.js'); -const attributeService = require('../services/attributes.js'); -const protectedSessionService = require('../services/protected_session.js'); -const hiddenSubtreeService = require('./hidden_subtree.js'); +const scriptService = require('./script'); +const cls = require('./cls'); +const sqlInit = require('./sql_init'); +const config = require('./config'); +const log = require('./log'); +const attributeService = require('../services/attributes'); +const protectedSessionService = require('../services/protected_session'); +const hiddenSubtreeService = require('./hidden_subtree'); /** * @param {BNote} note @@ -34,7 +34,7 @@ function runNotesWithLabel(runAttrValue) { if ((runOnInstances.length === 0 || runOnInstances.includes(instanceName)) && (runAtHours.length === 0 || runAtHours.includes(currentHours)) ) { - scriptService.executeNoteNoException(note, {originEntity: note}); + scriptService.executeNoteNoException(note, { originEntity: note }); } } } diff --git a/src/services/script.js b/src/services/script.ts similarity index 67% rename from src/services/script.js rename to src/services/script.ts index 6119075e3a..1df79290e1 100644 --- a/src/services/script.js +++ b/src/services/script.ts @@ -1,9 +1,22 @@ -const ScriptContext = require('./script_context.js'); -const cls = require('./cls.js'); -const log = require('./log.js'); -const becca = require('../becca/becca.js'); +import ScriptContext = require('./script_context'); +import cls = require('./cls'); +import log = require('./log'); +import becca = require('../becca/becca'); +import BNote = require('../becca/entities/bnote'); +import { ApiParams } from './backend_script_api_interface'; + +interface Bundle { + note?: BNote; + noteId?: string; + script: string; + html: string; + allNotes?: BNote[]; + allNoteIds?: string[]; +} + +type ScriptParams = any[]; -function executeNote(note, apiParams) { +function executeNote(note: BNote, apiParams: ApiParams) { if (!note.isJavaScript() || note.getScriptEnv() !== 'backend' || !note.isContentAvailable()) { log.info(`Cannot execute note ${note.noteId} "${note.title}", note must be of type "Code: JS backend"`); @@ -11,11 +24,14 @@ function executeNote(note, apiParams) { } const bundle = getScriptBundle(note, true, 'backend'); - + if (!bundle) { + throw new Error("Unable to determine bundle."); + } + return executeBundle(bundle, apiParams); } -function executeNoteNoException(note, apiParams) { +function executeNoteNoException(note: BNote, apiParams: ApiParams) { try { executeNote(note, apiParams); } @@ -24,7 +40,7 @@ function executeNoteNoException(note, apiParams) { } } -function executeBundle(bundle, apiParams = {}) { +function executeBundle(bundle: Bundle, apiParams: ApiParams = {}) { if (!apiParams.startNote) { // this is the default case, the only exception is when we want to preserve frontend startNote apiParams.startNote = bundle.note; @@ -33,19 +49,19 @@ function executeBundle(bundle, apiParams = {}) { const originalComponentId = cls.get('componentId'); cls.set('componentId', 'script'); - cls.set('bundleNoteId', bundle.note.noteId); + cls.set('bundleNoteId', bundle.note?.noteId); // last \r\n is necessary if the script contains line comment on its last line const script = `function() {\r ${bundle.script}\r }`; - const ctx = new ScriptContext(bundle.allNotes, apiParams); + const ctx = new ScriptContext(bundle.allNotes || [], apiParams); try { return execute(ctx, script); } - catch (e) { - log.error(`Execution of script "${bundle.note.title}" (${bundle.note.noteId}) failed with error: ${e.message}`); + catch (e: any) { + log.error(`Execution of script "${bundle.note?.title}" (${bundle.note?.noteId}) failed with error: ${e.message}`); throw e; } @@ -61,25 +77,36 @@ ${bundle.script}\r * This method preserves frontend startNode - that's why we start execution from currentNote and override * bundle's startNote. */ -function executeScript(script, params, startNoteId, currentNoteId, originEntityName, originEntityId) { +function executeScript(script: string, params: ScriptParams, startNoteId: string, currentNoteId: string, originEntityName: string, originEntityId: string) { const startNote = becca.getNote(startNoteId); const currentNote = becca.getNote(currentNoteId); const originEntity = becca.getEntity(originEntityName, originEntityId); + if (!currentNote) { + throw new Error("Cannot find note."); + } + // we're just executing an excerpt of the original frontend script in the backend context, so we must // override normal note's content, and it's mime type / script environment const overrideContent = `return (${script}\r\n)(${getParams(params)})`; const bundle = getScriptBundle(currentNote, true, 'backend', [], overrideContent); + if (!bundle) { + throw new Error("Unable to determine script bundle."); + } + + if (!startNote || !originEntity) { + throw new Error("Missing start note or origin entity."); + } return executeBundle(bundle, { startNote, originEntity }); } -function execute(ctx, script) { - return function() { return eval(`const apiContext = this;\r\n(${script}\r\n)()`); }.call(ctx); +function execute(ctx: any, script: string) { + return function () { return eval(`const apiContext = this;\r\n(${script}\r\n)()`); }.call(ctx); } -function getParams(params) { +function getParams(params: ScriptParams) { if (!params) { return params; } @@ -94,12 +121,7 @@ function getParams(params) { }).join(","); } -/** - * @param {BNote} note - * @param {string} [script] - * @param {Array} [params] - */ -function getScriptBundleForFrontend(note, script, params) { +function getScriptBundleForFrontend(note: BNote, script: string, params: ScriptParams) { let overrideContent = null; if (script) { @@ -113,23 +135,16 @@ function getScriptBundleForFrontend(note, script, params) { } // for frontend, we return just noteIds because frontend needs to use its own entity instances - bundle.noteId = bundle.note.noteId; + bundle.noteId = bundle.note?.noteId; delete bundle.note; - bundle.allNoteIds = bundle.allNotes.map(note => note.noteId); + bundle.allNoteIds = bundle.allNotes?.map(note => note.noteId); delete bundle.allNotes; return bundle; } -/** - * @param {BNote} note - * @param {boolean} [root=true] - * @param {string|null} [scriptEnv] - * @param {string[]} [includedNoteIds] - * @param {string|null} [overrideContent] - */ -function getScriptBundle(note, root = true, scriptEnv = null, includedNoteIds = [], overrideContent = null) { +function getScriptBundle(note: BNote, root: boolean = true, scriptEnv: string | null = null, includedNoteIds: string[] = [], overrideContent: string | null = null): Bundle | undefined { if (!note.isContentAvailable()) { return; } @@ -146,7 +161,7 @@ function getScriptBundle(note, root = true, scriptEnv = null, includedNoteIds = return; } - const bundle = { + const bundle: Bundle = { note: note, script: '', html: '', @@ -165,10 +180,14 @@ function getScriptBundle(note, root = true, scriptEnv = null, includedNoteIds = const childBundle = getScriptBundle(child, false, scriptEnv, includedNoteIds); if (childBundle) { - modules.push(childBundle.note); + if (childBundle.note) { + modules.push(childBundle.note); + } bundle.script += childBundle.script; bundle.html += childBundle.html; - bundle.allNotes = bundle.allNotes.concat(childBundle.allNotes); + if (bundle.allNotes && childBundle.allNotes) { + bundle.allNotes = bundle.allNotes.concat(childBundle.allNotes); + } } } @@ -196,11 +215,11 @@ return module.exports; return bundle; } -function sanitizeVariableName(str) { +function sanitizeVariableName(str: string) { return str.replace(/[^a-z0-9_]/gim, ""); } -module.exports = { +export = { executeNote, executeNoteNoException, executeScript, diff --git a/src/services/script_context.js b/src/services/script_context.js deleted file mode 100644 index 67400d6307..0000000000 --- a/src/services/script_context.js +++ /dev/null @@ -1,22 +0,0 @@ -const utils = require('./utils.js'); -const BackendScriptApi = require('./backend_script_api.js'); - -function ScriptContext(allNotes, apiParams = {}) { - this.modules = {}; - this.notes = utils.toObject(allNotes, note => [note.noteId, note]); - this.apis = utils.toObject(allNotes, note => [note.noteId, new BackendScriptApi(note, apiParams)]); - this.require = moduleNoteIds => { - return moduleName => { - const candidates = allNotes.filter(note => moduleNoteIds.includes(note.noteId)); - const note = candidates.find(c => c.title === moduleName); - - if (!note) { - return require(moduleName); - } - - return this.modules[note.noteId].exports; - } - }; -} - -module.exports = ScriptContext; diff --git a/src/services/script_context.ts b/src/services/script_context.ts new file mode 100644 index 0000000000..4de4863d04 --- /dev/null +++ b/src/services/script_context.ts @@ -0,0 +1,37 @@ +import utils = require('./utils'); +import BackendScriptApi = require('./backend_script_api'); +import BNote = require('../becca/entities/bnote'); +import { ApiParams } from './backend_script_api_interface'; + +type Module = { + exports: any[]; +}; + +class ScriptContext { + modules: Record<string, Module>; + notes: {}; + apis: {}; + allNotes: BNote[]; + + constructor(allNotes: BNote[], apiParams: ApiParams) { + this.allNotes = allNotes; + this.modules = {}; + this.notes = utils.toObject(allNotes, note => [note.noteId, note]); + this.apis = utils.toObject(allNotes, note => [note.noteId, new BackendScriptApi(note, apiParams)]); + } + + require(moduleNoteIds: string[]) { + return (moduleName: string) => { + const candidates = this.allNotes.filter(note => moduleNoteIds.includes(note.noteId)); + const note = candidates.find(c => c.title === moduleName); + + if (!note) { + return require(moduleName); + } + + return this.modules[note.noteId].exports; + } + }; +} + +export = ScriptContext; diff --git a/src/services/search/expressions/ancestor.js b/src/services/search/expressions/ancestor.ts similarity index 74% rename from src/services/search/expressions/ancestor.js rename to src/services/search/expressions/ancestor.ts index 57c3adb5f5..4e73807957 100644 --- a/src/services/search/expressions/ancestor.js +++ b/src/services/search/expressions/ancestor.ts @@ -1,12 +1,19 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); -const log = require('../../log.js'); -const becca = require('../../../becca/becca.js'); +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import log = require('../../log'); +import becca = require('../../../becca/becca'); +import SearchContext = require('../search_context'); class AncestorExp extends Expression { - constructor(ancestorNoteId, ancestorDepth) { + + private ancestorNoteId: string; + private ancestorDepthComparator; + + ancestorDepth?: string; + + constructor(ancestorNoteId: string, ancestorDepth?: string) { super(); this.ancestorNoteId = ancestorNoteId; @@ -14,7 +21,7 @@ class AncestorExp extends Expression { this.ancestorDepthComparator = this.getComparator(ancestorDepth); } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { const ancestorNote = becca.notes[this.ancestorNoteId]; if (!ancestorNote) { @@ -44,7 +51,7 @@ class AncestorExp extends Expression { return depthConformingNoteSet; } - getComparator(depthCondition) { + getComparator(depthCondition?: string): ((depth: number) => boolean) | null { if (!depthCondition) { return null; } @@ -67,4 +74,4 @@ class AncestorExp extends Expression { } } -module.exports = AncestorExp; +export = AncestorExp; diff --git a/src/services/search/expressions/and.js b/src/services/search/expressions/and.ts similarity index 51% rename from src/services/search/expressions/and.js rename to src/services/search/expressions/and.ts index a0dd350475..82c73fe70c 100644 --- a/src/services/search/expressions/and.js +++ b/src/services/search/expressions/and.ts @@ -1,11 +1,15 @@ "use strict"; -const Expression = require('./expression.js'); -const TrueExp = require('./true.js'); +import NoteSet = require('../note_set'); +import SearchContext = require('../search_context'); +import Expression = require('./expression'); +import TrueExp = require('./true'); class AndExp extends Expression { - static of(subExpressions) { - subExpressions = subExpressions.filter(exp => !!exp); + private subExpressions: Expression[]; + + static of(_subExpressions: (Expression | null | undefined)[]) { + const subExpressions = _subExpressions.filter((exp) => !!exp) as Expression[]; if (subExpressions.length === 1) { return subExpressions[0]; @@ -16,12 +20,12 @@ class AndExp extends Expression { } } - constructor(subExpressions) { + constructor(subExpressions: Expression[]) { super(); this.subExpressions = subExpressions; } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { for (const subExpression of this.subExpressions) { inputNoteSet = subExpression.execute(inputNoteSet, executionContext, searchContext); } @@ -30,4 +34,4 @@ class AndExp extends Expression { } } -module.exports = AndExp; +export = AndExp; diff --git a/src/services/search/expressions/attribute_exists.js b/src/services/search/expressions/attribute_exists.ts similarity index 69% rename from src/services/search/expressions/attribute_exists.js rename to src/services/search/expressions/attribute_exists.ts index f9be656894..b1f01a1c07 100644 --- a/src/services/search/expressions/attribute_exists.js +++ b/src/services/search/expressions/attribute_exists.ts @@ -1,11 +1,19 @@ "use strict"; -const NoteSet = require('../note_set.js'); -const becca = require('../../../becca/becca.js'); -const Expression = require('./expression.js'); +import NoteSet = require("../note_set"); +import SearchContext = require("../search_context"); + +import becca = require('../../../becca/becca'); +import Expression = require('./expression'); class AttributeExistsExp extends Expression { - constructor(attributeType, attributeName, prefixMatch) { + + private attributeType: string; + private attributeName: string; + private isTemplateLabel: boolean; + private prefixMatch: boolean; + + constructor(attributeType: string, attributeName: string, prefixMatch: boolean) { super(); this.attributeType = attributeType; @@ -15,7 +23,7 @@ class AttributeExistsExp extends Expression { this.prefixMatch = prefixMatch; } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { const attrs = this.prefixMatch ? becca.findAttributesWithPrefix(this.attributeType, this.attributeName) : becca.findAttributes(this.attributeType, this.attributeName); @@ -40,4 +48,4 @@ class AttributeExistsExp extends Expression { } } -module.exports = AttributeExistsExp; +export = AttributeExistsExp; diff --git a/src/services/search/expressions/child_of.js b/src/services/search/expressions/child_of.ts similarity index 68% rename from src/services/search/expressions/child_of.js rename to src/services/search/expressions/child_of.ts index d53b49c20f..10d31c00a6 100644 --- a/src/services/search/expressions/child_of.js +++ b/src/services/search/expressions/child_of.ts @@ -1,16 +1,20 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import SearchContext = require('../search_context'); class ChildOfExp extends Expression { - constructor(subExpression) { + + private subExpression: Expression; + + constructor(subExpression: Expression) { super(); this.subExpression = subExpression; } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { const subInputNoteSet = new NoteSet(); for (const note of inputNoteSet.notes) { @@ -33,4 +37,4 @@ class ChildOfExp extends Expression { } } -module.exports = ChildOfExp; +export = ChildOfExp; diff --git a/src/services/search/expressions/descendant_of.js b/src/services/search/expressions/descendant_of.ts similarity index 58% rename from src/services/search/expressions/descendant_of.js rename to src/services/search/expressions/descendant_of.ts index fa0e50dd3f..6994056c8d 100644 --- a/src/services/search/expressions/descendant_of.js +++ b/src/services/search/expressions/descendant_of.ts @@ -1,17 +1,20 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); -const becca = require('../../../becca/becca.js'); +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import becca = require('../../../becca/becca'); +import SearchContext = require('../search_context'); class DescendantOfExp extends Expression { - constructor(subExpression) { + private subExpression: Expression; + + constructor(subExpression: Expression) { super(); this.subExpression = subExpression; } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { const subInputNoteSet = new NoteSet(Object.values(becca.notes)); const subResNoteSet = this.subExpression.execute(subInputNoteSet, executionContext, searchContext); @@ -25,4 +28,4 @@ class DescendantOfExp extends Expression { } } -module.exports = DescendantOfExp; +export = DescendantOfExp; diff --git a/src/services/search/expressions/expression.js b/src/services/search/expressions/expression.js deleted file mode 100644 index 6ffbc290e0..0000000000 --- a/src/services/search/expressions/expression.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; - -class Expression { - constructor() { - this.name = this.constructor.name; // for DEBUG mode to have expression name as part of dumped JSON - } - - /** - * @param {NoteSet} inputNoteSet - * @param {object} executionContext - * @param {SearchContext} searchContext - * @returns {NoteSet} - */ - execute(inputNoteSet, executionContext, searchContext) {} -} - -module.exports = Expression; diff --git a/src/services/search/expressions/expression.ts b/src/services/search/expressions/expression.ts new file mode 100644 index 0000000000..c74341ba29 --- /dev/null +++ b/src/services/search/expressions/expression.ts @@ -0,0 +1,16 @@ +"use strict"; + +import NoteSet = require("../note_set"); +import SearchContext = require("../search_context"); + +abstract class Expression { + name: string; + + constructor() { + this.name = this.constructor.name; // for DEBUG mode to have expression name as part of dumped JSON + } + + abstract execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext): NoteSet; +} + +export = Expression; diff --git a/src/services/search/expressions/is_hidden.js b/src/services/search/expressions/is_hidden.ts similarity index 62% rename from src/services/search/expressions/is_hidden.js rename to src/services/search/expressions/is_hidden.ts index 32f33b5122..81bef22b3e 100644 --- a/src/services/search/expressions/is_hidden.js +++ b/src/services/search/expressions/is_hidden.ts @@ -1,13 +1,14 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import SearchContext = require('../search_context'); /** * Note is hidden when all its note paths start in hidden subtree (i.e., the note is not cloned into visible tree) */ class IsHiddenExp extends Expression { - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { const resultNoteSet = new NoteSet(); for (const note of inputNoteSet.notes) { @@ -20,4 +21,4 @@ class IsHiddenExp extends Expression { } } -module.exports = IsHiddenExp; +export = IsHiddenExp; diff --git a/src/services/search/expressions/label_comparison.js b/src/services/search/expressions/label_comparison.ts similarity index 64% rename from src/services/search/expressions/label_comparison.js rename to src/services/search/expressions/label_comparison.ts index 0bc27ff0c6..b455f82ca2 100644 --- a/src/services/search/expressions/label_comparison.js +++ b/src/services/search/expressions/label_comparison.ts @@ -1,11 +1,19 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); -const becca = require('../../../becca/becca.js'); +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import becca = require('../../../becca/becca'); +import SearchContext = require('../search_context'); + +type Comparator = (value: string) => boolean; class LabelComparisonExp extends Expression { - constructor(attributeType, attributeName, comparator) { + + private attributeType: string; + private attributeName: string; + private comparator: Comparator; + + constructor(attributeType: string, attributeName: string, comparator: Comparator) { super(); this.attributeType = attributeType; @@ -13,7 +21,7 @@ class LabelComparisonExp extends Expression { this.comparator = comparator; } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { const attrs = becca.findAttributes(this.attributeType, this.attributeName); const resultNoteSet = new NoteSet(); @@ -38,4 +46,4 @@ class LabelComparisonExp extends Expression { } } -module.exports = LabelComparisonExp; +export = LabelComparisonExp; diff --git a/src/services/search/expressions/not.js b/src/services/search/expressions/not.js deleted file mode 100644 index 2efd741e26..0000000000 --- a/src/services/search/expressions/not.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; - -const Expression = require('./expression.js'); - -class NotExp extends Expression { - constructor(subExpression) { - super(); - - this.subExpression = subExpression; - } - - execute(inputNoteSet, executionContext, searchContext) { - const subNoteSet = this.subExpression.execute(inputNoteSet, executionContext, searchContext); - - return inputNoteSet.minus(subNoteSet); - } -} - -module.exports = NotExp; diff --git a/src/services/search/expressions/not.ts b/src/services/search/expressions/not.ts new file mode 100644 index 0000000000..e1b31c84e7 --- /dev/null +++ b/src/services/search/expressions/not.ts @@ -0,0 +1,23 @@ +"use strict"; + +import NoteSet = require('../note_set'); +import SearchContext = require('../search_context'); +import Expression = require('./expression'); + +class NotExp extends Expression { + private subExpression: Expression; + + constructor(subExpression: Expression) { + super(); + + this.subExpression = subExpression; + } + + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { + const subNoteSet = this.subExpression.execute(inputNoteSet, executionContext, searchContext); + + return inputNoteSet.minus(subNoteSet); + } +} + +export = NotExp; diff --git a/src/services/search/expressions/note_content_fulltext.js b/src/services/search/expressions/note_content_fulltext.ts similarity index 73% rename from src/services/search/expressions/note_content_fulltext.js rename to src/services/search/expressions/note_content_fulltext.ts index 1b607b308f..ada9705a27 100644 --- a/src/services/search/expressions/note_content_fulltext.js +++ b/src/services/search/expressions/note_content_fulltext.ts @@ -1,18 +1,22 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); -const log = require('../../log.js'); -const becca = require('../../../becca/becca.js'); -const protectedSessionService = require('../../protected_session.js'); -const striptags = require('striptags'); -const utils = require('../../utils.js'); +import { NoteRow } from "../../../becca/entities/rows"; +import SearchContext = require("../search_context"); + +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import log = require('../../log'); +import becca = require('../../../becca/becca'); +import protectedSessionService = require('../../protected_session'); +import striptags = require('striptags'); +import utils = require('../../utils'); +import sql = require("../../sql"); const ALLOWED_OPERATORS = ['=', '!=', '*=*', '*=', '=*', '%=']; -const cachedRegexes = {}; +const cachedRegexes: Record<string, RegExp> = {}; -function getRegex(str) { +function getRegex(str: string): RegExp { if (!(str in cachedRegexes)) { cachedRegexes[str] = new RegExp(str, 'ms'); // multiline, dot-all } @@ -20,8 +24,22 @@ function getRegex(str) { return cachedRegexes[str]; } +interface ConstructorOpts { + tokens: string[]; + raw?: boolean; + flatText?: boolean; +} + +type SearchRow = Pick<NoteRow, "noteId" | "type" | "mime" | "content" | "isProtected">; + class NoteContentFulltextExp extends Expression { - constructor(operator, {tokens, raw, flatText}) { + + private operator: string; + private tokens: string[]; + private raw: boolean; + private flatText: boolean; + + constructor(operator: string, {tokens, raw, flatText}: ConstructorOpts) { super(); this.operator = operator; @@ -30,7 +48,7 @@ class NoteContentFulltextExp extends Expression { this.flatText = !!flatText; } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { if (!ALLOWED_OPERATORS.includes(this.operator)) { searchContext.addError(`Note content can be searched only with operators: ${ALLOWED_OPERATORS.join(", ")}, operator ${this.operator} given.`); @@ -38,9 +56,8 @@ class NoteContentFulltextExp extends Expression { } const resultNoteSet = new NoteSet(); - const sql = require('../../sql.js'); - - for (const row of sql.iterateRows(` + + for (const row of sql.iterateRows<SearchRow>(` SELECT noteId, type, mime, content, isProtected FROM notes JOIN blobs USING (blobId) WHERE type IN ('text', 'code', 'mermaid') AND isDeleted = 0`)) { @@ -51,18 +68,18 @@ class NoteContentFulltextExp extends Expression { return resultNoteSet; } - findInText({noteId, isProtected, content, type, mime}, inputNoteSet, resultNoteSet) { + findInText({noteId, isProtected, content, type, mime}: SearchRow, inputNoteSet: NoteSet, resultNoteSet: NoteSet) { if (!inputNoteSet.hasNoteId(noteId) || !(noteId in becca.notes)) { return; } if (isProtected) { - if (!protectedSessionService.isProtectedSessionAvailable()) { + if (!protectedSessionService.isProtectedSessionAvailable() || !content) { return; } try { - content = protectedSessionService.decryptString(content); + content = protectedSessionService.decryptString(content) || undefined; } catch (e) { log.info(`Cannot decrypt content of note ${noteId}`); return; @@ -89,7 +106,7 @@ class NoteContentFulltextExp extends Expression { } } else { const nonMatchingToken = this.tokens.find(token => - !content.includes(token) && + !content?.includes(token) && ( // in case of default fulltext search, we should consider both title, attrs and content // so e.g. "hello world" should match when "hello" is in title and "world" in content @@ -106,7 +123,7 @@ class NoteContentFulltextExp extends Expression { return content; } - preprocessContent(content, type, mime) { + preprocessContent(content: string, type: string, mime: string) { content = utils.normalize(content.toString()); if (type === 'text' && mime === 'text/html') { @@ -120,7 +137,7 @@ class NoteContentFulltextExp extends Expression { return content.trim(); } - stripTags(content) { + stripTags(content: string) { // we want to allow link to preserve URLs: https://github.com/zadam/trilium/issues/2412 // we want to insert space in place of block tags (because they imply text separation) // but we don't want to insert text for typical formatting inline tags which can occur within one word @@ -138,4 +155,4 @@ class NoteContentFulltextExp extends Expression { } } -module.exports = NoteContentFulltextExp; +export = NoteContentFulltextExp; diff --git a/src/services/search/expressions/note_flat_text.js b/src/services/search/expressions/note_flat_text.ts similarity index 84% rename from src/services/search/expressions/note_flat_text.js rename to src/services/search/expressions/note_flat_text.ts index 11cc466ec1..f163e8a7a9 100644 --- a/src/services/search/expressions/note_flat_text.js +++ b/src/services/search/expressions/note_flat_text.ts @@ -1,29 +1,34 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); -const becca = require('../../../becca/becca.js'); -const utils = require('../../utils.js'); +import BNote = require("../../../becca/entities/bnote"); +import SearchContext = require("../search_context"); + +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import becca = require('../../../becca/becca'); +import utils = require('../../utils'); class NoteFlatTextExp extends Expression { - constructor(tokens) { + private tokens: string[]; + + constructor(tokens: string[]) { super(); this.tokens = tokens; } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: any, searchContext: SearchContext) { // has deps on SQL which breaks unit test so needs to be dynamically required - const beccaService = require('../../../becca/becca_service.js'); + const beccaService = require('../../../becca/becca_service'); const resultNoteSet = new NoteSet(); /** - * @param {BNote} note - * @param {string[]} remainingTokens - tokens still needed to be found in the path towards root - * @param {string[]} takenPath - path so far taken towards from candidate note towards the root. - * It contains the suffix fragment of the full note path. + * @param note + * @param remainingTokens - tokens still needed to be found in the path towards root + * @param takenPath - path so far taken towards from candidate note towards the root. + * It contains the suffix fragment of the full note path. */ - const searchPathTowardsRoot = (note, remainingTokens, takenPath) => { + const searchPathTowardsRoot = (note: BNote, remainingTokens: string[], takenPath: string[]) => { if (remainingTokens.length === 0) { // we're done, just build the result const resultPath = this.getNotePath(note, takenPath); @@ -134,12 +139,7 @@ class NoteFlatTextExp extends Expression { return resultNoteSet; } - /** - * @param {BNote} note - * @param {string[]} takenPath - * @returns {string[]} - */ - getNotePath(note, takenPath) { + getNotePath(note: BNote, takenPath: string[]): string[] { if (takenPath.length === 0) { throw new Error("Path is not expected to be empty."); } else if (takenPath.length === 1 && takenPath[0] === note.noteId) { @@ -147,7 +147,7 @@ class NoteFlatTextExp extends Expression { } else { // this note is the closest to root containing the last matching token(s), thus completing the requirements // what's in this note's predecessors does not matter, thus we'll choose the best note path - const topMostMatchingTokenNotePath = becca.getNote(takenPath[0]).getBestNotePath(); + const topMostMatchingTokenNotePath = becca.getNote(takenPath[0])?.getBestNotePath() || []; return [...topMostMatchingTokenNotePath, ...takenPath.slice(1)]; } @@ -155,11 +155,8 @@ class NoteFlatTextExp extends Expression { /** * Returns noteIds which have at least one matching tokens - * - * @param {NoteSet} noteSet - * @returns {BNote[]} */ - getCandidateNotes(noteSet) { + getCandidateNotes(noteSet: NoteSet): BNote[] { const candidateNotes = []; for (const note of noteSet.notes) { @@ -175,4 +172,4 @@ class NoteFlatTextExp extends Expression { } } -module.exports = NoteFlatTextExp; +export = NoteFlatTextExp; diff --git a/src/services/search/expressions/or.js b/src/services/search/expressions/or.ts similarity index 62% rename from src/services/search/expressions/or.js rename to src/services/search/expressions/or.ts index 1704b5c60d..f89e9070e8 100644 --- a/src/services/search/expressions/or.js +++ b/src/services/search/expressions/or.ts @@ -1,11 +1,14 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); -const TrueExp = require('./true.js'); +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import TrueExp = require('./true'); +import SearchContext = require('../search_context'); class OrExp extends Expression { - static of(subExpressions) { + private subExpressions: Expression[]; + + static of(subExpressions: Expression[]) { subExpressions = subExpressions.filter(exp => !!exp); if (subExpressions.length === 1) { @@ -19,13 +22,13 @@ class OrExp extends Expression { } } - constructor(subExpressions) { + constructor(subExpressions: Expression[]) { super(); this.subExpressions = subExpressions; } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { const resultNoteSet = new NoteSet(); for (const subExpression of this.subExpressions) { @@ -36,4 +39,4 @@ class OrExp extends Expression { } } -module.exports = OrExp; +export = OrExp; diff --git a/src/services/search/expressions/order_by_and_limit.js b/src/services/search/expressions/order_by_and_limit.ts similarity index 66% rename from src/services/search/expressions/order_by_and_limit.js rename to src/services/search/expressions/order_by_and_limit.ts index c00f361f19..a3a37496f6 100644 --- a/src/services/search/expressions/order_by_and_limit.js +++ b/src/services/search/expressions/order_by_and_limit.ts @@ -1,13 +1,31 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); +import BNote = require("../../../becca/entities/bnote"); +import NoteSet = require("../note_set"); +import SearchContext = require("../search_context"); +import Expression = require("./expression"); + +interface ValueExtractor { + extract: (note: BNote) => number | string | null; +} + +interface OrderDefinition { + direction?: string; + smaller: number; + larger: number; + valueExtractor: ValueExtractor; +} class OrderByAndLimitExp extends Expression { - constructor(orderDefinitions, limit) { + + private orderDefinitions: OrderDefinition[]; + private limit: number; + subExpression: Expression | null; + + constructor(orderDefinitions: Pick<OrderDefinition, "direction" | "valueExtractor">[], limit?: number) { super(); - this.orderDefinitions = orderDefinitions; + this.orderDefinitions = orderDefinitions as OrderDefinition[]; for (const od of this.orderDefinitions) { od.smaller = od.direction === "asc" ? -1 : 1; @@ -16,11 +34,14 @@ class OrderByAndLimitExp extends Expression { this.limit = limit || 0; - /** @type {Expression} */ this.subExpression = null; // it's expected to be set after construction } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { + if (!this.subExpression) { + throw new Error("Missing subexpression"); + } + let {notes} = this.subExpression.execute(inputNoteSet, executionContext, searchContext); notes.sort((a, b) => { @@ -48,7 +69,8 @@ class OrderByAndLimitExp extends Expression { } // if both are numbers, then parse them for numerical comparison - if (this.isNumber(valA) && this.isNumber(valB)) { + if (typeof valA === "string" && this.isNumber(valA) && + typeof valB === "string" && this.isNumber(valB)) { valA = parseFloat(valA); valB = parseFloat(valB); } @@ -77,16 +99,16 @@ class OrderByAndLimitExp extends Expression { return noteSet; } - isNumber(x) { + isNumber(x: number | string) { if (typeof x === 'number') { return true; } else if (typeof x === 'string') { // isNaN will return false for blank string - return x.trim() !== "" && !isNaN(x); + return x.trim() !== "" && !isNaN(parseInt(x, 10)); } else { return false; } } } -module.exports = OrderByAndLimitExp; +export = OrderByAndLimitExp; diff --git a/src/services/search/expressions/parent_of.js b/src/services/search/expressions/parent_of.ts similarity index 68% rename from src/services/search/expressions/parent_of.js rename to src/services/search/expressions/parent_of.ts index 2243d35401..bd7b9d3046 100644 --- a/src/services/search/expressions/parent_of.js +++ b/src/services/search/expressions/parent_of.ts @@ -1,16 +1,19 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import SearchContext = require('../search_context'); class ParentOfExp extends Expression { - constructor(subExpression) { + private subExpression: Expression; + + constructor(subExpression: Expression) { super(); this.subExpression = subExpression; } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { const subInputNoteSet = new NoteSet(); for (const note of inputNoteSet.notes) { @@ -33,4 +36,4 @@ class ParentOfExp extends Expression { } } -module.exports = ParentOfExp; +export = ParentOfExp; diff --git a/src/services/search/expressions/property_comparison.js b/src/services/search/expressions/property_comparison.ts similarity index 74% rename from src/services/search/expressions/property_comparison.js rename to src/services/search/expressions/property_comparison.ts index 9b014a642b..843b5a862e 100644 --- a/src/services/search/expressions/property_comparison.js +++ b/src/services/search/expressions/property_comparison.ts @@ -1,14 +1,14 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); -const buildComparator = require('../services/build_comparator.js'); +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import buildComparator = require('../services/build_comparator'); /** * Search string is lower cased for case-insensitive comparison. But when retrieving properties, * we need the case-sensitive form, so we have this translation object. */ -const PROP_MAPPING = { +const PROP_MAPPING: Record<string, string> = { "noteid": "noteId", "title": "title", "type": "type", @@ -36,12 +36,22 @@ const PROP_MAPPING = { "revisioncount": "revisionCount" }; +interface SearchContext { + dbLoadNeeded?: boolean; +} + class PropertyComparisonExp extends Expression { - static isProperty(name) { + + private propertyName: string; + private operator: string; + private comparedValue: string; + private comparator; + + static isProperty(name: string) { return name in PROP_MAPPING; } - constructor(searchContext, propertyName, operator, comparedValue) { + constructor(searchContext: SearchContext, propertyName: string, operator: string, comparedValue: string) { super(); this.propertyName = PROP_MAPPING[propertyName]; @@ -54,11 +64,11 @@ class PropertyComparisonExp extends Expression { } } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { const resNoteSet = new NoteSet(); for (const note of inputNoteSet.notes) { - let value = note[this.propertyName]; + let value = (note as any)[this.propertyName]; if (value !== undefined && value !== null && typeof value !== 'string') { value = value.toString(); @@ -68,7 +78,7 @@ class PropertyComparisonExp extends Expression { value = value.toLowerCase(); } - if (this.comparator(value)) { + if (this.comparator && this.comparator(value)) { resNoteSet.add(note); } } @@ -77,4 +87,4 @@ class PropertyComparisonExp extends Expression { } } -module.exports = PropertyComparisonExp; +export = PropertyComparisonExp; diff --git a/src/services/search/expressions/relation_where.js b/src/services/search/expressions/relation_where.ts similarity index 72% rename from src/services/search/expressions/relation_where.js rename to src/services/search/expressions/relation_where.ts index dee56b1dda..9f33dcecf7 100644 --- a/src/services/search/expressions/relation_where.js +++ b/src/services/search/expressions/relation_where.ts @@ -1,18 +1,22 @@ "use strict"; -const Expression = require('./expression.js'); -const NoteSet = require('../note_set.js'); -const becca = require('../../../becca/becca.js'); +import Expression = require('./expression'); +import NoteSet = require('../note_set'); +import becca = require('../../../becca/becca'); +import SearchContext = require('../search_context'); class RelationWhereExp extends Expression { - constructor(relationName, subExpression) { + private relationName: string; + private subExpression: Expression; + + constructor(relationName: string, subExpression: Expression) { super(); this.relationName = relationName; this.subExpression = subExpression; } - execute(inputNoteSet, executionContext, searchContext) { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext) { const candidateNoteSet = new NoteSet(); for (const attr of becca.findAttributes('relation', this.relationName)) { @@ -38,4 +42,4 @@ class RelationWhereExp extends Expression { } } -module.exports = RelationWhereExp; +export = RelationWhereExp; diff --git a/src/services/search/expressions/true.js b/src/services/search/expressions/true.js deleted file mode 100644 index 7dbf35c27c..0000000000 --- a/src/services/search/expressions/true.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; - -const Expression = require('./expression.js'); - -class TrueExp extends Expression { - execute(inputNoteSet, executionContext, searchContext) { - return inputNoteSet; - } -} - -module.exports = TrueExp; diff --git a/src/services/search/expressions/true.ts b/src/services/search/expressions/true.ts new file mode 100644 index 0000000000..a53d1afde3 --- /dev/null +++ b/src/services/search/expressions/true.ts @@ -0,0 +1,14 @@ +"use strict"; + +import NoteSet = require("../note_set"); +import SearchContext = require("../search_context"); + +import Expression = require('./expression'); + +class TrueExp extends Expression { + execute(inputNoteSet: NoteSet, executionContext: {}, searchContext: SearchContext): NoteSet { + return inputNoteSet; + } +} + +export = TrueExp; diff --git a/src/services/search/note_set.js b/src/services/search/note_set.ts similarity index 69% rename from src/services/search/note_set.js rename to src/services/search/note_set.ts index 82c236cf67..47c644c38c 100644 --- a/src/services/search/note_set.js +++ b/src/services/search/note_set.ts @@ -1,40 +1,46 @@ "use strict"; +import BNote = require("../../becca/entities/bnote"); + class NoteSet { - constructor(notes = []) { - /** @type {BNote[]} */ + + private noteIdSet: Set<string>; + + notes: BNote[]; + sorted: boolean; + + constructor(notes: BNote[] = []) { this.notes = notes; this.noteIdSet = new Set(notes.map(note => note.noteId)); - /** @type {boolean} */ this.sorted = false; } - add(note) { + add(note: BNote) { if (!this.hasNote(note)) { this.notes.push(note); this.noteIdSet.add(note.noteId); } } - addAll(notes) { + addAll(notes: BNote[]) { for (const note of notes) { this.add(note); } } - hasNote(note) { + hasNote(note: BNote) { return this.hasNoteId(note.noteId); } - hasNoteId(noteId) { + hasNoteId(noteId: string) { return this.noteIdSet.has(noteId); } - mergeIn(anotherNoteSet) { + mergeIn(anotherNoteSet: NoteSet) { this.addAll(anotherNoteSet.notes); } - minus(anotherNoteSet) { + minus(anotherNoteSet: NoteSet) { const newNoteSet = new NoteSet(); for (const note of this.notes) { @@ -46,7 +52,7 @@ class NoteSet { return newNoteSet; } - intersection(anotherNoteSet) { + intersection(anotherNoteSet: NoteSet) { const newNoteSet = new NoteSet(); for (const note of this.notes) { @@ -59,4 +65,4 @@ class NoteSet { } } -module.exports = NoteSet; +export = NoteSet; diff --git a/src/services/search/search_context.js b/src/services/search/search_context.ts similarity index 69% rename from src/services/search/search_context.js rename to src/services/search/search_context.ts index 6827e38415..f7c1e2198b 100644 --- a/src/services/search/search_context.js +++ b/src/services/search/search_context.ts @@ -1,9 +1,29 @@ "use strict"; -const hoistedNoteService = require('../hoisted_note.js'); +import hoistedNoteService = require('../hoisted_note'); +import { SearchParams } from './services/types'; class SearchContext { - constructor(params = {}) { + + fastSearch: boolean; + includeArchivedNotes: boolean; + includeHiddenNotes: boolean; + ignoreHoistedNote: boolean; + ancestorNoteId?: string; + ancestorDepth?: string; + orderBy?: string; + orderDirection?: string; + limit?: number | null; + debug?: boolean; + debugInfo: {} | null; + fuzzyAttributeSearch: boolean; + highlightedTokens: string[]; + originalQuery: string; + fulltextQuery: string; + dbLoadNeeded: boolean; + private error: string | null; + + constructor(params: SearchParams = {}) { this.fastSearch = !!params.fastSearch; this.includeArchivedNotes = !!params.includeArchivedNotes; this.includeHiddenNotes = !!params.includeHiddenNotes; @@ -32,7 +52,7 @@ class SearchContext { this.error = null; } - addError(error) { + addError(error: string) { // we record only the first error, subsequent ones are usually a consequence of the first if (!this.error) { this.error = error; @@ -48,4 +68,4 @@ class SearchContext { } } -module.exports = SearchContext; +export = SearchContext; diff --git a/src/services/search/search_result.js b/src/services/search/search_result.ts similarity index 77% rename from src/services/search/search_result.js rename to src/services/search/search_result.ts index 3a094bc8ce..cf651d58ed 100644 --- a/src/services/search/search_result.js +++ b/src/services/search/search_result.ts @@ -1,12 +1,18 @@ "use strict"; -const beccaService = require('../../becca/becca_service.js'); -const becca = require('../../becca/becca.js'); +import beccaService = require('../../becca/becca_service'); +import becca = require('../../becca/becca'); class SearchResult { - constructor(notePathArray) { + notePathArray: string[]; + score: number; + notePathTitle: string; + highlightedNotePathTitle?: string; + + constructor(notePathArray: string[]) { this.notePathArray = notePathArray; this.notePathTitle = beccaService.getNoteTitleForPath(notePathArray); + this.score = 0; } get notePath() { @@ -17,7 +23,7 @@ class SearchResult { return this.notePathArray[this.notePathArray.length - 1]; } - computeScore(fulltextQuery, tokens) { + computeScore(fulltextQuery: string, tokens: string[]) { this.score = 0; const note = becca.notes[this.noteId]; @@ -42,9 +48,11 @@ class SearchResult { } } - addScoreForStrings(tokens, str, factor) { + addScoreForStrings(tokens: string[], str: string, factor: number) { const chunks = str.toLowerCase().split(" "); + this.score = 0; + for (const chunk of chunks) { for (const token of tokens) { if (chunk === token) { @@ -59,4 +67,4 @@ class SearchResult { } } -module.exports = SearchResult; +export = SearchResult; diff --git a/src/services/search/services/build_comparator.js b/src/services/search/services/build_comparator.ts similarity index 51% rename from src/services/search/services/build_comparator.js rename to src/services/search/services/build_comparator.ts index 6d3ba463a4..426bfa1959 100644 --- a/src/services/search/services/build_comparator.js +++ b/src/services/search/services/build_comparator.ts @@ -1,6 +1,6 @@ -const cachedRegexes = {}; +const cachedRegexes: Record<string, RegExp> = {}; -function getRegex(str) { +function getRegex(str: string) { if (!(str in cachedRegexes)) { cachedRegexes[str] = new RegExp(str); } @@ -8,31 +8,36 @@ function getRegex(str) { return cachedRegexes[str]; } -const stringComparators = { +type Comparator<T> = (comparedValue: T) => ((val: string) => boolean); + +const stringComparators: Record<string, Comparator<string>> = { "=": comparedValue => (val => val === comparedValue), "!=": comparedValue => (val => val !== comparedValue), ">": comparedValue => (val => val > comparedValue), ">=": comparedValue => (val => val >= comparedValue), "<": comparedValue => (val => val < comparedValue), "<=": comparedValue => (val => val <= comparedValue), - "*=": comparedValue => (val => val && val.endsWith(comparedValue)), - "=*": comparedValue => (val => val && val.startsWith(comparedValue)), - "*=*": comparedValue => (val => val && val.includes(comparedValue)), - "%=": comparedValue => (val => val && !!getRegex(comparedValue).test(val)), + "*=": comparedValue => (val => !!val && val.endsWith(comparedValue)), + "=*": comparedValue => (val => !!val && val.startsWith(comparedValue)), + "*=*": comparedValue => (val => !!val && val.includes(comparedValue)), + "%=": comparedValue => (val => !!val && !!getRegex(comparedValue).test(val)), }; -const numericComparators = { +const numericComparators: Record<string, Comparator<number>> = { ">": comparedValue => (val => parseFloat(val) > comparedValue), ">=": comparedValue => (val => parseFloat(val) >= comparedValue), "<": comparedValue => (val => parseFloat(val) < comparedValue), "<=": comparedValue => (val => parseFloat(val) <= comparedValue) }; -function buildComparator(operator, comparedValue) { +function buildComparator(operator: string, comparedValue: string) { comparedValue = comparedValue.toLowerCase(); - if (operator in numericComparators && !isNaN(comparedValue)) { - return numericComparators[operator](parseFloat(comparedValue)); + if (operator in numericComparators) { + const floatValue = parseFloat(comparedValue); + if (!isNaN(floatValue)) { + return numericComparators[operator](floatValue); + } } if (operator in stringComparators) { @@ -40,4 +45,4 @@ function buildComparator(operator, comparedValue) { } } -module.exports = buildComparator; +export = buildComparator; diff --git a/src/services/search/services/handle_parens.js b/src/services/search/services/handle_parens.ts similarity index 64% rename from src/services/search/services/handle_parens.js rename to src/services/search/services/handle_parens.ts index 14ff58b0ad..30d7e03fcc 100644 --- a/src/services/search/services/handle_parens.js +++ b/src/services/search/services/handle_parens.ts @@ -1,13 +1,15 @@ +import { TokenData } from "./types"; + /** * This will create a recursive object from a list of tokens - tokens between parenthesis are grouped in a single array */ -function handleParens(tokens) { +function handleParens(tokens: (TokenData | TokenData[])[]) { if (tokens.length === 0) { return []; } while (true) { - const leftIdx = tokens.findIndex(token => token.token === '('); + const leftIdx = tokens.findIndex(token => "token" in token && token.token === '('); if (leftIdx === -1) { return tokens; @@ -17,13 +19,18 @@ function handleParens(tokens) { let parensLevel = 0 for (rightIdx = leftIdx; rightIdx < tokens.length; rightIdx++) { - if (tokens[rightIdx].token === ')') { + const token = tokens[rightIdx]; + if (!("token" in token)) { + continue; + } + + if (token.token === ')') { parensLevel--; if (parensLevel === 0) { break; } - } else if (tokens[rightIdx].token === '(') { + } else if (token.token === '(') { parensLevel++; } } @@ -36,8 +43,8 @@ function handleParens(tokens) { ...tokens.slice(0, leftIdx), handleParens(tokens.slice(leftIdx + 1, rightIdx)), ...tokens.slice(rightIdx + 1) - ]; + ] as (TokenData | TokenData[])[]; } } -module.exports = handleParens; +export = handleParens; diff --git a/src/services/search/services/lex.js b/src/services/search/services/lex.ts similarity index 89% rename from src/services/search/services/lex.js rename to src/services/search/services/lex.ts index ddee0840f7..516cf71df6 100644 --- a/src/services/search/services/lex.js +++ b/src/services/search/services/lex.ts @@ -1,16 +1,17 @@ -function lex(str) { +import { TokenData } from "./types"; + +function lex(str: string) { str = str.toLowerCase(); let fulltextQuery = ""; - const fulltextTokens = []; - const expressionTokens = []; + const fulltextTokens: TokenData[] = []; + const expressionTokens: TokenData[] = []; - /** @type {boolean|string} */ - let quotes = false; // otherwise contains used quote - ', " or ` + let quotes: boolean | string = false; // otherwise contains used quote - ', " or ` let fulltextEnded = false; let currentWord = ''; - function isSymbolAnOperator(chr) { + function isSymbolAnOperator(chr: string) { return ['=', '*', '>', '<', '!', "-", "+", '%', ','].includes(chr); } @@ -23,12 +24,12 @@ function lex(str) { } } - function finishWord(endIndex, createAlsoForEmptyWords = false) { + function finishWord(endIndex: number, createAlsoForEmptyWords = false) { if (currentWord === '' && !createAlsoForEmptyWords) { return; } - const rec = { + const rec: TokenData = { token: currentWord, inQuotes: !!quotes, startIndex: endIndex - currentWord.length + 1, @@ -146,4 +147,4 @@ function lex(str) { } } -module.exports = lex; +export = lex; diff --git a/src/services/search/services/parse.js b/src/services/search/services/parse.ts similarity index 72% rename from src/services/search/services/parse.js rename to src/services/search/services/parse.ts index b31ea265d4..de943fdd22 100644 --- a/src/services/search/services/parse.js +++ b/src/services/search/services/parse.ts @@ -1,28 +1,31 @@ "use strict"; -const dayjs = require("dayjs"); -const AndExp = require('../expressions/and.js'); -const OrExp = require('../expressions/or.js'); -const NotExp = require('../expressions/not.js'); -const ChildOfExp = require('../expressions/child_of.js'); -const DescendantOfExp = require('../expressions/descendant_of.js'); -const ParentOfExp = require('../expressions/parent_of.js'); -const RelationWhereExp = require('../expressions/relation_where.js'); -const PropertyComparisonExp = require('../expressions/property_comparison.js'); -const AttributeExistsExp = require('../expressions/attribute_exists.js'); -const LabelComparisonExp = require('../expressions/label_comparison.js'); -const NoteFlatTextExp = require('../expressions/note_flat_text.js'); -const NoteContentFulltextExp = require('../expressions/note_content_fulltext.js'); -const OrderByAndLimitExp = require('../expressions/order_by_and_limit.js'); -const AncestorExp = require('../expressions/ancestor.js'); -const buildComparator = require('./build_comparator.js'); -const ValueExtractor = require('../value_extractor.js'); -const utils = require('../../utils.js'); -const TrueExp = require('../expressions/true.js'); -const IsHiddenExp = require('../expressions/is_hidden.js'); - -function getFulltext(tokens, searchContext) { - tokens = tokens.map(t => utils.removeDiacritic(t.token)); +import dayjs = require("dayjs"); +import AndExp = require('../expressions/and'); +import OrExp = require('../expressions/or'); +import NotExp = require('../expressions/not'); +import ChildOfExp = require('../expressions/child_of'); +import DescendantOfExp = require('../expressions/descendant_of'); +import ParentOfExp = require('../expressions/parent_of'); +import RelationWhereExp = require('../expressions/relation_where'); +import PropertyComparisonExp = require('../expressions/property_comparison'); +import AttributeExistsExp = require('../expressions/attribute_exists'); +import LabelComparisonExp = require('../expressions/label_comparison'); +import NoteFlatTextExp = require('../expressions/note_flat_text'); +import NoteContentFulltextExp = require('../expressions/note_content_fulltext'); +import OrderByAndLimitExp = require('../expressions/order_by_and_limit'); +import AncestorExp = require('../expressions/ancestor'); +import buildComparator = require('./build_comparator'); +import ValueExtractor = require('../value_extractor'); +import utils = require('../../utils'); +import TrueExp = require('../expressions/true'); +import IsHiddenExp = require('../expressions/is_hidden'); +import SearchContext = require("../search_context"); +import { TokenData } from "./types"; +import Expression = require("../expressions/expression"); + +function getFulltext(_tokens: TokenData[], searchContext: SearchContext) { + const tokens: string[] = _tokens.map(t => utils.removeDiacritic(t.token)); searchContext.highlightedTokens.push(...tokens); @@ -54,7 +57,7 @@ const OPERATORS = [ "%=" ]; -function isOperator(token) { +function isOperator(token: TokenData) { if (Array.isArray(token)) { return false; } @@ -62,20 +65,20 @@ function isOperator(token) { return OPERATORS.includes(token.token); } -function getExpression(tokens, searchContext, level = 0) { +function getExpression(tokens: TokenData[], searchContext: SearchContext, level = 0) { if (tokens.length === 0) { return null; } - const expressions = []; - let op = null; + const expressions: Expression[] = []; + let op: string | null = null; - let i; + let i: number; - function context(i) { + function context(i: number) { let {startIndex, endIndex} = tokens[i]; - startIndex = Math.max(0, startIndex - 20); - endIndex = Math.min(searchContext.originalQuery.length, endIndex + 20); + startIndex = Math.max(0, (startIndex || 0) - 20); + endIndex = Math.min(searchContext.originalQuery.length, (endIndex || Number.MAX_SAFE_INTEGER) + 20); return `"${startIndex !== 0 ? "..." : ""}${searchContext.originalQuery.substr(startIndex, endIndex - startIndex)}${endIndex !== searchContext.originalQuery.length ? "..." : ""}"`; } @@ -133,7 +136,7 @@ function getExpression(tokens, searchContext, level = 0) { return date.format(format); } - function parseNoteProperty() { + function parseNoteProperty(): Expression | undefined | null { if (tokens[i].token !== '.') { searchContext.addError('Expected "." to separate field path'); return; @@ -161,19 +164,25 @@ function getExpression(tokens, searchContext, level = 0) { if (tokens[i].token === 'parents') { i += 1; - return new ChildOfExp(parseNoteProperty()); + const expression = parseNoteProperty(); + if (!expression) { return; } + return new ChildOfExp(expression); } if (tokens[i].token === 'children') { i += 1; - return new ParentOfExp(parseNoteProperty()); + const expression = parseNoteProperty(); + if (!expression) { return; } + return new ParentOfExp(expression); } if (tokens[i].token === 'ancestors') { i += 1; - return new DescendantOfExp(parseNoteProperty()); + const expression = parseNoteProperty(); + if (!expression) { return; } + return new DescendantOfExp(expression); } if (tokens[i].token === 'labels') { @@ -219,6 +228,10 @@ function getExpression(tokens, searchContext, level = 0) { i += 2; const comparedValue = resolveConstantOperand(); + if (!comparedValue) { + searchContext.addError(`Unresolved constant operand.`); + return; + } return new PropertyComparisonExp(searchContext, propertyName, operator, comparedValue); } @@ -226,7 +239,7 @@ function getExpression(tokens, searchContext, level = 0) { searchContext.addError(`Unrecognized note property "${tokens[i].token}" in ${context(i)}`); } - function parseAttribute(name) { + function parseAttribute(name: string) { const isLabel = name.startsWith('#'); name = name.substr(1); @@ -239,10 +252,10 @@ function getExpression(tokens, searchContext, level = 0) { const subExp = isLabel ? parseLabel(name) : parseRelation(name); - return isNegated ? new NotExp(subExp) : subExp; + return subExp && isNegated ? new NotExp(subExp) : subExp; } - function parseLabel(labelName) { + function parseLabel(labelName: string) { searchContext.highlightedTokens.push(labelName); if (i < tokens.length - 2 && isOperator(tokens[i + 1])) { @@ -274,13 +287,15 @@ function getExpression(tokens, searchContext, level = 0) { } } - function parseRelation(relationName) { + function parseRelation(relationName: string) { searchContext.highlightedTokens.push(relationName); if (i < tokens.length - 2 && tokens[i + 1].token === '.') { i += 1; - return new RelationWhereExp(relationName, parseNoteProperty()); + const expression = parseNoteProperty(); + if (!expression) { return; } + return new RelationWhereExp(relationName, expression); } else if (i < tokens.length - 2 && isOperator(tokens[i + 1])) { searchContext.addError(`Relation can be compared only with property, e.g. ~relation.title=hello in ${context(i)}`); @@ -293,7 +308,10 @@ function getExpression(tokens, searchContext, level = 0) { } function parseOrderByAndLimit() { - const orderDefinitions = []; + const orderDefinitions: { + valueExtractor: ValueExtractor, + direction: string + }[] = []; let limit; if (tokens[i].token === 'orderby') { @@ -316,8 +334,9 @@ function getExpression(tokens, searchContext, level = 0) { const valueExtractor = new ValueExtractor(searchContext, propertyPath); - if (valueExtractor.validate()) { - searchContext.addError(valueExtractor.validate()); + const validationError = valueExtractor.validate(); + if (validationError) { + searchContext.addError(validationError); } orderDefinitions.push({ @@ -348,7 +367,10 @@ function getExpression(tokens, searchContext, level = 0) { for (i = 0; i < tokens.length; i++) { if (Array.isArray(tokens[i])) { - expressions.push(getExpression(tokens[i], searchContext, level++)); + const expression = getExpression(tokens[i] as unknown as TokenData[], searchContext, level++); + if (expression) { + expressions.push(expression); + } continue; } @@ -359,7 +381,10 @@ function getExpression(tokens, searchContext, level = 0) { } if (token.startsWith('#') || token.startsWith('~')) { - expressions.push(parseAttribute(token)); + const attribute = parseAttribute(token); + if (attribute) { + expressions.push(attribute); + } } else if (['orderby', 'limit'].includes(token)) { if (level !== 0) { @@ -384,12 +409,17 @@ function getExpression(tokens, searchContext, level = 0) { continue; } - expressions.push(new NotExp(getExpression(tokens[i], searchContext, level++))); + const tokenArray = tokens[i] as unknown as TokenData[]; + const expression = getExpression(tokenArray, searchContext, level++); + if (!expression) { return; } + expressions.push(new NotExp(expression)); } else if (token === 'note') { i++; - expressions.push(parseNoteProperty()); + const expression = parseNoteProperty(); + if (!expression) { return; } + expressions.push(expression); continue; } @@ -416,13 +446,18 @@ function getExpression(tokens, searchContext, level = 0) { return getAggregateExpression(); } -function parse({fulltextTokens, expressionTokens, searchContext}) { - let expression; +function parse({fulltextTokens, expressionTokens, searchContext}: { + fulltextTokens: TokenData[], + expressionTokens: (TokenData | TokenData[])[], + searchContext: SearchContext, + originalQuery: string +}) { + let expression: Expression | undefined | null; try { - expression = getExpression(expressionTokens, searchContext); + expression = getExpression(expressionTokens as TokenData[], searchContext); } - catch (e) { + catch (e: any) { searchContext.addError(e.message); expression = new TrueExp(); @@ -441,15 +476,15 @@ function parse({fulltextTokens, expressionTokens, searchContext}) { exp = new OrderByAndLimitExp([{ valueExtractor: new ValueExtractor(searchContext, ['note', searchContext.orderBy]), direction: searchContext.orderDirection - }], searchContext.limit); + }], searchContext.limit || undefined); - exp.subExpression = filterExp; + (exp as any).subExpression = filterExp; } return exp; } -function getAncestorExp({ancestorNoteId, ancestorDepth, includeHiddenNotes}) { +function getAncestorExp({ancestorNoteId, ancestorDepth, includeHiddenNotes}: SearchContext) { if (ancestorNoteId && ancestorNoteId !== 'root') { return new AncestorExp(ancestorNoteId, ancestorDepth); } else if (!includeHiddenNotes) { @@ -459,4 +494,4 @@ function getAncestorExp({ancestorNoteId, ancestorDepth, includeHiddenNotes}) { } } -module.exports = parse; +export = parse; diff --git a/src/services/search/services/search.js b/src/services/search/services/search.ts similarity index 78% rename from src/services/search/services/search.js rename to src/services/search/services/search.ts index e1ceed0860..7461d5b4ee 100644 --- a/src/services/search/services/search.js +++ b/src/services/search/services/search.ts @@ -1,22 +1,28 @@ "use strict"; -const normalizeString = require("normalize-strings"); -const lex = require('./lex.js'); -const handleParens = require('./handle_parens.js'); -const parse = require('./parse.js'); -const SearchResult = require('../search_result.js'); -const SearchContext = require('../search_context.js'); -const becca = require('../../../becca/becca.js'); -const beccaService = require('../../../becca/becca_service.js'); -const utils = require('../../utils.js'); -const log = require('../../log.js'); -const hoistedNoteService = require('../../hoisted_note.js'); - -function searchFromNote(note) { - let searchResultNoteIds, highlightedTokens; +import normalizeString = require("normalize-strings"); +import lex = require('./lex'); +import handleParens = require('./handle_parens'); +import parse = require('./parse'); +import SearchResult = require('../search_result'); +import SearchContext = require('../search_context'); +import becca = require('../../../becca/becca'); +import beccaService = require('../../../becca/becca_service'); +import utils = require('../../utils'); +import log = require('../../log'); +import hoistedNoteService = require('../../hoisted_note'); +import BNote = require("../../../becca/entities/bnote"); +import BAttribute = require("../../../becca/entities/battribute"); +import { SearchParams, TokenData } from "./types"; +import Expression = require("../expressions/expression"); +import sql = require("../../sql"); + +function searchFromNote(note: BNote) { + let searchResultNoteIds; + let highlightedTokens: string[]; const searchScript = note.getRelationValue('searchScript'); - const searchString = note.getLabelValue('searchString'); + const searchString = note.getLabelValue('searchString') || ""; let error = null; if (searchScript) { @@ -25,12 +31,12 @@ function searchFromNote(note) { } else { const searchContext = new SearchContext({ fastSearch: note.hasLabel('fastSearch'), - ancestorNoteId: note.getRelationValue('ancestor'), - ancestorDepth: note.getLabelValue('ancestorDepth'), + ancestorNoteId: note.getRelationValue('ancestor') || undefined, + ancestorDepth: note.getLabelValue('ancestorDepth') || undefined, includeArchivedNotes: note.hasLabel('includeArchivedNotes'), - orderBy: note.getLabelValue('orderBy'), - orderDirection: note.getLabelValue('orderDirection'), - limit: note.getLabelValue('limit'), + orderBy: note.getLabelValue('orderBy') || undefined, + orderDirection: note.getLabelValue('orderDirection') || undefined, + limit: parseInt(note.getLabelValue('limit') || "0", 10), debug: note.hasLabel('debug'), fuzzyAttributeSearch: false }); @@ -51,7 +57,7 @@ function searchFromNote(note) { }; } -function searchFromRelation(note, relationName) { +function searchFromRelation(note: BNote, relationName: string) { const scriptNote = note.getRelationTarget(relationName); if (!scriptNote) { @@ -72,7 +78,7 @@ function searchFromRelation(note, relationName) { return []; } - const scriptService = require('../../script.js'); // to avoid circular dependency + const scriptService = require('../../script'); // TODO: to avoid circular dependency const result = scriptService.executeNote(scriptNote, {originEntity: note}); if (!Array.isArray(result)) { @@ -90,18 +96,21 @@ function searchFromRelation(note, relationName) { } function loadNeededInfoFromDatabase() { - const sql = require('../../sql.js'); - /** * This complex structure is needed to calculate total occupied space by a note. Several object instances * (note, revisions, attachments) can point to a single blobId, and thus the blob size should count towards the total * only once. * - * @var {Object.<string, Object.<string, int>>} - noteId => { blobId => blobSize } + * noteId => { blobId => blobSize } */ - const noteBlobs = {}; + const noteBlobs: Record<string, Record<string, number>> = {}; - const noteContentLengths = sql.getRows(` + type NoteContentLengthsRow = { + noteId: string; + blobId: string; + length: number; + }; + const noteContentLengths = sql.getRows<NoteContentLengthsRow>(` SELECT noteId, blobId, @@ -122,7 +131,12 @@ function loadNeededInfoFromDatabase() { noteBlobs[noteId] = { [blobId]: length }; } - const attachmentContentLengths = sql.getRows(` + type AttachmentContentLengthsRow = { + noteId: string; + blobId: string; + length: number; + }; + const attachmentContentLengths = sql.getRows<AttachmentContentLengthsRow>(` SELECT ownerId AS noteId, attachments.blobId, @@ -151,7 +165,13 @@ function loadNeededInfoFromDatabase() { becca.notes[noteId].contentAndAttachmentsSize = Object.values(noteBlobs[noteId]).reduce((acc, size) => acc + size, 0); } - const revisionContentLengths = sql.getRows(` + type RevisionRow = { + noteId: string; + blobId: string; + length: number; + isNoteRevision: true; + }; + const revisionContentLengths = sql.getRows<RevisionRow>(` SELECT noteId, revisions.blobId, @@ -186,8 +206,11 @@ function loadNeededInfoFromDatabase() { noteBlobs[noteId][blobId] = length; - if (isNoteRevision) { - becca.notes[noteId].revisionCount++; + if (isNoteRevision) { + const noteRevision = becca.notes[noteId]; + if (noteRevision && noteRevision.revisionCount) { + noteRevision.revisionCount++; + } } } @@ -196,20 +219,16 @@ function loadNeededInfoFromDatabase() { } } -/** - * @param {Expression} expression - * @param {SearchContext} searchContext - * @returns {SearchResult[]} - */ -function findResultsWithExpression(expression, searchContext) { +function findResultsWithExpression(expression: Expression, searchContext: SearchContext): SearchResult[] { if (searchContext.dbLoadNeeded) { loadNeededInfoFromDatabase(); } const allNoteSet = becca.getAllNoteSet(); + const noteIdToNotePath: Record<string, string[]> = {}; const executionContext = { - noteIdToNotePath: {} + noteIdToNotePath }; const noteSet = expression.execute(allNoteSet, executionContext, searchContext); @@ -250,16 +269,16 @@ function findResultsWithExpression(expression, searchContext) { return searchResults; } -function parseQueryToExpression(query, searchContext) { +function parseQueryToExpression(query: string, searchContext: SearchContext) { const {fulltextQuery, fulltextTokens, expressionTokens} = lex(query); searchContext.fulltextQuery = fulltextQuery; - let structuredExpressionTokens; + let structuredExpressionTokens: (TokenData | TokenData[])[]; try { structuredExpressionTokens = handleParens(expressionTokens); } - catch (e) { + catch (e: any) { structuredExpressionTokens = []; searchContext.addError(e.message); } @@ -284,23 +303,13 @@ function parseQueryToExpression(query, searchContext) { return expression; } -/** - * @param {string} query - * @param {object} params - see SearchContext - * @returns {BNote[]} - */ -function searchNotes(query, params = {}) { +function searchNotes(query: string, params: SearchParams = {}): BNote[] { const searchResults = findResultsWithQuery(query, new SearchContext(params)); return searchResults.map(sr => becca.notes[sr.noteId]); } -/** - * @param {string} query - * @param {SearchContext} searchContext - * @returns {SearchResult[]} - */ -function findResultsWithQuery(query, searchContext) { +function findResultsWithQuery(query: string, searchContext: SearchContext): SearchResult[] { query = query || ""; searchContext.originalQuery = query; @@ -313,18 +322,13 @@ function findResultsWithQuery(query, searchContext) { return findResultsWithExpression(expression, searchContext); } -/** - * @param {string} query - * @param {SearchContext} searchContext - * @returns {BNote|null} - */ -function findFirstNoteWithQuery(query, searchContext) { +function findFirstNoteWithQuery(query: string, searchContext: SearchContext): BNote | null { const searchResults = findResultsWithQuery(query, searchContext); return searchResults.length > 0 ? becca.notes[searchResults[0].noteId] : null; } -function searchNotesForAutocomplete(query) { +function searchNotesForAutocomplete(query: string) { const searchContext = new SearchContext({ fastSearch: true, includeArchivedNotes: false, @@ -351,7 +355,7 @@ function searchNotesForAutocomplete(query) { }); } -function highlightSearchResults(searchResults, highlightedTokens) { +function highlightSearchResults(searchResults: SearchResult[], highlightedTokens: string[]) { highlightedTokens = Array.from(new Set(highlightedTokens)); // we remove < signs because they can cause trouble in matching and overwriting existing highlighted chunks @@ -387,7 +391,7 @@ function highlightSearchResults(searchResults, highlightedTokens) { } } - function wrapText(text, start, length, prefix, suffix) { + function wrapText(text: string, start: number, length: number, prefix: string, suffix: string) { return text.substring(0, start) + prefix + text.substr(start, length) + suffix + text.substring(start + length); } @@ -403,6 +407,7 @@ function highlightSearchResults(searchResults, highlightedTokens) { let match; // Find all matches + if (!result.highlightedNotePathTitle) { continue; } while ((match = tokenRegex.exec(normalizeString(result.highlightedNotePathTitle))) !== null) { result.highlightedNotePathTitle = wrapText(result.highlightedNotePathTitle, match.index, token.length, "{", "}"); @@ -413,6 +418,7 @@ function highlightSearchResults(searchResults, highlightedTokens) { } for (const result of searchResults) { + if (!result.highlightedNotePathTitle) { continue; } result.highlightedNotePathTitle = result.highlightedNotePathTitle .replace(/"/g, "<small>") .replace(/'/g, "</small>") @@ -421,7 +427,7 @@ function highlightSearchResults(searchResults, highlightedTokens) { } } -function formatAttribute(attr) { +function formatAttribute(attr: BAttribute) { if (attr.type === 'relation') { return `~${utils.escapeHtml(attr.name)}=…`; } @@ -438,7 +444,7 @@ function formatAttribute(attr) { } } -module.exports = { +export = { searchFromNote, searchNotesForAutocomplete, findResultsWithQuery, diff --git a/src/services/search/services/types.ts b/src/services/search/services/types.ts new file mode 100644 index 0000000000..09450f7606 --- /dev/null +++ b/src/services/search/services/types.ts @@ -0,0 +1,20 @@ +export interface TokenData { + token: string; + inQuotes?: boolean; + startIndex?: number; + endIndex?: number; +} + +export interface SearchParams { + fastSearch?: boolean; + includeArchivedNotes?: boolean; + includeHiddenNotes?: boolean; + ignoreHoistedNote?: boolean; + ancestorNoteId?: string; + ancestorDepth?: string; + orderBy?: string; + orderDirection?: string; + limit?: number | null; + debug?: boolean; + fuzzyAttributeSearch?: boolean; +} \ No newline at end of file diff --git a/src/services/search/value_extractor.js b/src/services/search/value_extractor.ts similarity index 89% rename from src/services/search/value_extractor.js rename to src/services/search/value_extractor.ts index 27aff0e6c4..4f2466f111 100644 --- a/src/services/search/value_extractor.js +++ b/src/services/search/value_extractor.ts @@ -1,10 +1,12 @@ "use strict"; +import BNote = require("../../becca/entities/bnote"); + /** * Search string is lower cased for case-insensitive comparison. But when retrieving properties, * we need a case-sensitive form, so we have this translation object. */ -const PROP_MAPPING = { +const PROP_MAPPING: Record<string, string> = { "noteid": "noteId", "title": "title", "type": "type", @@ -32,8 +34,14 @@ const PROP_MAPPING = { "revisioncount": "revisionCount" }; +interface SearchContext { + dbLoadNeeded: boolean; +} + class ValueExtractor { - constructor(searchContext, propertyPath) { + private propertyPath: string[]; + + constructor(searchContext: SearchContext, propertyPath: string[]) { this.propertyPath = propertyPath.map(pathEl => pathEl.toLowerCase()); if (this.propertyPath[0].startsWith('#')) { @@ -81,10 +89,10 @@ class ValueExtractor { } } - extract(note) { - let cursor = note; + extract(note: BNote) { + let cursor: BNote | null = note; - let i; + let i: number = 0; const cur = () => this.propertyPath[i]; @@ -105,8 +113,7 @@ class ValueExtractor { i++; const attr = cursor.getAttributeCaseInsensitive('relation', cur()); - - cursor = attr ? attr.targetNote : null; + cursor = attr?.targetNote || null; } else if (cur() === 'parents') { cursor = cursor.parents[0]; @@ -118,7 +125,7 @@ class ValueExtractor { return Math.random().toString(); // string is expected for comparison } else if (cur() in PROP_MAPPING) { - return cursor[PROP_MAPPING[cur()]]; + return (cursor as any)[PROP_MAPPING[cur()]]; } else { // FIXME @@ -127,4 +134,4 @@ class ValueExtractor { } } -module.exports = ValueExtractor; +export = ValueExtractor; diff --git a/src/services/session_secret.js b/src/services/session_secret.ts similarity index 55% rename from src/services/session_secret.js rename to src/services/session_secret.ts index 90eaa65bc7..3721cf63fa 100644 --- a/src/services/session_secret.js +++ b/src/services/session_secret.ts @@ -1,15 +1,17 @@ "use strict"; -const fs = require('fs'); -const crypto = require('crypto'); -const dataDir = require('./data_dir.js'); -const log = require('./log.js'); +import fs = require('fs'); +import crypto = require('crypto'); +import dataDir = require('./data_dir'); +import log = require('./log'); const sessionSecretPath = `${dataDir.TRILIUM_DATA_DIR}/session_secret.txt`; let sessionSecret; -function randomValueHex(len) { +const ENCODING = "ascii"; + +function randomValueHex(len: number) { return crypto.randomBytes(Math.ceil(len / 2)) .toString('hex') // convert to hexadecimal format .slice(0, len).toUpperCase(); // return required number of characters @@ -20,10 +22,10 @@ if (!fs.existsSync(sessionSecretPath)) { log.info("Generated session secret"); - fs.writeFileSync(sessionSecretPath, sessionSecret, 'ASCII'); + fs.writeFileSync(sessionSecretPath, sessionSecret, ENCODING); } else { - sessionSecret = fs.readFileSync(sessionSecretPath, 'ASCII'); + sessionSecret = fs.readFileSync(sessionSecretPath, ENCODING); } -module.exports = sessionSecret; +export = sessionSecret; diff --git a/src/services/setup.js b/src/services/setup.ts similarity index 75% rename from src/services/setup.js rename to src/services/setup.ts index 67eb7f3ce2..3d7706dbac 100644 --- a/src/services/setup.js +++ b/src/services/setup.ts @@ -1,15 +1,16 @@ -const syncService = require('./sync.js'); -const log = require('./log.js'); -const sqlInit = require('./sql_init.js'); -const optionService = require('./options.js'); -const syncOptions = require('./sync_options.js'); -const request = require('./request.js'); -const appInfo = require('./app_info.js'); -const utils = require('./utils.js'); -const becca = require('../becca/becca.js'); +import syncService = require('./sync'); +import log = require('./log'); +import sqlInit = require('./sql_init'); +import optionService = require('./options'); +import syncOptions = require('./sync_options'); +import request = require('./request'); +import appInfo = require('./app_info'); +import utils = require('./utils'); +import becca = require('../becca/becca'); +import { SetupStatusResponse, SetupSyncSeedResponse } from './api-interface'; async function hasSyncServerSchemaAndSeed() { - const response = await requestToSyncServer('GET', '/api/setup/status'); + const response = await requestToSyncServer<SetupStatusResponse>('GET', '/api/setup/status'); if (response.syncVersion !== appInfo.syncVersion) { throw new Error(`Could not setup sync since local sync protocol version is ${appInfo.syncVersion} while remote is ${response.syncVersion}. To fix this issue, use same Trilium version on all instances.`); @@ -32,7 +33,7 @@ function triggerSync() { async function sendSeedToSyncServer() { log.info("Initiating sync to server"); - await requestToSyncServer('POST', '/api/setup/sync-seed', { + await requestToSyncServer<void>('POST', '/api/setup/sync-seed', { options: getSyncSeedOptions(), syncVersion: appInfo.syncVersion }); @@ -43,7 +44,7 @@ async function sendSeedToSyncServer() { optionService.setOption('lastSyncedPull', 0); } -async function requestToSyncServer(method, path, body = null) { +async function requestToSyncServer<T>(method: string, path: string, body?: string | {}): Promise<T> { const timeout = syncOptions.getSyncTimeout(); return await utils.timeLimit(request.exec({ @@ -52,10 +53,10 @@ async function requestToSyncServer(method, path, body = null) { body, proxy: syncOptions.getSyncProxy(), timeout: timeout - }), timeout); + }), timeout) as T; } -async function setupSyncFromSyncServer(syncServerHost, syncProxy, password) { +async function setupSyncFromSyncServer(syncServerHost: string, syncProxy: string, password: string) { if (sqlInit.isDbInitialized()) { return { result: 'failure', @@ -67,7 +68,7 @@ async function setupSyncFromSyncServer(syncServerHost, syncProxy, password) { log.info("Getting document options FROM sync server."); // the response is expected to contain documentId and documentSecret options - const resp = await request.exec({ + const resp = await request.exec<SetupSyncSeedResponse>({ method: 'get', url: `${syncServerHost}/api/setup/sync-seed`, auth: { password }, @@ -92,7 +93,7 @@ async function setupSyncFromSyncServer(syncServerHost, syncProxy, password) { return { result: 'success' }; } - catch (e) { + catch (e: any) { log.error(`Sync failed: '${e.message}', stack: ${e.stack}`); return { diff --git a/src/services/spaced_update.js b/src/services/spaced_update.ts similarity index 78% rename from src/services/spaced_update.js rename to src/services/spaced_update.ts index 9518df908b..24417d18c4 100644 --- a/src/services/spaced_update.js +++ b/src/services/spaced_update.ts @@ -1,9 +1,19 @@ +type Updater = () => void; + class SpacedUpdate { - constructor(updater, updateInterval = 1000) { + + private updater: Updater; + private lastUpdated: number; + private changed: boolean; + private updateInterval: number; + private changeForbidden: boolean; + + constructor(updater: Updater, updateInterval = 1000) { this.updater = updater; this.lastUpdated = Date.now(); this.changed = false; this.updateInterval = updateInterval; + this.changeForbidden = false; } scheduleUpdate() { @@ -52,7 +62,7 @@ class SpacedUpdate { } } - async allowUpdateWithoutChange(callback) { + async allowUpdateWithoutChange(callback: () => void) { this.changeForbidden = true; try { @@ -64,4 +74,4 @@ class SpacedUpdate { } } -module.exports = SpacedUpdate; +export = SpacedUpdate; diff --git a/src/services/special_notes.js b/src/services/special_notes.ts similarity index 79% rename from src/services/special_notes.js rename to src/services/special_notes.ts index 60d202df0a..8a75873ec4 100644 --- a/src/services/special_notes.js +++ b/src/services/special_notes.ts @@ -1,16 +1,20 @@ -const attributeService = require('./attributes.js'); -const dateNoteService = require('./date_notes.js'); -const becca = require('../becca/becca.js'); -const noteService = require('./notes.js'); -const dateUtils = require('./date_utils.js'); -const log = require('./log.js'); -const hoistedNoteService = require('./hoisted_note.js'); -const searchService = require('./search/services/search.js'); -const SearchContext = require('./search/search_context.js'); -const {LBTPL_NOTE_LAUNCHER, LBTPL_CUSTOM_WIDGET, LBTPL_SPACER, LBTPL_SCRIPT} = require('./hidden_subtree.js'); - -function getInboxNote(date) { +import attributeService = require('./attributes'); +import dateNoteService = require('./date_notes'); +import becca = require('../becca/becca'); +import noteService = require('./notes'); +import dateUtils = require('./date_utils'); +import log = require('./log'); +import hoistedNoteService = require('./hoisted_note'); +import searchService = require('./search/services/search'); +import SearchContext = require('./search/search_context'); +import hiddenSubtree = require('./hidden_subtree'); +const { LBTPL_NOTE_LAUNCHER, LBTPL_CUSTOM_WIDGET, LBTPL_SPACER, LBTPL_SCRIPT } = hiddenSubtree; + +function getInboxNote(date: string) { const workspaceNote = hoistedNoteService.getWorkspaceNote(); + if (!workspaceNote) { + throw new Error("Unable to find workspace note"); + } let inbox; @@ -48,8 +52,9 @@ function createSqlConsole() { return note; } -function saveSqlConsole(sqlConsoleNoteId) { +function saveSqlConsole(sqlConsoleNoteId: string) { const sqlConsoleNote = becca.getNote(sqlConsoleNoteId); + if (!sqlConsoleNote) throw new Error(`Unable to find SQL console note ID: ${sqlConsoleNoteId}`); const today = dateUtils.localNowDate(); const sqlConsoleHome = @@ -59,7 +64,7 @@ function saveSqlConsole(sqlConsoleNoteId) { const result = sqlConsoleNote.cloneTo(sqlConsoleHome.noteId); for (const parentBranch of sqlConsoleNote.getParentBranches()) { - if (parentBranch.parentNote.hasAncestor('_hidden')) { + if (parentBranch.parentNote?.hasAncestor('_hidden')) { parentBranch.markAsDeleted(); } } @@ -67,7 +72,7 @@ function saveSqlConsole(sqlConsoleNoteId) { return result; } -function createSearchNote(searchString, ancestorNoteId) { +function createSearchNote(searchString: string, ancestorNoteId: string) { const {note} = noteService.createNewNote({ parentNoteId: getMonthlyParentNoteId('_search', 'search'), title: `Search: ${searchString}`, @@ -88,6 +93,9 @@ function createSearchNote(searchString, ancestorNoteId) { function getSearchHome() { const workspaceNote = hoistedNoteService.getWorkspaceNote(); + if (!workspaceNote) { + throw new Error("Unable to find workspace note"); + } if (!workspaceNote.isRoot()) { return workspaceNote.searchNoteInSubtree('#workspaceSearchHome') @@ -101,14 +109,18 @@ function getSearchHome() { } } -function saveSearchNote(searchNoteId) { +function saveSearchNote(searchNoteId: string) { const searchNote = becca.getNote(searchNoteId); + if (!searchNote) { + throw new Error("Unable to find search note"); + } + const searchHome = getSearchHome(); const result = searchNote.cloneTo(searchHome.noteId); for (const parentBranch of searchNote.getParentBranches()) { - if (parentBranch.parentNote.hasAncestor('_hidden')) { + if (parentBranch.parentNote?.hasAncestor('_hidden')) { parentBranch.markAsDeleted(); } } @@ -116,7 +128,7 @@ function saveSearchNote(searchNoteId) { return result; } -function getMonthlyParentNoteId(rootNoteId, prefix) { +function getMonthlyParentNoteId(rootNoteId: string, prefix: string) { const month = dateUtils.localNowDate().substring(0, 7); const labelName = `${prefix}MonthNote`; @@ -138,7 +150,7 @@ function getMonthlyParentNoteId(rootNoteId, prefix) { return monthNote.noteId; } -function createScriptLauncher(parentNoteId, forceNoteId = null) { +function createScriptLauncher(parentNoteId: string, forceNoteId?: string) { const note = noteService.createNewNote({ noteId: forceNoteId, title: "Script Launcher", @@ -151,7 +163,13 @@ function createScriptLauncher(parentNoteId, forceNoteId = null) { return note; } -function createLauncher({parentNoteId, launcherType, noteId}) { +interface LauncherConfig { + parentNoteId: string; + launcherType: string; + noteId: string; +} + +function createLauncher({ parentNoteId, launcherType, noteId }: LauncherConfig) { let note; if (launcherType === 'note') { @@ -197,10 +215,10 @@ function createLauncher({parentNoteId, launcherType, noteId}) { }; } -function resetLauncher(noteId) { +function resetLauncher(noteId: string) { const note = becca.getNote(noteId); - if (note.isLaunchBarConfig()) { + if (note?.isLaunchBarConfig()) { if (note) { if (noteId === '_lbRoot') { // deleting hoisted notes are not allowed, so we just reset the children @@ -228,7 +246,13 @@ function resetLauncher(noteId) { * Another use case was for script-packages (e.g. demo Task manager) which could this way register automatically/easily * into the launchbar - for this it's recommended to use backend API's createOrUpdateLauncher() */ -function createOrUpdateScriptLauncherFromApi(opts) { +function createOrUpdateScriptLauncherFromApi(opts: { + id: string; + title: string; + action: string; + icon?: string; + shortcut?: string; +}) { if (opts.id && !/^[a-z0-9]+$/i.test(opts.id)) { throw new Error(`Launcher ID can be alphanumeric only, '${opts.id}' given`); } @@ -263,7 +287,7 @@ function createOrUpdateScriptLauncherFromApi(opts) { return launcherNote; } -module.exports = { +export = { getInboxNote, createSqlConsole, saveSqlConsole, diff --git a/src/services/sql.js b/src/services/sql.ts similarity index 72% rename from src/services/sql.js rename to src/services/sql.ts index 38cbabe193..4aa367fbe7 100644 --- a/src/services/sql.js +++ b/src/services/sql.ts @@ -4,17 +4,20 @@ * @module sql */ -const log = require('./log.js'); -const Database = require('better-sqlite3'); -const dataDir = require('./data_dir.js'); -const cls = require('./cls.js'); -const fs = require("fs-extra"); - -const dbConnection = new Database(dataDir.DOCUMENT_PATH); +import log = require('./log'); +import type { Statement, Database as DatabaseType, RunResult } from "better-sqlite3"; +import dataDir = require('./data_dir'); +import cls = require('./cls'); +import fs = require("fs-extra"); +import Database = require('better-sqlite3'); + +const dbConnection: DatabaseType = new Database(dataDir.DOCUMENT_PATH); dbConnection.pragma('journal_mode = WAL'); const LOG_ALL_QUERIES = false; +type Params = any; + [`exit`, `SIGINT`, `SIGUSR1`, `SIGUSR2`, `SIGTERM`].forEach(eventType => { process.on(eventType, () => { if (dbConnection) { @@ -25,7 +28,7 @@ const LOG_ALL_QUERIES = false; }); }); -function insert(tableName, rec, replace = false) { +function insert<T extends {}>(tableName: string, rec: T, replace = false) { const keys = Object.keys(rec || {}); if (keys.length === 0) { log.error(`Can't insert empty object into table ${tableName}`); @@ -48,11 +51,11 @@ function insert(tableName, rec, replace = false) { return res ? res.lastInsertRowid : null; } -function replace(tableName, rec) { - return insert(tableName, rec, true); +function replace<T extends {}>(tableName: string, rec: T): number | null { + return insert(tableName, rec, true) as number | null; } -function upsert(tableName, primaryKey, rec) { +function upsert<T extends {}>(tableName: string, primaryKey: string, rec: T) { const keys = Object.keys(rec || {}); if (keys.length === 0) { log.error(`Can't upsert empty object into table ${tableName}`); @@ -70,16 +73,16 @@ function upsert(tableName, primaryKey, rec) { for (const idx in rec) { if (rec[idx] === true || rec[idx] === false) { - rec[idx] = rec[idx] ? 1 : 0; + (rec as any)[idx] = rec[idx] ? 1 : 0; } } execute(query, rec); } -const statementCache = {}; +const statementCache: Record<string, Statement> = {}; -function stmt(sql) { +function stmt(sql: string) { if (!(sql in statementCache)) { statementCache[sql] = dbConnection.prepare(sql); } @@ -87,31 +90,34 @@ function stmt(sql) { return statementCache[sql]; } -function getRow(query, params = []) { - return wrap(query, s => s.get(params)); +function getRow<T>(query: string, params: Params = []): T { + return wrap(query, s => s.get(params)) as T; } -function getRowOrNull(query, params = []) { +function getRowOrNull<T>(query: string, params: Params = []): T | null { const all = getRows(query, params); + if (!all) { + return null; + } - return all.length > 0 ? all[0] : null; + return (all.length > 0 ? all[0] : null) as (T | null); } -function getValue(query, params = []) { - return wrap(query, s => s.pluck().get(params)); +function getValue<T>(query: string, params: Params = []): T { + return wrap(query, s => s.pluck().get(params)) as T; } // smaller values can result in better performance due to better usage of statement cache const PARAM_LIMIT = 100; -function getManyRows(query, params) { - let results = []; +function getManyRows<T>(query: string, params: Params): T[] { + let results: unknown[] = []; while (params.length > 0) { const curParams = params.slice(0, Math.min(params.length, PARAM_LIMIT)); params = params.slice(curParams.length); - const curParamsObj = {}; + const curParamsObj: Record<string, any> = {}; let j = 1; for (const param of curParams) { @@ -130,45 +136,45 @@ function getManyRows(query, params) { results = results.concat(subResults); } - return results; + return (results as (T[] | null) || []); } -function getRows(query, params = []) { - return wrap(query, s => s.all(params)); +function getRows<T>(query: string, params: Params = []): T[] { + return wrap(query, s => s.all(params)) as T[]; } -function getRawRows(query, params = []) { - return wrap(query, s => s.raw().all(params)); +function getRawRows<T extends {} | unknown[]>(query: string, params: Params = []): T[] { + return (wrap(query, s => s.raw().all(params)) as T[]) || []; } -function iterateRows(query, params = []) { +function iterateRows<T>(query: string, params: Params = []): IterableIterator<T> { if (LOG_ALL_QUERIES) { console.log(query); } - return stmt(query).iterate(params); + return stmt(query).iterate(params) as IterableIterator<T>; } -function getMap(query, params = []) { - const map = {}; - const results = getRawRows(query, params); +function getMap<K extends string | number | symbol, V>(query: string, params: Params = []) { + const map: Record<K, V> = {} as Record<K, V>; + const results = getRawRows<[K, V]>(query, params); - for (const row of results) { + for (const row of results || []) { map[row[0]] = row[1]; } return map; } -function getColumn(query, params = []) { - return wrap(query, s => s.pluck().all(params)); +function getColumn<T>(query: string, params: Params = []): T[] { + return wrap(query, s => s.pluck().all(params)) as T[]; } -function execute(query, params = []) { - return wrap(query, s => s.run(params)); +function execute(query: string, params: Params = []): RunResult { + return wrap(query, s => s.run(params)) as RunResult; } -function executeMany(query, params) { +function executeMany(query: string, params: Params) { if (LOG_ALL_QUERIES) { console.log(query); } @@ -177,7 +183,7 @@ function executeMany(query, params) { const curParams = params.slice(0, Math.min(params.length, PARAM_LIMIT)); params = params.slice(curParams.length); - const curParamsObj = {}; + const curParamsObj: Record<string, any> = {}; let j = 1; for (const param of curParams) { @@ -192,7 +198,7 @@ function executeMany(query, params) { } } -function executeScript(query) { +function executeScript(query: string): DatabaseType { if (LOG_ALL_QUERIES) { console.log(query); } @@ -200,7 +206,7 @@ function executeScript(query) { return dbConnection.exec(query); } -function wrap(query, func) { +function wrap(query: string, func: (statement: Statement) => unknown): unknown { const startTimestamp = Date.now(); let result; @@ -211,7 +217,7 @@ function wrap(query, func) { try { result = func(stmt(query)); } - catch (e) { + catch (e: any) { if (e.message.includes("The database connection is not open")) { // this often happens on killing the app which puts these alerts in front of user // in these cases error should be simply ignored. @@ -237,12 +243,12 @@ function wrap(query, func) { return result; } -function transactional(func) { +function transactional<T>(func: (statement: Statement) => T) { try { - const ret = dbConnection.transaction(func).deferred(); + const ret = (dbConnection.transaction(func) as any).deferred(); if (!dbConnection.inTransaction) { // i.e. transaction was really committed (and not just savepoint released) - require('./ws.js').sendTransactionEntityChangesToAllClients(); + require('./ws').sendTransactionEntityChangesToAllClients(); } return ret; @@ -253,17 +259,17 @@ function transactional(func) { if (entityChangeIds.length > 0) { log.info("Transaction rollback dirtied the becca, forcing reload."); - require('../becca/becca_loader.js').load(); + require('../becca/becca_loader').load(); } // the maxEntityChangeId has been incremented during failed transaction, need to recalculate - require('./entity_changes.js').recalculateMaxEntityChangeId(); + require('./entity_changes').recalculateMaxEntityChangeId(); throw e; } } -function fillParamList(paramIds, truncate = true) { +function fillParamList(paramIds: string[], truncate = true) { if (paramIds.length === 0) { return; } @@ -286,7 +292,7 @@ function fillParamList(paramIds, truncate = true) { s.run(paramIds); } -async function copyDatabase(targetFilePath) { +async function copyDatabase(targetFilePath: string) { try { fs.unlinkSync(targetFilePath); } catch (e) { @@ -295,7 +301,7 @@ async function copyDatabase(targetFilePath) { await dbConnection.backup(targetFilePath); } -function disableSlowQueryLogging(cb) { +function disableSlowQueryLogging<T>(cb: () => T) { const orig = cls.isSlowQueryLoggingDisabled(); try { @@ -308,7 +314,7 @@ function disableSlowQueryLogging(cb) { } } -module.exports = { +export = { dbConnection, insert, replace, diff --git a/src/services/sql_init.js b/src/services/sql_init.ts similarity index 74% rename from src/services/sql_init.js rename to src/services/sql_init.ts index f1b4e9152f..5c9937daa8 100644 --- a/src/services/sql_init.js +++ b/src/services/sql_init.ts @@ -1,17 +1,18 @@ -const log = require('./log.js'); -const fs = require('fs'); -const resourceDir = require('./resource_dir.js'); -const sql = require('./sql.js'); -const utils = require('./utils.js'); -const optionService = require('./options.js'); -const port = require('./port.js'); -const BOption = require('../becca/entities/boption.js'); -const TaskContext = require('./task_context.js'); -const migrationService = require('./migration.js'); -const cls = require('./cls.js'); -const config = require('./config.js'); - -const dbReady = utils.deferred(); +import log = require('./log'); +import fs = require('fs'); +import resourceDir = require('./resource_dir'); +import sql = require('./sql'); +import utils = require('./utils'); +import optionService = require('./options'); +import port = require('./port'); +import BOption = require('../becca/entities/boption'); +import TaskContext = require('./task_context'); +import migrationService = require('./migration'); +import cls = require('./cls'); +import config = require('./config'); +import { OptionRow } from '../becca/entities/rows'; + +const dbReady = utils.deferred<void>(); cls.init(initDbConnection); @@ -50,7 +51,7 @@ async function createInitialDatabase() { throw new Error("DB is already initialized"); } - const schema = fs.readFileSync(`${resourceDir.DB_INIT_DIR}/schema.sql`, 'UTF-8'); + const schema = fs.readFileSync(`${resourceDir.DB_INIT_DIR}/schema.sql`, "utf-8"); const demoFile = fs.readFileSync(`${resourceDir.DB_INIT_DIR}/demo.zip`); let rootNote; @@ -60,10 +61,10 @@ async function createInitialDatabase() { sql.executeScript(schema); - require('../becca/becca_loader.js').load(); + require('../becca/becca_loader').load(); - const BNote = require('../becca/entities/bnote.js'); - const BBranch = require('../becca/entities/bbranch.js'); + const BNote = require('../becca/entities/bnote'); + const BBranch = require('../becca/entities/bbranch'); log.info("Creating root note ..."); @@ -83,19 +84,19 @@ async function createInitialDatabase() { notePosition: 10 }).save(); - const optionsInitService = require('./options_init.js'); + const optionsInitService = require('./options_init'); optionsInitService.initDocumentOptions(); optionsInitService.initNotSyncedOptions(true, {}); optionsInitService.initStartupOptions(); - require('./encryption/password.js').resetPassword(); + require('./encryption/password').resetPassword(); }); log.info("Importing demo content ..."); const dummyTaskContext = new TaskContext("no-progress-reporting", 'import', false); - const zipImportService = require('./import/zip.js'); + const zipImportService = require('./import/zip'); await zipImportService.importZip(dummyTaskContext, demoFile, rootNote); sql.transactional(() => { @@ -105,7 +106,7 @@ async function createInitialDatabase() { const startNoteId = sql.getValue("SELECT noteId FROM branches WHERE parentNoteId = 'root' AND isDeleted = 0 ORDER BY notePosition"); - const optionService = require('./options.js'); + const optionService = require('./options'); optionService.setOption('openNoteContexts', JSON.stringify([ { notePath: startNoteId, @@ -119,19 +120,19 @@ async function createInitialDatabase() { initDbConnection(); } -function createDatabaseForSync(options, syncServerHost = '', syncProxy = '') { +function createDatabaseForSync(options: OptionRow[], syncServerHost = '', syncProxy = '') { log.info("Creating database for sync"); if (isDbInitialized()) { throw new Error("DB is already initialized"); } - const schema = fs.readFileSync(`${resourceDir.DB_INIT_DIR}/schema.sql`, 'UTF-8'); + const schema = fs.readFileSync(`${resourceDir.DB_INIT_DIR}/schema.sql`, "utf8"); sql.transactional(() => { sql.executeScript(schema); - require('./options_init.js').initNotSyncedOptions(false, { syncServerHost, syncProxy }); + require('./options_init').initNotSyncedOptions(false, { syncServerHost, syncProxy }); // document options required for sync to kick off for (const opt of options) { @@ -166,10 +167,10 @@ dbReady.then(() => { return; } - setInterval(() => require('./backup.js').regularBackup(), 4 * 60 * 60 * 1000); + setInterval(() => require('./backup').regularBackup(), 4 * 60 * 60 * 1000); // kickoff first backup soon after start up - setTimeout(() => require('./backup.js').regularBackup(), 5 * 60 * 1000); + setTimeout(() => require('./backup').regularBackup(), 5 * 60 * 1000); // optimize is usually inexpensive no-op, so running it semi-frequently is not a big deal setTimeout(() => optimize(), 60 * 60 * 1000); @@ -178,12 +179,12 @@ dbReady.then(() => { }); function getDbSize() { - return sql.getValue("SELECT page_count * page_size / 1000 as size FROM pragma_page_count(), pragma_page_size()"); + return sql.getValue<number>("SELECT page_count * page_size / 1000 as size FROM pragma_page_count(), pragma_page_size()"); } log.info(`DB size: ${getDbSize()} KB`); -module.exports = { +export = { dbReady, schemaExists, isDbInitialized, diff --git a/src/services/sync.js b/src/services/sync.ts similarity index 74% rename from src/services/sync.js rename to src/services/sync.ts index fbafdfcfa2..8c81839843 100644 --- a/src/services/sync.js +++ b/src/services/sync.ts @@ -1,27 +1,50 @@ "use strict"; -const log = require('./log.js'); -const sql = require('./sql.js'); -const optionService = require('./options.js'); -const utils = require('./utils.js'); -const instanceId = require('./instance_id.js'); -const dateUtils = require('./date_utils.js'); -const syncUpdateService = require('./sync_update.js'); -const contentHashService = require('./content_hash.js'); -const appInfo = require('./app_info.js'); -const syncOptions = require('./sync_options.js'); -const syncMutexService = require('./sync_mutex.js'); -const cls = require('./cls.js'); -const request = require('./request.js'); -const ws = require('./ws.js'); -const entityChangesService = require('./entity_changes.js'); -const entityConstructor = require('../becca/entity_constructor.js'); -const becca = require('../becca/becca.js'); +import log = require('./log'); +import sql = require('./sql'); +import optionService = require('./options'); +import utils = require('./utils'); +import instanceId = require('./instance_id'); +import dateUtils = require('./date_utils'); +import syncUpdateService = require('./sync_update'); +import contentHashService = require('./content_hash'); +import appInfo = require('./app_info'); +import syncOptions = require('./sync_options'); +import syncMutexService = require('./sync_mutex'); +import cls = require('./cls'); +import request = require('./request'); +import ws = require('./ws'); +import entityChangesService = require('./entity_changes'); +import entityConstructor = require('../becca/entity_constructor'); +import becca = require('../becca/becca'); +import { EntityChange, EntityChangeRecord, EntityRow } from './entity_changes_interface'; +import { CookieJar, ExecOpts } from './request_interface'; let proxyToggle = true; let outstandingPullCount = 0; +interface CheckResponse { + maxEntityChangeId: number; + entityHashes: Record<string, Record<string, string>> +} + +interface SyncResponse { + instanceId: string; + maxEntityChangeId: number; +} + +interface ChangesResponse { + entityChanges: EntityChangeRecord[]; + lastEntityChangeId: number; + outstandingPullCount: number; +} + +interface SyncContext { + cookieJar: CookieJar; + instanceId?: string; +} + async function sync() { try { return await syncMutexService.doExclusively(async () => { @@ -53,7 +76,7 @@ async function sync() { }; }); } - catch (e) { + catch (e: any) { // we're dynamically switching whether we're using proxy or not based on whether we encountered error with the current method proxyToggle = !proxyToggle; @@ -84,7 +107,7 @@ async function sync() { } async function login() { - const setupService = require('./setup.js'); // circular dependency issue + const setupService = require('./setup'); // circular dependency issue if (!await setupService.hasSyncServerSchemaAndSeed()) { await setupService.sendSeedToSyncServer(); @@ -93,19 +116,23 @@ async function login() { return await doLogin(); } -async function doLogin() { +async function doLogin(): Promise<SyncContext> { const timestamp = dateUtils.utcNowDateTime(); const documentSecret = optionService.getOption('documentSecret'); const hash = utils.hmac(documentSecret, timestamp); - const syncContext = { cookieJar: {} }; - const resp = await syncRequest(syncContext, 'POST', '/api/login/sync', { + const syncContext: SyncContext = { cookieJar: {} }; + const resp = await syncRequest<SyncResponse>(syncContext, 'POST', '/api/login/sync', { timestamp: timestamp, syncVersion: appInfo.syncVersion, hash: hash }); + if (!resp) { + throw new Error("Got no response."); + } + if (resp.instanceId === instanceId) { throw new Error(`Sync server has instance ID '${resp.instanceId}' which is also local. This usually happens when the sync client is (mis)configured to sync with itself (URL points back to client) instead of the correct sync server.`); } @@ -125,7 +152,7 @@ async function doLogin() { return syncContext; } -async function pullChanges(syncContext) { +async function pullChanges(syncContext: SyncContext) { while (true) { const lastSyncedPull = getLastSyncedPull(); const logMarkerId = utils.randomString(10); // to easily pair sync events between client and server logs @@ -133,7 +160,10 @@ async function pullChanges(syncContext) { const startDate = Date.now(); - const resp = await syncRequest(syncContext, 'GET', changesUri); + const resp = await syncRequest<ChangesResponse>(syncContext, 'GET', changesUri); + if (!resp) { + throw new Error("Request failed."); + } const {entityChanges, lastEntityChangeId} = resp; outstandingPullCount = resp.outstandingPullCount; @@ -141,7 +171,9 @@ async function pullChanges(syncContext) { const pulledDate = Date.now(); sql.transactional(() => { - syncUpdateService.updateEntities(entityChanges, syncContext.instanceId); + if (syncContext.instanceId) { + syncUpdateService.updateEntities(entityChanges, syncContext.instanceId); + } if (lastSyncedPull !== lastEntityChangeId) { setLastSyncedPull(lastEntityChangeId); @@ -156,7 +188,7 @@ async function pullChanges(syncContext) { log.info(`Sync ${logMarkerId}: Pulled ${entityChanges.length} changes in ${sizeInKb} KB, starting at entityChangeId=${lastSyncedPull} in ${pulledDate - startDate}ms and applied them in ${Date.now() - pulledDate}ms, ${outstandingPullCount} outstanding pulls`); } - catch (e) { + catch (e: any) { log.error(`Error occurred ${e.message} ${e.stack}`); } } @@ -165,11 +197,11 @@ async function pullChanges(syncContext) { log.info("Finished pull"); } -async function pushChanges(syncContext) { - let lastSyncedPush = getLastSyncedPush(); +async function pushChanges(syncContext: SyncContext) { + let lastSyncedPush: number | null | undefined = getLastSyncedPush(); while (true) { - const entityChanges = sql.getRows('SELECT * FROM entity_changes WHERE isSynced = 1 AND id > ? LIMIT 1000', [lastSyncedPush]); + const entityChanges = sql.getRows<EntityChange>('SELECT * FROM entity_changes WHERE isSynced = 1 AND id > ? LIMIT 1000', [lastSyncedPush]); if (entityChanges.length === 0) { log.info("Nothing to push"); @@ -190,7 +222,7 @@ async function pushChanges(syncContext) { } }); - if (filteredEntityChanges.length === 0) { + if (filteredEntityChanges.length === 0 && lastSyncedPush) { // there still might be more sync changes (because of batch limit), just all the current batch // has been filtered out setLastSyncedPush(lastSyncedPush); @@ -214,16 +246,22 @@ async function pushChanges(syncContext) { lastSyncedPush = entityChangesRecords[entityChangesRecords.length - 1].entityChange.id; - setLastSyncedPush(lastSyncedPush); + if (lastSyncedPush) { + setLastSyncedPush(lastSyncedPush); + } } } -async function syncFinished(syncContext) { +async function syncFinished(syncContext: SyncContext) { await syncRequest(syncContext, 'POST', '/api/sync/finished'); } -async function checkContentHash(syncContext) { - const resp = await syncRequest(syncContext, 'GET', '/api/sync/check'); +async function checkContentHash(syncContext: SyncContext) { + const resp = await syncRequest<CheckResponse>(syncContext, 'GET', '/api/sync/check'); + if (!resp) { + throw new Error("Got no response."); + } + const lastSyncedPullId = getLastSyncedPull(); if (lastSyncedPullId < resp.maxEntityChangeId) { @@ -244,7 +282,7 @@ async function checkContentHash(syncContext) { if (failedChecks.length > 0) { // before re-queuing sectors, make sure the entity changes are correct - const consistencyChecks = require('./consistency_checks.js'); + const consistencyChecks = require('./consistency_checks'); consistencyChecks.runEntityChangesChecks(); await syncRequest(syncContext, 'POST', `/api/sync/check-entity-changes`); @@ -261,8 +299,12 @@ async function checkContentHash(syncContext) { const PAGE_SIZE = 1000000; -async function syncRequest(syncContext, method, requestPath, body) { - body = body ? JSON.stringify(body) : ''; +interface SyncContext { + cookieJar: CookieJar +} + +async function syncRequest<T extends {}>(syncContext: SyncContext, method: string, requestPath: string, _body?: {}) { + const body = _body ? JSON.stringify(_body) : ''; const timeout = syncOptions.getSyncTimeout(); @@ -272,7 +314,7 @@ async function syncRequest(syncContext, method, requestPath, body) { const pageCount = Math.max(1, Math.ceil(body.length / PAGE_SIZE)); for (let pageIndex = 0; pageIndex < pageCount; pageIndex++) { - const opts = { + const opts: ExecOpts = { method, url: syncOptions.getSyncServerHost() + requestPath, cookieJar: syncContext.cookieJar, @@ -286,13 +328,13 @@ async function syncRequest(syncContext, method, requestPath, body) { proxy: proxyToggle ? syncOptions.getSyncProxy() : null }; - response = await utils.timeLimit(request.exec(opts), timeout); + response = await utils.timeLimit(request.exec(opts), timeout) as T; } return response; } -function getEntityChangeRow(entityChange) { +function getEntityChangeRow(entityChange: EntityChange) { const {entityName, entityId} = entityChange; if (entityName === 'note_reordering') { @@ -305,7 +347,7 @@ function getEntityChangeRow(entityChange) { throw new Error(`Unknown entity for entity change ${JSON.stringify(entityChange)}`); } - const entityRow = sql.getRow(`SELECT * FROM ${entityName} WHERE ${primaryKey} = ?`, [entityId]); + const entityRow = sql.getRow<EntityRow>(`SELECT * FROM ${entityName} WHERE ${primaryKey} = ?`, [entityId]); if (!entityRow) { log.error(`Cannot find entity for entity change ${JSON.stringify(entityChange)}`); @@ -317,15 +359,17 @@ function getEntityChangeRow(entityChange) { entityRow.content = Buffer.from(entityRow.content, 'utf-8'); } - entityRow.content = entityRow.content.toString("base64"); + if (entityRow.content) { + entityRow.content = entityRow.content.toString("base64"); + } } return entityRow; } } -function getEntityChangeRecords(entityChanges) { - const records = []; +function getEntityChangeRecords(entityChanges: EntityChange[]) { + const records: EntityChangeRecord[] = []; let length = 0; for (const entityChange of entityChanges) { @@ -340,7 +384,7 @@ function getEntityChangeRecords(entityChanges) { continue; } - const record = { entityChange, entity }; + const record: EntityChangeRecord = { entityChange, entity }; records.push(record); @@ -359,7 +403,7 @@ function getLastSyncedPull() { return parseInt(optionService.getOption('lastSyncedPull')); } -function setLastSyncedPull(entityChangeId) { +function setLastSyncedPull(entityChangeId: number) { const lastSyncedPullOption = becca.getOption('lastSyncedPull'); if (lastSyncedPullOption) { // might be null in initial sync when becca is not loaded @@ -378,7 +422,7 @@ function getLastSyncedPush() { return lastSyncedPush; } -function setLastSyncedPush(entityChangeId) { +function setLastSyncedPush(entityChangeId: number) { ws.setLastSyncedPush(entityChangeId); const lastSyncedPushOption = becca.getOption('lastSyncedPush'); @@ -399,7 +443,7 @@ function getOutstandingPullCount() { return outstandingPullCount; } -require('../becca/becca_loader.js').beccaLoaded.then(() => { +require('../becca/becca_loader').beccaLoaded.then(() => { setInterval(cls.wrap(sync), 60000); // kickoff initial sync immediately, but should happen after initial consistency checks @@ -409,7 +453,7 @@ require('../becca/becca_loader.js').beccaLoaded.then(() => { getLastSyncedPush(); }); -module.exports = { +export = { sync, login, getEntityChangeRecords, diff --git a/src/services/sync_mutex.js b/src/services/sync_mutex.ts similarity index 88% rename from src/services/sync_mutex.js rename to src/services/sync_mutex.ts index fb95d03c43..9ad0fd08c8 100644 --- a/src/services/sync_mutex.js +++ b/src/services/sync_mutex.ts @@ -6,7 +6,7 @@ const Mutex = require('async-mutex').Mutex; const instance = new Mutex(); -async function doExclusively(func) { +async function doExclusively<T>(func: () => T) { const releaseMutex = await instance.acquire(); try { @@ -17,6 +17,6 @@ async function doExclusively(func) { } } -module.exports = { +export = { doExclusively }; diff --git a/src/services/sync_options.js b/src/services/sync_options.ts similarity index 89% rename from src/services/sync_options.js rename to src/services/sync_options.ts index 7cf44c06f0..92aea6c3f4 100644 --- a/src/services/sync_options.js +++ b/src/services/sync_options.ts @@ -1,7 +1,7 @@ "use strict"; -const optionService = require('./options.js'); -const config = require('./config.js'); +import optionService = require('./options'); +import config = require('./config'); /* * Primary configuration for sync is in the options (document), but we allow to override @@ -10,11 +10,11 @@ const config = require('./config.js'); * to live sync server. */ -function get(name) { +function get(name: string) { return (config['Sync'] && config['Sync'][name]) || optionService.getOption(name); } -module.exports = { +export = { // env variable is the easiest way to guarantee we won't overwrite prod data during development // after copying prod document/data directory getSyncServerHost: () => process.env.TRILIUM_SYNC_SERVER_HOST || get('syncServerHost'), diff --git a/src/services/sync_update.js b/src/services/sync_update.ts similarity index 69% rename from src/services/sync_update.js rename to src/services/sync_update.ts index 94091e9eff..888947b8b0 100644 --- a/src/services/sync_update.js +++ b/src/services/sync_update.ts @@ -1,11 +1,18 @@ -const sql = require('./sql.js'); -const log = require('./log.js'); -const entityChangesService = require('./entity_changes.js'); -const eventService = require('./events.js'); -const entityConstructor = require('../becca/entity_constructor.js'); -const ws = require('./ws.js'); - -function updateEntities(entityChanges, instanceId) { +import sql = require('./sql'); +import log = require('./log'); +import entityChangesService = require('./entity_changes'); +import eventService = require('./events'); +import entityConstructor = require('../becca/entity_constructor'); +import ws = require('./ws'); +import { EntityChange, EntityChangeRecord, EntityRow } from './entity_changes_interface'; + +interface UpdateContext { + alreadyErased: number; + erased: number; + updated: Record<string, string[]> +} + +function updateEntities(entityChanges: EntityChangeRecord[], instanceId: string) { if (entityChanges.length === 0) { return; } @@ -34,13 +41,15 @@ function updateEntities(entityChanges, instanceId) { atLeastOnePullApplied = true; } - updateEntity(entityChange, entity, instanceId, updateContext); + if (entity) { + updateEntity(entityChange, entity, instanceId, updateContext); + } } logUpdateContext(updateContext); } -function updateEntity(remoteEC, remoteEntityRow, instanceId, updateContext) { +function updateEntity(remoteEC: EntityChange, remoteEntityRow: EntityRow, instanceId: string, updateContext: UpdateContext) { if (!remoteEntityRow && remoteEC.entityName === 'options') { return; // can be undefined for options with isSynced=false } @@ -65,8 +74,12 @@ function updateEntity(remoteEC, remoteEntityRow, instanceId, updateContext) { } } -function updateNormalEntity(remoteEC, remoteEntityRow, instanceId, updateContext) { - const localEC = sql.getRow(`SELECT * FROM entity_changes WHERE entityName = ? AND entityId = ?`, [remoteEC.entityName, remoteEC.entityId]); +function updateNormalEntity(remoteEC: EntityChange, remoteEntityRow: EntityRow, instanceId: string, updateContext: UpdateContext) { + const localEC = sql.getRow<EntityChange>(`SELECT * FROM entity_changes WHERE entityName = ? AND entityId = ?`, [remoteEC.entityName, remoteEC.entityId]); + + if (!localEC.utcDateChanged || !remoteEC.utcDateChanged) { + throw new Error("Missing date changed."); + } if (!localEC || localEC.utcDateChanged <= remoteEC.utcDateChanged) { if (remoteEC.isErased) { @@ -110,28 +123,30 @@ function updateNormalEntity(remoteEC, remoteEntityRow, instanceId, updateContext return false; } -function preProcessContent(remoteEC, remoteEntityRow) { +function preProcessContent(remoteEC: EntityChange, remoteEntityRow: EntityRow) { if (remoteEC.entityName === 'blobs' && remoteEntityRow.content !== null) { // we always use a Buffer object which is different from normal saving - there we use a simple string type for // "string notes". The problem is that in general, it's not possible to detect whether a blob content // is string note or note (syncs can arrive out of order) - remoteEntityRow.content = Buffer.from(remoteEntityRow.content, 'base64'); + if (typeof remoteEntityRow.content === "string") { + remoteEntityRow.content = Buffer.from(remoteEntityRow.content, 'base64'); - if (remoteEntityRow.content.byteLength === 0) { - // there seems to be a bug which causes empty buffer to be stored as NULL which is then picked up as inconsistency - // (possibly not a problem anymore with the newer better-sqlite3) - remoteEntityRow.content = ""; + if (remoteEntityRow.content.byteLength === 0) { + // there seems to be a bug which causes empty buffer to be stored as NULL which is then picked up as inconsistency + // (possibly not a problem anymore with the newer better-sqlite3) + remoteEntityRow.content = ""; + } } } } -function updateNoteReordering(remoteEC, remoteEntityRow, instanceId) { +function updateNoteReordering(remoteEC: EntityChange, remoteEntityRow: EntityRow, instanceId: string) { if (!remoteEntityRow) { throw new Error(`Empty note_reordering body for: ${JSON.stringify(remoteEC)}`); } for (const key in remoteEntityRow) { - sql.execute("UPDATE branches SET notePosition = ? WHERE branchId = ?", [remoteEntityRow[key], key]); + sql.execute("UPDATE branches SET notePosition = ? WHERE branchId = ?", [remoteEntityRow[key as keyof EntityRow], key]); } entityChangesService.putEntityChangeWithInstanceId(remoteEC, instanceId); @@ -139,7 +154,7 @@ function updateNoteReordering(remoteEC, remoteEntityRow, instanceId) { return true; } -function eraseEntity(entityChange) { +function eraseEntity(entityChange: EntityChange) { const {entityName, entityId} = entityChange; const entityNames = [ @@ -161,7 +176,7 @@ function eraseEntity(entityChange) { sql.execute(`DELETE FROM ${entityName} WHERE ${primaryKeyName} = ?`, [entityId]); } -function logUpdateContext(updateContext) { +function logUpdateContext(updateContext: UpdateContext) { const message = JSON.stringify(updateContext) .replaceAll('"', '') .replaceAll(":", ": ") @@ -170,6 +185,6 @@ function logUpdateContext(updateContext) { log.info(message.substr(1, message.length - 2)); } -module.exports = { +export = { updateEntities }; diff --git a/src/services/task_context.js b/src/services/task_context.ts similarity index 75% rename from src/services/task_context.js rename to src/services/task_context.ts index 58530ffece..bacf3e8f81 100644 --- a/src/services/task_context.js +++ b/src/services/task_context.ts @@ -1,12 +1,21 @@ "use strict"; -const ws = require('./ws.js'); +import { TaskData } from './task_context_interface'; +import ws = require('./ws'); // taskId => TaskContext -const taskContexts = {}; +const taskContexts: Record<string, TaskContext> = {}; class TaskContext { - constructor(taskId, taskType = null, data = {}) { + + private taskId: string; + private taskType: string | null; + private progressCount: number; + private lastSentCountTs: number; + data: TaskData | null; + noteDeletionHandlerTriggered: boolean; + + constructor(taskId: string, taskType: string | null = null, data: {} | null = {}) { this.taskId = taskId; this.taskType = taskType; this.data = data; @@ -23,8 +32,7 @@ class TaskContext { this.increaseProgressCount(); } - /** @returns {TaskContext} */ - static getInstance(taskId, taskType, data = null) { + static getInstance(taskId: string, taskType: string, data: {} | null = null): TaskContext { if (!taskContexts[taskId]) { taskContexts[taskId] = new TaskContext(taskId, taskType, data); } @@ -48,7 +56,7 @@ class TaskContext { } } - reportError(message) { + reportError(message: string) { ws.sendMessageToAllClients({ type: 'taskError', taskId: this.taskId, @@ -58,7 +66,7 @@ class TaskContext { }); } - taskSucceeded(result) { + taskSucceeded(result?: string) { ws.sendMessageToAllClients({ type: 'taskSucceeded', taskId: this.taskId, @@ -69,4 +77,4 @@ class TaskContext { } } -module.exports = TaskContext; +export = TaskContext; diff --git a/src/services/task_context_interface.ts b/src/services/task_context_interface.ts new file mode 100644 index 0000000000..3c359d7423 --- /dev/null +++ b/src/services/task_context_interface.ts @@ -0,0 +1,7 @@ +export interface TaskData { + safeImport?: boolean; + textImportedAsText?: boolean; + codeImportedAsCode?: boolean; + shrinkImages?: boolean; + replaceUnderscoresWithSpaces?: boolean; +} diff --git a/src/services/tray.js b/src/services/tray.ts similarity index 84% rename from src/services/tray.js rename to src/services/tray.ts index bb32a78bb2..48a8ebaae3 100644 --- a/src/services/tray.js +++ b/src/services/tray.ts @@ -1,13 +1,9 @@ -const { Menu, Tray } = require('electron'); -const path = require('path'); -const windowService = require('./window.js'); -const optionService = require('./options.js'); +import { Menu, Tray } from 'electron'; +import path = require('path'); +import windowService = require('./window'); +import optionService = require('./options'); -const UPDATE_TRAY_EVENTS = [ - 'minimize', 'maximize', 'show', 'hide' -] - -let tray = null; +let tray: Tray; // `mainWindow.isVisible` doesn't work with `mainWindow.show` and `mainWindow.hide` - it returns `false` when the window // is minimized let isVisible = true; @@ -37,22 +33,25 @@ const getIconPath = () => { } const registerVisibilityListener = () => { const mainWindow = windowService.getMainWindow(); + if (!mainWindow) { return; } // They need to be registered before the tray updater is registered mainWindow.on('show', () => { isVisible = true; + updateTrayMenu(); }); mainWindow.on('hide', () => { isVisible = false; + updateTrayMenu(); }); - UPDATE_TRAY_EVENTS.forEach(eventName => { - mainWindow.on(eventName, updateTrayMenu) - }); + mainWindow.on("minimize", updateTrayMenu); + mainWindow.on("maximize", updateTrayMenu); } const updateTrayMenu = () => { const mainWindow = windowService.getMainWindow(); + if (!mainWindow) { return; } const contextMenu = Menu.buildFromTemplate([ { @@ -83,6 +82,7 @@ const updateTrayMenu = () => { } const changeVisibility = () => { const window = windowService.getMainWindow(); + if (!window) { return; } if (isVisible) { window.hide(); @@ -106,6 +106,6 @@ function createTray() { registerVisibilityListener(); } -module.exports = { +export = { createTray } diff --git a/src/services/tree.js b/src/services/tree.ts similarity index 79% rename from src/services/tree.js rename to src/services/tree.ts index 3973b0f6ee..d731fe6b0c 100644 --- a/src/services/tree.js +++ b/src/services/tree.ts @@ -1,12 +1,13 @@ "use strict"; -const sql = require('./sql.js'); -const log = require('./log.js'); -const BBranch = require('../becca/entities/bbranch.js'); -const entityChangesService = require('./entity_changes.js'); -const becca = require('../becca/becca.js'); - -function validateParentChild(parentNoteId, childNoteId, branchId = null) { +import sql = require('./sql'); +import log = require('./log'); +import BBranch = require('../becca/entities/bbranch'); +import entityChangesService = require('./entity_changes'); +import becca = require('../becca/becca'); +import BNote = require('../becca/entities/bnote'); + +function validateParentChild(parentNoteId: string, childNoteId: string, branchId: string | null = null) { if (['root', '_hidden', '_share', '_lbRoot', '_lbAvailableLaunchers', '_lbVisibleLaunchers'].includes(childNoteId)) { return { branch: null, success: false, message: `Cannot change this note's location.` }; } @@ -25,7 +26,7 @@ function validateParentChild(parentNoteId, childNoteId, branchId = null) { return { branch: existingBranch, success: false, - message: `Note "${childNote.title}" note already exists in the "${parentNote.title}".` + message: `Note "${childNote?.title}" note already exists in the "${parentNote?.title}".` }; } @@ -37,7 +38,7 @@ function validateParentChild(parentNoteId, childNoteId, branchId = null) { }; } - if (parentNoteId !== '_lbBookmarks' && becca.getNote(parentNoteId).type === 'launcher') { + if (parentNoteId !== '_lbBookmarks' && becca.getNote(parentNoteId)?.type === 'launcher') { return { branch: null, success: false, @@ -51,7 +52,7 @@ function validateParentChild(parentNoteId, childNoteId, branchId = null) { /** * Tree cycle can be created when cloning or when moving existing clone. This method should detect both cases. */ -function wouldAddingBranchCreateCycle(parentNoteId, childNoteId) { +function wouldAddingBranchCreateCycle(parentNoteId: string, childNoteId: string) { if (parentNoteId === childNoteId) { return true; } @@ -70,20 +71,22 @@ function wouldAddingBranchCreateCycle(parentNoteId, childNoteId) { return parentAncestorNoteIds.some(parentAncestorNoteId => childSubtreeNoteIds.has(parentAncestorNoteId)); } -function sortNotes(parentNoteId, customSortBy = 'title', reverse = false, foldersFirst = false, sortNatural = false, sortLocale) { +function sortNotes(parentNoteId: string, customSortBy: string = 'title', reverse = false, foldersFirst = false, sortNatural = false, _sortLocale?: string | null) { if (!customSortBy) { customSortBy = 'title'; } - if (!sortLocale) { - // sortLocale can not be empty string or null value, default value must be set to undefined. - sortLocale = undefined; - } + // sortLocale can not be empty string or null value, default value must be set to undefined. + const sortLocale = (_sortLocale || undefined); sql.transactional(() => { - const notes = becca.getNote(parentNoteId).getChildNotes(); + const note = becca.getNote(parentNoteId); + if (!note) { + throw new Error("Unable to find note"); + } - const normalize = obj => (obj && typeof obj === 'string') ? obj.toLowerCase() : obj; + const notes = note.getChildNotes(); + const normalize = (obj: any) => (obj && typeof obj === 'string') ? obj.toLowerCase() : obj; notes.sort((a, b) => { if (foldersFirst) { @@ -96,7 +99,7 @@ function sortNotes(parentNoteId, customSortBy = 'title', reverse = false, folder } } - function fetchValue(note, key) { + function fetchValue(note: BNote, key: string) { let rawValue; if (key === 'title') { @@ -105,14 +108,14 @@ function sortNotes(parentNoteId, customSortBy = 'title', reverse = false, folder rawValue = prefix ? `${prefix} - ${note.title}` : note.title; } else { rawValue = ['dateCreated', 'dateModified'].includes(key) - ? note[key] + ? (note as any)[key] : note.getLabelValue(key); } return normalize(rawValue); } - function compare(a, b) { + function compare(a: string, b: string) { if (!sortNatural) { // alphabetical sort return b === null || b === undefined || a < b ? -1 : 1; @@ -160,6 +163,7 @@ function sortNotes(parentNoteId, customSortBy = 'title', reverse = false, folder for (const note of notes) { const branch = note.getParentBranches().find(b => b.parentNoteId === parentNoteId); + if (!branch) { continue; } if (branch.noteId === '_hidden') { position = 999_999_999; @@ -182,9 +186,8 @@ function sortNotes(parentNoteId, customSortBy = 'title', reverse = false, folder }); } -function sortNotesIfNeeded(parentNoteId) { +function sortNotesIfNeeded(parentNoteId: string) { const parentNote = becca.getNote(parentNoteId); - if (!parentNote) { return; } @@ -206,7 +209,7 @@ function sortNotesIfNeeded(parentNoteId) { /** * @deprecated this will be removed in the future */ -function setNoteToParent(noteId, prefix, parentNoteId) { +function setNoteToParent(noteId: string, prefix: string, parentNoteId: string) { const parentNote = becca.getNote(parentNoteId); if (parentNoteId && !parentNote) { @@ -215,7 +218,7 @@ function setNoteToParent(noteId, prefix, parentNoteId) { } // case where there might be more such branches is ignored. It's expected there should be just one - const branchId = sql.getValue("SELECT branchId FROM branches WHERE isDeleted = 0 AND noteId = ? AND prefix = ?", [noteId, prefix]); + const branchId = sql.getValue<string>("SELECT branchId FROM branches WHERE isDeleted = 0 AND noteId = ? AND prefix = ?", [noteId, prefix]); const branch = becca.getBranch(branchId); if (branch) { @@ -233,12 +236,15 @@ function setNoteToParent(noteId, prefix, parentNoteId) { } else if (parentNoteId) { const note = becca.getNote(noteId); + if (!note) { + throw new Error(`Cannot find note '${noteId}.`); + } if (note.isDeleted) { throw new Error(`Cannot create a branch for '${noteId}' which is deleted.`); } - const branchId = sql.getValue('SELECT branchId FROM branches WHERE isDeleted = 0 AND noteId = ? AND parentNoteId = ?', [noteId, parentNoteId]); + const branchId = sql.getValue<string>('SELECT branchId FROM branches WHERE isDeleted = 0 AND noteId = ? AND parentNoteId = ?', [noteId, parentNoteId]); const branch = becca.getBranch(branchId); if (branch) { @@ -255,7 +261,7 @@ function setNoteToParent(noteId, prefix, parentNoteId) { } } -module.exports = { +export = { validateParentChild, sortNotes, sortNotesIfNeeded, diff --git a/src/services/utils.js b/src/services/utils.ts similarity index 72% rename from src/services/utils.js rename to src/services/utils.ts index 2e99c6ef05..6b2974df0c 100644 --- a/src/services/utils.js +++ b/src/services/utils.ts @@ -1,18 +1,18 @@ "use strict"; -const crypto = require('crypto'); +import crypto = require('crypto'); const randtoken = require('rand-token').generator({source: 'crypto'}); -const unescape = require('unescape'); -const escape = require('escape-html'); -const sanitize = require("sanitize-filename"); -const mimeTypes = require('mime-types'); -const path = require('path'); +import unescape = require('unescape'); +import escape = require('escape-html'); +import sanitize = require("sanitize-filename"); +import mimeTypes = require('mime-types'); +import path = require('path'); function newEntityId() { return randomString(12); } -function randomString(length) { +function randomString(length: number): string { return randtoken.generate(length); } @@ -20,11 +20,11 @@ function randomSecureToken(bytes = 32) { return crypto.randomBytes(bytes).toString('base64'); } -function md5(content) { +function md5(content: crypto.BinaryLike) { return crypto.createHash('md5').update(content).digest('hex'); } -function hashedBlobId(content) { +function hashedBlobId(content: string | Buffer) { if (content === null || content === undefined) { content = ""; } @@ -41,19 +41,16 @@ function hashedBlobId(content) { return kindaBase62Hash.substr(0, 20); } -function toBase64(plainText) { +function toBase64(plainText: string | Buffer) { return Buffer.from(plainText).toString('base64'); } -/** - * @returns {Buffer} - */ -function fromBase64(encodedText) { +function fromBase64(encodedText: string) { return Buffer.from(encodedText, 'base64'); } -function hmac(secret, value) { - const hmac = crypto.createHmac('sha256', Buffer.from(secret.toString(), 'ASCII')); +function hmac(secret: any, value: any) { + const hmac = crypto.createHmac('sha256', Buffer.from(secret.toString(), 'ascii')); hmac.update(value.toString()); return hmac.digest('base64'); } @@ -62,30 +59,30 @@ function isElectron() { return !!process.versions['electron']; } -function hash(text) { +function hash(text: string) { text = text.normalize(); return crypto.createHash('sha1').update(text).digest('base64'); } -function isEmptyOrWhitespace(str) { +function isEmptyOrWhitespace(str: string) { return str === null || str.match(/^ *$/) !== null; } -function sanitizeSqlIdentifier(str) { +function sanitizeSqlIdentifier(str: string) { return str.replace(/[^A-Za-z0-9_]/g, ""); } -function escapeHtml(str) { +function escapeHtml(str: string) { return escape(str); } -function unescapeHtml(str) { +function unescapeHtml(str: string) { return unescape(str); } -function toObject(array, fn) { - const obj = {}; +function toObject<T, K extends string | number | symbol, V>(array: T[], fn: (item: T) => [K, V]): Record<K, V> { + const obj: Record<K, V> = {} as Record<K, V>; // TODO: unsafe? for (const item of array) { const ret = fn(item); @@ -96,12 +93,12 @@ function toObject(array, fn) { return obj; } -function stripTags(text) { +function stripTags(text: string) { return text.replace(/<(?:.|\n)*?>/gm, ''); } -function union(a, b) { - const obj = {}; +function union<T extends string | number | symbol>(a: T[], b: T[]): T[] { + const obj: Record<T, T> = {} as Record<T, T>; // TODO: unsafe? for (let i = a.length-1; i >= 0; i--) { obj[a[i]] = a[i]; @@ -111,7 +108,7 @@ function union(a, b) { obj[b[i]] = b[i]; } - const res = []; + const res: T[] = []; for (const k in obj) { if (obj.hasOwnProperty(k)) { // <-- optional @@ -122,7 +119,7 @@ function union(a, b) { return res; } -function escapeRegExp(str) { +function escapeRegExp(str: string) { return str.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, "\\$1"); } @@ -135,7 +132,7 @@ function crash() { } } -function sanitizeFilenameForHeader(filename) { +function sanitizeFilenameForHeader(filename: string) { let sanitizedFilename = sanitize(filename); if (sanitizedFilename.trim().length === 0) { @@ -145,7 +142,7 @@ function sanitizeFilenameForHeader(filename) { return encodeURIComponent(sanitizedFilename); } -function getContentDisposition(filename) { +function getContentDisposition(filename: string) { const sanitizedFilename = sanitizeFilenameForHeader(filename); return `file; filename="${sanitizedFilename}"; filename*=UTF-8''${sanitizedFilename}`; @@ -159,24 +156,24 @@ const STRING_MIME_TYPES = [ "image/svg+xml" ]; -function isStringNote(type, mime) { +function isStringNote(type: string, mime: string) { // render and book are string note in the sense that they are expected to contain empty string return ["text", "code", "relationMap", "search", "render", "book", "mermaid", "canvas"].includes(type) || mime.startsWith('text/') || STRING_MIME_TYPES.includes(mime); } -function quoteRegex(url) { +function quoteRegex(url: string) { return url.replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&'); } -function replaceAll(string, replaceWhat, replaceWith) { +function replaceAll(string: string, replaceWhat: string, replaceWith: string) { const quotedReplaceWhat = quoteRegex(replaceWhat); return string.replace(new RegExp(quotedReplaceWhat, "g"), replaceWith); } -function formatDownloadTitle(fileName, type, mime) { +function formatDownloadTitle(fileName: string, type: string, mime: string) { if (!fileName) { fileName = "untitled"; } @@ -218,7 +215,7 @@ function formatDownloadTitle(fileName, type, mime) { } } -function removeTextFileExtension(filePath) { +function removeTextFileExtension(filePath: string) { const extension = path.extname(filePath).toLowerCase(); if (extension === '.md' || extension === '.markdown' || extension === '.html') { @@ -229,8 +226,8 @@ function removeTextFileExtension(filePath) { } } -function getNoteTitle(filePath, replaceUnderscoresWithSpaces, noteMeta) { - if (noteMeta) { +function getNoteTitle(filePath: string, replaceUnderscoresWithSpaces: boolean, noteMeta?: { title?: string }) { + if (noteMeta?.title) { return noteMeta.title; } else { const basename = path.basename(removeTextFileExtension(filePath)); @@ -241,7 +238,7 @@ function getNoteTitle(filePath, replaceUnderscoresWithSpaces, noteMeta) { } } -function timeLimit(promise, limitMs, errorMessage) { +function timeLimit<T>(promise: Promise<T>, limitMs: number, errorMessage?: string): Promise<T> { if (!promise || !promise.then) { // it's not actually a promise return promise; } @@ -267,23 +264,28 @@ function timeLimit(promise, limitMs, errorMessage) { }); } -function deferred() { +interface DeferredPromise<T> extends Promise<T> { + resolve: (value: T | PromiseLike<T>) => void, + reject: (reason?: any) => void +} + +function deferred<T>(): DeferredPromise<T> { return (() => { - let resolve, reject; + let resolve!: (value: T | PromiseLike<T>) => void; + let reject!: (reason?: any) => void; - let promise = new Promise((res, rej) => { + let promise = new Promise<T>((res, rej) => { resolve = res; reject = rej; - }); + }) as DeferredPromise<T>; promise.resolve = resolve; promise.reject = reject; - - return promise; + return promise as DeferredPromise<T>; })(); } -function removeDiacritic(str) { +function removeDiacritic(str: string) { if (!str) { return ""; } @@ -291,12 +293,12 @@ function removeDiacritic(str) { return str.normalize("NFD").replace(/\p{Diacritic}/gu, ""); } -function normalize(str) { +function normalize(str: string) { return removeDiacritic(str).toLowerCase(); } -function toMap(list, key) { - const map = {}; +function toMap<T extends Record<string, any>>(list: T[], key: keyof T): Record<string, T> { + const map: Record<string, T> = {}; for (const el of list) { map[el[key]] = el; @@ -305,11 +307,11 @@ function toMap(list, key) { return map; } -function isString(x) { +function isString(x: any) { return Object.prototype.toString.call(x) === "[object String]"; } -module.exports = { +export = { randomSecureToken, randomString, md5, diff --git a/src/services/window.js b/src/services/window.ts similarity index 79% rename from src/services/window.js rename to src/services/window.ts index 68f70010ab..80f7f7e834 100644 --- a/src/services/window.js +++ b/src/services/window.ts @@ -1,31 +1,29 @@ -const path = require('path'); -const url = require("url"); -const port = require('./port.js'); -const optionService = require('./options.js'); -const env = require('./env.js'); -const log = require('./log.js'); -const sqlInit = require('./sql_init.js'); -const cls = require('./cls.js'); -const keyboardActionsService = require('./keyboard_actions.js'); -const {ipcMain} = require('electron'); +import path = require('path'); +import url = require("url"); +import port = require('./port'); +import optionService = require('./options'); +import env = require('./env'); +import log = require('./log'); +import sqlInit = require('./sql_init'); +import cls = require('./cls'); +import keyboardActionsService = require('./keyboard_actions'); +import remoteMain = require("@electron/remote/main") +import { App, BrowserWindow, WebContents, ipcMain } from 'electron'; // Prevent the window being garbage collected -/** @type {Electron.BrowserWindow} */ -let mainWindow; -/** @type {Electron.BrowserWindow} */ -let setupWindow; +let mainWindow: BrowserWindow | null; +let setupWindow: BrowserWindow | null; -async function createExtraWindow(extraWindowHash) { +async function createExtraWindow(extraWindowHash: string) { const spellcheckEnabled = optionService.getOptionBool('spellCheckEnabled'); - const {BrowserWindow} = require('electron'); + const { BrowserWindow } = require('electron'); const win = new BrowserWindow({ width: 1000, height: 800, title: 'Trilium Notes', webPreferences: { - enableRemoteModule: true, nodeIntegration: true, contextIsolation: false, spellcheck: spellcheckEnabled @@ -44,7 +42,7 @@ ipcMain.on('create-extra-window', (event, arg) => { createExtraWindow(arg.extraWindowHash); }); -async function createMainWindow(app) { +async function createMainWindow(app: App) { const windowStateKeeper = require('electron-window-state'); // should not be statically imported const mainWindowState = windowStateKeeper({ @@ -55,7 +53,7 @@ async function createMainWindow(app) { const spellcheckEnabled = optionService.getOptionBool('spellCheckEnabled'); - const {BrowserWindow} = require('electron'); // should not be statically imported + const { BrowserWindow } = require('electron'); // should not be statically imported mainWindow = new BrowserWindow({ x: mainWindowState.x, @@ -64,7 +62,6 @@ async function createMainWindow(app) { height: mainWindowState.height, title: 'Trilium Notes', webPreferences: { - enableRemoteModule: true, nodeIntegration: true, contextIsolation: false, spellcheck: spellcheckEnabled, @@ -95,8 +92,12 @@ async function createMainWindow(app) { }); } -function configureWebContents(webContents, spellcheckEnabled) { - require("@electron/remote/main").enable(webContents); +function configureWebContents(webContents: WebContents, spellcheckEnabled: boolean) { + if (!mainWindow) { + return; + } + + remoteMain.enable(webContents); mainWindow.webContents.setWindowOpenHandler((details) => { require("electron").shell.openExternal(details.url); @@ -108,8 +109,7 @@ function configureWebContents(webContents, spellcheckEnabled) { const parsedUrl = url.parse(targetUrl); // we still need to allow internal redirects from setup and migration pages - if (!['localhost', '127.0.0.1'].includes(parsedUrl.hostname) || (parsedUrl.path && parsedUrl.path !== '/' && parsedUrl.path !== '/?')) { - + if (!['localhost', '127.0.0.1'].includes(parsedUrl.hostname || "") || (parsedUrl.path && parsedUrl.path !== '/' && parsedUrl.path !== '/?')) { ev.preventDefault(); } }); @@ -128,7 +128,7 @@ function getIcon() { } async function createSetupWindow() { - const {BrowserWindow} = require('electron'); // should not be statically imported + const { BrowserWindow } = require('electron'); // should not be statically imported setupWindow = new BrowserWindow({ width: 800, height: 800, @@ -152,7 +152,7 @@ function closeSetupWindow() { } async function registerGlobalShortcuts() { - const {globalShortcut} = require('electron'); + const { globalShortcut } = require('electron'); await sqlInit.dbReady; @@ -168,6 +168,10 @@ async function registerGlobalShortcuts() { const translatedShortcut = shortcut.substr(7); const result = globalShortcut.register(translatedShortcut, cls.wrap(() => { + if (!mainWindow) { + return; + } + // window may be hidden / not in focus mainWindow.focus(); @@ -189,8 +193,7 @@ function getMainWindow() { return mainWindow; } - -module.exports = { +export = { createMainWindow, createSetupWindow, closeSetupWindow, diff --git a/src/services/ws.js b/src/services/ws.ts similarity index 71% rename from src/services/ws.js rename to src/services/ws.ts index 9dead1866d..6ff81e1db2 100644 --- a/src/services/ws.js +++ b/src/services/ws.ts @@ -1,15 +1,19 @@ -const WebSocket = require('ws'); -const utils = require('./utils.js'); -const log = require('./log.js'); -const sql = require('./sql.js'); -const cls = require('./cls.js'); -const config = require('./config.js'); -const syncMutexService = require('./sync_mutex.js'); -const protectedSessionService = require('./protected_session.js'); -const becca = require('../becca/becca.js'); -const AbstractBeccaEntity = require('../becca/entities/abstract_becca_entity.js'); - -const env = require('./env.js'); +import WebSocket = require('ws'); +import utils = require('./utils'); +import log = require('./log'); +import sql = require('./sql'); +import cls = require('./cls'); +import config = require('./config'); +import syncMutexService = require('./sync_mutex'); +import protectedSessionService = require('./protected_session'); +import becca = require('../becca/becca'); +import AbstractBeccaEntity = require('../becca/entities/abstract_becca_entity'); + +import env = require('./env'); +import { IncomingMessage, Server } from 'http'; +import { EntityChange } from './entity_changes_interface'; +import { TaskData } from './task_context_interface'; + if (env.isDev()) { const chokidar = require('chokidar'); const debounce = require('debounce'); @@ -21,15 +25,42 @@ if (env.isDev()) { .on('unlink', debouncedReloadFrontend); } -let webSocketServer; -let lastSyncedPush = null; +let webSocketServer!: WebSocket.Server; +let lastSyncedPush: number | null = null; + +interface Message { + type: string; + data?: { + lastSyncedPush?: number | null, + entityChanges?: any[], + shrinkImages?: boolean + } | null, + lastSyncedPush?: number | null, + + progressCount?: number; + taskId?: string; + taskType?: string | null; + message?: string; + reason?: string; + result?: string; + + script?: string; + params?: any[]; + noteId?: string; + messages?: string[]; + startNoteId?: string; + currentNoteId?: string; + originEntityName?: "notes"; + originEntityId?: string | null; +} -function init(httpServer, sessionParser) { +type SessionParser = (req: IncomingMessage, params: {}, cb: () => void) => void; +function init(httpServer: Server, sessionParser: SessionParser) { webSocketServer = new WebSocket.Server({ verifyClient: (info, done) => { sessionParser(info.req, {}, () => { const allowed = utils.isElectron() - || info.req.session.loggedIn + || (info.req as any).session.loggedIn || (config.General && config.General.noAuthentication); if (!allowed) { @@ -43,12 +74,12 @@ function init(httpServer, sessionParser) { }); webSocketServer.on('connection', (ws, req) => { - ws.id = utils.randomString(10); + (ws as any).id = utils.randomString(10); console.log(`websocket client connected`); ws.on('message', async messageJson => { - const message = JSON.parse(messageJson); + const message = JSON.parse(messageJson as any); if (message.type === 'log-error') { log.info(`JS Error: ${message.error}\r @@ -73,7 +104,7 @@ Stack: ${message.stack}`); }); } -function sendMessage(client, message) { +function sendMessage(client: WebSocket, message: Message) { const jsonStr = JSON.stringify(message); if (client.readyState === WebSocket.OPEN) { @@ -81,7 +112,7 @@ function sendMessage(client, message) { } } -function sendMessageToAllClients(message) { +function sendMessageToAllClients(message: Message) { const jsonStr = JSON.stringify(message); if (webSocketServer) { @@ -97,7 +128,7 @@ function sendMessageToAllClients(message) { } } -function fillInAdditionalProperties(entityChange) { +function fillInAdditionalProperties(entityChange: EntityChange) { if (entityChange.isErased) { return; } @@ -123,14 +154,14 @@ function fillInAdditionalProperties(entityChange) { if (!entityChange.entity) { entityChange.entity = sql.getRow(`SELECT * FROM notes WHERE noteId = ?`, [entityChange.entityId]); - if (entityChange.entity.isProtected) { - entityChange.entity.title = protectedSessionService.decryptString(entityChange.entity.title); + if (entityChange.entity?.isProtected) { + entityChange.entity.title = protectedSessionService.decryptString(entityChange.entity.title || ""); } } } else if (entityChange.entityName === 'revisions') { - entityChange.noteId = sql.getValue(`SELECT noteId - FROM revisions - WHERE revisionId = ?`, [entityChange.entityId]); + entityChange.noteId = sql.getValue<string>(`SELECT noteId + FROM revisions + WHERE revisionId = ?`, [entityChange.entityId]); } else if (entityChange.entityName === 'note_reordering') { entityChange.positions = {}; @@ -138,7 +169,9 @@ function fillInAdditionalProperties(entityChange) { if (parentNote) { for (const childBranch of parentNote.getChildBranches()) { - entityChange.positions[childBranch.branchId] = childBranch.notePosition; + if (childBranch?.branchId) { + entityChange.positions[childBranch.branchId] = childBranch.notePosition; + } } } } else if (entityChange.entityName === 'options') { @@ -160,7 +193,7 @@ function fillInAdditionalProperties(entityChange) { } // entities with higher number can reference the entities with lower number -const ORDERING = { +const ORDERING: Record<string, number> = { "etapi_tokens": 0, "attributes": 2, "branches": 2, @@ -172,14 +205,17 @@ const ORDERING = { "options": 0 }; -function sendPing(client, entityChangeIds = []) { +function sendPing(client: WebSocket, entityChangeIds = []) { if (entityChangeIds.length === 0) { sendMessage(client, { type: 'ping' }); return; } - const entityChanges = sql.getManyRows(`SELECT * FROM entity_changes WHERE id IN (???)`, entityChangeIds); + const entityChanges = sql.getManyRows<EntityChange>(`SELECT * FROM entity_changes WHERE id IN (???)`, entityChangeIds); + if (!entityChanges) { + return; + } // sort entity changes since froca expects "referential order", i.e. referenced entities should already exist // in froca. @@ -190,7 +226,7 @@ function sendPing(client, entityChangeIds = []) { try { fillInAdditionalProperties(entityChange); } - catch (e) { + catch (e: any) { log.error(`Could not fill additional properties for entity change ${JSON.stringify(entityChange)} because of error: ${e.message}: ${e.stack}`); } } @@ -228,15 +264,15 @@ function syncFailed() { sendMessageToAllClients({ type: 'sync-failed', lastSyncedPush }); } -function reloadFrontend(reason) { +function reloadFrontend(reason: string) { sendMessageToAllClients({ type: 'reload-frontend', reason }); } -function setLastSyncedPush(entityChangeId) { +function setLastSyncedPush(entityChangeId: number) { lastSyncedPush = entityChangeId; } -module.exports = { +export = { init, sendMessageToAllClients, syncPushInProgress, diff --git a/src/share/content_renderer.js b/src/share/content_renderer.js index f29ca9307a..9065fec08e 100644 --- a/src/share/content_renderer.js +++ b/src/share/content_renderer.js @@ -1,6 +1,6 @@ const {JSDOM} = require("jsdom"); const shaca = require('./shaca/shaca.js'); -const assetPath = require('../services/asset_path.js'); +const assetPath = require('../services/asset_path'); const shareRoot = require('./share_root.js'); const escapeHtml = require('escape-html'); diff --git a/src/share/routes.js b/src/share/routes.js index 2805933a8b..4ad5a15c6c 100644 --- a/src/share/routes.js +++ b/src/share/routes.js @@ -7,11 +7,11 @@ const shaca = require('./shaca/shaca.js'); const shacaLoader = require('./shaca/shaca_loader.js'); const shareRoot = require('./share_root.js'); const contentRenderer = require('./content_renderer.js'); -const assetPath = require('../services/asset_path.js'); -const appPath = require('../services/app_path.js'); -const searchService = require('../services/search/services/search.js'); -const SearchContext = require('../services/search/search_context.js'); -const log = require('../services/log.js'); +const assetPath = require('../services/asset_path'); +const appPath = require('../services/app_path'); +const searchService = require('../services/search/services/search'); +const SearchContext = require('../services/search/search_context'); +const log = require('../services/log'); /** * @param {SNote} note @@ -236,7 +236,7 @@ function register(router) { addNoIndexHeader(note, res); - const utils = require('../services/utils.js'); + const utils = require('../services/utils'); const filename = utils.formatDownloadTitle(note.title, note.type, note.mime); @@ -304,7 +304,7 @@ function register(router) { addNoIndexHeader(attachment.note, res); - const utils = require('../services/utils.js'); + const utils = require('../services/utils'); const filename = utils.formatDownloadTitle(attachment.title, null, attachment.mime); diff --git a/src/share/shaca/entities/sattachment.js b/src/share/shaca/entities/sattachment.js index 46504ba14d..4b76fea2c9 100644 --- a/src/share/shaca/entities/sattachment.js +++ b/src/share/shaca/entities/sattachment.js @@ -1,7 +1,7 @@ "use strict"; -const sql = require('../../sql.js'); -const utils = require('../../../services/utils.js'); +const sql = require('../../sql'); +const utils = require('../../../services/utils'); const AbstractShacaEntity = require('./abstract_shaca_entity.js'); class SAttachment extends AbstractShacaEntity { diff --git a/src/share/shaca/entities/snote.js b/src/share/shaca/entities/snote.js index 167f238d02..fd889c23fa 100644 --- a/src/share/shaca/entities/snote.js +++ b/src/share/shaca/entities/snote.js @@ -1,7 +1,7 @@ "use strict"; -const sql = require('../../sql.js'); -const utils = require('../../../services/utils.js'); +const sql = require('../../sql'); +const utils = require('../../../services/utils'); const AbstractShacaEntity = require('./abstract_shaca_entity.js'); const escape = require('escape-html'); diff --git a/src/share/shaca/shaca_loader.js b/src/share/shaca/shaca_loader.js index 00ba631823..5d1648f495 100644 --- a/src/share/shaca/shaca_loader.js +++ b/src/share/shaca/shaca_loader.js @@ -1,14 +1,14 @@ "use strict"; -const sql = require('../sql.js'); +const sql = require('../sql'); const shaca = require('./shaca.js'); -const log = require('../../services/log.js'); +const log = require('../../services/log'); const SNote = require('./entities/snote.js'); const SBranch = require('./entities/sbranch.js'); const SAttribute = require('./entities/sattribute.js'); const SAttachment = require('./entities/sattachment.js'); const shareRoot = require('../share_root.js'); -const eventService = require('../../services/events.js'); +const eventService = require('../../services/events'); function load() { const start = Date.now(); diff --git a/src/share/sql.js b/src/share/sql.js index 07dd2fd85f..485c879212 100644 --- a/src/share/sql.js +++ b/src/share/sql.js @@ -1,7 +1,7 @@ "use strict"; const Database = require('better-sqlite3'); -const dataDir = require('../services/data_dir.js'); +const dataDir = require('../services/data_dir'); const dbConnection = new Database(dataDir.DOCUMENT_PATH, { readonly: true }); diff --git a/src/tools/generate_document.js b/src/tools/generate_document.js index 67f202a2bf..e716e7e016 100644 --- a/src/tools/generate_document.js +++ b/src/tools/generate_document.js @@ -3,12 +3,12 @@ * will create 1000 new notes and some clones into the current document.db */ -require('../becca/entity_constructor.js'); -const sqlInit = require('../services/sql_init.js'); -const noteService = require('../services/notes.js'); -const attributeService = require('../services/attributes.js'); -const cls = require('../services/cls.js'); -const cloningService = require('../services/cloning.js'); +require('../becca/entity_constructor'); +const sqlInit = require('../services/sql_init'); +const noteService = require('../services/notes'); +const attributeService = require('../services/attributes'); +const cls = require('../services/cls'); +const cloningService = require('../services/cloning'); const loremIpsum = require('lorem-ipsum').loremIpsum; const noteCount = parseInt(process.argv[2]); diff --git a/src/types.d.ts b/src/types.d.ts new file mode 100644 index 0000000000..9fe04a204e --- /dev/null +++ b/src/types.d.ts @@ -0,0 +1,32 @@ +/* + * This file contains type definitions for libraries that did not have one + * in its library or in `@types/*` packages. + */ + +declare module 'unescape' { + function unescape(str: string, type?: string): string; + export = unescape; +} + +declare module 'html2plaintext' { + function html2plaintext(htmlText: string): string; + export = html2plaintext; +} + +declare module 'normalize-strings' { + function normalizeString(string: string): string; + export = normalizeString; +} + +declare module 'joplin-turndown-plugin-gfm' { + import TurndownService = require("turndown"); + namespace gfm { + function gfm(service: TurndownService): void; + } + export = gfm; +} + +declare module 'is-animated' { + function isAnimated(buffer: Buffer): boolean; + export = isAnimated; +} \ No newline at end of file diff --git a/src/www.js b/src/www.js index 84f3abaa97..2059a3a7cb 100644 --- a/src/www.js +++ b/src/www.js @@ -45,7 +45,7 @@ function startTrilium() { * instead of the new one. This is complicated by the fact that it is possible to run multiple instances of Trilium * if port and data dir are configured separately. This complication is the source of the following weird usage. * - * The line below makes sure that the "second-instance" (process in window.js) is fired. Normally it returns a boolean + * The line below makes sure that the "second-instance" (process in window.ts) is fired. Normally it returns a boolean * indicating whether another instance is running or not, but we ignore that and kill the app only based on the port conflict. * * A bit weird is that "second-instance" is triggered also on the valid usecases (different port/data dir) and @@ -126,26 +126,26 @@ function startHttpServer() { } httpServer.on('error', error => { - if (!listenOnTcp || error.syscall !== 'listen') { + if (!listenOnTcp || error.syscall !== 'listen') { + throw error; + } + + // handle specific listen errors with friendly messages + switch (error.code) { + case 'EACCES': + console.error(`Port ${port} requires elevated privileges. It's recommended to use port above 1024.`); + process.exit(1); + break; + + case 'EADDRINUSE': + console.error(`Port ${port} is already in use. Most likely, another Trilium process is already running. You might try to find it, kill it, and try again.`); + process.exit(1); + break; + + default: throw error; - } - - // handle specific listen errors with friendly messages - switch (error.code) { - case 'EACCES': - console.error(`Port ${port} requires elevated privileges. It's recommended to use port above 1024.`); - process.exit(1); - break; - - case 'EADDRINUSE': - console.error(`Port ${port} is already in use. Most likely, another Trilium process is already running. You might try to find it, kill it, and try again.`); - process.exit(1); - break; - - default: - throw error; - } } + } ) httpServer.on('listening', () => { diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000000..31aa526d23 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "moduleResolution": "Node", + "declaration": false, + "sourceMap": true, + "outDir": "./dist", + "strict": true, + "noImplicitAny": true, + "resolveJsonModule": true, + "lib": ["ES2022"], + "downlevelIteration": true + }, + "include": [ + "./src/**/*.js", + "./src/**/*.ts" + ], + "exclude": ["./node_modules/**/*"], + "ts-node": { + "files": true + }, + "files": [ + "src/types.d.ts" + ] + } diff --git a/webpack.config.js b/webpack.config.js index 639642f529..2e3bd7b17c 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -1,10 +1,10 @@ const path = require('path'); -const assetPath = require('./src/services/asset_path.js'); +const assetPath = require('./src/services/asset_path'); module.exports = { mode: 'production', entry: { - setup: './src/public/app/setup.js', + setup: './src/public/app/setup.ts', mobile: './src/public/app/mobile.js', desktop: './src/public/app/desktop.js', },