From 11f7add94521fade32a7c500dbeadd7076be6f56 Mon Sep 17 00:00:00 2001 From: Emma Zhu Date: Tue, 6 Jun 2023 16:36:33 +0800 Subject: [PATCH] Filter blob by tags --- .vscode/launch.json | 105 +- src/blob/handlers/ContainerHandler.ts | 43 +- src/blob/handlers/ServiceHandler.ts | 42 +- src/blob/persistence/FilterBlobPage.ts | 128 + src/blob/persistence/IBlobMetadataStore.ts | 22 +- .../persistence/IBlobMetadataStore.ts.bak | 1132 ++++++ src/blob/persistence/LokiBlobMetadataStore.ts | 106 +- .../persistence/LokiBlobMetadataStore.ts.bak | 3423 ++++++++++++++++ .../QueryInterpreter/IQueryContext.ts | 1 + .../QueryInterpreter/QueryInterpreter.ts | 75 + .../QueryInterpreter/QueryNodes/AndNode.ts | 20 + .../QueryNodes/BinaryOperatorNode.ts | 14 + .../QueryNodes/ConstantNode.ts | 20 + .../QueryInterpreter/QueryNodes/EqualsNode.ts | 26 + .../QueryNodes/ExpressionNode.ts | 18 + .../QueryNodes/GreaterThanEqualNode.ts | 27 + .../QueryNodes/GreaterThanNode.ts | 28 + .../QueryInterpreter/QueryNodes/IQueryNode.ts | 14 + .../QueryInterpreter/QueryNodes/KeyNode.ts | 21 + .../QueryNodes/LessThanEqualNode.ts | 27 + .../QueryNodes/LessThanNode.ts | 27 + .../QueryNodes/NotEqualsNode.ts | 28 + .../QueryInterpreter/QueryNodes/OrNode.ts | 20 + .../QueryInterpreter/QueryParser.ts | 517 +++ src/blob/persistence/SqlBlobMetadataStore.ts | 123 +- .../persistence/SqlBlobMetadataStore.ts.bak | 3446 +++++++++++++++++ src/blob/utils/utils.ts | 74 +- tests/blob/apis/container.test.ts | 68 +- tests/blob/apis/service.test.ts | 92 +- 29 files changed, 9582 insertions(+), 105 deletions(-) create mode 100644 src/blob/persistence/FilterBlobPage.ts create mode 100644 src/blob/persistence/IBlobMetadataStore.ts.bak create mode 100644 src/blob/persistence/LokiBlobMetadataStore.ts.bak create mode 100644 src/blob/persistence/QueryInterpreter/IQueryContext.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryInterpreter.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryParser.ts create mode 100644 src/blob/persistence/SqlBlobMetadataStore.ts.bak diff --git a/.vscode/launch.json b/.vscode/launch.json index 02d73d813..cc9c9d687 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -9,12 +9,22 @@ "request": "launch", "name": "Azurite Service - Loki", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/src/azurite.ts", "-d", "debug.log"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/src/azurite.ts", + "-d", + "debug.log" + ], "env": { "AZURITE_ACCOUNTS": "" }, - "skipFiles": ["node_modules/*/**", "/*/**"], + "skipFiles": [ + "node_modules/*/**", + "/*/**" + ], "outputCapture": "std" }, { @@ -35,8 +45,16 @@ "request": "launch", "name": "Azurite Service - Loki, Loose", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/src/azurite.ts", "-d", "debug.log", "-L"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/src/azurite.ts", + "-d", + "debug.log", + "-L" + ], "env": { "AZURITE_ACCOUNTS": "" }, @@ -47,7 +65,10 @@ "request": "launch", "name": "Azurite Service - Loki, Loose, HTTPS, OAuth", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/src/azurite.ts", "-L", @@ -70,7 +91,10 @@ "request": "launch", "name": "Azurite Queue Service - Loki, HTTPS, OAuth", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/src/queue/main.ts", "-d", @@ -92,7 +116,10 @@ "request": "launch", "name": "Azurite Blob Service - Loki, HTTPS, OAuth", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/src/blob/main.ts", "-d", @@ -114,8 +141,15 @@ "request": "launch", "name": "Azurite Blob Service - SQL", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/src/blob/main.ts", "-d", "debug.log"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/src/blob/main.ts", + "-d", + "debug.log" + ], "env": { "AZURITE_DB": "mysql://root:my-secret-pw@127.0.0.1:3306/azurite_blob", "AZURITE_ACCOUNTS": "" @@ -127,8 +161,15 @@ "request": "launch", "name": "Azurite Table Service - Loki", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/src/table/main.ts", "-d", "debug.log"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/src/table/main.ts", + "-d", + "debug.log" + ], "env": { "AZURITE_ACCOUNTS": "" }, @@ -139,8 +180,13 @@ "request": "launch", "name": "Current TS File", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/${relativeFile}"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/${relativeFile}" + ], "outputCapture": "std" }, { @@ -148,7 +194,10 @@ "request": "launch", "name": "Current Mocha TS File - Loki", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/node_modules/mocha/bin/_mocha", "-u", @@ -162,7 +211,7 @@ "AZURITE_ACCOUNTS": "", "AZURE_TABLE_STORAGE": "", "DATATABLES_ACCOUNT_NAME": "", - "DATATABLES_ACCOUNT_KEY" : "", + "DATATABLES_ACCOUNT_KEY": "", "AZURE_DATATABLES_STORAGE_STRING": "https://.table.core.windows.net", "AZURE_DATATABLES_SAS": "?", "NODE_TLS_REJECT_UNAUTHORIZED": "0" @@ -175,7 +224,10 @@ "request": "launch", "name": "EXE Mocha TS File - Loki", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/node_modules/mocha/bin/_mocha", "-u", @@ -183,7 +235,7 @@ "--timeout", "999999", "--colors", - "${workspaceFolder}/tests/exe.test.ts", + "${workspaceFolder}/tests/exe.test.ts", "--exit" ], "env": { @@ -199,7 +251,10 @@ "request": "launch", "name": "Current Mocha TS File - SQL", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/node_modules/mocha/bin/_mocha", "-u", @@ -211,7 +266,7 @@ ], "env": { "AZURITE_ACCOUNTS": "", - "AZURITE_TEST_DB": "mysql://root:my-secret-pw@127.0.0.1:3306/azurite_blob_test", + "AZURITE_TEST_DB": "mysql://root:!!123abc@127.0.0.1:3306/azurite_blob", "NODE_TLS_REJECT_UNAUTHORIZED": "0" }, "internalConsoleOptions": "openOnSessionStart", @@ -250,9 +305,13 @@ "type": "extensionHost", "request": "launch", "runtimeExecutable": "${execPath}", - "args": ["--extensionDevelopmentPath=${workspaceFolder}"], - "outFiles": ["${workspaceFolder}/dist/**/*.js"], + "args": [ + "--extensionDevelopmentPath=${workspaceFolder}" + ], + "outFiles": [ + "${workspaceFolder}/dist/**/*.js" + ], "preLaunchTask": "npm: watch" } ] -} +} \ No newline at end of file diff --git a/src/blob/handlers/ContainerHandler.ts b/src/blob/handlers/ContainerHandler.ts index 9fde0d7b4..450dc8049 100644 --- a/src/blob/handlers/ContainerHandler.ts +++ b/src/blob/handlers/ContainerHandler.ts @@ -368,7 +368,48 @@ export default class ContainerHandler extends BaseHandler public async filterBlobs(options: Models.ContainerFilterBlobsOptionalParams, context: Context ): Promise { - throw new NotImplementedError(context.contextId!); + const blobCtx = new BlobStorageContext(context); + const accountName = blobCtx.account!; + const containerName = blobCtx.container!; + await this.metadataStore.checkContainerExist( + context, + accountName, + containerName + ); + + const request = context.request!; + const marker = options.marker; + options.marker = options.marker || ""; + if ( + options.maxresults === undefined || + options.maxresults > DEFAULT_LIST_BLOBS_MAX_RESULTS + ) { + options.maxresults = DEFAULT_LIST_BLOBS_MAX_RESULTS; + } + + const [blobs, nextMarker] = await this.metadataStore.filterBlobs( + context, + accountName, + containerName, + options.where, + options.maxresults, + marker, + ); + + const serviceEndpoint = `${request.getEndpoint()}/${accountName}`; + const response: Models.ContainerFilterBlobsResponse = { + statusCode: 200, + requestId: context.contextId, + version: BLOB_API_VERSION, + date: context.startTime, + serviceEndpoint, + where: options.where!, + blobs: blobs, + clientRequestId: options.requestId, + nextMarker: `${nextMarker || ""}` + }; + + return response; } /** diff --git a/src/blob/handlers/ServiceHandler.ts b/src/blob/handlers/ServiceHandler.ts index 8a968f72e..bd8bd9f79 100644 --- a/src/blob/handlers/ServiceHandler.ts +++ b/src/blob/handlers/ServiceHandler.ts @@ -6,6 +6,7 @@ import IServiceHandler from "../generated/handlers/IServiceHandler"; import { parseXML } from "../generated/utils/xml"; import { BLOB_API_VERSION, + DEFAULT_LIST_BLOBS_MAX_RESULTS, DEFAULT_LIST_CONTAINERS_MAX_RESULTS, EMULATOR_ACCOUNT_ISHIERARCHICALNAMESPACEENABLED, EMULATOR_ACCOUNT_KIND, @@ -22,8 +23,7 @@ import { Readable } from "stream"; import { OAuthLevel } from "../../common/models"; import { BEARER_TOKEN_PREFIX } from "../../common/utils/constants"; import { decode } from "jsonwebtoken"; -import { getUserDelegationKeyValue } from "../utils/utils"; -import NotImplementedError from "../errors/NotImplementedError"; +import { getUserDelegationKeyValue } from "../utils/utils" /** * ServiceHandler handles Azure Storage Blob service related requests. @@ -373,10 +373,44 @@ export default class ServiceHandler extends BaseHandler return this.getAccountInfo(context); } - public filterBlobs( + public async filterBlobs( options: Models.ServiceFilterBlobsOptionalParams, context: Context ): Promise { - throw new NotImplementedError(context.contextId); + const blobCtx = new BlobStorageContext(context); + const accountName = blobCtx.account!; + + const request = context.request!; + const marker = options.marker; + options.marker = options.marker || ""; + if ( + options.maxresults === undefined || + options.maxresults > DEFAULT_LIST_BLOBS_MAX_RESULTS + ) { + options.maxresults = DEFAULT_LIST_BLOBS_MAX_RESULTS; + } + + const [blobs, nextMarker] = await this.metadataStore.filterBlobs( + context, + accountName, + undefined, + options.where, + options.maxresults, + marker, + ); + + const serviceEndpoint = `${request.getEndpoint()}/${accountName}`; + const response: Models.ServiceFilterBlobsResponse = { + statusCode: 200, + requestId: context.contextId, + version: BLOB_API_VERSION, + date: context.startTime, + serviceEndpoint, + where: options.where!, + blobs: blobs, + clientRequestId: options.requestId, + nextMarker: `${nextMarker || ""}` + }; + return response; } } diff --git a/src/blob/persistence/FilterBlobPage.ts b/src/blob/persistence/FilterBlobPage.ts new file mode 100644 index 000000000..5fda2a210 --- /dev/null +++ b/src/blob/persistence/FilterBlobPage.ts @@ -0,0 +1,128 @@ + +/** + * This implements a page of blob results. + * When maxResults is smaller than the number of prefixed items in the metadata source, multiple reads from + * the source may be necessary. + * + * @export + * @class FilterBlobPage + */ +export default class FilterBlobPage { + readonly maxResults: number; + + filterBlobItems: FilterBlobType[] = []; + latestMarker: string = ""; + + // isFull indicates we could only (maybe) add a prefix + private isFull: boolean = false; + + // isExhausted indicates nothing more should be added + private isExhausted: boolean = false; + + constructor(maxResults: number) { + this.maxResults = maxResults; + } + + /** + * Empty the page (useful in unit tests) + * + */ + public reset() { + this.filterBlobItems.splice(0); + this.isFull = false; + this.isExhausted = false; + this.latestMarker = ""; + } + + private updateFull() { + this.isFull = (this.filterBlobItems.length === this.maxResults); + } + + /** + * addItem will add to the blob list if possible and update the full/exhausted state of the page + */ + private addItem(item: FilterBlobType): boolean { + if (this.isExhausted) { + return false; + } + let added: boolean = false; + if (! this.isFull) { + this.filterBlobItems.push(item); + added = true; + } + this.updateFull(); + + // if a blob causes fullness the next item read cannot be squashed only duplicate prefixes can + this.isExhausted = this.isFull; + return added; + } + + /** + * Add a BlobType item to the appropriate collection, update the marker + * + * When the page becomes full, items may still be added iff the item is existing prefix + * + * Return the number of items added + */ + private add(name: string, item: FilterBlobType): boolean { + if (this.isExhausted) { + return false; + } + if (name < this.latestMarker) { + throw new Error("add received unsorted item. add must be called on sorted data"); + } + const marker = (name > this.latestMarker) ? name : this.latestMarker; + let added: boolean = false; + added = this.addItem(item); + if (added) { + this.latestMarker = marker; + } + return added; + } + + /** + * Iterate over an array blobs read from a source and add them until the page cannot accept new items + */ + private processList(docs: FilterBlobType[], nameFn: (item: FilterBlobType) => string): number { + let added: number = 0; + for (const item of docs) { + if (this.add(nameFn(item), item)) { + added++; + } + if (this.isExhausted) break; + } + return added; + } + + /** + * Fill the page if possible by using the provided reader function. + * + * For any BlobType, the name is used with delimiter to treat the item as a blob or + * a BlobPrefix for the list blobs result. + * + * This function will use the reader for BlobType to keep reading from a metadata + * data source until the source has no more items or the page cannot add any more items. + * + * Return the contents of the page, blobs, prefixes, and a continuation token if applicable + */ + public async fill( + reader: (offset: number) => Promise, + namer: (item: FilterBlobType) => string, + ): Promise<[FilterBlobType[], string]> { + let offset: number = 0; + let docs = await reader(offset); + let added: number = 0; + while (docs.length) { + added = this.processList(docs, namer); + offset += added; + if (added < this.maxResults) { + break; + } + docs = await reader(offset); + } + return [ + this.filterBlobItems, + added < docs.length ? this.latestMarker : "" + ]; + } +} diff --git a/src/blob/persistence/IBlobMetadataStore.ts b/src/blob/persistence/IBlobMetadataStore.ts index 12d91ca7d..186a9a536 100644 --- a/src/blob/persistence/IBlobMetadataStore.ts +++ b/src/blob/persistence/IBlobMetadataStore.ts @@ -4,6 +4,7 @@ import IDataStore from "../../common/IDataStore"; import IGCExtentProvider from "../../common/IGCExtentProvider"; import * as Models from "../generated/artifacts/models"; import Context from "../generated/Context"; +import { FilterBlobItem } from "../generated/artifacts/models"; /** * This model describes a chunk inside a persistency extent for a given extent ID. @@ -153,6 +154,8 @@ interface IGetBlobPropertiesRes { } export type GetBlobPropertiesRes = IGetBlobPropertiesRes; +export type FilterBlobModel = FilterBlobItem; + // The response model for each lease-related request. interface IBlobLeaseResponse { properties: Models.BlobPropertiesInternal; @@ -212,8 +215,8 @@ export type BlockModel = IBlockAdditionalProperties & PersistencyBlockModel; */ export interface IBlobMetadataStore extends IGCExtentProvider, - IDataStore, - ICleaner { + IDataStore, + ICleaner { /** * Update blob service properties. Create service properties if not exists in persistency layer. * @@ -502,6 +505,15 @@ export interface IBlobMetadataStore includeUncommittedBlobs?: boolean ): Promise<[BlobModel[], string | undefined]>; + filterBlobs( + context: Context, + account: string, + container?: string, + where?: string, + maxResults?: number, + marker?: string, + ): Promise<[FilterBlobModel[], string | undefined]>; + /** * Create blob item in persistency layer. Will replace if blob exists. * @@ -1078,7 +1090,7 @@ export interface IBlobMetadataStore listUncommittedBlockPersistencyChunks( marker?: string, maxResults?: number - ): Promise<[IExtentChunk[], string | undefined]>; + ): Promise<[IExtentChunk[], string | undefined]>; /** * Set blob tags. @@ -1103,7 +1115,7 @@ export interface IBlobMetadataStore leaseAccessConditions: Models.LeaseAccessConditions | undefined, tags: Models.BlobTags | undefined, modifiedAccessConditions?: Models.ModifiedAccessConditions - ): Promise; + ): Promise; /** * Get blob tags. @@ -1125,7 +1137,7 @@ export interface IBlobMetadataStore blob: string, snapshot: string | undefined, leaseAccessConditions: Models.LeaseAccessConditions | undefined, - modifiedAccessConditions?: Models.ModifiedAccessConditions + modifiedAccessConditions?: Models.ModifiedAccessConditions, ): Promise; } diff --git a/src/blob/persistence/IBlobMetadataStore.ts.bak b/src/blob/persistence/IBlobMetadataStore.ts.bak new file mode 100644 index 000000000..12d91ca7d --- /dev/null +++ b/src/blob/persistence/IBlobMetadataStore.ts.bak @@ -0,0 +1,1132 @@ +import { BlobTags } from "@azure/storage-blob"; +import ICleaner from "../../common/ICleaner"; +import IDataStore from "../../common/IDataStore"; +import IGCExtentProvider from "../../common/IGCExtentProvider"; +import * as Models from "../generated/artifacts/models"; +import Context from "../generated/Context"; + +/** + * This model describes a chunk inside a persistency extent for a given extent ID. + * A chunk points to a sub-range of an extent. + * + * @export + * @interface IPersistencyChunk + */ +export interface IExtentChunk { + id: string; // The persistency layer storage extent ID where the chunk belongs to + offset: number; // Chunk offset inside the extent where chunk starts in bytes + count: number; // Chunk length in bytes +} + +export const ZERO_EXTENT_ID = "*ZERO*"; + +/** MODELS FOR SERVICE */ +interface IServiceAdditionalProperties { + accountName: string; +} + +export type ServicePropertiesModel = Models.StorageServiceProperties & + IServiceAdditionalProperties; + +/** MODELS FOR CONTAINER */ +interface IContainerAdditionalProperties { + accountName: string; + leaseDurationSeconds?: number; + leaseId?: string; + leaseExpireTime?: Date; + leaseBreakTime?: Date; + containerAcl?: Models.SignedIdentifier[]; +} + +export type ContainerModel = Models.ContainerItem & + IContainerAdditionalProperties; + +export interface IContainerMetadata { + [propertyName: string]: string; +} + +// The response model for getContainerProperties. +export type GetContainerPropertiesResponse = Models.ContainerItem; + +// The response for getContainerAccessPolicy. +interface IGetContainerAccessPolicyResponse { + properties: Models.ContainerProperties; + containerAcl?: Models.SignedIdentifier[]; +} +export type GetContainerAccessPolicyResponse = IGetContainerAccessPolicyResponse; + +// The params for setContainerAccessPolicy. +interface ISetContainerAccessPolicyOptions { + lastModified: Date; + etag: string; + containerAcl?: Models.SignedIdentifier[]; + publicAccess?: Models.PublicAccessType; + leaseAccessConditions?: Models.LeaseAccessConditions; + modifiedAccessConditions?: Models.ModifiedAccessConditions; +} +export type SetContainerAccessPolicyOptions = ISetContainerAccessPolicyOptions; + +// The response model for each lease-related request. +interface IContainerLeaseResponse { + properties: Models.ContainerProperties; + leaseId?: string; + leaseTime?: number; +} +export type AcquireContainerLeaseResponse = IContainerLeaseResponse; +export type ReleaseContainerLeaseResponse = Models.ContainerProperties; +export type RenewContainerLeaseResponse = IContainerLeaseResponse; +export type BreakContainerLeaseResponse = IContainerLeaseResponse; +export type ChangeContainerLeaseResponse = IContainerLeaseResponse; + +/** MODELS FOR BLOBS */ +interface IPersistencyPropertiesRequired { + /** + * A reference to persistency layer chunk of data. + * + * @type {IExtentChunk} + * @memberof IPersistencyProperties + */ + persistency: IExtentChunk; +} + +interface IPersistencyPropertiesOptional { + /** + * A reference to persistency layer chunk of data. + * + * @type {IExtentChunk} + * @memberof IPersistencyProperties + */ + persistency?: IExtentChunk; +} + +interface IBlockBlobAdditionalProperties { + /** + * False for uncommitted block blob, otherwise true. + * + * @type {boolean} + * @memberof IBlobAdditionalProperties + */ + isCommitted: boolean; + + /** + * Committed blocks for block blob. + * + * @type {PersistencyBlockModel[]} + * @memberof IBlobAdditionalProperties + */ + committedBlocksInOrder?: PersistencyBlockModel[]; +} + +/** + * PageRange model with pointers to persistency chunk. + */ +export type PersistencyPageRange = IPersistencyPropertiesRequired & + Models.PageRange; + +interface IPageBlobAdditionalProperties { + pageRangesInOrder?: PersistencyPageRange[]; +} + +interface IBlobAdditionalProperties { + accountName: string; + containerName: string; + leaseDurationSeconds?: number; + leaseId?: string; + leaseExpireTime?: Date; + leaseBreakTime?: Date; +} + +export type BlobModel = IBlobAdditionalProperties & + IPageBlobAdditionalProperties & + IBlockBlobAdditionalProperties & + Models.BlobItemInternal & + IPersistencyPropertiesOptional; + +export type BlobPrefixModel = IPersistencyPropertiesOptional & + Models.BlobPrefix; + +// The response model for getContainerProperties. +interface IGetBlobPropertiesRes { + properties: Models.BlobPropertiesInternal; + metadata?: Models.BlobMetadata; + blobCommittedBlockCount?: number; // AppendBlobOnly +} +export type GetBlobPropertiesRes = IGetBlobPropertiesRes; + +// The response model for each lease-related request. +interface IBlobLeaseResponse { + properties: Models.BlobPropertiesInternal; + leaseId?: string; + leaseTime?: number; +} +export type AcquireBlobLeaseResponse = IBlobLeaseResponse; +export type ReleaseBlobLeaseResponse = Models.ContainerProperties; +export type RenewBlobLeaseResponse = IBlobLeaseResponse; +export type BreakBlobLeaseResponse = IBlobLeaseResponse; +export type ChangeBlobLeaseResponse = IBlobLeaseResponse; + +// The response model for create snapshot. +interface ICreateSnapshotResponse { + properties: Models.BlobPropertiesInternal; + snapshot: string; +} +export type CreateSnapshotResponse = ICreateSnapshotResponse; + +// The model contain account name, container name, blob name and snapshot for blob. +interface IBlobId { + account: string; + container: string; + blob: string; + snapshot?: string; +} +export type BlobId = IBlobId; + +// The model contain required attributes of pageblob for request getPageRanges. +interface IGetPageRangeResponse { + pageRangesInOrder?: PersistencyPageRange[]; + properties: Models.BlobPropertiesInternal; +} +export type GetPageRangeResponse = IGetPageRangeResponse; + +/** MODELS FOR BLOCKS */ +interface IBlockAdditionalProperties { + accountName: string; + containerName: string; + blobName: string; + isCommitted: boolean; +} + +export type PersistencyBlockModel = Models.Block & + IPersistencyPropertiesRequired; + +export type BlockModel = IBlockAdditionalProperties & PersistencyBlockModel; + +/** + * Persistency layer metadata storage interface. + * + * TODO: Integrate cache layer to cache account, container & blob metadata. + * + * @export + * @interface IBlobMetadataStore + * @extends {IDataStore} + */ +export interface IBlobMetadataStore + extends IGCExtentProvider, + IDataStore, + ICleaner { + /** + * Update blob service properties. Create service properties if not exists in persistency layer. + * + * TODO: Account's service property should be created when storage account is created or metadata + * storage initialization. This method should only be responsible for updating existing record. + * In this way, we can reduce one I/O call to get account properties. + * + * @param {ServicePropertiesModel} serviceProperties + * @returns {Promise} undefined properties will be ignored during properties setup + * @memberof IBlobMetadataStore + */ + setServiceProperties( + context: Context, + serviceProperties: ServicePropertiesModel + ): Promise; + + /** + * Get service properties for specific storage account. + * + * @param {string} account + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + getServiceProperties( + context: Context, + account: string + ): Promise; + + /** + * List containers with query conditions specified. + * + * @param {string} account + * @param {string} [prefix] + * @param {number} [maxResults] + * @param {number} [marker] + * @returns {(Promise<[ContainerModel[], number | undefined]>)} A tuple including containers and next marker + * @memberof IBlobMetadataStore + */ + listContainers( + context: Context, + account: string, + prefix?: string, + maxResults?: number, + marker?: string + ): Promise<[ContainerModel[], string | undefined]>; + + /** + * Create a container. + * + * @param {ContainerModel} container + * @param {Context} [context] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + createContainer( + context: Context, + container: ContainerModel + ): Promise; + + /** + * Get container properties. + * + * @param {string} account + * @param {string} container + * @param {Context} context + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + getContainerProperties( + context: Context, + account: string, + container: string, + leaseAccessConditions?: Models.LeaseAccessConditions + ): Promise; + + /** + * Delete container item if exists from persistency layer. + * Note that this method will mark the specific container with "deleting" tag. Container item + * will be removed only if all blobs under that container has been removed with GC. During + * "deleting" status, container and blobs under that container cannot be accessed. + * + * TODO: Make sure all metadata interface implementation follow up above assumption. + * TODO: GC for async container deletion. + * + * @param {string} account + * @param {string} container + * @param {Context} context + * @param {Models.ContainerDeleteMethodOptionalParams} [options] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + deleteContainer( + context: Context, + account: string, + container: string, + options?: Models.ContainerDeleteMethodOptionalParams + ): Promise; + + /** + * Set container metadata. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {Date} lastModified + * @param {string} etag + * @param {IContainerMetadata} [metadata] + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + setContainerMetadata( + context: Context, + account: string, + container: string, + lastModified: Date, + etag: string, + metadata?: IContainerMetadata, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Get container access policy. + * + * @param {string} account + * @param {string} container + * @param {Context} context + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + getContainerACL( + context: Context, + account: string, + container: string, + leaseAccessConditions?: Models.LeaseAccessConditions + ): Promise; + + /** + * Set container access policy. + * + * @param {string} account + * @param {string} container + * @param {SetContainerAccessPolicyOptions} setAclModel + * @param {Context} context + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + setContainerACL( + context: Context, + account: string, + container: string, + setAclModel: SetContainerAccessPolicyOptions + ): Promise; + + /** + * Acquire container lease + * + * @param {string} account + * @param {string} container + * @param {Models.ContainerAcquireLeaseOptionalParams} [options] + * @param {Context} context + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + acquireContainerLease( + context: Context, + account: string, + container: string, + options?: Models.ContainerAcquireLeaseOptionalParams + ): Promise; + + /** + * Release container lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} leaseId + * @param {Models.ContainerReleaseLeaseOptionalParams} [options] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + releaseContainerLease( + context: Context, + account: string, + container: string, + leaseId: string, + options?: Models.ContainerReleaseLeaseOptionalParams + ): Promise; + + /** + * Renew container lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} leaseId + * @param {Models.ContainerRenewLeaseOptionalParams} [options] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + renewContainerLease( + context: Context, + account: string, + container: string, + leaseId: string, + options?: Models.ContainerRenewLeaseOptionalParams + ): Promise; + + /** + * Break container lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {(number | undefined)} breakPeriod + * @param {Models.ContainerBreakLeaseOptionalParams} [options] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + breakContainerLease( + context: Context, + account: string, + container: string, + breakPeriod: number | undefined, + options?: Models.ContainerBreakLeaseOptionalParams + ): Promise; + + /** + * Change container lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} leaseId + * @param {string} proposedLeaseId + * @param {Models.ContainerChangeLeaseOptionalParams} [options] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + changeContainerLease( + context: Context, + account: string, + container: string, + leaseId: string, + proposedLeaseId: string, + options?: Models.ContainerChangeLeaseOptionalParams + ): Promise; + + /** + * Check the existence of a container. + * + * @param {string} account + * @param {string} container + * @param {Context} [context] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + checkContainerExist( + context: Context, + account: string, + container: string + ): Promise; + + listBlobs( + context: Context, + account: string, + container: string, + delimiter?: string, + blob?: string, + prefix?: string, + maxResults?: number, + marker?: string, + includeSnapshots?: boolean, + includeUncommittedBlobs?: boolean + ): Promise<[BlobModel[], BlobPrefixModel[], string | undefined]>; + + listAllBlobs( + maxResults?: number, + marker?: string, + includeSnapshots?: boolean, + includeUncommittedBlobs?: boolean + ): Promise<[BlobModel[], string | undefined]>; + + /** + * Create blob item in persistency layer. Will replace if blob exists. + * + * @param {Context} context + * @param {BlobModel} blob + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] Optional. Will validate lease if provided + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + createBlob( + context: Context, + blob: BlobModel, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Create snapshot. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] Optional. Will validate lease if provided + * @param {Models.BlobMetadata} [metadata] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + createSnapshot( + context: Context, + account: string, + container: string, + blob: string, + leaseAccessConditions?: Models.LeaseAccessConditions, + metadata?: Models.BlobMetadata, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Gets a blob item from metadata store by account name, container name and blob name. + * Will return block list or page list as well for downloading. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(string | undefined)} snapshot + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] Optional. Will validate lease if provided + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + downloadBlob( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string | undefined, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Get blob properties. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(string | undefined)} snapshot + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + getBlobProperties( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string | undefined, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Delete blob or its snapshots. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {Models.BlobDeleteMethodOptionalParams} options + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + deleteBlob( + context: Context, + account: string, + container: string, + blob: string, + options: Models.BlobDeleteMethodOptionalParams + ): Promise; + + /** + * Set blob HTTP headers. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @param {(Models.BlobHTTPHeaders | undefined)} blobHTTPHeaders + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + setBlobHTTPHeaders( + context: Context, + account: string, + container: string, + blob: string, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + blobHTTPHeaders: Models.BlobHTTPHeaders | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Set blob metadata. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @param {(Models.BlobMetadata | undefined)} metadata + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + setBlobMetadata( + context: Context, + account: string, + container: string, + blob: string, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + metadata: Models.BlobMetadata | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Acquire blob lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {number} duration + * @param {string} [proposedLeaseId] + * @param {Models.BlobAcquireLeaseOptionalParams} [options] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + acquireBlobLease( + context: Context, + account: string, + container: string, + blob: string, + duration: number, + proposedLeaseId?: string, + options?: Models.BlobAcquireLeaseOptionalParams + ): Promise; + + /** + * Release blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} leaseId + * @param {Models.BlobReleaseLeaseOptionalParams} [options] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + releaseBlobLease( + context: Context, + account: string, + container: string, + blob: string, + leaseId: string, + options?: Models.BlobReleaseLeaseOptionalParams + ): Promise; + + /** + * Renew blob lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} leaseId + * @param {Models.BlobRenewLeaseOptionalParams} [options] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + renewBlobLease( + context: Context, + account: string, + container: string, + blob: string, + leaseId: string, + options?: Models.BlobRenewLeaseOptionalParams + ): Promise; + + /** + * Change blob lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} leaseId + * @param {string} proposedLeaseId + * @param {Models.BlobChangeLeaseOptionalParams} [option] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + changeBlobLease( + context: Context, + account: string, + container: string, + blob: string, + leaseId: string, + proposedLeaseId: string, + option?: Models.BlobChangeLeaseOptionalParams + ): Promise; + + /** + * Break blob lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {number} [breakPeriod] + * @param {Models.BlobBreakLeaseOptionalParams} [option] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + breakBlobLease( + context: Context, + account: string, + container: string, + blob: string, + breakPeriod?: number, + option?: Models.BlobBreakLeaseOptionalParams + ): Promise; + + /** + * Check the existence of a blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} [snapshot] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + checkBlobExist( + context: Context, + account: string, + container: string, + blob: string, + snapshot?: string + ): Promise; + + /** + * Get blobType and committed status for SAS authentication. + * + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} [snapshot] + * @returns {(Promise< + * { blobType: Models.BlobType | undefined; isCommitted: boolean } | undefined + * >)} + * @memberof IBlobMetadataStore + */ + getBlobType( + account: string, + container: string, + blob: string, + snapshot?: string + ): Promise< + { blobType: Models.BlobType | undefined; isCommitted: boolean } | undefined + >; + + /** + * Start copy from Url. + * + * @param {Context} context + * @param {BlobId} source + * @param {BlobId} destination + * @param {string} copySource + * @param {(Models.BlobMetadata | undefined)} metadata + * @param {(Models.AccessTier | undefined)} tier + * @param {Models.BlobStartCopyFromURLOptionalParams} [leaseAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + startCopyFromURL( + context: Context, + source: BlobId, + destination: BlobId, + copySource: string, + metadata: Models.BlobMetadata | undefined, + tier: Models.AccessTier | undefined, + leaseAccessConditions?: Models.BlobStartCopyFromURLOptionalParams + ): Promise; + + /** + * Sync copy from Url. + * + * @param {Context} context + * @param {BlobId} source + * @param {BlobId} destination + * @param {string} copySource + * @param {(Models.BlobMetadata | undefined)} metadata + * @param {(Models.AccessTier | undefined)} tier + * @param {Models.BlobCopyFromURLOptionalParams} [leaseAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + copyFromURL( + context: Context, + source: BlobId, + destination: BlobId, + copySource: string, + metadata: Models.BlobMetadata | undefined, + tier: Models.AccessTier | undefined, + leaseAccessConditions?: Models.BlobCopyFromURLOptionalParams + ): Promise; + + /** + * Update Tier for a blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {Models.AccessTier} tier + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @returns {(Promise<200 | 202>)} + * @memberof IBlobMetadataStore + */ + setTier( + context: Context, + account: string, + container: string, + blob: string, + tier: Models.AccessTier, + leaseAccessConditions: Models.LeaseAccessConditions | undefined + ): Promise<200 | 202>; + + /** + * Update blob block item in persistency layer. Will create if block doesn't exist. + * Will also create a uncommitted block blob. + * + * @param {BlockModel} block + * @param {Context} context + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + stageBlock( + context: Context, + block: BlockModel, + leaseAccessConditions: Models.LeaseAccessConditions | undefined + ): Promise; + + /** + * Append block to an append blob. + * + * @param {Context} context + * @param {BlockModel} block + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @param {Models.AppendPositionAccessConditions} [appendPositionAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + appendBlock( + context: Context, + block: BlockModel, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions, + appendPositionAccessConditions?: Models.AppendPositionAccessConditions + ): Promise; + + /** + * Commit block list for a blob. + * + * @param {Context} context + * @param {BlobModel} blob + * @param {{ blockName: string; blockCommitType: string }[]} blockList + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + commitBlockList( + context: Context, + blob: BlobModel, + blockList: { blockName: string; blockCommitType: string }[], + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Gets blocks list for a blob from persistency layer by account, container and blob names. + * + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} [snapshot] + * @param {(boolean | undefined)} isCommitted + * @param {Context} context + * @returns {Promise<{ + * properties: Models.BlobProperties; + * uncommittedBlocks: Models.Block[]; + * committedBlocks: Models.Block[]; + * }>} + * @memberof IBlobMetadataStore + */ + getBlockList( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string | undefined, + isCommitted: boolean | undefined, + leaseAccessConditions: Models.LeaseAccessConditions | undefined + ): Promise<{ + properties: Models.BlobPropertiesInternal; + uncommittedBlocks: Models.Block[]; + committedBlocks: Models.Block[]; + }>; + + /** + * Upload new pages for page blob. + * + * @param {Context} context + * @param {BlobModel} blob + * @param {number} start + * @param {number} end + * @param {IExtentChunk} persistency + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @param {Models.SequenceNumberAccessConditions} [sequenceNumberAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + uploadPages( + context: Context, + blob: BlobModel, + start: number, + end: number, + persistency: IExtentChunk, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions, + sequenceNumberAccessConditions?: Models.SequenceNumberAccessConditions + ): Promise; + + /** + * Clear range for a page blob. + * + * @param {Context} context + * @param {BlobModel} blob + * @param {number} start + * @param {number} end + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @param {Models.SequenceNumberAccessConditions} [sequenceNumberAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + clearRange( + context: Context, + blob: BlobModel, + start: number, + end: number, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions, + sequenceNumberAccessConditions?: Models.SequenceNumberAccessConditions + ): Promise; + + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} [snapshot] + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + getPageRanges( + context: Context, + account: string, + container: string, + blob: string, + snapshot?: string, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Resize a page blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {number} blobContentLength + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + resizePageBlob( + context: Context, + account: string, + container: string, + blob: string, + blobContentLength: number, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Update the sequence number of a page blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {Models.SequenceNumberActionType} sequenceNumberAction + * @param {(number | undefined)} blobSequenceNumber + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + updateSequenceNumber( + context: Context, + account: string, + container: string, + blob: string, + sequenceNumberAction: Models.SequenceNumberActionType, + blobSequenceNumber: number | undefined, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Gets uncommitted blocks list for a blob from persistency layer. + */ + listUncommittedBlockPersistencyChunks( + marker?: string, + maxResults?: number + ): Promise<[IExtentChunk[], string | undefined]>; + + /** + * Set blob tags. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(string | undefined)} snapshot + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @param {(Models.BlobMetadata | undefined)} metadata + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + setBlobTag( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string | undefined, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + tags: Models.BlobTags | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; + + /** + * Get blob tags. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(string | undefined)} snapshot + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof IBlobMetadataStore + */ + getBlobTag( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string | undefined, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise; +} + +export default IBlobMetadataStore; diff --git a/src/blob/persistence/LokiBlobMetadataStore.ts b/src/blob/persistence/LokiBlobMetadataStore.ts index bae54c07e..7f5b72c89 100644 --- a/src/blob/persistence/LokiBlobMetadataStore.ts +++ b/src/blob/persistence/LokiBlobMetadataStore.ts @@ -47,6 +47,7 @@ import IBlobMetadataStore, { ChangeContainerLeaseResponse, ContainerModel, CreateSnapshotResponse, + FilterBlobModel, GetBlobPropertiesRes, GetContainerAccessPolicyResponse, GetContainerPropertiesResponse, @@ -62,7 +63,9 @@ import IBlobMetadataStore, { SetContainerAccessPolicyOptions } from "./IBlobMetadataStore"; import PageWithDelimiter from "./PageWithDelimiter"; -import { getBlobTagsCount, getTagsFromString } from "../utils/utils"; +import FilterBlobPage from "./FilterBlobPage"; +import { generateQueryBlobWithTagsWhereFunction } from "./QueryInterpreter/QueryInterpreter"; +import { getBlobTagsCount, getTagsFromString, toBlobTags } from "../utils/utils"; /** * This is a metadata source implementation for blob based on loki DB. @@ -326,9 +329,9 @@ export default class LokiBlobMetadataStore prefix === "" ? { name: { $gt: marker }, accountName: account } : { - name: { $regex: `^${this.escapeRegex(prefix)}`, $gt: marker }, - accountName: account - }; + name: { $regex: `^${this.escapeRegex(prefix)}`, $gt: marker }, + accountName: account + }; // Workaround for loki which will ignore $gt when providing $regex const query2 = { name: { $gt: marker } }; @@ -750,10 +753,10 @@ export default class LokiBlobMetadataStore const leaseTimeSeconds: number = doc.properties.leaseState === Models.LeaseStateType.Breaking && - doc.leaseBreakTime + doc.leaseBreakTime ? Math.round( - (doc.leaseBreakTime.getTime() - context.startTime!.getTime()) / 1000 - ) + (doc.leaseBreakTime.getTime() - context.startTime!.getTime()) / 1000 + ) : 0; coll.update(doc); @@ -821,6 +824,77 @@ export default class LokiBlobMetadataStore } } + public async filterBlobs( + context: Context, + account: string, + container?: string, + where?: string, + maxResults: number = DEFAULT_LIST_BLOBS_MAX_RESULTS, + marker: string = "", + ): Promise<[FilterBlobModel[], string | undefined]> { + const query: any = {}; + if (account !== undefined) { + query.accountName = account; + } + if (container !== undefined) { + query.containerName = container; + await this.checkContainerExist( + context, + account, + container + ); + } + + const filterFunction = generateQueryBlobWithTagsWhereFunction(where!); + + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const page = new FilterBlobPage(maxResults); + const readPage = async (offset: number): Promise => { + const doc = await coll + .chain() + .find(query) + .where((obj) => { + return obj.name > marker!; + }) + .sort((obj1, obj2) => { + if (obj1.name === obj2.name) return 0; + if (obj1.name > obj2.name) return 1; + return -1; + }) + .offset(offset) + .limit(maxResults) + .data(); + + return doc.map((item) => { + let blobItem: FilterBlobModel; + blobItem = { + name: item.name, + containerName: item.containerName, + tags: item.blobTags + }; + return blobItem; + }).filter((blobItem) => { + const tagsMeetConditions = filterFunction(blobItem); + if (tagsMeetConditions.length !== 0) { + blobItem.tags = { blobTagSet: toBlobTags(tagsMeetConditions) }; + return true; + } + return false; + }); + }; + + const nameItem = (item: FilterBlobModel) => { + return item.name; + }; + + const [blobItems, nextMarker] = await page.fill(readPage, nameItem); + + return [ + blobItems, + nextMarker + ]; + } + public async listBlobs( context: Context, account: string, @@ -1689,10 +1763,10 @@ export default class LokiBlobMetadataStore const leaseTimeSeconds: number = doc.properties.leaseState === Models.LeaseStateType.Breaking && - doc.leaseBreakTime + doc.leaseBreakTime ? Math.round( - (doc.leaseBreakTime.getTime() - context.startTime!.getTime()) / 1000 - ) + (doc.leaseBreakTime.getTime() - context.startTime!.getTime()) / 1000 + ) : 0; coll.update(doc); @@ -1919,7 +1993,7 @@ export default class LokiBlobMetadataStore leaseBreakTime: destBlob !== undefined ? destBlob.leaseBreakTime : undefined, committedBlocksInOrder: sourceBlob.committedBlocksInOrder, - persistency: sourceBlob.persistency, + persistency: sourceBlob.persistency, blobTags: options.blobTagsString === undefined ? undefined : getTagsFromString(options.blobTagsString, context.contextId!) }; @@ -2106,7 +2180,7 @@ export default class LokiBlobMetadataStore leaseBreakTime: destBlob !== undefined ? destBlob.leaseBreakTime : undefined, committedBlocksInOrder: sourceBlob.committedBlocksInOrder, - persistency: sourceBlob.persistency, + persistency: sourceBlob.persistency, blobTags: options.blobTagsString === undefined ? undefined : getTagsFromString(options.blobTagsString, context.contextId!) }; @@ -3398,6 +3472,14 @@ export default class LokiBlobMetadataStore context ); + if (modifiedAccessConditions?.ifTags) { + const validateFunction = generateQueryBlobWithTagsWhereFunction(modifiedAccessConditions?.ifTags, true); + if (modifiedAccessConditions?.ifTags !== undefined + && validateFunction(doc).length === 0) { + throw new Error("412"); + } + } + return doc.blobTags; } diff --git a/src/blob/persistence/LokiBlobMetadataStore.ts.bak b/src/blob/persistence/LokiBlobMetadataStore.ts.bak new file mode 100644 index 000000000..a425badbc --- /dev/null +++ b/src/blob/persistence/LokiBlobMetadataStore.ts.bak @@ -0,0 +1,3423 @@ +import { stat } from "fs"; +import Loki from "lokijs"; +import uuid from "uuid/v4"; + +import IGCExtentProvider from "../../common/IGCExtentProvider"; +import { + convertDateTimeStringMsTo7Digital, + rimrafAsync +} from "../../common/utils/utils"; +import { newEtag } from "../../common/utils/utils"; +import { validateReadConditions } from "../conditions/ReadConditionalHeadersValidator"; +import { + validateSequenceNumberWriteConditions, + validateWriteConditions +} from "../conditions/WriteConditionalHeadersValidator"; +import StorageErrorFactory from "../errors/StorageErrorFactory"; +import * as Models from "../generated/artifacts/models"; +import Context from "../generated/Context"; +import PageBlobRangesManager from "../handlers/PageBlobRangesManager"; +import BlobLeaseAdapter from "../lease/BlobLeaseAdapter"; +import BlobLeaseSyncer from "../lease/BlobLeaseSyncer"; +import BlobReadLeaseValidator from "../lease/BlobReadLeaseValidator"; +import BlobWriteLeaseSyncer from "../lease/BlobWriteLeaseSyncer"; +import BlobWriteLeaseValidator from "../lease/BlobWriteLeaseValidator"; +import ContainerDeleteLeaseValidator from "../lease/ContainerDeleteLeaseValidator"; +import ContainerLeaseAdapter from "../lease/ContainerLeaseAdapter"; +import ContainerLeaseSyncer from "../lease/ContainerLeaseSyncer"; +import ContainerReadLeaseValidator from "../lease/ContainerReadLeaseValidator"; +import { ILease } from "../lease/ILeaseState"; +import LeaseFactory from "../lease/LeaseFactory"; +import { + DEFAULT_LIST_BLOBS_MAX_RESULTS, + DEFAULT_LIST_CONTAINERS_MAX_RESULTS, + MAX_APPEND_BLOB_BLOCK_COUNT +} from "../utils/constants"; +import BlobReferredExtentsAsyncIterator from "./BlobReferredExtentsAsyncIterator"; +import IBlobMetadataStore, { + AcquireBlobLeaseResponse, + AcquireContainerLeaseResponse, + BlobId, + BlobModel, + BlobPrefixModel, + BlockModel, + BreakBlobLeaseResponse, + BreakContainerLeaseResponse, + ChangeBlobLeaseResponse, + ChangeContainerLeaseResponse, + ContainerModel, + CreateSnapshotResponse, + GetBlobPropertiesRes, + GetContainerAccessPolicyResponse, + GetContainerPropertiesResponse, + GetPageRangeResponse, + IContainerMetadata, + IExtentChunk, + PersistencyBlockModel, + ReleaseBlobLeaseResponse, + ReleaseContainerLeaseResponse, + RenewBlobLeaseResponse, + RenewContainerLeaseResponse, + ServicePropertiesModel, + SetContainerAccessPolicyOptions +} from "./IBlobMetadataStore"; +import PageWithDelimiter from "./PageWithDelimiter"; +import { getBlobTagsCount, getTagsFromString } from "../utils/utils"; + +/** + * This is a metadata source implementation for blob based on loki DB. + * + * Notice that, following design is for emulator purpose only, and doesn't design for best performance. + * We may want to optimize the persistency layer performance in the future. Such as by distributing metadata + * into different collections, or make binary payload write as an append-only pattern. + * + * Loki DB includes following collections and documents: + * + * -- SERVICE_PROPERTIES_COLLECTION // Collection contains service properties + * // Default collection name is $SERVICES_COLLECTION$ + * // Each document maps to 1 account blob service + * // Unique document properties: accountName + * -- CONTAINERS_COLLECTION // Collection contains all containers + * // Default collection name is $CONTAINERS_COLLECTION$ + * // Each document maps to 1 container + * // Unique document properties: accountName, (container)name + * -- BLOBS_COLLECTION // Collection contains all blobs + * // Default collection name is $BLOBS_COLLECTION$ + * // Each document maps to a blob + * // Unique document properties: accountName, containerName, (blob)name, snapshot + * -- BLOCKS_COLLECTION // Block blob blocks collection includes all UNCOMMITTED blocks + * // Unique document properties: accountName, containerName, blobName, name, isCommitted + * + * @export + * @class LokiBlobMetadataStore + */ +export default class LokiBlobMetadataStore + implements IBlobMetadataStore, IGCExtentProvider { + private readonly db: Loki; + + private initialized: boolean = false; + private closed: boolean = true; + + private readonly SERVICES_COLLECTION = "$SERVICES_COLLECTION$"; + private readonly CONTAINERS_COLLECTION = "$CONTAINERS_COLLECTION$"; + private readonly BLOBS_COLLECTION = "$BLOBS_COLLECTION$"; + private readonly BLOCKS_COLLECTION = "$BLOCKS_COLLECTION$"; + + private readonly pageBlobRangesManager = new PageBlobRangesManager(); + + public constructor(public readonly lokiDBPath: string) { + this.db = new Loki(lokiDBPath, { + autosave: true, + autosaveInterval: 5000 + }); + } + + public isInitialized(): boolean { + return this.initialized; + } + + public isClosed(): boolean { + return this.closed; + } + + public async init(): Promise { + await new Promise((resolve, reject) => { + stat(this.lokiDBPath, (statError, stats) => { + if (!statError) { + this.db.loadDatabase({}, (dbError) => { + if (dbError) { + reject(dbError); + } else { + resolve(); + } + }); + } else { + // when DB file doesn't exist, ignore the error because following will re-create the file + resolve(); + } + }); + }); + + // In loki DB implementation, these operations are all sync. Doesn't need an async lock + + // Create service properties collection if not exists + let servicePropertiesColl = this.db.getCollection(this.SERVICES_COLLECTION); + if (servicePropertiesColl === null) { + servicePropertiesColl = this.db.addCollection(this.SERVICES_COLLECTION, { + unique: ["accountName"] + }); + } + + // Create containers collection if not exists + if (this.db.getCollection(this.CONTAINERS_COLLECTION) === null) { + this.db.addCollection(this.CONTAINERS_COLLECTION, { + // Optimization for indexing and searching + // https://rawgit.com/techfort/LokiJS/master/jsdoc/tutorial-Indexing%20and%20Query%20performance.html + indices: ["accountName", "name"] + }); // Optimize for find operation + } + + // Create containers collection if not exists + if (this.db.getCollection(this.BLOBS_COLLECTION) === null) { + this.db.addCollection(this.BLOBS_COLLECTION, { + indices: ["accountName", "containerName", "name", "snapshot"] // Optimize for find operation + }); + } + + // Create blocks collection if not exists + if (this.db.getCollection(this.BLOCKS_COLLECTION) === null) { + this.db.addCollection(this.BLOCKS_COLLECTION, { + indices: ["accountName", "containerName", "blobName", "name"] // Optimize for find operation + }); + } + + await new Promise((resolve, reject) => { + this.db.saveDatabase((err) => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); + + this.initialized = true; + this.closed = false; + } + + /** + * Close loki DB. + * + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async close(): Promise { + await new Promise((resolve, reject) => { + this.db.close((err) => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); + + this.closed = true; + } + + /** + * Clean LokiBlobMetadataStore. + * + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async clean(): Promise { + if (this.isClosed()) { + await rimrafAsync(this.lokiDBPath); + + return; + } + throw new Error(`Cannot clean LokiBlobMetadataStore, it's not closed.`); + } + + public iteratorExtents(): AsyncIterator { + return new BlobReferredExtentsAsyncIterator(this); + } + + /** + * Update blob service properties. Create service properties if not exists in persistency layer. + * + * TODO: Account's service property should be created when storage account is created or metadata + * storage initialization. This method should only be responsible for updating existing record. + * In this way, we can reduce one I/O call to get account properties. + * + * @param {ServicePropertiesModel} serviceProperties + * @returns {Promise} undefined properties will be ignored during properties setup + * @memberof LokiBlobMetadataStore + */ + public async setServiceProperties( + context: Context, + serviceProperties: ServicePropertiesModel + ): Promise { + const coll = this.db.getCollection(this.SERVICES_COLLECTION); + const doc = coll.by("accountName", serviceProperties.accountName); + + if (doc) { + doc.cors = + serviceProperties.cors === undefined + ? doc.cors + : serviceProperties.cors; + + doc.hourMetrics = + serviceProperties.hourMetrics === undefined + ? doc.hourMetrics + : serviceProperties.hourMetrics; + + doc.logging = + serviceProperties.logging === undefined + ? doc.logging + : serviceProperties.logging; + + doc.minuteMetrics = + serviceProperties.minuteMetrics === undefined + ? doc.minuteMetrics + : serviceProperties.minuteMetrics; + + doc.defaultServiceVersion = + serviceProperties.defaultServiceVersion === undefined + ? doc.defaultServiceVersion + : serviceProperties.defaultServiceVersion; + + doc.deleteRetentionPolicy = + serviceProperties.deleteRetentionPolicy === undefined + ? doc.deleteRetentionPolicy + : serviceProperties.deleteRetentionPolicy; + + doc.staticWebsite = + serviceProperties.staticWebsite === undefined + ? doc.staticWebsite + : serviceProperties.staticWebsite; + + return coll.update(doc); + } else { + return coll.insert(serviceProperties); + } + } + + /** + * Get service properties for specific storage account. + * + * @param {string} account + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async getServiceProperties( + context: Context, + account: string + ): Promise { + const coll = this.db.getCollection(this.SERVICES_COLLECTION); + const doc = coll.by("accountName", account); + return doc ? doc : undefined; + } + + /** + * List containers with query conditions specified. + * + * @param {string} account + * @param {string} [prefix=""] + * @param {number} [maxResults=5000] + * @param {string} [marker=""] + * @returns {(Promise<[ContainerModel[], string | undefined]>)} + * @memberof LokiBlobMetadataStore + */ + public async listContainers( + context: Context, + account: string, + prefix: string = "", + maxResults: number = DEFAULT_LIST_CONTAINERS_MAX_RESULTS, + marker: string = "" + ): Promise<[ContainerModel[], string | undefined]> { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + + const query = + prefix === "" + ? { name: { $gt: marker }, accountName: account } + : { + name: { $regex: `^${this.escapeRegex(prefix)}`, $gt: marker }, + accountName: account + }; + + // Workaround for loki which will ignore $gt when providing $regex + const query2 = { name: { $gt: marker } }; + + const docs = coll + .chain() + .find(query) + .find(query2) + .simplesort("name") + .limit(maxResults + 1) + .data(); + + if (docs.length <= maxResults) { + return [ + docs.map((doc) => { + return LeaseFactory.createLeaseState( + new ContainerLeaseAdapter(doc), + context + ).sync(new ContainerLeaseSyncer(doc)); + }), + undefined + ]; + } else { + // In this case, the last item is the one we get in addition, should set the Marker before it. + const nextMarker = docs[docs.length - 2].name; + docs.pop(); + return [ + docs.map((doc) => { + return LeaseFactory.createLeaseState( + new ContainerLeaseAdapter(doc), + context + ).sync(new ContainerLeaseSyncer(doc)); + }), + nextMarker + ]; + } + } + + /** + * Create a container. + * + * @param {ContainerModel} container + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async createContainer( + context: Context, + container: ContainerModel + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = coll.findOne({ + accountName: container.accountName, + name: container.name + }); + + if (doc) { + const requestId = context ? context.contextId : undefined; + throw StorageErrorFactory.getContainerAlreadyExists(requestId); + } + + return coll.insert(container); + } + + /** + * Get container properties. + * + * @param {string} account + * @param {string} container + * @param {Context} context + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async getContainerProperties( + context: Context, + account: string, + container: string, + leaseAccessConditions?: Models.LeaseAccessConditions + ): Promise { + const doc = await this.getContainerWithLeaseUpdated( + account, + container, + context + ); + + new ContainerReadLeaseValidator(leaseAccessConditions).validate( + new ContainerLeaseAdapter(doc), + context + ); + + const res: GetContainerPropertiesResponse = { + name: container, + properties: doc.properties, + metadata: doc.metadata + }; + + return res; + } + + /** + * Delete container item if exists from persistency layer. + * + * Loki based implementation will delete container documents from Containers collection, + * blob documents from Blobs collection, and blocks documents from Blocks collection immediately. + * + * Persisted extents data will be deleted by GC. + * + * @param {string} account + * @param {string} container + * @param {Context} context + * @param {Models.ContainerDeleteMethodOptionalParams} [options] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async deleteContainer( + context: Context, + account: string, + container: string, + options: Models.ContainerDeleteMethodOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = await this.getContainerWithLeaseUpdated( + account, + container, + context, + false + ); + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + new ContainerDeleteLeaseValidator(options.leaseAccessConditions).validate( + new ContainerLeaseAdapter(doc), + context + ); + + coll.remove(doc); + + const blobColl = this.db.getCollection(this.BLOBS_COLLECTION); + blobColl.findAndRemove({ + accountName: account, + containerName: container + }); + + const blockColl = this.db.getCollection(this.BLOCKS_COLLECTION); + blockColl.findAndRemove({ + accountName: account, + containerName: container + }); + } + + /** + * Set container metadata. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {Date} lastModified + * @param {string} etag + * @param {IContainerMetadata} [metadata] + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async setContainerMetadata( + context: Context, + account: string, + container: string, + lastModified: Date, + etag: string, + metadata?: IContainerMetadata, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = await this.getContainerWithLeaseUpdated( + account, + container, + context, + false + ); + + validateWriteConditions(context, modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + new ContainerReadLeaseValidator(leaseAccessConditions).validate( + new ContainerLeaseAdapter(doc), + context + ); + + doc.properties.lastModified = lastModified; + doc.properties.etag = etag; + doc.metadata = metadata; + + return coll.update(doc); + } + + /** + * Get container access policy. + * + * @param {string} account + * @param {string} container + * @param {Context} context + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async getContainerACL( + context: Context, + account: string, + container: string, + leaseAccessConditions?: Models.LeaseAccessConditions + ): Promise { + const doc = await this.getContainerWithLeaseUpdated( + account, + container, + context + ); + + new ContainerReadLeaseValidator(leaseAccessConditions).validate( + new ContainerLeaseAdapter(doc), + context + ); + + const res: GetContainerAccessPolicyResponse = { + properties: doc.properties, + containerAcl: doc.containerAcl + }; + + return res; + } + + /** + * Set container access policy. + * + * @param {string} account + * @param {string} container + * @param {SetContainerAccessPolicyOptions} setAclModel + * @param {Context} context + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async setContainerACL( + context: Context, + account: string, + container: string, + setAclModel: SetContainerAccessPolicyOptions + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = await this.getContainerWithLeaseUpdated( + account, + container, + context, + false + ); + + validateWriteConditions(context, setAclModel.modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + new ContainerReadLeaseValidator(setAclModel.leaseAccessConditions).validate( + new ContainerLeaseAdapter(doc), + context + ); + + doc.properties.publicAccess = setAclModel.publicAccess; + doc.containerAcl = setAclModel.containerAcl; + doc.properties.lastModified = setAclModel.lastModified; + doc.properties.etag = setAclModel.etag; + + return coll.update(doc); + } + + /** + * Acquire container lease. + * + * @param {string} account + * @param {string} container + * @param {Models.ContainerAcquireLeaseOptionalParams} options + * @param {Context} context + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async acquireContainerLease( + context: Context, + account: string, + container: string, + options: Models.ContainerAcquireLeaseOptionalParams + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = await this.getContainer(account, container, context, false); + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + LeaseFactory.createLeaseState(new ContainerLeaseAdapter(doc), context) + .acquire(options.duration!, options.proposedLeaseId) + .sync(new ContainerLeaseSyncer(doc)); + + coll.update(doc); + + return { properties: doc.properties, leaseId: doc.leaseId }; + } + + /** + * Release container lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} leaseId + * @param {Models.ContainerReleaseLeaseOptionalParams} [options={}] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async releaseContainerLease( + context: Context, + account: string, + container: string, + leaseId: string, + options: Models.ContainerReleaseLeaseOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = await this.getContainer(account, container, context, false); + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + LeaseFactory.createLeaseState(new ContainerLeaseAdapter(doc), context) + .release(leaseId) + .sync(new ContainerLeaseSyncer(doc)); + + coll.update(doc); + + return doc.properties; + } + + /** + * Renew container lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} leaseId + * @param {Models.ContainerRenewLeaseOptionalParams} [options={}] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async renewContainerLease( + context: Context, + account: string, + container: string, + leaseId: string, + options: Models.ContainerRenewLeaseOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = await this.getContainer(account, container, context, false); + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + LeaseFactory.createLeaseState(new ContainerLeaseAdapter(doc), context) + .renew(leaseId) + .sync(new ContainerLeaseSyncer(doc)); + + coll.update(doc); + + return { properties: doc.properties, leaseId: doc.leaseId }; + } + + /** + * Break container lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {(number | undefined)} breakPeriod + * @param {Models.ContainerBreakLeaseOptionalParams} [options={}] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async breakContainerLease( + context: Context, + account: string, + container: string, + breakPeriod: number | undefined, + options: Models.ContainerBreakLeaseOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = await this.getContainer(account, container, context, false); + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + LeaseFactory.createLeaseState(new ContainerLeaseAdapter(doc), context) + .break(breakPeriod) + .sync(new ContainerLeaseSyncer(doc)); + + const leaseTimeSeconds: number = + doc.properties.leaseState === Models.LeaseStateType.Breaking && + doc.leaseBreakTime + ? Math.round( + (doc.leaseBreakTime.getTime() - context.startTime!.getTime()) / 1000 + ) + : 0; + + coll.update(doc); + + return { properties: doc.properties, leaseTime: leaseTimeSeconds }; + } + + /** + * Change container lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} leaseId + * @param {string} proposedLeaseId + * @param {Models.ContainerChangeLeaseOptionalParams} [options={}] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async changeContainerLease( + context: Context, + account: string, + container: string, + leaseId: string, + proposedLeaseId: string, + options: Models.ContainerChangeLeaseOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = await this.getContainer(account, container, context, false); + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + LeaseFactory.createLeaseState(new ContainerLeaseAdapter(doc), context) + .change(leaseId, proposedLeaseId) + .sync(new ContainerLeaseSyncer(doc)); + + coll.update(doc); + + return { properties: doc.properties, leaseId: doc.leaseId }; + } + + /** + * Check the existence of a container. + * + * @param {string} account + * @param {string} container + * @param {Context} [context] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async checkContainerExist( + context: Context, + account: string, + container: string + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = coll.findOne({ accountName: account, name: container }); + if (!doc) { + const requestId = context ? context.contextId : undefined; + throw StorageErrorFactory.getContainerNotFound(requestId); + } + } + + public async listBlobs( + context: Context, + account: string, + container: string, + delimiter?: string, + blob?: string, + prefix: string = "", + maxResults: number = DEFAULT_LIST_BLOBS_MAX_RESULTS, + marker: string = "", + includeSnapshots?: boolean, + includeUncommittedBlobs?: boolean + ): Promise<[BlobModel[], BlobPrefixModel[], string | undefined]> { + const query: any = {}; + if (prefix !== "") { + query.name = { $regex: `^${this.escapeRegex(prefix)}` }; + } + if (blob !== undefined) { + query.name = blob; + } + if (account !== undefined) { + query.accountName = account; + } + if (container !== undefined) { + query.containerName = container; + } + + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const page = new PageWithDelimiter(maxResults, delimiter, prefix); + const readPage = async (offset: number): Promise => { + return await coll + .chain() + .find(query) + .where((obj) => { + return obj.name > marker!; + }) + .where((obj) => { + return includeSnapshots ? true : obj.snapshot.length === 0; + }) + .where((obj) => { + return includeUncommittedBlobs ? true : obj.isCommitted; + }) + .sort((obj1, obj2) => { + if (obj1.name === obj2.name) return 0; + if (obj1.name > obj2.name) return 1; + return -1; + }) + .offset(offset) + .limit(maxResults) + .data(); + }; + + const nameItem = (item: BlobModel) => { + return item.name; + }; + + const [blobItems, blobPrefixes, nextMarker] = await page.fill(readPage, nameItem); + + return [ + blobItems.map((doc) => { + doc.properties.contentMD5 = this.restoreUint8Array( + doc.properties.contentMD5 + ); + return LeaseFactory.createLeaseState( + new BlobLeaseAdapter(doc), + context + ).sync(new BlobLeaseSyncer(doc)); + }), + blobPrefixes, + nextMarker + ]; + } + + public async listAllBlobs( + maxResults: number = DEFAULT_LIST_BLOBS_MAX_RESULTS, + marker: string = "", + includeSnapshots?: boolean, + includeUncommittedBlobs?: boolean + ): Promise<[BlobModel[], string | undefined]> { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + + const docs = await coll + .chain() + .where((obj) => { + return obj.name > marker!; + }) + .where((obj) => { + return includeSnapshots ? true : obj.snapshot.length === 0; + }) + .where((obj) => { + return includeUncommittedBlobs ? true : obj.isCommitted; + }) + .simplesort("name") + .limit(maxResults + 1) + .data(); + + for (const doc of docs) { + const blobDoc = doc as BlobModel; + blobDoc.properties.contentMD5 = this.restoreUint8Array( + blobDoc.properties.contentMD5 + ); + } + + if (docs.length <= maxResults) { + return [docs, undefined]; + } else { + const nextMarker = docs[docs.length - 2].name; + docs.pop(); + return [docs, nextMarker]; + } + } + + /** + * Create blob item in persistency layer. Will replace if blob exists. + * + * @param {Context} context + * @param {BlobModel} blob + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async createBlob( + context: Context, + blob: BlobModel, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + await this.checkContainerExist( + context, + blob.accountName, + blob.containerName + ); + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const blobDoc = coll.findOne({ + accountName: blob.accountName, + containerName: blob.containerName, + name: blob.name, + snapshot: blob.snapshot + }); + + validateWriteConditions(context, modifiedAccessConditions, blobDoc); + + // Create if not exists + if ( + modifiedAccessConditions && + modifiedAccessConditions.ifNoneMatch === "*" && + blobDoc + ) { + throw StorageErrorFactory.getBlobAlreadyExists(context.contextId); + } + + if (blobDoc) { + LeaseFactory.createLeaseState(new BlobLeaseAdapter(blobDoc), context) + .validate(new BlobWriteLeaseValidator(leaseAccessConditions)) + .sync(new BlobLeaseSyncer(blob)); // Keep original blob lease + + if ( + blobDoc.properties !== undefined && + blobDoc.properties.accessTier === Models.AccessTier.Archive + ) { + throw StorageErrorFactory.getBlobArchived(context.contextId); + } + coll.remove(blobDoc); + } + delete (blob as any).$loki; + return coll.insert(blob); + } + + /** + * Create snapshot. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] Optional. Will validate lease if provided + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async createSnapshot( + context: Context, + account: string, + container: string, + blob: string, + leaseAccessConditions?: Models.LeaseAccessConditions, + metadata?: Models.BlobMetadata, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + false, + true + ); + + validateReadConditions(context, modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + new BlobReadLeaseValidator(leaseAccessConditions).validate( + new BlobLeaseAdapter(doc), + context + ); + + const snapshotTime = convertDateTimeStringMsTo7Digital( + context.startTime!.toISOString() + ); + + const snapshotBlob: BlobModel = { + name: doc.name, + deleted: false, + snapshot: snapshotTime, + properties: { ...doc.properties }, + metadata: metadata ? { ...metadata } : { ...doc.metadata }, + blobTags: doc.blobTags, + accountName: doc.accountName, + containerName: doc.containerName, + pageRangesInOrder: + doc.pageRangesInOrder === undefined + ? undefined + : doc.pageRangesInOrder.slice(), + isCommitted: doc.isCommitted, + committedBlocksInOrder: + doc.committedBlocksInOrder === undefined + ? undefined + : doc.committedBlocksInOrder.slice(), + persistency: + doc.persistency === undefined ? undefined : { ...doc.persistency } + }; + + new BlobLeaseSyncer(snapshotBlob).sync({ + leaseId: undefined, + leaseExpireTime: undefined, + leaseDurationSeconds: undefined, + leaseBreakTime: undefined, + leaseDurationType: undefined, + leaseState: undefined, + leaseStatus: undefined + }); + + coll.insert(snapshotBlob); + + return { + properties: snapshotBlob.properties, + snapshot: snapshotTime + }; + } + + /** + * Gets a blob item from persistency layer by container name and blob name. + * Will return block list or page list as well for downloading. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} [snapshot=""] + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async downloadBlob( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string = "", + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + snapshot, + context, + false, + true + ); + + validateReadConditions(context, modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + new BlobReadLeaseValidator(leaseAccessConditions).validate( + new BlobLeaseAdapter(doc), + context + ); + + return doc; + } + + /** + * Gets a blob item from persistency layer by container name and blob name. + * Will return block list or page list as well for downloading. + * + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} [snapshot] + * @returns {(Promise)} + * @memberof LokiBlobMetadataStore + */ + public async getBlob( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string = "" + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const blobDoc = coll.findOne({ + accountName: account, + containerName: container, + name: blob, + snapshot + }); + + if (blobDoc) { + const blobModel = blobDoc as BlobModel; + blobModel.properties.contentMD5 = this.restoreUint8Array( + blobModel.properties.contentMD5 + ); + return blobDoc; + } else { + return undefined; + } + } + + /** + * Get blob properties. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} [snapshot=""] + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async getBlobProperties( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string = "", + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + snapshot, + context, + false, + true + ); + + validateReadConditions(context, modifiedAccessConditions, doc); + + // When block blob don't have commited block, should return 404 + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + new BlobReadLeaseValidator(leaseAccessConditions).validate( + new BlobLeaseAdapter(doc), + context + ); + + doc.properties.tagCount = getBlobTagsCount(doc.blobTags); + + return { + properties: doc.properties, + metadata: doc.metadata, + blobCommittedBlockCount: + doc.properties.blobType === Models.BlobType.AppendBlob + ? (doc.committedBlocksInOrder || []).length + : undefined + }; + } + + /** + * Delete blob or its snapshots. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {Models.BlobDeleteMethodOptionalParams} options + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async deleteBlob( + context: Context, + account: string, + container: string, + blob: string, + options: Models.BlobDeleteMethodOptionalParams + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + await this.checkContainerExist(context, account, container); + + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + options.snapshot, + context, + false + ); + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const againstBaseBlob = doc.snapshot === ""; + + // Check bad requests + if (!againstBaseBlob && options.deleteSnapshots !== undefined) { + throw StorageErrorFactory.getInvalidOperation( + context.contextId!, + "Invalid operation against a blob snapshot." + ); + } + + new BlobWriteLeaseValidator(options.leaseAccessConditions).validate( + new BlobLeaseAdapter(doc), + context + ); + + // Scenario: Delete base blob only + if (againstBaseBlob && options.deleteSnapshots === undefined) { + const count = coll.count({ + accountName: account, + containerName: container, + name: blob + }); + if (count > 1) { + throw StorageErrorFactory.getSnapshotsPresent(context.contextId!); + } else { + coll.findAndRemove({ + accountName: account, + containerName: container, + name: blob + }); + } + } + + // Scenario: Delete one snapshot only + if (!againstBaseBlob) { + coll.findAndRemove({ + accountName: account, + containerName: container, + name: blob, + snapshot: doc.snapshot + }); + } + + // Scenario: Delete base blob and snapshots + if ( + againstBaseBlob && + options.deleteSnapshots === Models.DeleteSnapshotsOptionType.Include + ) { + coll.findAndRemove({ + accountName: account, + containerName: container, + name: blob + }); + } + + // Scenario: Delete all snapshots only + if ( + againstBaseBlob && + options.deleteSnapshots === Models.DeleteSnapshotsOptionType.Only + ) { + const query = { + accountName: account, + containerName: container, + name: blob, + snapshot: { $gt: "" } + }; + coll.findAndRemove(query); + } + } + + /** + * Set blob HTTP headers. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @param {(Models.BlobHTTPHeaders | undefined)} blobHTTPHeaders + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async setBlobHTTPHeaders( + context: Context, + account: string, + container: string, + blob: string, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + blobHTTPHeaders: Models.BlobHTTPHeaders | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + false, + true + ); + + validateWriteConditions(context, modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const lease = new BlobLeaseAdapter(doc); + new BlobWriteLeaseValidator(leaseAccessConditions).validate(lease, context); + + const blobHeaders = blobHTTPHeaders; + const blobProps = doc.properties; + // as per https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties#remarks + // If any one or more of the following properties is set in the request, + // then all of these properties are set together. + // If a value is not provided for a given property when at least one + // of the properties listed below is set, then that property will + // be cleared for the blob. + if (blobHeaders !== undefined) { + blobProps.cacheControl = blobHeaders.blobCacheControl; + blobProps.contentType = blobHeaders.blobContentType; + blobProps.contentMD5 = blobHeaders.blobContentMD5; + blobProps.contentEncoding = blobHeaders.blobContentEncoding; + blobProps.contentLanguage = blobHeaders.blobContentLanguage; + blobProps.contentDisposition = blobHeaders.blobContentDisposition; + } + doc.properties = blobProps; + doc.properties.etag = newEtag(); + blobProps.lastModified = context.startTime ? context.startTime : new Date(); + + new BlobWriteLeaseSyncer(doc).sync(lease); + + coll.update(doc); + return doc.properties; + } + + /** + * Set blob metadata. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @param {(Models.BlobMetadata | undefined)} metadata + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async setBlobMetadata( + context: Context, + account: string, + container: string, + blob: string, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + metadata: Models.BlobMetadata | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + false, + true + ); + + validateWriteConditions(context, modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const lease = new BlobLeaseAdapter(doc); + new BlobWriteLeaseValidator(leaseAccessConditions).validate(lease, context); + new BlobWriteLeaseSyncer(doc).sync(lease); + doc.metadata = metadata; + doc.properties.etag = newEtag(); + doc.properties.lastModified = context.startTime || new Date(); + coll.update(doc); + return doc.properties; + } + + /** + * Acquire blob lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {number} duration + * @param {string} [proposedLeaseId] + * @param {Models.BlobAcquireLeaseOptionalParams} [options={}] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async acquireBlobLease( + context: Context, + account: string, + container: string, + blob: string, + duration: number, + proposedLeaseId?: string, + options: Models.BlobAcquireLeaseOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + false + ); // This may return an uncommitted blob, or undefined for an unexist blob + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + // Azure Storage allows lease for a uncommitted blob + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + if (doc.snapshot !== "") { + throw StorageErrorFactory.getBlobSnapshotsPresent(context.contextId); + } + + LeaseFactory.createLeaseState(new BlobLeaseAdapter(doc), context) + .acquire(duration, proposedLeaseId) + .sync(new BlobLeaseSyncer(doc)); + + coll.update(doc); + + return { properties: doc.properties, leaseId: doc.leaseId }; + } + + /** + * Release blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} leaseId + * @param {Models.BlobReleaseLeaseOptionalParams} [options={}] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async releaseBlobLease( + context: Context, + account: string, + container: string, + blob: string, + leaseId: string, + options: Models.BlobReleaseLeaseOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + false + ); // This may return an uncommitted blob, or undefined for an unexist blob + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + // Azure Storage allows lease for a uncommitted blob + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + if (doc.snapshot !== "") { + throw StorageErrorFactory.getBlobSnapshotsPresent(context.contextId!); + } + + LeaseFactory.createLeaseState(new BlobLeaseAdapter(doc), context) + .release(leaseId) + .sync(new BlobLeaseSyncer(doc)); + + coll.update(doc); + + return doc.properties; + } + + /** + * Renew blob lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} leaseId + * @param {Models.BlobRenewLeaseOptionalParams} [options={}] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async renewBlobLease( + context: Context, + account: string, + container: string, + blob: string, + leaseId: string, + options: Models.BlobRenewLeaseOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + false + ); // This may return an uncommitted blob, or undefined for an unexist blob + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + // Azure Storage allows lease for a uncommitted blob + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + if (doc.snapshot !== "") { + throw StorageErrorFactory.getBlobSnapshotsPresent(context.contextId!); + } + + LeaseFactory.createLeaseState(new BlobLeaseAdapter(doc), context) + .renew(leaseId) + .sync(new BlobLeaseSyncer(doc)); + + coll.update(doc); + + return { properties: doc.properties, leaseId: doc.leaseId }; + } + + /** + * Change blob lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} leaseId + * @param {string} proposedLeaseId + * @param {Models.BlobChangeLeaseOptionalParams} [option={}] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async changeBlobLease( + context: Context, + account: string, + container: string, + blob: string, + leaseId: string, + proposedLeaseId: string, + options: Models.BlobChangeLeaseOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + false + ); // This may return an uncommitted blob, or undefined for an unexist blob + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + // Azure Storage allows lease for a uncommitted blob + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + if (doc.snapshot !== "") { + throw StorageErrorFactory.getBlobSnapshotsPresent(context.contextId!); + } + + LeaseFactory.createLeaseState(new BlobLeaseAdapter(doc), context) + .change(leaseId, proposedLeaseId) + .sync(new BlobLeaseSyncer(doc)); + + coll.update(doc); + + return { properties: doc.properties, leaseId: doc.leaseId }; + } + + /** + * Break blob lease. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(number | undefined)} breakPeriod + * @param {Models.BlobBreakLeaseOptionalParams} [options={}] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async breakBlobLease( + context: Context, + account: string, + container: string, + blob: string, + breakPeriod: number | undefined, + options: Models.BlobBreakLeaseOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + false + ); // This may return an uncommitted blob, or undefined for an unexist blob + + validateWriteConditions(context, options.modifiedAccessConditions, doc); + + // Azure Storage allows lease for a uncommitted blob + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + if (doc.snapshot !== "") { + throw StorageErrorFactory.getBlobSnapshotsPresent(context.contextId!); + } + + LeaseFactory.createLeaseState(new BlobLeaseAdapter(doc), context) + .break(breakPeriod) + .sync(new BlobLeaseSyncer(doc)); + + const leaseTimeSeconds: number = + doc.properties.leaseState === Models.LeaseStateType.Breaking && + doc.leaseBreakTime + ? Math.round( + (doc.leaseBreakTime.getTime() - context.startTime!.getTime()) / 1000 + ) + : 0; + + coll.update(doc); + + return { properties: doc.properties, leaseTime: leaseTimeSeconds }; + } + + /** + * Check the existence of a blob. + * + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} [snapshot=""] + * @param {Context} [context] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async checkBlobExist( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string = "" + ): Promise { + await this.checkContainerExist(context, account, container); + + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = coll.findOne({ + accountName: account, + containerName: container, + name: blob, + snapshot + }); + + if (!doc) { + const requestId = context ? context.contextId : undefined; + throw StorageErrorFactory.getBlobNotFound(requestId); + } + } + + /** + * Get blobType and committed status for SAS authentication. + * + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} [snapshot=""] + * @returns {(Promise< + * { blobType: Models.BlobType | undefined; isCommitted: boolean } | undefined + * >)} + * @memberof LokiBlobMetadataStore + */ + public async getBlobType( + account: string, + container: string, + blob: string, + snapshot: string = "" + ): Promise< + { blobType: Models.BlobType | undefined; isCommitted: boolean } | undefined + > { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = coll.findOne({ + accountName: account, + containerName: container, + name: blob, + snapshot + }); + if (!doc) { + return undefined; + } + return { blobType: doc.properties.blobType, isCommitted: doc.isCommitted }; + } + + /** + * Start copy from Url. + * + * @param {Context} context + * @param {BlobId} source + * @param {BlobId} destination + * @param {string} copySource + * @param {(Models.BlobMetadata | undefined)} metadata + * @param {(Models.AccessTier | undefined)} tier + * @param {Models.BlobStartCopyFromURLOptionalParams} [leaseAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async startCopyFromURL( + context: Context, + source: BlobId, + destination: BlobId, + copySource: string, + metadata: Models.BlobMetadata | undefined, + tier: Models.AccessTier | undefined, + options: Models.BlobStartCopyFromURLOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const sourceBlob = await this.getBlobWithLeaseUpdated( + source.account, + source.container, + source.blob, + source.snapshot, + context, + true, + true + ); + + options.sourceModifiedAccessConditions = + options.sourceModifiedAccessConditions || {}; + validateReadConditions( + context, + { + ifModifiedSince: + options.sourceModifiedAccessConditions.sourceIfModifiedSince, + ifUnmodifiedSince: + options.sourceModifiedAccessConditions.sourceIfUnmodifiedSince, + ifMatch: options.sourceModifiedAccessConditions.sourceIfMatch, + ifNoneMatch: options.sourceModifiedAccessConditions.sourceIfNoneMatch + }, + sourceBlob + ); + + const destBlob = await this.getBlobWithLeaseUpdated( + destination.account, + destination.container, + destination.blob, + undefined, + context, + false + ); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + destBlob + ); + + // Copy if not exists + if ( + options.modifiedAccessConditions && + options.modifiedAccessConditions.ifNoneMatch === "*" && + destBlob + ) { + throw StorageErrorFactory.getBlobAlreadyExists(context.contextId); + } + + if (destBlob) { + new BlobWriteLeaseValidator(options.leaseAccessConditions).validate( + new BlobLeaseAdapter(destBlob), + context + ); + } + + // If source is uncommitted or deleted + if ( + sourceBlob === undefined || + sourceBlob.deleted || + !sourceBlob.isCommitted + ) { + throw StorageErrorFactory.getBlobNotFound(context.contextId!); + } + + if (sourceBlob.properties.accessTier === Models.AccessTier.Archive + && (tier === undefined || source.account !== destination.account)) { + throw StorageErrorFactory.getBlobArchived(context.contextId!); + } + + await this.checkContainerExist( + context, + destination.account, + destination.container + ); + + // Deep clone a copied blob + const copiedBlob: BlobModel = { + name: destination.blob, + deleted: false, + snapshot: "", + properties: { + ...sourceBlob.properties, + creationTime: context.startTime!, + lastModified: context.startTime!, + etag: newEtag(), + leaseStatus: + destBlob !== undefined + ? destBlob.properties.leaseStatus + : Models.LeaseStatusType.Unlocked, + leaseState: + destBlob !== undefined + ? destBlob.properties.leaseState + : Models.LeaseStateType.Available, + leaseDuration: + destBlob !== undefined + ? destBlob.properties.leaseDuration + : undefined, + copyId: uuid(), + copyStatus: Models.CopyStatusType.Success, + copySource, + copyProgress: sourceBlob.properties.contentLength + ? `${sourceBlob.properties.contentLength}/${sourceBlob.properties.contentLength}` + : undefined, + copyCompletionTime: context.startTime, + copyStatusDescription: undefined, + incrementalCopy: false, + destinationSnapshot: undefined, + deletedTime: undefined, + remainingRetentionDays: undefined, + archiveStatus: undefined, + accessTierChangeTime: undefined + }, + metadata: + metadata === undefined || Object.keys(metadata).length === 0 + ? { ...sourceBlob.metadata } + : metadata, + accountName: destination.account, + containerName: destination.container, + pageRangesInOrder: sourceBlob.pageRangesInOrder, + isCommitted: sourceBlob.isCommitted, + leaseDurationSeconds: + destBlob !== undefined ? destBlob.leaseDurationSeconds : undefined, + leaseId: destBlob !== undefined ? destBlob.leaseId : undefined, + leaseExpireTime: + destBlob !== undefined ? destBlob.leaseExpireTime : undefined, + leaseBreakTime: + destBlob !== undefined ? destBlob.leaseBreakTime : undefined, + committedBlocksInOrder: sourceBlob.committedBlocksInOrder, + persistency: sourceBlob.persistency, + blobTags: options.blobTagsString === undefined ? undefined : getTagsFromString(options.blobTagsString, context.contextId!) + }; + + if ( + copiedBlob.properties.blobType === Models.BlobType.BlockBlob && + tier !== undefined + ) { + copiedBlob.properties.accessTier = this.parseTier(tier); + if (copiedBlob.properties.accessTier === undefined) { + throw StorageErrorFactory.getInvalidHeaderValue(context.contextId, { + HeaderName: "x-ms-access-tier", + HeaderValue: `${tier}` + }); + } + } + + if ( + copiedBlob.properties.blobType === Models.BlobType.PageBlob && + tier !== undefined + ) { + throw StorageErrorFactory.getInvalidHeaderValue(context.contextId, { + HeaderName: "x-ms-access-tier", + HeaderValue: `${tier}` + }); + } + + if (destBlob) { + coll.remove(destBlob); + } + coll.insert(copiedBlob); + return copiedBlob.properties; + } + + /** + * Copy from Url. + * + * @param {Context} context + * @param {BlobId} source + * @param {BlobId} destination + * @param {string} copySource + * @param {(Models.BlobMetadata | undefined)} metadata + * @param {(Models.AccessTier | undefined)} tier + * @param {Models.BlobCopyFromURLOptionalParams} [leaseAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async copyFromURL( + context: Context, + source: BlobId, + destination: BlobId, + copySource: string, + metadata: Models.BlobMetadata | undefined, + tier: Models.AccessTier | undefined, + options: Models.BlobCopyFromURLOptionalParams = {} + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const sourceBlob = await this.getBlobWithLeaseUpdated( + source.account, + source.container, + source.blob, + source.snapshot, + context, + true, + true + ); + + options.sourceModifiedAccessConditions = + options.sourceModifiedAccessConditions || {}; + validateReadConditions( + context, + { + ifModifiedSince: + options.sourceModifiedAccessConditions.sourceIfModifiedSince, + ifUnmodifiedSince: + options.sourceModifiedAccessConditions.sourceIfUnmodifiedSince, + ifMatch: options.sourceModifiedAccessConditions.sourceIfMatch, + ifNoneMatch: options.sourceModifiedAccessConditions.sourceIfNoneMatch + }, + sourceBlob + ); + + const destBlob = await this.getBlobWithLeaseUpdated( + destination.account, + destination.container, + destination.blob, + undefined, + context, + false + ); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + destBlob + ); + + // Copy if not exists + if ( + options.modifiedAccessConditions && + options.modifiedAccessConditions.ifNoneMatch === "*" && + destBlob + ) { + throw StorageErrorFactory.getBlobAlreadyExists(context.contextId); + } + + if (destBlob) { + const lease = new BlobLeaseAdapter(destBlob); + new BlobWriteLeaseSyncer(destBlob).sync(lease); + new BlobWriteLeaseValidator(options.leaseAccessConditions).validate( + lease, + context + ); + } + + // If source is uncommitted or deleted + if ( + sourceBlob === undefined || + sourceBlob.deleted || + !sourceBlob.isCommitted + ) { + throw StorageErrorFactory.getBlobNotFound(context.contextId!); + } + + if (sourceBlob.properties.accessTier === Models.AccessTier.Archive) { + throw StorageErrorFactory.getBlobArchived(context.contextId!); + } + + await this.checkContainerExist( + context, + destination.account, + destination.container + ); + + // Deep clone a copied blob + const copiedBlob: BlobModel = { + name: destination.blob, + deleted: false, + snapshot: "", + properties: { + ...sourceBlob.properties, + creationTime: context.startTime!, + lastModified: context.startTime!, + etag: newEtag(), + leaseStatus: + destBlob !== undefined + ? destBlob.properties.leaseStatus + : Models.LeaseStatusType.Unlocked, + leaseState: + destBlob !== undefined + ? destBlob.properties.leaseState + : Models.LeaseStateType.Available, + leaseDuration: + destBlob !== undefined + ? destBlob.properties.leaseDuration + : undefined, + copyId: uuid(), + copyStatus: Models.CopyStatusType.Success, + copySource, + copyProgress: sourceBlob.properties.contentLength + ? `${sourceBlob.properties.contentLength}/${sourceBlob.properties.contentLength}` + : undefined, + copyCompletionTime: context.startTime, + copyStatusDescription: undefined, + incrementalCopy: false, + destinationSnapshot: undefined, + deletedTime: undefined, + remainingRetentionDays: undefined, + archiveStatus: undefined, + accessTierChangeTime: undefined + }, + metadata: + metadata === undefined || Object.keys(metadata).length === 0 + ? { ...sourceBlob.metadata } + : metadata, + accountName: destination.account, + containerName: destination.container, + pageRangesInOrder: sourceBlob.pageRangesInOrder, + isCommitted: sourceBlob.isCommitted, + leaseDurationSeconds: + destBlob !== undefined ? destBlob.leaseDurationSeconds : undefined, + leaseId: destBlob !== undefined ? destBlob.leaseId : undefined, + leaseExpireTime: + destBlob !== undefined ? destBlob.leaseExpireTime : undefined, + leaseBreakTime: + destBlob !== undefined ? destBlob.leaseBreakTime : undefined, + committedBlocksInOrder: sourceBlob.committedBlocksInOrder, + persistency: sourceBlob.persistency, + blobTags: options.blobTagsString === undefined ? undefined : getTagsFromString(options.blobTagsString, context.contextId!) + }; + + if ( + copiedBlob.properties.blobType === Models.BlobType.BlockBlob && + tier !== undefined + ) { + copiedBlob.properties.accessTier = this.parseTier(tier); + if (copiedBlob.properties.accessTier === undefined) { + throw StorageErrorFactory.getInvalidHeaderValue(context.contextId, { + HeaderName: "x-ms-access-tier", + HeaderValue: `${tier}` + }); + } + } + + if ( + copiedBlob.properties.blobType === Models.BlobType.PageBlob && + tier !== undefined + ) { + throw StorageErrorFactory.getInvalidHeaderValue(context.contextId, { + HeaderName: "x-ms-access-tier", + HeaderValue: `${tier}` + }); + } + + if (destBlob) { + coll.remove(destBlob); + } + coll.insert(copiedBlob); + return copiedBlob.properties; + } + + /** + * Update Tier for a blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {Models.AccessTier} tier + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @returns {(Promise<200 | 202>)} + * @memberof LokiBlobMetadataStore + */ + public async setTier( + context: Context, + account: string, + container: string, + blob: string, + tier: Models.AccessTier, + leaseAccessConditions: Models.LeaseAccessConditions | undefined + ): Promise<200 | 202> { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + true, + true + ); + let responseCode: 200 | 202 = 200; + + // Check the lease action aligned with current lease state. + // API reference doesn't mention there is x-ms-lease-id header supported by this API, + // however, it fails to set tier for a leased blocked blob with LeaseIdMissing + const lease = new BlobLeaseAdapter(doc); + new BlobWriteLeaseValidator(leaseAccessConditions).validate(lease, context); + + // Check Blob is not snapshot + if (doc.snapshot !== "") { + throw StorageErrorFactory.getBlobSnapshotsPresent(context.contextId!); + } + + // Check BlobTier matches blob type + if ( + (tier === Models.AccessTier.Archive || + tier === Models.AccessTier.Cool || + tier === Models.AccessTier.Hot) && + doc.properties.blobType === Models.BlobType.BlockBlob + ) { + // Block blob + // tslint:disable-next-line:max-line-length + // TODO: check blob is not block blob with snapshot, throw StorageErrorFactory.getBlobSnapshotsPresent_hassnapshot() + + // Archive -> Coo/Hot will return 202 + if ( + doc.properties.accessTier === Models.AccessTier.Archive && + (tier === Models.AccessTier.Cool || tier === Models.AccessTier.Hot) + ) { + responseCode = 202; + } + + doc.properties.accessTier = tier; + doc.properties.accessTierInferred = false; + doc.properties.accessTierChangeTime = context.startTime; + } else { + throw StorageErrorFactory.getAccessTierNotSupportedForBlobType( + context.contextId! + ); + } + + new BlobWriteLeaseSyncer(doc).sync(lease); + coll.update(doc); + + return responseCode; + } + + /** + * Update blob block item in persistency layer. Will create if block doesn't exist. + * Will also create a uncommitted block blob. + * + * @param {BlockModel} block + * @param {Context} context + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async stageBlock( + context: Context, + block: BlockModel, + leaseAccessConditions?: Models.LeaseAccessConditions + ): Promise { + await this.checkContainerExist( + context, + block.accountName, + block.containerName + ); + + const blobColl = this.db.getCollection(this.BLOBS_COLLECTION); + const blobDoc = blobColl.findOne({ + accountName: block.accountName, + containerName: block.containerName, + name: block.blobName + }); + + let blobExist = false; + + if (!blobDoc) { + const etag = newEtag(); + const newBlob = { + deleted: false, + accountName: block.accountName, + containerName: block.containerName, + name: block.blobName, + properties: { + creationTime: context.startTime, + lastModified: context.startTime, + etag, + contentLength: 0, + blobType: Models.BlobType.BlockBlob + }, + snapshot: "", + isCommitted: false + }; + blobColl.insert(newBlob); + } else { + if (blobDoc.properties.blobType !== Models.BlobType.BlockBlob) { + throw StorageErrorFactory.getBlobInvalidBlobType(context.contextId); + } + + LeaseFactory.createLeaseState(new BlobLeaseAdapter(blobDoc), context) + .validate(new BlobWriteLeaseValidator(leaseAccessConditions)) + .sync(new BlobWriteLeaseSyncer(blobDoc)); + blobExist = true; + } + + const coll = this.db.getCollection(this.BLOCKS_COLLECTION); + + // If the new block ID does not have same length with before uncommited block ID, return failure. + if (blobExist) { + const existBlockDoc = coll.findOne({ + accountName: block.accountName, + containerName: block.containerName, + blobName: block.blobName + }); + if (existBlockDoc) { + if ( + Buffer.from(existBlockDoc.name, "base64").length !== + Buffer.from(block.name, "base64").length + ) { + throw StorageErrorFactory.getInvalidBlobOrBlock(context.contextId); + } + } + } + + const blockDoc = coll.findOne({ + accountName: block.accountName, + containerName: block.containerName, + blobName: block.blobName, + name: block.name, + isCommitted: block.isCommitted + }); + + if (blockDoc) { + coll.remove(blockDoc); + } + + delete (block as any).$loki; + coll.insert(block); + } + + public async appendBlock( + context: Context, + block: BlockModel, + leaseAccessConditions: Models.LeaseAccessConditions = {}, + modifiedAccessConditions: Models.ModifiedAccessConditions = {}, + appendPositionAccessConditions: Models.AppendPositionAccessConditions = {} + ): Promise { + const doc = await this.getBlobWithLeaseUpdated( + block.accountName, + block.containerName, + block.blobName, + undefined, + context, + false, + true + ); + + validateWriteConditions(context, modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + new BlobWriteLeaseValidator(leaseAccessConditions).validate( + new BlobLeaseAdapter(doc), + context + ); + + if (doc.properties.blobType !== Models.BlobType.AppendBlob) { + throw StorageErrorFactory.getBlobInvalidBlobType(context.contextId); + } + + if ( + (doc.committedBlocksInOrder || []).length >= MAX_APPEND_BLOB_BLOCK_COUNT + ) { + throw StorageErrorFactory.getBlockCountExceedsLimit(context.contextId); + } + + if (appendPositionAccessConditions.appendPosition !== undefined) { + if ( + (doc.properties.contentLength || 0) !== + appendPositionAccessConditions.appendPosition + ) { + throw StorageErrorFactory.getAppendPositionConditionNotMet( + context.contextId + ); + } + } + + if (appendPositionAccessConditions.maxSize !== undefined) { + if ( + (doc.properties.contentLength || 0) + block.size > + appendPositionAccessConditions.maxSize + ) { + throw StorageErrorFactory.getMaxBlobSizeConditionNotMet( + context.contextId + ); + } + } + + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + doc.committedBlocksInOrder = doc.committedBlocksInOrder || []; + doc.committedBlocksInOrder.push(block); + doc.properties.etag = newEtag(); + doc.properties.lastModified = context.startTime || new Date(); + doc.properties.contentLength = + (doc.properties.contentLength || 0) + block.size; + coll.update(doc); + + return doc.properties; + } + + /** + * Commit block list for a blob. + * + * @param {Context} context + * @param {BlobModel} blob + * @param {{ blockName: string; blockCommitType: string }[]} blockList + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async commitBlockList( + context: Context, + blob: BlobModel, + blockList: { blockName: string; blockCommitType: string }[], + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + blob.accountName, + blob.containerName, + blob.name, + blob.snapshot, + context, + // XStore allows commit block list with empty block list to create a block blob without stage block call + // In this case, there will no existing blob doc exists + false + ); + + validateWriteConditions(context, modifiedAccessConditions, doc); + + // Create if not exists + if ( + modifiedAccessConditions && + modifiedAccessConditions.ifNoneMatch === "*" && + doc && + doc.isCommitted + ) { + throw StorageErrorFactory.getBlobAlreadyExists(context.contextId); + } + + let lease: ILease | undefined; + if (doc) { + if (doc.properties.blobType !== Models.BlobType.BlockBlob) { + throw StorageErrorFactory.getBlobInvalidBlobType(context.contextId); + } + + lease = new BlobLeaseAdapter(doc); + new BlobWriteLeaseValidator(leaseAccessConditions).validate( + lease, + context + ); + } + + // Get all blocks in persistency layer + const blockColl = this.db.getCollection(this.BLOCKS_COLLECTION); + const pUncommittedBlocks = blockColl + .chain() + .find({ + accountName: blob.accountName, + containerName: blob.containerName, + blobName: blob.name + }) + .data(); + + const pCommittedBlocksMap: Map = new Map(); // persistencyCommittedBlocksMap + if (doc) { + for (const pBlock of doc.committedBlocksInOrder || []) { + pCommittedBlocksMap.set(pBlock.name, pBlock); + } + } + + const pUncommittedBlocksMap: Map = new Map(); // persistencyUncommittedBlocksMap + for (const pBlock of pUncommittedBlocks) { + if (!pBlock.isCommitted) { + pUncommittedBlocksMap.set(pBlock.name, pBlock); + } + } + + const selectedBlockList: PersistencyBlockModel[] = []; + for (const block_1 of blockList) { + switch (block_1.blockCommitType.toLowerCase()) { + case "uncommitted": + const pUncommittedBlock = pUncommittedBlocksMap.get( + block_1.blockName + ); + if (pUncommittedBlock === undefined) { + throw StorageErrorFactory.getInvalidBlockList(context.contextId!); + } else { + selectedBlockList.push(pUncommittedBlock); + } + break; + case "committed": + const pCommittedBlock = pCommittedBlocksMap.get(block_1.blockName); + if (pCommittedBlock === undefined) { + throw StorageErrorFactory.getInvalidBlockList(context.contextId!); + } else { + selectedBlockList.push(pCommittedBlock); + } + break; + case "latest": + const pLatestBlock = + pUncommittedBlocksMap.get(block_1.blockName) || + pCommittedBlocksMap.get(block_1.blockName); + if (pLatestBlock === undefined) { + throw StorageErrorFactory.getInvalidBlockList(context.contextId!); + } else { + selectedBlockList.push(pLatestBlock); + } + break; + default: + throw StorageErrorFactory.getInvalidBlockList(context.contextId!); + } + } + + if (doc) { + // Commit block list + doc.properties.blobType = blob.properties.blobType; + doc.properties.lastModified = blob.properties.lastModified; + doc.committedBlocksInOrder = selectedBlockList; + doc.isCommitted = true; + doc.metadata = blob.metadata; + doc.properties.accessTier = blob.properties.accessTier; + doc.properties.accessTierInferred = blob.properties.accessTierInferred; + doc.properties.etag = blob.properties.etag; + doc.properties.cacheControl = blob.properties.cacheControl; + doc.properties.contentType = blob.properties.contentType; + doc.properties.contentMD5 = blob.properties.contentMD5; + doc.properties.contentEncoding = blob.properties.contentEncoding; + doc.properties.contentLanguage = blob.properties.contentLanguage; + doc.properties.contentDisposition = blob.properties.contentDisposition; + doc.blobTags = blob.blobTags; + doc.properties.contentLength = selectedBlockList + .map((block) => block.size) + .reduce((total, val) => { + return total + val; + }, 0); + + // set lease state to available if it's expired + if (lease) { + new BlobWriteLeaseSyncer(doc).sync(lease); + } + + coll.update(doc); + } else { + blob.committedBlocksInOrder = selectedBlockList; + blob.properties.contentLength = selectedBlockList + .map((block) => block.size) + .reduce((total, val) => { + return total + val; + }, 0); + coll.insert(blob); + } + + blockColl.findAndRemove({ + accountName: blob.accountName, + containerName: blob.containerName, + blobName: blob.name + }); + } + + /** + * Gets blocks list for a blob from persistency layer by account, container and blob names. + * + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(string | undefined)} snapshot + * @param {(boolean | undefined)} isCommitted + * @param {Context} context + * @returns {Promise<{ + * properties: Models.BlobProperties; + * uncommittedBlocks: Models.Block[]; + * committedBlocks: Models.Block[]; + * }>} + * @memberof LokiBlobMetadataStore + */ + public async getBlockList( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string | undefined, + isCommitted: boolean | undefined, + leaseAccessConditions: Models.LeaseAccessConditions | undefined + ): Promise<{ + properties: Models.BlobPropertiesInternal; + uncommittedBlocks: Models.Block[]; + committedBlocks: Models.Block[]; + }> { + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + snapshot, + context + ); + + new BlobReadLeaseValidator(leaseAccessConditions).validate( + new BlobLeaseAdapter(doc), + context + ); + + if (doc.properties.blobType !== Models.BlobType.BlockBlob) { + throw StorageErrorFactory.getBlobInvalidBlobType(context.contextId); + } + + const res: { + properties: Models.BlobPropertiesInternal; + uncommittedBlocks: Models.Block[]; + committedBlocks: Models.Block[]; + } = { + properties: doc.properties, + uncommittedBlocks: [], + committedBlocks: [] + }; + + if (isCommitted !== false && doc.committedBlocksInOrder !== undefined) { + res.committedBlocks = doc.committedBlocksInOrder; + } + + if (isCommitted !== true) { + const blockColl = this.db.getCollection(this.BLOCKS_COLLECTION); + const blocks = await blockColl + .chain() + .find({ + accountName: account, + containerName: container, + blobName: blob + }) + .simplesort("$loki") + .data(); + + for (const item of blocks) { + res.uncommittedBlocks.push(item); + } + } + + return res; + } + + /** + * Upload new pages for page blob. + * + * @param {Context} context + * @param {BlobModel} blob + * @param {number} start + * @param {number} end + * @param {IExtentChunk} persistency + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @param {Models.SequenceNumberAccessConditions} [sequenceNumberAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async uploadPages( + context: Context, + blob: BlobModel, + start: number, + end: number, + persistency: IExtentChunk, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions, + sequenceNumberAccessConditions?: Models.SequenceNumberAccessConditions + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + blob.accountName, + blob.containerName, + blob.name, + blob.snapshot, + context!, + false, + true + ); + + validateWriteConditions(context, modifiedAccessConditions, doc); + + validateSequenceNumberWriteConditions( + context, + sequenceNumberAccessConditions, + doc + ); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + if (doc.properties.blobType !== Models.BlobType.PageBlob) { + throw StorageErrorFactory.getBlobInvalidBlobType(context.contextId); + } + + const lease = new BlobLeaseAdapter(doc); + new BlobWriteLeaseValidator(leaseAccessConditions).validate(lease, context); + + this.pageBlobRangesManager.mergeRange(doc.pageRangesInOrder || [], { + start, + end, + persistency + }); + + // set lease state to available if it's expired + new BlobWriteLeaseSyncer(doc).sync(lease); + + doc.properties.etag = newEtag(); + doc.properties.lastModified = context.startTime || new Date(); + + coll.update(doc); + + return doc.properties; + } + + /** + * Clear range for a page blob. + * + * @param {Context} context + * @param {BlobModel} blob + * @param {number} start + * @param {number} end + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @param {Models.SequenceNumberAccessConditions} [sequenceNumberAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async clearRange( + context: Context, + blob: BlobModel, + start: number, + end: number, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions, + sequenceNumberAccessConditions?: Models.SequenceNumberAccessConditions + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + blob.accountName, + blob.containerName, + blob.name, + blob.snapshot, + context!, + false, + true + ); + + validateWriteConditions(context, modifiedAccessConditions, doc); + + validateSequenceNumberWriteConditions( + context, + sequenceNumberAccessConditions, + doc + ); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const lease = new BlobLeaseAdapter(doc); + new BlobWriteLeaseValidator(leaseAccessConditions).validate(lease, context); + + this.pageBlobRangesManager.clearRange(doc.pageRangesInOrder || [], { + start, + end + }); + + // TODO: Check other blob update operations need lease reset or not + // set lease state to available if it's expired + new BlobWriteLeaseSyncer(doc).sync(lease); + + doc.properties.etag = newEtag(); + doc.properties.lastModified = context.startTime || new Date(); + + coll.update(doc); + + return doc.properties; + } + + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {string} [snapshot] + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async getPageRanges( + context: Context, + account: string, + container: string, + blob: string, + snapshot?: string, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + snapshot, + context, + false, + true + ); + + validateReadConditions(context, modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + if (doc.properties.blobType !== Models.BlobType.PageBlob) { + throw StorageErrorFactory.getBlobInvalidBlobType(context.contextId); + } + + new BlobReadLeaseValidator(leaseAccessConditions).validate( + new BlobLeaseAdapter(doc), + context + ); + + return { + properties: doc.properties, + pageRangesInOrder: doc.pageRangesInOrder + }; + } + + /** + * Resize a page blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {number} blobContentLength + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async resizePageBlob( + context: Context, + account: string, + container: string, + blob: string, + blobContentLength: number, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + false, + true + ); + + validateWriteConditions(context, modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + if (doc.properties.blobType !== Models.BlobType.PageBlob) { + throw StorageErrorFactory.getInvalidOperation( + context.contextId, + "Resize could only be against a page blob." + ); + } + + const lease = new BlobLeaseAdapter(doc); + new BlobWriteLeaseValidator(leaseAccessConditions).validate(lease, context); + + doc.pageRangesInOrder = doc.pageRangesInOrder || []; + if (doc.properties.contentLength! > blobContentLength) { + const start = blobContentLength; + const end = doc.properties.contentLength! - 1; + this.pageBlobRangesManager.clearRange(doc.pageRangesInOrder || [], { + start, + end + }); + } + + doc.properties.contentLength = blobContentLength; + doc.properties.lastModified = context.startTime || new Date(); + doc.properties.etag = newEtag(); + + new BlobWriteLeaseSyncer(doc).sync(lease); + + coll.update(doc); + return doc.properties; + } + + /** + * Update the sequence number of a page blob. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {Models.SequenceNumberActionType} sequenceNumberAction + * @param {(number | undefined)} blobSequenceNumber + * @param {Models.LeaseAccessConditions} [leaseAccessConditions] + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async updateSequenceNumber( + context: Context, + account: string, + container: string, + blob: string, + sequenceNumberAction: Models.SequenceNumberActionType, + blobSequenceNumber: number | undefined, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + undefined, + context, + false, + true + ); + + validateWriteConditions(context, modifiedAccessConditions, doc); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + if (doc.properties.blobType !== Models.BlobType.PageBlob) { + throw StorageErrorFactory.getInvalidOperation( + context.contextId!, + "Get Page Ranges could only be against a page blob." + ); + } + + const lease = new BlobLeaseAdapter(doc); + new BlobWriteLeaseValidator(leaseAccessConditions).validate(lease, context); + + if (doc.properties.blobSequenceNumber === undefined) { + doc.properties.blobSequenceNumber = 0; + } + + switch (sequenceNumberAction) { + case Models.SequenceNumberActionType.Max: + if (blobSequenceNumber === undefined) { + throw StorageErrorFactory.getInvalidOperation( + context.contextId!, + "x-ms-blob-sequence-number is required when x-ms-sequence-number-action is set to max." + ); + } + doc.properties.blobSequenceNumber = Math.max( + doc.properties.blobSequenceNumber, + blobSequenceNumber + ); + break; + case Models.SequenceNumberActionType.Increment: + if (blobSequenceNumber !== undefined) { + throw StorageErrorFactory.getInvalidOperation( + context.contextId!, + "x-ms-blob-sequence-number cannot be provided when x-ms-sequence-number-action is set to increment." + ); + } + doc.properties.blobSequenceNumber++; + break; + case Models.SequenceNumberActionType.Update: + if (blobSequenceNumber === undefined) { + throw StorageErrorFactory.getInvalidOperation( + context.contextId!, + "x-ms-blob-sequence-number is required when x-ms-sequence-number-action is set to update." + ); + } + doc.properties.blobSequenceNumber = blobSequenceNumber; + break; + default: + throw StorageErrorFactory.getInvalidOperation( + context.contextId!, + "Unsupported x-ms-sequence-number-action value." + ); + } + + doc.properties.etag = newEtag(); + doc.properties.lastModified = context.startTime!; + new BlobWriteLeaseSyncer(doc).sync(lease); + + coll.update(doc); + return doc.properties; + } + + public async listUncommittedBlockPersistencyChunks( + marker: string = "-1", + maxResults: number = 2000 + ): Promise<[IExtentChunk[], string | undefined]> { + const coll = this.db.getCollection(this.BLOCKS_COLLECTION); + const blockDocs = coll + .chain() + .where((obj) => { + return obj.$loki > parseInt(marker, 10); + }) + .simplesort("$loki") + .limit(maxResults + 1) + .data(); + + if (blockDocs.length <= maxResults) { + return [blockDocs.map((block) => block.persistency), undefined]; + } else { + blockDocs.pop(); + const nextMarker = `${blockDocs[maxResults - 1].$loki}`; + return [blockDocs.map((block) => block.persistency), nextMarker]; + } + } + + /** + * LokiJS will persist Uint8Array into Object. + * This method will restore object to Uint8Array. + * + * @private + * @param {*} obj + * @returns {(Uint8Array | undefined)} + * @memberof LokiBlobMetadataStore + */ + private restoreUint8Array(obj: any): Uint8Array | undefined { + if (typeof obj !== "object") { + return undefined; + } + + if (obj instanceof Uint8Array) { + return obj; + } + + if (obj.type === "Buffer") { + obj = obj.data; + } + + const length = Object.keys(obj).length; + const arr = Buffer.allocUnsafe(length); + + for (let i = 0; i < length; i++) { + if (!obj.hasOwnProperty(i)) { + throw new TypeError( + `Cannot restore loki DB persisted object to Uint8Array. Key ${i} is missing.` + ); + } + + arr[i] = obj[i]; + } + + return arr; + } + + /** + * Escape a string to be used as a regex. + * + * @private + * @param {string} regex + * @returns {string} + * @memberof LokiBlobMetadataStore + */ + private escapeRegex(regex: string): string { + return regex.replace(/[-\/\\^$*+?.()|[\]{}]/g, "\\$&"); + } + + /** + * Get a container document from container collection. + * Updated lease related properties according to current time. + * + * @private + * @param {string} account + * @param {string} container + * @param {Context} context + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + + /** + * Get a container document from container collection. + * Updated lease related properties according to current time. + * Will throw ContainerNotFound storage error if container doesn't exist. + * + * @private + * @param {string} account + * @param {string} container + * @param {Context} context + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + private async getContainerWithLeaseUpdated( + account: string, + container: string, + context: Context, + forceExist?: true + ): Promise; + + /** + * Get a container document from container collection. + * Updated lease related properties according to current time. + * Will NOT throw ContainerNotFound storage error if container doesn't exist. + * + * @private + * @param {string} account + * @param {string} container + * @param {Context} context + * @param {false} forceExist + * @returns {(Promise)} + * @memberof LokiBlobMetadataStore + */ + private async getContainerWithLeaseUpdated( + account: string, + container: string, + context: Context, + forceExist: false + ): Promise; + + private async getContainerWithLeaseUpdated( + account: string, + container: string, + context: Context, + forceExist?: boolean + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = coll.findOne({ accountName: account, name: container }); + + if (forceExist === undefined || forceExist === true) { + if (!doc) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + } + + if (!doc) { + return undefined; + } + + LeaseFactory.createLeaseState(new ContainerLeaseAdapter(doc), context).sync( + new ContainerLeaseSyncer(doc) + ); + + return doc; + } + + /** + * Get a container document from Loki collections. + * Will throw ContainerNotFound error when container doesn't exist. + * + * @private + * @param {string} account + * @param {string} container + * @param {Context} context + * @param {true} [forceExist] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + private async getContainer( + account: string, + container: string, + context: Context, + forceExist?: true + ): Promise; + + /** + * Get a container document from Loki collections. + * Will NOT throw ContainerNotFound error when container doesn't exist. + * + * @private + * @param {string} account + * @param {string} container + * @param {Context} context + * @param {false} forceExist + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + private async getContainer( + account: string, + container: string, + context: Context, + forceExist: false + ): Promise; + + private async getContainer( + account: string, + container: string, + context: Context, + forceExist?: boolean + ): Promise { + const coll = this.db.getCollection(this.CONTAINERS_COLLECTION); + const doc = coll.findOne({ accountName: account, name: container }); + + if (!doc) { + if (forceExist) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } else { + return undefined; + } + } + + return doc; + } + + /** + * Get a blob document model from Loki collection. + * Will throw BlobNotFound storage error if blob doesn't exist. + * + * @private + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(string | undefined)} snapshot + * @param {Context} context + * @param {undefined} [forceExist] + * @param {boolean} [forceCommitted] If true, will take uncommitted blob as a non-exist blob and throw exception. + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + private async getBlobWithLeaseUpdated( + account: string, + container: string, + blob: string, + snapshot: string | undefined, + context: Context, + forceExist?: true, + forceCommitted?: boolean + ): Promise; + + /** + * Get a blob document model from Loki collection. + * Will NOT throw BlobNotFound storage error if blob doesn't exist. + * + * @private + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(string | undefined)} snapshot + * @param {Context} context + * @param {false} forceExist + * @param {boolean} [forceCommitted] If true, will take uncommitted blob as a non-exist blob and return undefined. + * @returns {(Promise)} + * @memberof LokiBlobMetadataStore + */ + private async getBlobWithLeaseUpdated( + account: string, + container: string, + blob: string, + snapshot: string | undefined, + context: Context, + forceExist: false, + forceCommitted?: boolean + ): Promise; + + private async getBlobWithLeaseUpdated( + account: string, + container: string, + blob: string, + snapshot: string = "", + context: Context, + forceExist?: boolean, + forceCommitted?: boolean + ): Promise { + await this.checkContainerExist(context, account, container); + + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = coll.findOne({ + accountName: account, + containerName: container, + name: blob, + snapshot + }); + + // Force exist if parameter forceExist is undefined or true + if (forceExist === undefined || forceExist === true) { + if (forceCommitted) { + if (!doc || !(doc as BlobModel).isCommitted) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + } else { + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + } + } else { + if (forceCommitted) { + if (!doc || !(doc as BlobModel).isCommitted) { + return undefined; + } + } else { + if (!doc) { + return undefined; + } + } + } + + if (doc.properties) { + doc.properties.contentMD5 = this.restoreUint8Array( + doc.properties.contentMD5 + ); + } + + // Snapshot doesn't have lease + if (snapshot !== undefined && snapshot !== "") { + new BlobLeaseSyncer(doc).sync({ + leaseId: undefined, + leaseExpireTime: undefined, + leaseDurationSeconds: undefined, + leaseBreakTime: undefined, + leaseDurationType: undefined, + leaseState: Models.LeaseStateType.Available, // TODO: Lease state & status should be undefined for snapshots + leaseStatus: Models.LeaseStatusType.Unlocked // TODO: Lease state & status should be undefined for snapshots + }); + } else { + LeaseFactory.createLeaseState(new BlobLeaseAdapter(doc), context).sync( + new BlobLeaseSyncer(doc) + ); + } + + return doc; + } + + /** + * Set blob tags. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(string | undefined)} snapshot + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @param {(Models.BlobTags | undefined)} tags + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async setBlobTag( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string | undefined, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + tags: Models.BlobTags | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + snapshot, + context, + false, + true + ); + + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const lease = new BlobLeaseAdapter(doc); + new BlobWriteLeaseValidator(leaseAccessConditions).validate(lease, context); + new BlobWriteLeaseSyncer(doc).sync(lease); + doc.blobTags = tags; + doc.properties.etag = newEtag(); + doc.properties.lastModified = context.startTime || new Date(); + coll.update(doc); + } + + /** + * Get blob tags. + * + * @param {Context} context + * @param {string} account + * @param {string} container + * @param {string} blob + * @param {(string | undefined)} snapshot + * @param {(Models.LeaseAccessConditions | undefined)} leaseAccessConditions + * @param {Models.ModifiedAccessConditions} [modifiedAccessConditions] + * @returns {Promise} + * @memberof LokiBlobMetadataStore + */ + public async getBlobTag( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string = "", + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + const doc = await this.getBlobWithLeaseUpdated( + account, + container, + blob, + snapshot, + context, + false, + true + ); + + // When block blob don't have commited block, should return 404 + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + new BlobReadLeaseValidator(leaseAccessConditions).validate( + new BlobLeaseAdapter(doc), + context + ); + + return doc.blobTags; + } + + /** + * Get the tier setting from request headers. + * + * @private + * @param {string} tier + * @returns {(Models.AccessTier | undefined)} + * @memberof BlobHandler + */ + private parseTier(tier: string): Models.AccessTier | undefined { + tier = tier.toLowerCase(); + if (tier === Models.AccessTier.Hot.toLowerCase()) { + return Models.AccessTier.Hot; + } + if (tier === Models.AccessTier.Cool.toLowerCase()) { + return Models.AccessTier.Cool; + } + if (tier === Models.AccessTier.Archive.toLowerCase()) { + return Models.AccessTier.Archive; + } + return undefined; + } +} diff --git a/src/blob/persistence/QueryInterpreter/IQueryContext.ts b/src/blob/persistence/QueryInterpreter/IQueryContext.ts new file mode 100644 index 000000000..3f42ca1a4 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/IQueryContext.ts @@ -0,0 +1 @@ +export type IQueryContext = any; \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryInterpreter.ts b/src/blob/persistence/QueryInterpreter/QueryInterpreter.ts new file mode 100644 index 000000000..4928243f5 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryInterpreter.ts @@ -0,0 +1,75 @@ +import { BlobTags } from "../../generated/artifacts/models"; +import { FilterBlobModel } from "../IBlobMetadataStore"; +import BinaryOperatorNode from "./QueryNodes/BinaryOperatorNode"; +import ExpressionNode from "./QueryNodes/ExpressionNode"; +import IQueryNode, { TagContent } from "./QueryNodes/IQueryNode"; +import KeyNode from "./QueryNodes/KeyNode"; +import parseQuery from "./QueryParser"; + +export default function executeQuery(context: FilterBlobModel, queryTree: IQueryNode): TagContent[] { + let tags: any = {}; + const blobTags = context.tags; + if (blobTags) { + let blobTagsValue: BlobTags; + if (typeof (blobTags) === 'string') { + blobTagsValue = JSON.parse(blobTags as any); + } + else { + blobTagsValue = blobTags; + } + blobTagsValue.blobTagSet.forEach((aTag) => { + tags[aTag.key] = aTag.value; + }) + } + tags["@container"] = context.containerName; + return queryTree.evaluate(tags) +} + +/** + * Validates that the provided query tree represents a valid query. + * + * That is, a query containing at least one conditional expression, + * where every conditional expression operates on at least + * one column or built-in identifier (i.e. comparison between two constants is not allowed). + * + * @param {IQueryNode} queryTree + */ +export function validateQueryTree(queryTree: IQueryNode) { + const identifierReferences = countIdentifierReferences(queryTree); + + if (!identifierReferences) { + throw new Error("Invalid Query, no identifier references found.") + } +} + +function countIdentifierReferences(queryTree: IQueryNode): number { + if (queryTree instanceof KeyNode) { + return 1; + } + + if (queryTree instanceof BinaryOperatorNode) { + return countIdentifierReferences(queryTree.left) + countIdentifierReferences(queryTree.right) + } + + if (queryTree instanceof ExpressionNode) { + return countIdentifierReferences(queryTree.child) + } + + return 0 +} + + +export function generateQueryBlobWithTagsWhereFunction( + query: string | undefined, + conditions: boolean = false +): (entity: any) => TagContent[] { + if (query === undefined) { + return () => { + return []; + } + } + + const queryTree = parseQuery(query); + validateQueryTree(queryTree); + return (entity) => executeQuery(entity, queryTree); +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts new file mode 100644 index 000000000..48ffab431 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts @@ -0,0 +1,20 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class AndNode extends BinaryOperatorNode { + get name(): string { + return `and` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + if (leftContent.length !== 0 && rightContent.length !== 0) { + return leftContent.concat(rightContent); + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts new file mode 100644 index 000000000..cf04bf479 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts @@ -0,0 +1,14 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode from "./IQueryNode"; + +export default abstract class BinaryOperatorNode implements IQueryNode { + constructor(public left: IQueryNode, public right: IQueryNode) { } + + abstract evaluate(context: IQueryContext): any + + abstract get name(): string + + toString(): string { + return `(${this.name} ${this.left.toString()} ${this.right.toString()})` + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts new file mode 100644 index 000000000..94b7236b2 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts @@ -0,0 +1,20 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode, { TagContent } from "./IQueryNode"; + +export default class ConstantNode implements IQueryNode { + constructor(private value: string) { } + + get name(): string { + return "constant" + } + + evaluate(_context: IQueryContext): TagContent[] { + return [{ + value: this.value + }]; + } + + toString(): string { + return JSON.stringify(this.value) + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts new file mode 100644 index 000000000..ffcf09cf9 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts @@ -0,0 +1,26 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class EqualsNode extends BinaryOperatorNode { + get name(): string { + return `eq` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value === rightContent[0].value) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts new file mode 100644 index 000000000..8c3180de1 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts @@ -0,0 +1,18 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode, { TagContent } from "./IQueryNode"; + +export default class ExpressionNode implements IQueryNode { + constructor(public child: IQueryNode) { } + + get name(): string { + return "expression" + } + + evaluate(context: IQueryContext): TagContent[] { + return this.child.evaluate(context) + } + + toString(): string { + return `(${this.child.toString()})` + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts new file mode 100644 index 000000000..d2c79c375 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts @@ -0,0 +1,27 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; + +export default class GreaterThanEqualNode extends BinaryOperatorNode { + get name(): string { + return `gte` + } + + evaluate(context: IQueryContext): any { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value >= rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts new file mode 100644 index 000000000..3e461c20d --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts @@ -0,0 +1,28 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class GreaterThanNode extends BinaryOperatorNode { + get name(): string { + return `gt` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value > rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts new file mode 100644 index 000000000..9f045961b --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts @@ -0,0 +1,14 @@ +import { IQueryContext } from "../IQueryContext"; + +export interface TagContent { + key?: string; + value?: string; +} + +export default interface IQueryNode { + get name(): string + + evaluate(context: IQueryContext): TagContent[] + + toString(): string +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts new file mode 100644 index 000000000..68df1a62a --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts @@ -0,0 +1,21 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode, { TagContent } from "./IQueryNode"; + +export default class KeyNode implements IQueryNode { + constructor(private identifier: string) { } + + get name(): string { + return "id" + } + + evaluate(context: IQueryContext): TagContent[] { + return [{ + key: this.identifier, + value: context[this.identifier] + }]; + } + + toString(): string { + return `(${this.name} ${this.identifier})` + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts new file mode 100644 index 000000000..c6c8ef1d8 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts @@ -0,0 +1,27 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; + +export default class LessThanEqualNode extends BinaryOperatorNode { + get name(): string { + return `lte` + } + + evaluate(context: IQueryContext): any { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value <= rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts new file mode 100644 index 000000000..d5d788927 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts @@ -0,0 +1,27 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; + +export default class LessThanNode extends BinaryOperatorNode { + get name(): string { + return `lt` + } + + evaluate(context: IQueryContext): any { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value < rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts new file mode 100644 index 000000000..b757f9cac --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts @@ -0,0 +1,28 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class NotEqualsNode extends BinaryOperatorNode { + get name(): string { + return `ne` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value !== rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts new file mode 100644 index 000000000..0337bab1d --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts @@ -0,0 +1,20 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class OrNode extends BinaryOperatorNode { + get name(): string { + return `or` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + if (leftContent.length !== 0 || rightContent.length !== 0) { + return leftContent.concat(rightContent); + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryParser.ts b/src/blob/persistence/QueryInterpreter/QueryParser.ts new file mode 100644 index 000000000..473926de0 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryParser.ts @@ -0,0 +1,517 @@ +import AndNode from "./QueryNodes/AndNode"; +import ConstantNode from "./QueryNodes/ConstantNode"; +import EqualsNode from "./QueryNodes/EqualsNode"; +import ExpressionNode from "./QueryNodes/ExpressionNode"; +import GreaterThanEqualNode from "./QueryNodes/GreaterThanEqualNode"; +import GreaterThanNode from "./QueryNodes/GreaterThanNode"; +import IQueryNode from "./QueryNodes/IQueryNode"; +import KeyNode from "./QueryNodes/KeyNode"; +import LessThanEqualNode from "./QueryNodes/LessThanEqualNode"; +import LessThanNode from "./QueryNodes/LessThanNode"; +import NotEqualsNode from "./QueryNodes/NotEqualsNode"; +import OrNode from "./QueryNodes/OrNode"; + +/** + * This file is used to parse query string for Azure Blob filter by tags and x-ms-if-tags conditions. + * https://learn.microsoft.com/en-us/azure/storage/blobs/storage-manage-find-blobs?tabs=azure-portal + * https://learn.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations + */ + +enum ComparisonType { + Equal, + Greater, + Less, + NotEqual +} + +interface ComparisonNode { + key: string; + existedComparison: ComparisonType[]; +} + +export default function parseQuery( + query: string, + conditions: boolean = false): IQueryNode { + return new QueryParser(query).visit() +} + +/** + * A recursive descent parser for Azure Blob filter by tags query syntax. + * + * This parser is implemented using a recursive descent strategy, which composes + * layers of syntax hierarchy, roughly corresponding to the structure of an EBNF + * grammar. Each layer of the hierarchy is implemented as a method which consumes + * the syntax for that layer, and then calls the next layer of the hierarchy. + * + * So for example, the syntax tree that we currently use is composed of: + * - QUERY := EXPRESSION + * - EXPRESSION := OR + * - OR := AND ("or" OR)* + * - AND := UNARY ("and" AND)* + * - UNARY := ("not")? EXPRESSION_GROUP + * - EXPRESSION_GROUP := ("(" EXPRESSION ")") | BINARY + * - BINARY := IDENTIFIER_OR_CONSTANT (OPERATOR IDENTIFIER_OR_CONSTANT)? + * - IDENTIFIER_OR_CONSTANT := CONSTANT | IDENTIFIER + * - CONSTANT := STRING + */ +class QueryParser { + constructor(query: string, + conditions: boolean = false) { + this.query = new ParserContext(query); + this.forConditions = conditions; + } + + private query: ParserContext; + private comparisonNodes: Record = {}; + private comparisonCount: number = 0; + private forConditions: boolean; + + validateWithPreviousComparison(key: string, currentComparison: ComparisonType) { + if (this.forConditions) return; + if (currentComparison === ComparisonType.NotEqual) { + return; + } + + if (this.comparisonNodes[key]) { + for (let i = 0; i < this.comparisonNodes[key].existedComparison.length; ++i) { + if (currentComparison === ComparisonType.Equal) { + throw new Error("can't have multiple conditions for a single tag unless they define a range"); + } + + if (currentComparison === ComparisonType.Greater && + (this.comparisonNodes[key].existedComparison[i] === ComparisonType.Less + || this.comparisonNodes[key].existedComparison[i] === ComparisonType.Equal)) { + throw new Error("can't have multiple conditions for a single tag unless they define a range"); + } + + + if (currentComparison === ComparisonType.Less && + (this.comparisonNodes[key].existedComparison[i] === ComparisonType.Greater + || this.comparisonNodes[key].existedComparison[i] === ComparisonType.Equal)) { + throw new Error("can't have multiple conditions for a single tag unless they define a range"); + } + } + } + + return; + } + + appendComparionNode(key: string, currentComparison: ComparisonType) { + ++this.comparisonCount; + if (this.comparisonCount > 10) { + throw new Error("there can be at most 10 unique tags in a query"); + } + + if (this.forConditions) return; + if (this.comparisonNodes[key]) { + this.comparisonNodes[key].existedComparison.push(currentComparison); + } + else { + this.comparisonNodes[key] = { + key: key, + existedComparison: [currentComparison] + } + } + } + + /** + * Visits the root of the query syntax tree, returning the corresponding root node. + * + * @returns {IQueryNode} + */ + visit(): IQueryNode { + return this.visitQuery(); + } + + /** + * Visits the QUERY layer of the query syntax tree, returning the appropriate node. + * + * @returns {IQueryNode} + */ + private visitQuery(): IQueryNode { + const tree = this.visitExpression(); + + this.query.skipWhitespace(); + this.query.assertEndOfQuery(); + + return tree; + } + + /** + * Visits the EXPRESSION layer of the query syntax tree, returning the appropriate node. + * + * EXPRESSION := OR + * + * @returns {IQueryNode} + */ + private visitExpression(): IQueryNode { + return this.visitOr(); + } + + /** + * Visits the OR layer of the query syntax tree, returning the appropriate node. + * + * OR := AND ("or" OR)* + * + * @returns {IQueryNode} + */ + private visitOr(): IQueryNode { + const left = this.visitAnd(); + + this.query.skipWhitespace(); + if (this.query.consume("or", true)) { + if (!this.forConditions) { + throw new Error("Or not allowed"); + } + const right = this.visitOr(); + return new OrNode(left, right); + } else { + return left; + } + } + + /** + * Visits the AND layer of the query syntax tree, returning the appropriate node. + * + * AND := UNARY ("and" AND)* + * + * @returns {IQueryNode} + */ + private visitAnd(): IQueryNode { + const left = this.visitUnary(); + + this.query.skipWhitespace(); + if (this.query.consume("and", true)) { + const right = this.visitAnd(); + + return new AndNode(left, right); + } else { + return left; + } + } + + /** + * Visits the UNARY layer of the query syntax tree, returning the appropriate node. + * + * UNARY := ("not")? EXPRESSION_GROUP + * + * @returns {IQueryNode} + */ + private visitUnary(): IQueryNode { + this.query.skipWhitespace(); + const right = this.visitExpressionGroup() + return right; + } + + /** + * Visits the EXPRESSION_GROUP layer of the query syntax tree, returning the appropriate node. + * + * EXPRESSION_GROUP := ("(" OR ")") | BINARY + * + * @returns {IQueryNode} + */ + private visitExpressionGroup(): IQueryNode { + this.query.skipWhitespace(); + if (this.query.consume("(")) { + const child = this.visitExpression() + + this.query.skipWhitespace(); + this.query.consume(")") || this.query.throw(`Expected a ')' to close the expression group, but found '${this.query.peek()}' instead.`) + + return new ExpressionNode(child) + } else { + return this.visitBinary() + } + } + + /** + * Visits the BINARY layer of the query syntax tree, returning the appropriate node. + * + * BINARY := IDENTIFIER_OR_CONSTANT (OPERATOR IDENTIFIER_OR_CONSTANT)? + * + * @returns {IQueryNode} + */ + private visitBinary(): IQueryNode { + const left = this.visitKey(); + + this.query.skipWhitespace(); + const operator = this.query.consumeOneOf(true, "=", ">=", "<=", ">", "<", "<>") + if (operator) { + const right = this.visitValue() + + switch (operator) { + case "=": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Equal); + this.appendComparionNode(left.toString(), ComparisonType.Equal); + return new EqualsNode(left, right); + case "<>": + if (!this.forConditions) { + throw new Error("<> not allowed"); + } + this.validateWithPreviousComparison(left.toString(), ComparisonType.NotEqual); + this.appendComparionNode(left.toString(), ComparisonType.NotEqual); + return new NotEqualsNode(left, right); + case ">=": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Greater); + this.appendComparionNode(left.toString(), ComparisonType.Greater); + return new GreaterThanEqualNode(left, right); + case ">": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Greater); + this.appendComparionNode(left.toString(), ComparisonType.Greater); + return new GreaterThanNode(left, right); + case "<": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Less); + this.appendComparionNode(left.toString(), ComparisonType.Less); + return new LessThanNode(left, right); + case "<=": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Less); + this.appendComparionNode(left.toString(), ComparisonType.Less); + return new LessThanEqualNode(left, right); + } + } + + return left; + } + + /** + * Visits the IDENTIFIER_OR_CONSTANT layer of the query syntax tree, returning the appropriate node. + * + * IDENTIFIER_OR_CONSTANT := CONSTANT | IDENTIFIER + * + * @returns {IQueryNode} + */ + private visitValue(): IQueryNode { + this.query.skipWhitespace(); + + if (`'`.includes(this.query.peek())) { + return this.visitString(); + } + throw new Error("Expecting value"); + } + + private validateKey(key: string) { + if (key.startsWith("@")) { + if (this.forConditions) { + throw new Error("x-ms-if-tags not support container"); + } + + if (key !== "@container") { + throw new Error("Only container name allowed"); + } + } + } + + /** + * Visits the STRING layer of the query syntax tree, returning the appropriate node. + * + * Strings are wrapped in either single quotes (') or double quotes (") and may contain + * doubled-up quotes to introduce a literal. + */ + private visitString(isAKey: boolean = false): IQueryNode { + const openCharacter = this.query.take() + + /** + * Strings are terminated by the same character that opened them. + * But we also allow doubled-up characters to represent a literal, which means we need to only terminate a string + * when we receive an odd-number of closing characters followed by a non-closing character. + * + * Conceptually, this is represented by the following state machine: + * + * - start: normal + * - normal+(current: !') -> normal + * - normal+(current: ', next: ') -> escaping + * - normal+(current: ', next: !') -> end + * - escaping+(current: ') -> normal + * + * We can implement this using the state field of the `take` method's predicate. + */ + const content = this.query.take((c, peek, state) => { + if (state === "escaping") { + return "normal"; + } else if (c === openCharacter && peek === openCharacter) { + return "escaping"; + } else if (c !== openCharacter) { + return "normal"; + } else { + return false; + } + }); + + this.query.consume(openCharacter) || this.query.throw(`Expected a \`${openCharacter}\` to close the string, but found ${this.query.peek()} instead.`); + + if (isAKey) { + const keyName = content.replace(new RegExp(`${openCharacter}${openCharacter}`, 'g'), openCharacter); + this.validateKey(keyName); + return new KeyNode(keyName); + } + else { + return new ConstantNode(content.replace(new RegExp(`${openCharacter}${openCharacter}`, 'g'), openCharacter)); + } + } + + /** + * Visits the IDENTIFIER layer of the query syntax tree, returning the appropriate node. + * + * Identifiers are a sequence of characters which are not whitespace. + * + * @returns {IQueryNode} + */ + private visitKey(): IQueryNode { + // A key name can be surrounded by double quotes. + if (`"`.includes(this.query.peek())) { + return this.visitString(true); + } + else { + const identifier = this.query.take( + c => !!c.trim() && c !== '=' && c != '>' && c !== '<' + ) || this.query.throw(`Expected a valid identifier, but found '${this.query.peek()}' instead.`); + this.validateKey(identifier); + return new KeyNode(identifier) + } + } +} + +/** + * Provides the logic and helper functions for consuming tokens from a query string. + * This includes low level constructs like peeking at the next character, consuming a + * specific sequence of characters, and skipping whitespace. + */ +export class ParserContext { + constructor(private query: string) { + } + private tokenPosition: number = 0; + + /** + * Asserts that the query has been fully consumed. + * + * This method should be called after the parser has finished consuming the known parts of the query. + * Any remaining query after this point is indicative of a syntax error. + */ + assertEndOfQuery() { + if (this.tokenPosition < this.query.length) { + this.throw(`Unexpected token '${this.peek()}'.`) + } + } + + /** + * Retrieves the next character in the query without advancing the parser. + * + * @returns {string} A single character, or `undefined` if the end of the query has been reached. + */ + peek(): string { + return this.query[this.tokenPosition] + } + + /** + * Advances the parser past any whitespace characters. + */ + skipWhitespace() { + while (this.query[this.tokenPosition] && !this.query[this.tokenPosition].trim()) { + this.tokenPosition++ + } + } + + /** + * Attempts to consume a given sequence of characters from the query, + * advancing the parser if the sequence is found. + * + * @param {string} sequence The sequence of characters which should be consumed. + * @param {boolean} ignoreCase Whether or not the case of the characters should be ignored. + * @returns {boolean} `true` if the sequence was consumed, `false` otherwise. + */ + consume(sequence: string, ignoreCase: boolean = false): boolean { + const normalize = ignoreCase ? (s: string) => s.toLowerCase() : (s: string) => s; + + if (normalize(this.query.substring(this.tokenPosition, this.tokenPosition + sequence.length)) === normalize(sequence)) { + this.tokenPosition += sequence.length + return true + } + + return false + } + + /** + * Attempts to consume one of a given set of sequences from the query, + * advancing the parser if one of the sequences is found. + * + * Sequences are tested in the order they are provided, and the first + * sequence which is found is consumed. As such, it is important to + * avoid prefixes appearing before their longer counterparts. + * + * @param {boolean} ignoreCase Whether or not the case of the characters should be ignored. + * @param {string[]} options The list of character sequences which should be consumed. + * @returns {string | null} The sequence which was consumed, or `null` if none of the sequences were found. + */ + consumeOneOf(ignoreCase: boolean = false, ...options: string[]): string | null { + for (const option of options) { + if (this.consume(option, ignoreCase)) { + return option + } + } + + return null + } + + /** + * Consumes a sequence of characters from the query based on a character predicate function. + * + * The predicate function is called for each character in the query, and the sequence is + * consumed until the predicate returns `false` or the end of the query is reached. + * + * @param {Function} predicate The function which determines which characters should be consumed. + * @returns {string} The sequence of characters which were consumed. + */ + take(predicate?: (char: string, peek: string, state: T | undefined) => T): string { + const start = this.tokenPosition + let until = this.tokenPosition + + if (predicate) { + let state: T | undefined; + while (this.query[until]) { + state = predicate(this.query[until], this.query[until + 1], state) + if (!state) { + break + } + + until++; + } + } else { + // If no predicate is provided, then just take one character + until++ + } + + this.tokenPosition = until + return this.query.substring(start, until) + } + + /** + * Consumes a sequence of characters from the query based on a character predicate function, + * and then consumes a terminating sequence of characters (throwing an exception if these are not found). + * + * This function is particularly useful for consuming sequences of characters which are surrounded + * by a prefix and suffix, such as strings. + * + * @param {string} prefix The prefix which should be consumed. + * @param {Function} predicate The function which determines which characters should be consumed. + * @param {string} suffix The suffix which should be consumed. + * @returns {string | null} The sequence of characters which were consumed, or `null` if the prefix was not found. + */ + takeWithTerminator(prefix: string, predicate: (char: string, peek: string, state: T | undefined) => T, suffix: string): string | null { + if (!this.consume(prefix)) { + return null; + } + + const value = this.take(predicate); + this.consume(suffix) || this.throw(`Expected "${suffix}" to close the "${prefix}...${suffix}", but found '${this.peek()}' instead.`); + + return value; + } + + /** + * Throws an exception with a message indicating the position of the parser in the query. + * @param {string} message The message to include in the exception. + */ + throw(message: string): never { + throw new Error(`[query:${this.tokenPosition}]: ${message} (at '${this.query.substring(Math.max(0, this.tokenPosition - 10), this.tokenPosition)}<<${this.peek()}>>${this.query.substring(this.tokenPosition + 1, this.tokenPosition + 10)}...')`) + } +} \ No newline at end of file diff --git a/src/blob/persistence/SqlBlobMetadataStore.ts b/src/blob/persistence/SqlBlobMetadataStore.ts index cb96a6fc4..9b470e75b 100644 --- a/src/blob/persistence/SqlBlobMetadataStore.ts +++ b/src/blob/persistence/SqlBlobMetadataStore.ts @@ -53,6 +53,7 @@ import IBlobMetadataStore, { ChangeContainerLeaseResponse, ContainerModel, CreateSnapshotResponse, + FilterBlobModel, GetBlobPropertiesRes, GetContainerAccessPolicyResponse, GetContainerPropertiesResponse, @@ -67,13 +68,15 @@ import IBlobMetadataStore, { SetContainerAccessPolicyOptions } from "./IBlobMetadataStore"; import PageWithDelimiter from "./PageWithDelimiter"; -import { getBlobTagsCount, getTagsFromString } from "../utils/utils"; +import FilterBlobPage from "./FilterBlobPage"; +import { getBlobTagsCount, getTagsFromString, toBlobTags } from "../utils/utils"; +import { generateQueryBlobWithTagsWhereFunction } from "./QueryInterpreter/QueryInterpreter"; // tslint:disable: max-classes-per-file -class ServicesModel extends Model {} -class ContainersModel extends Model {} -class BlobsModel extends Model {} -class BlocksModel extends Model {} +class ServicesModel extends Model { } +class ContainersModel extends Model { } +class BlobsModel extends Model { } +class BlocksModel extends Model { } // class PagesModel extends Model {} interface IBlobContentProperties { @@ -1037,10 +1040,10 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { containerModel.properties.leaseState === Models.LeaseStateType.Breaking && containerModel.leaseBreakTime ? Math.round( - (containerModel.leaseBreakTime.getTime() - - context.startTime!.getTime()) / - 1000 - ) + (containerModel.leaseBreakTime.getTime() - + context.startTime!.getTime()) / + 1000 + ) : 0; return { @@ -1227,6 +1230,78 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { }); } + public async filterBlobs( + context: Context, + account: string, + container?: string, + where?: string, + maxResults: number = DEFAULT_LIST_BLOBS_MAX_RESULTS, + marker?: string, + ): Promise<[FilterBlobModel[], string | undefined]> { + return this.sequelize.transaction(async (t) => { + if (container) { + await this.assertContainerExists(context, account, container, t); + } + + let whereQuery: any; + if (container) { + whereQuery = { + accountName: account, + containerName: container + } + } + else { + whereQuery = { + accountName: account + }; + }; + + if (marker !== undefined) { + if (whereQuery.blobName !== undefined) { + whereQuery.blobName[Op.gt] = marker; + } else { + whereQuery.blobName = { + [Op.gt]: marker + }; + } + } + whereQuery.deleting = 0; + + // fill the page by possibly querying multiple times + const page = new FilterBlobPage(maxResults); + + const nameItem = (item: BlobsModel): string => { + return this.getModelValue(item, "blobName", true); + }; + const filterFunction = generateQueryBlobWithTagsWhereFunction(where!); + + const readPage = async (off: number): Promise => { + return (await BlobsModel.findAll({ + where: whereQuery as any, + order: [["blobName", "ASC"]], + transaction: t, + limit: maxResults, + offset: off + })); + }; + + const [blobItems, nextMarker] = await page.fill(readPage, nameItem); + + const filterBlobModelMapper = (model: BlobsModel) => { + return this.convertDbModelToFilterBlobModel(model); + }; + + return [blobItems.map(filterBlobModelMapper).filter((blobItem) => { + const tagsMeetConditions = filterFunction(blobItem); + if (tagsMeetConditions.length !== 0) { + blobItem.tags = { blobTagSet: toBlobTags(tagsMeetConditions) }; + return true; + } + return false; + }), nextMarker]; + }); + } + public async listBlobs( context: Context, account: string, @@ -1729,7 +1804,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { // TODO: Return blobCommittedBlockCount for append blob - let responds = LeaseFactory.createLeaseState( + let responds = LeaseFactory.createLeaseState( new BlobLeaseAdapter(blobModel), context ) @@ -1737,7 +1812,7 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { .sync(new BlobLeaseSyncer(blobModel)); return { ...responds, - properties : { + properties: { ...responds.properties, tagCount: getBlobTagsCount(blobModel.blobTags), }, @@ -2418,11 +2493,11 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { const leaseTimeSeconds: number = lease.leaseState === Models.LeaseStateType.Breaking && - lease.leaseBreakTime + lease.leaseBreakTime ? Math.round( - (lease.leaseBreakTime.getTime() - context.startTime!.getTime()) / - 1000 - ) + (lease.leaseBreakTime.getTime() - context.startTime!.getTime()) / + 1000 + ) : 0; await BlobsModel.update(this.convertLeaseToDbModel(lease), { @@ -3078,6 +3153,14 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { }; } + private convertDbModelToFilterBlobModel(dbModel: BlobsModel): FilterBlobModel { + return { + containerName: this.getModelValue(dbModel, "containerName", true), + name: this.getModelValue(dbModel, "blobName", true), + tags: this.deserializeModelValue(dbModel, "blobTags") + }; + } + private convertDbModelToBlobModel(dbModel: BlobsModel): BlobModel { const contentProperties: IBlobContentProperties = this.convertDbModelToBlobContentProperties( dbModel @@ -3407,13 +3490,19 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { if (!blobModel.isCommitted) { throw StorageErrorFactory.getBlobNotFound(context.contextId); - } - + } + LeaseFactory.createLeaseState( new BlobLeaseAdapter(blobModel), context ).validate(new BlobReadLeaseValidator(leaseAccessConditions)); + if (modifiedAccessConditions?.ifTags) { + const validateFunction = generateQueryBlobWithTagsWhereFunction(modifiedAccessConditions?.ifTags, true); + if (!validateFunction(blobModel)) { + throw new Error("412"); + } + } return blobModel.blobTags; }); } diff --git a/src/blob/persistence/SqlBlobMetadataStore.ts.bak b/src/blob/persistence/SqlBlobMetadataStore.ts.bak new file mode 100644 index 000000000..dfd596237 --- /dev/null +++ b/src/blob/persistence/SqlBlobMetadataStore.ts.bak @@ -0,0 +1,3446 @@ +import { + BOOLEAN, + DATE, + INTEGER, + literal, + Model, + Op, + Options as SequelizeOptions, + Sequelize, + TEXT, + Transaction +} from "sequelize"; + +import uuid from "uuid/v4"; + +import { + DEFAULT_SQL_CHARSET, + DEFAULT_SQL_COLLATE +} from "../../common/utils/constants"; +import { convertDateTimeStringMsTo7Digital } from "../../common/utils/utils"; +import { newEtag } from "../../common/utils/utils"; +import { validateReadConditions } from "../conditions/ReadConditionalHeadersValidator"; +import { validateWriteConditions } from "../conditions/WriteConditionalHeadersValidator"; +import StorageErrorFactory from "../errors/StorageErrorFactory"; +import * as Models from "../generated/artifacts/models"; +import Context from "../generated/Context"; +import BlobLeaseAdapter from "../lease/BlobLeaseAdapter"; +import BlobLeaseSyncer from "../lease/BlobLeaseSyncer"; +import BlobReadLeaseValidator from "../lease/BlobReadLeaseValidator"; +import BlobWriteLeaseSyncer from "../lease/BlobWriteLeaseSyncer"; +import BlobWriteLeaseValidator from "../lease/BlobWriteLeaseValidator"; +import ContainerDeleteLeaseValidator from "../lease/ContainerDeleteLeaseValidator"; +import ContainerLeaseAdapter from "../lease/ContainerLeaseAdapter"; +import ContainerLeaseSyncer from "../lease/ContainerLeaseSyncer"; +import ContainerReadLeaseValidator from "../lease/ContainerReadLeaseValidator"; +import { ILease } from "../lease/ILeaseState"; +import LeaseFactory from "../lease/LeaseFactory"; +import { + DEFAULT_LIST_BLOBS_MAX_RESULTS, + DEFAULT_LIST_CONTAINERS_MAX_RESULTS +} from "../utils/constants"; +import BlobReferredExtentsAsyncIterator from "./BlobReferredExtentsAsyncIterator"; +import IBlobMetadataStore, { + AcquireBlobLeaseResponse, + AcquireContainerLeaseResponse, + BlobId, + BlobModel, + BlobPrefixModel, + BlockModel, + BreakBlobLeaseResponse, + BreakContainerLeaseResponse, + ChangeBlobLeaseResponse, + ChangeContainerLeaseResponse, + ContainerModel, + CreateSnapshotResponse, + GetBlobPropertiesRes, + GetContainerAccessPolicyResponse, + GetContainerPropertiesResponse, + GetPageRangeResponse, + IContainerMetadata, + IExtentChunk, + PersistencyBlockModel, + ReleaseBlobLeaseResponse, + RenewBlobLeaseResponse, + RenewContainerLeaseResponse, + ServicePropertiesModel, + SetContainerAccessPolicyOptions +} from "./IBlobMetadataStore"; +import PageWithDelimiter from "./PageWithDelimiter"; +import { getBlobTagsCount, getTagsFromString } from "../utils/utils"; + +// tslint:disable: max-classes-per-file +class ServicesModel extends Model {} +class ContainersModel extends Model {} +class BlobsModel extends Model {} +class BlocksModel extends Model {} +// class PagesModel extends Model {} + +interface IBlobContentProperties { + contentLength?: number; + contentType?: string; + contentEncoding?: string; + contentLanguage?: string; + contentMD5?: Uint8Array; + contentDisposition?: string; + cacheControl?: string; +} + +/** + * A SQL based Blob metadata storage implementation based on Sequelize. + * Refer to CONTRIBUTION.md for how to setup SQL database environment. + * + * @export + * @class SqlBlobMetadataStore + * @implements {IBlobMetadataStore} + */ +export default class SqlBlobMetadataStore implements IBlobMetadataStore { + private initialized: boolean = false; + private closed: boolean = false; + private readonly sequelize: Sequelize; + + /** + * Creates an instance of SqlBlobMetadataStore. + * + * @param {string} connectionURI For example, "postgres://user:pass@example.com:5432/dbname" + * @param {SequelizeOptions} [sequelizeOptions] + * @memberof SqlBlobMetadataStore + */ + public constructor( + connectionURI: string, + sequelizeOptions?: SequelizeOptions + ) { + // Enable encrypt connection for SQL Server + if (connectionURI.startsWith("mssql") && sequelizeOptions) { + sequelizeOptions.dialectOptions = sequelizeOptions.dialectOptions || {}; + (sequelizeOptions.dialectOptions as any).options = + (sequelizeOptions.dialectOptions as any).options || {}; + (sequelizeOptions.dialectOptions as any).options.encrypt = true; + } + this.sequelize = new Sequelize(connectionURI, sequelizeOptions); + } + + public async init(): Promise { + await this.sequelize.authenticate(); + + ServicesModel.init( + { + accountName: { + type: "VARCHAR(32)", + primaryKey: true + }, + defaultServiceVersion: { + type: "VARCHAR(10)" + }, + cors: { + type: "VARCHAR(4095)" + }, + logging: { + type: "VARCHAR(255)" + }, + minuteMetrics: { + type: "VARCHAR(255)" + }, + hourMetrics: { + type: "VARCHAR(255)" + }, + staticWebsite: { + type: "VARCHAR(1023)" + }, + deleteRetentionPolicy: { + type: "VARCHAR(255)" + } + }, + { + sequelize: this.sequelize, + modelName: "Services", + tableName: "Services", + timestamps: false + } + ); + + ContainersModel.init( + { + accountName: { + type: "VARCHAR(32)", + unique: "accountname_containername" + }, + // tslint:disable-next-line:max-line-length + // https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + containerName: { + type: "VARCHAR(63)", + unique: "accountname_containername" + }, + containerId: { + type: INTEGER.UNSIGNED, + primaryKey: true, + autoIncrement: true + }, + lastModified: { + allowNull: false, + type: DATE(6) + }, + etag: { + allowNull: false, + type: "VARCHAR(127)" + }, + // TODO: Confirm max length of metadata pairs + metadata: { + type: "VARCHAR(4095)" + }, + containerAcl: { + type: "VARCHAR(1023)" + }, + publicAccess: { + type: "VARCHAR(31)" + }, + lease: { + type: "VARCHAR(1023)" + }, + hasImmutabilityPolicy: { + type: BOOLEAN + }, + hasLegalHold: { + type: BOOLEAN + } + }, + { + sequelize: this.sequelize, + modelName: "Containers", + tableName: "Containers", + charset: DEFAULT_SQL_CHARSET, + collate: DEFAULT_SQL_COLLATE, + timestamps: false + } + ); + + BlobsModel.init( + { + accountName: { + type: "VARCHAR(64)", + allowNull: false + }, + containerName: { + type: "VARCHAR(255)", + allowNull: false + }, + blobName: { + type: "VARCHAR(255)", + allowNull: false + }, + snapshot: { + type: "VARCHAR(64)", + allowNull: false, + defaultValue: "" + }, + blobId: { + type: INTEGER.UNSIGNED, + primaryKey: true, + autoIncrement: true + }, + lastModified: { + allowNull: false, + type: DATE(6) + }, + creationTime: { + allowNull: false, + type: DATE(6) + }, + accessTierChangeTime: { + allowNull: true, + type: DATE(6) + }, + accessTierInferred: { + type: BOOLEAN + }, + etag: { + allowNull: false, + type: "VARCHAR(127)" + }, + blobType: { + allowNull: false, + type: "VARCHAR(31)" + }, + blobSequenceNumber: { + type: "VARCHAR(63)" + }, + accessTier: { + type: "VARCHAR(31)" + }, + contentProperties: { + type: "VARCHAR(1023)" + }, + lease: { + type: "VARCHAR(1023)" + }, + deleting: { + type: INTEGER.UNSIGNED, + defaultValue: 0, // 0 means container is not under deleting(gc) + allowNull: false + }, + isCommitted: { + type: BOOLEAN, + allowNull: false + }, + persistency: { + type: "VARCHAR(255)" + }, + committedBlocksInOrder: { + type: TEXT({ length: "medium" }) + }, + metadata: { + type: "VARCHAR(2047)" + }, + blobTags: { + type: "VARCHAR(4096)" + } + }, + { + sequelize: this.sequelize, + modelName: "Blobs", + tableName: "Blobs", + timestamps: false, + charset: DEFAULT_SQL_CHARSET, + collate: DEFAULT_SQL_COLLATE, + indexes: [ + { + // name: 'title_index', + // using: 'BTREE', + unique: true, + fields: [ + "accountName", + "containerName", + "blobName", + "snapshot", + "deleting" + ] + } + ] + } + ); + + BlocksModel.init( + { + accountName: { + type: "VARCHAR(64)", + allowNull: false + }, + containerName: { + type: "VARCHAR(255)", + allowNull: false + }, + blobName: { + type: "VARCHAR(255)", + allowNull: false + }, + // TODO: Check max block name length + blockName: { + type: "VARCHAR(64)", + allowNull: false + }, + deleting: { + type: INTEGER.UNSIGNED, + defaultValue: 0, // 0 means container is not under deleting(gc) + allowNull: false + }, + size: { + type: INTEGER.UNSIGNED, + allowNull: false + }, + persistency: { + type: "VARCHAR(255)" + } + }, + { + sequelize: this.sequelize, + modelName: "Blocks", + tableName: "Blocks", + timestamps: false, + indexes: [ + { + unique: true, + fields: ["accountName", "containerName", "blobName", "blockName"] + } + ] + } + ); + + // TODO: sync() is only for development purpose, use migration for production + await this.sequelize.sync(); + + this.initialized = true; + } + + public isInitialized(): boolean { + return this.initialized; + } + + public async close(): Promise { + await this.sequelize.close(); + this.closed = true; + } + + public isClosed(): boolean { + return this.closed; + } + + public async clean(): Promise { + // TODO: Implement cleanup in database + } + + public async setServiceProperties( + context: Context, + serviceProperties: ServicePropertiesModel + ): Promise { + return this.sequelize.transaction(async (t) => { + const findResult = await ServicesModel.findByPk( + serviceProperties.accountName, + { + transaction: t + } + ); + const updateValues = { + defaultServiceVersion: serviceProperties.defaultServiceVersion, + cors: this.serializeModelValue(serviceProperties.cors), + logging: this.serializeModelValue(serviceProperties.logging), + minuteMetrics: this.serializeModelValue( + serviceProperties.minuteMetrics + ), + hourMetrics: this.serializeModelValue(serviceProperties.hourMetrics), + staticWebsite: this.serializeModelValue( + serviceProperties.staticWebsite + ), + deleteRetentionPolicy: this.serializeModelValue( + serviceProperties.deleteRetentionPolicy + ) + }; + if (findResult === null) { + await ServicesModel.create( + { + accountName: serviceProperties.accountName, + ...updateValues + }, + { transaction: t } + ); + } else { + const updateResult = await ServicesModel.update(updateValues, { + transaction: t, + where: { + accountName: serviceProperties.accountName + } + }); + + // Set the exactly equal properties will affect 0 rows. + const updatedRows = updateResult[0]; + if (updatedRows > 1) { + throw Error( + `SqlBlobMetadataStore:updateServiceProperties() failed. Update operation affect ${updatedRows} rows.` + ); + } + } + + return serviceProperties; + }); + } + + public async getServiceProperties( + context: Context, + account: string + ): Promise { + const findResult = await ServicesModel.findByPk(account); + if (findResult === null) { + return undefined; + } + + const logging = this.deserializeModelValue(findResult, "logging"); + const hourMetrics = this.deserializeModelValue(findResult, "hourMetrics"); + const minuteMetrics = this.deserializeModelValue( + findResult, + "minuteMetrics" + ); + const cors = this.deserializeModelValue(findResult, "cors"); + const deleteRetentionPolicy = this.deserializeModelValue( + findResult, + "deleteRetentionPolicy" + ); + const staticWebsite = this.deserializeModelValue( + findResult, + "staticWebsite" + ); + const defaultServiceVersion = this.getModelValue( + findResult, + "defaultServiceVersion" + ); + + const ret: ServicePropertiesModel = { + accountName: account + }; + + if (logging !== undefined) { + ret.logging = logging; + } + if (hourMetrics !== undefined) { + ret.hourMetrics = hourMetrics; + } + if (minuteMetrics !== undefined) { + ret.minuteMetrics = minuteMetrics; + } + if (cors !== undefined) { + ret.cors = cors; + } + if (deleteRetentionPolicy !== undefined) { + ret.deleteRetentionPolicy = deleteRetentionPolicy; + } + if (staticWebsite !== undefined) { + ret.staticWebsite = staticWebsite; + } + if (defaultServiceVersion !== undefined) { + ret.defaultServiceVersion = defaultServiceVersion; + } + + return ret; + } + + public async listContainers( + context: Context, + account: string, + prefix: string = "", + maxResults: number = DEFAULT_LIST_CONTAINERS_MAX_RESULTS, + marker: string + ): Promise<[ContainerModel[], string | undefined]> { + const whereQuery: any = { accountName: account }; + + if (prefix.length > 0) { + whereQuery.containerName = { + [Op.like]: `${prefix}%` + }; + } + + if (marker !== "") { + if (whereQuery.containerName === undefined) { + whereQuery.containerName = { + [Op.gt]: marker + }; + } else { + whereQuery.containerName[Op.gt] = marker; + } + } + + const findResult = await ContainersModel.findAll({ + limit: maxResults + 1, + where: whereQuery as any, + order: [["containerName", "ASC"]] + }); + + const leaseUpdateMapper = (model: ContainersModel) => { + const containerModel = this.convertDbModelToContainerModel(model); + return LeaseFactory.createLeaseState( + new ContainerLeaseAdapter(containerModel), + context + ).sync(new ContainerLeaseSyncer(containerModel)); + }; + + if (findResult.length <= maxResults) { + return [findResult.map(leaseUpdateMapper), undefined]; + } else { + const tail = findResult[findResult.length - 2]; + findResult.pop(); + const nextMarker = this.getModelValue( + tail, + "containerName", + true + ); + return [findResult.map(leaseUpdateMapper), nextMarker]; + } + } + + public async createContainer( + context: Context, + container: ContainerModel + ): Promise { + try { + await ContainersModel.create( + this.convertContainerModelToDbModel(container) + ); + return container; + } catch (err) { + if (err.name === "SequelizeUniqueConstraintError") { + throw StorageErrorFactory.getContainerAlreadyExists(context.contextId); + } + throw err; + } + } + + public async getContainerProperties( + context: Context, + account: string, + container: string, + leaseAccessConditions?: Models.LeaseAccessConditions + ): Promise { + const findResult = await this.assertContainerExists( + context, + account, + container, + undefined, + true + ); + const containerModel = this.convertDbModelToContainerModel(findResult); + + return LeaseFactory.createLeaseState( + new ContainerLeaseAdapter(containerModel), + context + ) + .validate(new ContainerReadLeaseValidator(leaseAccessConditions)) + .sync(new ContainerLeaseSyncer(containerModel)); + } + + public async deleteContainer( + context: Context, + account: string, + container: string, + options: Models.ContainerDeleteMethodOptionalParams = {} + ): Promise { + await this.sequelize.transaction(async (t) => { + /* Transaction starts */ + const findResult = await ContainersModel.findOne({ + attributes: [ + "accountName", + "containerName", + "etag", + "lastModified", + "lease" + ], + where: { + accountName: account, + containerName: container + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + findResult ? this.convertDbModelToContainerModel(findResult) : undefined + ); + + if (findResult === null || findResult === undefined) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + LeaseFactory.createLeaseState( + this.convertDbModelToLease(findResult), + context + ).validate( + new ContainerDeleteLeaseValidator(options.leaseAccessConditions) + ); + + await ContainersModel.destroy({ + where: { + accountName: account, + containerName: container + }, + transaction: t + }); + + // TODO: GC blobs under deleting status + await BlobsModel.update( + { + deleting: literal("deleting + 1") + }, + { + where: { + accountName: account, + containerName: container + }, + transaction: t + } + ); + + // TODO: GC blocks under deleting status + await BlocksModel.update( + { + deleting: literal("deleting + 1") + }, + { + where: { + accountName: account, + containerName: container + }, + transaction: t + } + ); + /* Transaction ends */ + }); + } + + public async setContainerMetadata( + context: Context, + account: string, + container: string, + lastModified: Date, + etag: string, + metadata?: IContainerMetadata, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + return this.sequelize.transaction(async (t) => { + /* Transaction starts */ + const findResult = await ContainersModel.findOne({ + attributes: [ + "accountName", + "containerName", + "etag", + "lastModified", + "lease" + ], + where: { + accountName: account, + containerName: container + }, + transaction: t + }); + + validateWriteConditions( + context, + modifiedAccessConditions, + findResult ? this.convertDbModelToContainerModel(findResult) : undefined + ); + + if (findResult === null || findResult === undefined) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + LeaseFactory.createLeaseState( + this.convertDbModelToLease(findResult), + context + ).validate(new ContainerReadLeaseValidator(leaseAccessConditions)); + + await ContainersModel.update( + { + lastModified, + etag, + metadata: this.serializeModelValue(metadata) || null + }, + { + where: { + accountName: account, + containerName: container + }, + transaction: t + } + ); + /* Transaction ends */ + }); + } + + public async getContainerACL( + context: Context, + account: string, + container: string, + leaseAccessConditions?: Models.LeaseAccessConditions | undefined + ): Promise { + const findResult = await ContainersModel.findOne({ + where: { + accountName: account, + containerName: container + } + }); + + if (findResult === null || findResult === undefined) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + const containerModel = this.convertDbModelToContainerModel(findResult); + + LeaseFactory.createLeaseState( + new ContainerLeaseAdapter(containerModel), + context + ) + .validate(new ContainerReadLeaseValidator(leaseAccessConditions)) + .sync(new ContainerLeaseSyncer(containerModel)); + + return { + properties: containerModel.properties, + containerAcl: containerModel.containerAcl + }; + } + + public async setContainerACL( + context: Context, + account: string, + container: string, + setAclModel: SetContainerAccessPolicyOptions + ): Promise { + await this.sequelize.transaction(async (t) => { + const findResult = await ContainersModel.findOne({ + attributes: [ + "accountName", + "containerName", + "etag", + "lastModified", + "lease" + ], + where: { + accountName: account, + containerName: container + }, + transaction: t + }); + + validateWriteConditions( + context, + setAclModel.modifiedAccessConditions, + findResult ? this.convertDbModelToContainerModel(findResult) : undefined + ); + + if (findResult === null || findResult === undefined) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + const lease = this.convertDbModelToLease(findResult); + + LeaseFactory.createLeaseState(lease, context).validate( + new ContainerReadLeaseValidator(setAclModel.leaseAccessConditions) + ); + + const updateResult = await ContainersModel.update( + { + lastModified: setAclModel.lastModified, + etag: setAclModel.etag, + containerAcl: + this.serializeModelValue(setAclModel.containerAcl) || null, + publicAccess: this.serializeModelValue(setAclModel.publicAccess) + }, + { + where: { + accountName: account, + containerName: container + }, + transaction: t + } + ); + + if (updateResult[0] === 0) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + }); + } + + public async acquireContainerLease( + context: Context, + account: string, + container: string, + options: Models.ContainerAcquireLeaseOptionalParams + ): Promise { + return this.sequelize.transaction(async (t) => { + /* Transaction starts */ + const findResult = await ContainersModel.findOne({ + where: { + accountName: account, + containerName: container + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + findResult ? this.convertDbModelToContainerModel(findResult) : undefined + ); + + if (findResult === null || findResult === undefined) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + const containerModel = this.convertDbModelToContainerModel(findResult); + LeaseFactory.createLeaseState( + new ContainerLeaseAdapter(containerModel), + context + ) + .acquire(options.duration!, options.proposedLeaseId) + .sync(new ContainerLeaseSyncer(containerModel)); + + await ContainersModel.update( + this.convertLeaseToDbModel(new ContainerLeaseAdapter(containerModel)), + { + where: { + accountName: account, + containerName: container + }, + transaction: t + } + ); + + return { + properties: containerModel.properties, + leaseId: containerModel.leaseId + }; + /* Transaction ends */ + }); + } + + public async releaseContainerLease( + context: Context, + account: string, + container: string, + leaseId: string, + options: Models.ContainerReleaseLeaseOptionalParams = {} + ): Promise { + return this.sequelize.transaction(async (t) => { + /* Transaction starts */ + const findResult = await ContainersModel.findOne({ + where: { + accountName: account, + containerName: container + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + findResult ? this.convertDbModelToContainerModel(findResult) : undefined + ); + + if (findResult === null || findResult === undefined) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + const containerModel = this.convertDbModelToContainerModel(findResult); + + LeaseFactory.createLeaseState( + new ContainerLeaseAdapter(containerModel), + context + ) + .release(leaseId) + .sync(new ContainerLeaseSyncer(containerModel)); + + await ContainersModel.update( + this.convertLeaseToDbModel(new ContainerLeaseAdapter(containerModel)), + { + where: { + accountName: account, + containerName: container + }, + transaction: t + } + ); + + return containerModel.properties; + /* Transaction ends */ + }); + } + + public async renewContainerLease( + context: Context, + account: string, + container: string, + leaseId: string, + options: Models.ContainerRenewLeaseOptionalParams = {} + ): Promise { + return this.sequelize.transaction(async (t) => { + /* Transaction starts */ + // TODO: Filter out unnecessary fields in select query + const findResult = await ContainersModel.findOne({ + where: { + accountName: account, + containerName: container + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + findResult ? this.convertDbModelToContainerModel(findResult) : undefined + ); + + if (findResult === null || findResult === undefined) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + const containerModel = this.convertDbModelToContainerModel(findResult); + + LeaseFactory.createLeaseState( + new ContainerLeaseAdapter(containerModel), + context + ) + .renew(leaseId) + .sync(new ContainerLeaseSyncer(containerModel)); + + await ContainersModel.update( + this.convertLeaseToDbModel(new ContainerLeaseAdapter(containerModel)), + { + where: { + accountName: account, + containerName: container + }, + transaction: t + } + ); + + return { + properties: containerModel.properties, + leaseId: containerModel.leaseId + }; + /* Transaction ends */ + }); + } + + public async breakContainerLease( + context: Context, + account: string, + container: string, + breakPeriod: number | undefined, + options: Models.ContainerBreakLeaseOptionalParams = {} + ): Promise { + return this.sequelize.transaction(async (t) => { + const findResult = await ContainersModel.findOne({ + where: { + accountName: account, + containerName: container + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + findResult ? this.convertDbModelToContainerModel(findResult) : undefined + ); + + if (findResult === null || findResult === undefined) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + const containerModel = this.convertDbModelToContainerModel(findResult); + + LeaseFactory.createLeaseState( + new ContainerLeaseAdapter(containerModel), + context + ) + .break(breakPeriod) + .sync(new ContainerLeaseSyncer(containerModel)); + + await ContainersModel.update( + this.convertLeaseToDbModel(new ContainerLeaseAdapter(containerModel)), + { + where: { + accountName: account, + containerName: container + }, + transaction: t + } + ); + + const leaseTimeSeconds: number = + containerModel.properties.leaseState === + Models.LeaseStateType.Breaking && containerModel.leaseBreakTime + ? Math.round( + (containerModel.leaseBreakTime.getTime() - + context.startTime!.getTime()) / + 1000 + ) + : 0; + + return { + properties: containerModel.properties, + leaseTime: leaseTimeSeconds + }; + }); + } + + public async changeContainerLease( + context: Context, + account: string, + container: string, + leaseId: string, + proposedLeaseId: string, + options: Models.ContainerChangeLeaseOptionalParams = {} + ): Promise { + return this.sequelize.transaction(async (t) => { + const findResult = await ContainersModel.findOne({ + where: { + accountName: account, + containerName: container + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + findResult ? this.convertDbModelToContainerModel(findResult) : undefined + ); + + if (findResult === null || findResult === undefined) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + + const containerModel = this.convertDbModelToContainerModel(findResult); + + LeaseFactory.createLeaseState( + new ContainerLeaseAdapter(containerModel), + context + ) + .change(leaseId, proposedLeaseId) + .sync(new ContainerLeaseSyncer(containerModel)); + + await ContainersModel.update( + this.convertLeaseToDbModel(new ContainerLeaseAdapter(containerModel)), + { + where: { + accountName: account, + containerName: container + }, + transaction: t + } + ); + + return { + properties: containerModel.properties, + leaseId: containerModel.leaseId + }; + }); + } + + public async checkContainerExist( + context: Context, + account: string, + container: string + ): Promise { + await this.assertContainerExists(context, account, container, undefined); + } + + public async createBlob( + context: Context, + blob: BlobModel, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists( + context, + blob.accountName, + blob.containerName, + t + ); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: blob.accountName, + containerName: blob.containerName, + blobName: blob.name, + snapshot: blob.snapshot, + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateWriteConditions( + context, + modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) + : undefined + ); + + // Create if not exists + if ( + modifiedAccessConditions && + modifiedAccessConditions.ifNoneMatch === "*" && + blobFindResult + ) { + throw StorageErrorFactory.getBlobAlreadyExists(context.contextId); + } + + if (blobFindResult) { + const blobModel: BlobModel = this.convertDbModelToBlobModel( + blobFindResult + ); + + LeaseFactory.createLeaseState(new BlobLeaseAdapter(blobModel), context) + .validate(new BlobWriteLeaseValidator(leaseAccessConditions)) + .sync(new BlobLeaseSyncer(blob)); // Keep original blob lease; + + if ( + blobModel.properties !== undefined && + blobModel.properties.accessTier === Models.AccessTier.Archive + ) { + throw StorageErrorFactory.getBlobArchived(context.contextId); + } + } + + await BlobsModel.upsert(this.convertBlobModelToDbModel(blob), { + transaction: t + }); + }); + } + + public async downloadBlob( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string = "", + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot, + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateReadConditions( + context, + modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel: BlobModel = this.convertDbModelToBlobModel( + blobFindResult + ); + + return LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ) + .validate(new BlobReadLeaseValidator(leaseAccessConditions)) + .sync(new BlobLeaseSyncer(blobModel)); + }); + } + + public async listBlobs( + context: Context, + account: string, + container: string, + delimiter?: string, + blob?: string, + prefix: string = "", + maxResults: number = DEFAULT_LIST_BLOBS_MAX_RESULTS, + marker?: string, + includeSnapshots?: boolean, + includeUncommittedBlobs?: boolean + ): Promise<[BlobModel[], BlobPrefixModel[], any | undefined]> { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const whereQuery: any = { + accountName: account, + containerName: container + }; + + if (blob !== undefined) { + whereQuery.blobName = blob; + } else { + if (prefix.length > 0) { + whereQuery.blobName = { + [Op.like]: `${prefix}%` + }; + } + + if (marker !== undefined) { + if (whereQuery.blobName !== undefined) { + whereQuery.blobName[Op.gt] = marker; + } else { + whereQuery.blobName = { + [Op.gt]: marker + }; + } + } + } + if (!includeSnapshots) { + whereQuery.snapshot = ""; + } + if (!includeUncommittedBlobs) { + whereQuery.isCommitted = true; + } + whereQuery.deleting = 0; + + const leaseUpdateMapper = (model: BlobsModel) => { + const blobModel = this.convertDbModelToBlobModel(model); + return LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).sync(new BlobLeaseSyncer(blobModel)); + }; + + // fill the page by possibly querying multiple times + const page = new PageWithDelimiter(maxResults, delimiter, prefix); + + const nameItem = (item: BlobsModel): string => { + return this.getModelValue(item, "blobName", true); + }; + + const readPage = async (off: number): Promise => { + return await BlobsModel.findAll({ + where: whereQuery as any, + order: [["blobName", "ASC"]], + transaction: t, + limit: maxResults, + offset: off + }); + }; + + const [blobItems, blobPrefixes, nextMarker] = await page.fill(readPage, nameItem); + + return [blobItems.map(leaseUpdateMapper), blobPrefixes, nextMarker]; + }); + } + + public async listAllBlobs( + maxResults: number = DEFAULT_LIST_BLOBS_MAX_RESULTS, + marker?: string, + includeSnapshots?: boolean, + includeUncommittedBlobs?: boolean + ): Promise<[BlobModel[], any | undefined]> { + const whereQuery: any = {}; + if (marker !== undefined) { + whereQuery.blobName = { + [Op.gt]: marker + }; + } + if (!includeSnapshots) { + whereQuery.snapshot = ""; + } + if (!includeUncommittedBlobs) { + whereQuery.isCommitted = true; + } + whereQuery.deleting = 0; + + const blobFindResult = await BlobsModel.findAll({ + limit: maxResults + 1, + where: whereQuery as any, + order: [["blobName", "ASC"]] + }); + + if (blobFindResult.length <= maxResults) { + return [ + blobFindResult.map(this.convertDbModelToBlobModel.bind(this)), + undefined + ]; + } else { + blobFindResult.pop(); + const tail = blobFindResult[blobFindResult.length - 1]; + const nextMarker = this.getModelValue(tail, "blobName", true); + return [ + blobFindResult.map(this.convertDbModelToBlobModel.bind(this)), + nextMarker + ]; + } + } + + public async stageBlock( + context: Context, + block: BlockModel, + leaseAccessConditions?: Models.LeaseAccessConditions + ): Promise { + await this.sequelize.transaction(async (t) => { + await this.assertContainerExists( + context, + block.accountName, + block.containerName, + t + ); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: block.accountName, + containerName: block.containerName, + blobName: block.blobName, + snapshot: "", + deleting: 0 + }, + transaction: t + }); + + if (blobFindResult !== null && blobFindResult !== undefined) { + const blobModel: BlobModel = this.convertDbModelToBlobModel( + blobFindResult + ); + + if (blobModel.isCommitted === true) { + LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).validate(new BlobWriteLeaseValidator(leaseAccessConditions)); + } + + // If the new block ID does not have same length with before uncommited block ID, return failure. + const existBlock = await BlocksModel.findOne({ + attributes: ["blockName"], + where: { + accountName: block.accountName, + containerName: block.containerName, + blobName: block.blobName, + deleting: 0 + }, + order: [["id", "ASC"]], + transaction: t + }); + if ( + existBlock && + Buffer.from( + this.getModelValue(existBlock, "blockName", true), + "base64" + ).length !== Buffer.from(block.name, "base64").length + ) { + throw StorageErrorFactory.getInvalidBlobOrBlock(context.contextId); + } + } else { + const newBlob = { + deleted: false, + accountName: block.accountName, + containerName: block.containerName, + name: block.blobName, + properties: { + creationTime: context.startTime!, + lastModified: context.startTime!, + etag: newEtag(), + contentLength: 0, + blobType: Models.BlobType.BlockBlob + }, + snapshot: "", + isCommitted: false + }; + await BlobsModel.upsert(this.convertBlobModelToDbModel(newBlob), { + transaction: t + }); + } + + await BlocksModel.upsert( + { + accountName: block.accountName, + containerName: block.containerName, + blobName: block.blobName, + blockName: block.name, + size: block.size, + persistency: this.serializeModelValue(block.persistency) + }, + { transaction: t } + ); + }); + } + + public getBlockList( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string = "", + isCommitted?: boolean, + leaseAccessConditions?: Models.LeaseAccessConditions + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot, + deleting: 0 + }, + transaction: t + }); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel = this.convertDbModelToBlobModel(blobFindResult); + LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).validate(new BlobReadLeaseValidator(leaseAccessConditions)); + + const res: { + uncommittedBlocks: Models.Block[]; + committedBlocks: Models.Block[]; + } = { + uncommittedBlocks: [], + committedBlocks: [] + }; + + if (isCommitted !== false) { + res.committedBlocks = blobModel.committedBlocksInOrder || []; + } + + if (isCommitted !== true) { + const blocks = await BlocksModel.findAll({ + attributes: ["blockName", "size"], + where: { + accountName: account, + containerName: container, + blobName: blob, + deleting: 0 + }, + order: [["id", "ASC"]], + transaction: t + }); + for (const item of blocks) { + const block = { + name: this.getModelValue(item, "blockName", true), + size: this.getModelValue(item, "size", true) + }; + res.uncommittedBlocks.push(block); + } + } + return res; + }); + } + + public async commitBlockList( + context: Context, + blob: BlobModel, + blockList: { blockName: string; blockCommitType: string }[], + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + await this.sequelize.transaction(async (t) => { + await this.assertContainerExists( + context, + blob.accountName, + blob.containerName, + t + ); + + const pCommittedBlocksMap: Map = new Map(); // persistencyCommittedBlocksMap + const pUncommittedBlocksMap: Map< + string, + PersistencyBlockModel + > = new Map(); // persistencyUncommittedBlocksMap + + const badRequestError = StorageErrorFactory.getInvalidBlockList( + context.contextId + ); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: blob.accountName, + containerName: blob.containerName, + blobName: blob.name, + snapshot: blob.snapshot, + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateWriteConditions( + context, + modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) // TODO: Reduce duplicated convert + : undefined + ); + + let creationTime = blob.properties.creationTime || context.startTime; + + if (blobFindResult !== null && blobFindResult !== undefined) { + const blobModel: BlobModel = this.convertDbModelToBlobModel( + blobFindResult + ); + + // Create if not exists + if ( + modifiedAccessConditions && + modifiedAccessConditions.ifNoneMatch === "*" && + blobModel && + blobModel.isCommitted + ) { + throw StorageErrorFactory.getBlobAlreadyExists(context.contextId); + } + + creationTime = blobModel.properties.creationTime || creationTime; + + LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).validate(new BlobWriteLeaseValidator(leaseAccessConditions)); + + const committedBlocksInOrder = blobModel.committedBlocksInOrder; + for (const pBlock of committedBlocksInOrder || []) { + pCommittedBlocksMap.set(pBlock.name, pBlock); + } + } + + const blockFindResult = await BlocksModel.findAll({ + where: { + accountName: blob.accountName, + containerName: blob.containerName, + blobName: blob.name, + deleting: 0 + }, + transaction: t + }); + for (const item of blockFindResult) { + const block = { + name: this.getModelValue(item, "blockName", true), + size: this.getModelValue(item, "size", true), + persistency: this.deserializeModelValue(item, "persistency") + }; + pUncommittedBlocksMap.set(block.name, block); + } + const selectedBlockList: PersistencyBlockModel[] = []; + for (const block of blockList) { + switch (block.blockCommitType.toLowerCase()) { + case "uncommitted": + const pUncommittedBlock = pUncommittedBlocksMap.get( + block.blockName + ); + if (pUncommittedBlock === undefined) { + throw badRequestError; + } else { + selectedBlockList.push(pUncommittedBlock); + } + break; + case "committed": + const pCommittedBlock = pCommittedBlocksMap.get(block.blockName); + if (pCommittedBlock === undefined) { + throw badRequestError; + } else { + selectedBlockList.push(pCommittedBlock); + } + break; + case "latest": + const pLatestBlock = + pUncommittedBlocksMap.get(block.blockName) || + pCommittedBlocksMap.get(block.blockName); + if (pLatestBlock === undefined) { + throw badRequestError; + } else { + selectedBlockList.push(pLatestBlock); + } + break; + default: + throw badRequestError; + } + } + + const commitBlockBlob: BlobModel = { + ...blob, + deleted: false, + committedBlocksInOrder: selectedBlockList, + properties: { + ...blob.properties, + creationTime, + lastModified: blob.properties.lastModified || context.startTime, + contentLength: selectedBlockList + .map((block) => block.size) + .reduce((total, val) => { + return total + val; + }, 0), + blobType: Models.BlobType.BlockBlob + } + }; + + new BlobLeaseSyncer(commitBlockBlob).sync({ + leaseId: undefined, + leaseExpireTime: undefined, + leaseDurationSeconds: undefined, + leaseBreakTime: undefined, + leaseDurationType: undefined, + leaseState: undefined, + leaseStatus: undefined + }); + + await BlobsModel.upsert(this.convertBlobModelToDbModel(commitBlockBlob), { + transaction: t + }); + + await BlocksModel.update( + { + deleting: literal("deleting + 1") + }, + { + where: { + accountName: blob.accountName, + containerName: blob.containerName, + blobName: blob.name + }, + transaction: t + } + ); + }); + } + + public async getBlobProperties( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string = "", + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot, + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateReadConditions( + context, + modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel: BlobModel = this.convertDbModelToBlobModel( + blobFindResult + ); + + if (!blobModel.isCommitted) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + // TODO: Return blobCommittedBlockCount for append blob + + let responds = LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ) + .validate(new BlobReadLeaseValidator(leaseAccessConditions)) + .sync(new BlobLeaseSyncer(blobModel)); + return { + ...responds, + properties : { + ...responds.properties, + tagCount: getBlobTagsCount(blobModel.blobTags), + }, + } + }); + } + + public undeleteBlob(): Promise { + throw new Error("Method not implemented."); + } + + public async createSnapshot( + context: Context, + account: string, + container: string, + blob: string, + leaseAccessConditions?: Models.LeaseAccessConditions, + metadata?: Models.BlobMetadata, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateReadConditions( + context, + modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) // TODO: Reduce double convert + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const snapshotBlob: BlobModel = this.convertDbModelToBlobModel( + blobFindResult + ); + + LeaseFactory.createLeaseState( + new BlobLeaseAdapter(snapshotBlob), + context + ).validate(new BlobReadLeaseValidator(leaseAccessConditions)); + + const snapshotTime = convertDateTimeStringMsTo7Digital( + context.startTime!.toISOString() + ); + + snapshotBlob.snapshot = snapshotTime; + snapshotBlob.metadata = metadata || snapshotBlob.metadata; + snapshotBlob.blobTags = snapshotBlob.blobTags; + + new BlobLeaseSyncer(snapshotBlob).sync({ + leaseId: undefined, + leaseExpireTime: undefined, + leaseDurationSeconds: undefined, + leaseBreakTime: undefined, + leaseDurationType: undefined, + leaseState: undefined, + leaseStatus: undefined + }); + + await BlobsModel.upsert(this.convertBlobModelToDbModel(snapshotBlob), { + transaction: t + }); + + return { + properties: snapshotBlob.properties, + snapshot: snapshotTime + }; + }); + } + + public async deleteBlob( + context: Context, + account: string, + container: string, + blob: string, + options: Models.BlobDeleteMethodOptionalParams + ): Promise { + await this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: options.snapshot === undefined ? "" : options.snapshot, + deleting: 0, + isCommitted: true // TODO: Support deleting uncommitted block blob + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) // TODO: Reduce double convert + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel = this.convertDbModelToBlobModel(blobFindResult); + + const againstBaseBlob = blobModel.snapshot === ""; + + if (againstBaseBlob) { + LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).validate(new BlobWriteLeaseValidator(options.leaseAccessConditions)); + } + + // Check bad requests + if (!againstBaseBlob && options.deleteSnapshots !== undefined) { + throw StorageErrorFactory.getInvalidOperation( + context.contextId, + "Invalid operation against a blob snapshot." + ); + } + + // Scenario: Delete base blob only + if (againstBaseBlob && options.deleteSnapshots === undefined) { + const count = await BlobsModel.count({ + where: { + accountName: account, + containerName: container, + blobName: blob, + deleting: 0 + }, + transaction: t + }); + + if (count > 1) { + throw StorageErrorFactory.getSnapshotsPresent(context.contextId!); + } else { + await BlobsModel.update( + { + deleting: literal("deleting + 1") + }, + { + where: { + accountName: account, + containerName: container, + blobName: blob + }, + transaction: t + } + ); + + await BlocksModel.update( + { + deleting: literal("deleting + 1") + }, + { + where: { + accountName: account, + containerName: container, + blobName: blob + }, + transaction: t + } + ); + } + } + + // Scenario: Delete one snapshot only + if (!againstBaseBlob) { + await BlobsModel.update( + { + deleting: literal("deleting + 1") + }, + { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: blobModel.snapshot + }, + transaction: t + } + ); + } + + // Scenario: Delete base blob and snapshots + if ( + againstBaseBlob && + options.deleteSnapshots === Models.DeleteSnapshotsOptionType.Include + ) { + await BlobsModel.update( + { + deleting: literal("deleting + 1") + }, + { + where: { + accountName: account, + containerName: container, + blobName: blob + }, + transaction: t + } + ); + + await BlocksModel.update( + { + deleting: literal("deleting + 1") + }, + { + where: { + accountName: account, + containerName: container, + blobName: blob + }, + transaction: t + } + ); + } + + // Scenario: Delete all snapshots only + if ( + againstBaseBlob && + options.deleteSnapshots === Models.DeleteSnapshotsOptionType.Only + ) { + await BlobsModel.update( + { + deleting: literal("deleting + 1") + }, + { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: { [Op.gt]: "" } + }, + transaction: t + } + ); + } + }); + } + + public async setBlobHTTPHeaders( + context: Context, + account: string, + container: string, + blob: string, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + blobHTTPHeaders: Models.BlobHTTPHeaders | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateWriteConditions( + context, + modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) // TODO: Reduce double convert + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel: BlobModel = this.convertDbModelToBlobModel( + blobFindResult + ); + + LeaseFactory.createLeaseState(new BlobLeaseAdapter(blobModel), context) + .validate(new BlobWriteLeaseValidator(leaseAccessConditions)) + .sync(new BlobWriteLeaseSyncer(blobModel)); + + if (blobHTTPHeaders !== undefined) { + blobModel.properties.cacheControl = blobHTTPHeaders.blobCacheControl; + blobModel.properties.contentType = blobHTTPHeaders.blobContentType; + blobModel.properties.contentMD5 = blobHTTPHeaders.blobContentMD5; + blobModel.properties.contentEncoding = + blobHTTPHeaders.blobContentEncoding; + blobModel.properties.contentLanguage = + blobHTTPHeaders.blobContentLanguage; + blobModel.properties.contentDisposition = + blobHTTPHeaders.blobContentDisposition; + } + + blobModel.properties.etag = newEtag(); + blobModel.properties.lastModified = context.startTime + ? context.startTime + : new Date(); + + await BlobsModel.update(this.convertBlobModelToDbModel(blobModel), { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0 + }, + transaction: t + }); + + return blobModel.properties; + }); + } + + public setBlobMetadata( + context: Context, + account: string, + container: string, + blob: string, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + metadata: Models.BlobMetadata | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateWriteConditions( + context, + modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) // TODO: Reduce double convert + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel = this.convertDbModelToBlobModel(blobFindResult); + + LeaseFactory.createLeaseState(new BlobLeaseAdapter(blobModel), context) + .validate(new BlobWriteLeaseValidator(leaseAccessConditions)) + .sync(new BlobWriteLeaseSyncer(blobModel)); + + const lastModified = context.startTime! || new Date(); + const etag = newEtag(); + + await BlobsModel.update( + { + metadata: this.serializeModelValue(metadata) || null, + lastModified, + etag, + ...this.convertLeaseToDbModel(new BlobLeaseAdapter(blobModel)) + }, + { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0 + }, + transaction: t + } + ); + + const ret: Models.BlobPropertiesInternal = { + lastModified, + etag, + leaseStatus: blobModel.properties.leaseStatus, + leaseDuration: blobModel.properties.leaseDuration, + leaseState: blobModel.properties.leaseState + }; + + return ret; + }); + } + + public async acquireBlobLease( + context: Context, + account: string, + container: string, + blob: string, + duration: number, + proposedLeaseId?: string, + options: Models.BlobAcquireLeaseOptionalParams = {} + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) // TODO: Reduce double convert + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel = this.convertDbModelToBlobModel(blobFindResult); + + const lease = LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).acquire(duration, proposedLeaseId).lease; + + await BlobsModel.update(this.convertLeaseToDbModel(lease), { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0 + }, + transaction: t + }); + return { properties: blobModel.properties, leaseId: lease.leaseId }; + }); + } + + public async releaseBlobLease( + context: Context, + account: string, + container: string, + blob: string, + leaseId: string, + options: Models.BlobReleaseLeaseOptionalParams = {} + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) // TODO: Reduce double convert + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel = this.convertDbModelToBlobModel(blobFindResult); + + const lease = LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).release(leaseId).lease; + + await BlobsModel.update(this.convertLeaseToDbModel(lease), { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0 + }, + transaction: t + }); + return blobModel.properties; + }); + } + + public async renewBlobLease( + context: Context, + account: string, + container: string, + blob: string, + leaseId: string, + options: Models.BlobRenewLeaseOptionalParams = {} + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) // TODO: Reduce double convert + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel = this.convertDbModelToBlobModel(blobFindResult); + + const lease = LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).renew(leaseId).lease; + + await BlobsModel.update(this.convertLeaseToDbModel(lease), { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0 + }, + transaction: t + }); + return { properties: blobModel.properties, leaseId: lease.leaseId }; + }); + } + + public async changeBlobLease( + context: Context, + account: string, + container: string, + blob: string, + leaseId: string, + proposedLeaseId: string, + options: Models.BlobChangeLeaseOptionalParams = {} + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) // TODO: Reduce double convert + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel = this.convertDbModelToBlobModel(blobFindResult); + + const lease = LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).change(leaseId, proposedLeaseId).lease; + + await BlobsModel.update(this.convertLeaseToDbModel(lease), { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0 + }, + transaction: t + }); + return { properties: blobModel.properties, leaseId: lease.leaseId }; + }); + } + + public async breakBlobLease( + context: Context, + account: string, + container: string, + blob: string, + breakPeriod: number | undefined, + options: Models.BlobBreakLeaseOptionalParams = {} + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) // TODO: Reduce double convert + : undefined + ); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel = this.convertDbModelToBlobModel(blobFindResult); + + const lease = LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).break(breakPeriod).lease; + + const leaseTimeSeconds: number = + lease.leaseState === Models.LeaseStateType.Breaking && + lease.leaseBreakTime + ? Math.round( + (lease.leaseBreakTime.getTime() - context.startTime!.getTime()) / + 1000 + ) + : 0; + + await BlobsModel.update(this.convertLeaseToDbModel(lease), { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0 + }, + transaction: t + }); + return { properties: blobModel.properties, leaseTime: leaseTimeSeconds }; + }); + } + + public async checkBlobExist( + context: Context, + account: string, + container: string, + blob: string, + snapshot?: string | undefined + ): Promise { + await this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const res = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: snapshot ? snapshot : "", + deleting: 0 + }, + transaction: t + }); + + if (res === null || res === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + }); + } + + public async getBlobType( + account: string, + container: string, + blob: string, + snapshot?: string | undefined + ): Promise< + { blobType: Models.BlobType | undefined; isCommitted: boolean } | undefined + > { + const res = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: snapshot ? snapshot : "", + deleting: 0 + } + }); + + if (res === null || res === undefined) { + return undefined; + } + + const blobType = this.getModelValue(res, "blobType", true); + const isCommitted = this.getModelValue(res, "isCommitted", true); + + return { blobType, isCommitted }; + } + + public startCopyFromURL( + context: Context, + source: BlobId, + destination: BlobId, + copySource: string, + metadata: Models.BlobMetadata | undefined, + tier: Models.AccessTier | undefined, + options: Models.BlobStartCopyFromURLOptionalParams = {} + ): Promise { + return this.sequelize.transaction(async (t) => { + const sourceBlob = await this.getBlobWithLeaseUpdated( + source.account, + source.container, + source.blob, + source.snapshot, + context, + true, + true, + t + ); + + options.sourceModifiedAccessConditions = + options.sourceModifiedAccessConditions || {}; + validateReadConditions( + context, + { + ifModifiedSince: + options.sourceModifiedAccessConditions.sourceIfModifiedSince, + ifUnmodifiedSince: + options.sourceModifiedAccessConditions.sourceIfUnmodifiedSince, + ifMatch: options.sourceModifiedAccessConditions.sourceIfMatch, + ifNoneMatch: options.sourceModifiedAccessConditions.sourceIfNoneMatch + }, + sourceBlob + ); + + const destBlob = await this.getBlobWithLeaseUpdated( + destination.account, + destination.container, + destination.blob, + undefined, + context, + false, + undefined, + t + ); + + validateWriteConditions( + context, + options.modifiedAccessConditions, + destBlob + ); + + if (destBlob) { + new BlobWriteLeaseValidator(options.leaseAccessConditions).validate( + new BlobLeaseAdapter(destBlob), + context + ); + } + + // If source is uncommitted or deleted + if ( + sourceBlob === undefined || + sourceBlob.deleted || + !sourceBlob.isCommitted + ) { + throw StorageErrorFactory.getBlobNotFound(context.contextId!); + } + + if (sourceBlob.properties.accessTier === Models.AccessTier.Archive + && (tier === undefined || source.account !== destination.account)) { + throw StorageErrorFactory.getBlobArchived(context.contextId!); + } + + await this.assertContainerExists( + context, + destination.account, + destination.container, + t + ); + + // Deep clone a copied blob + const copiedBlob: BlobModel = { + name: destination.blob, + deleted: false, + snapshot: "", + properties: { + ...sourceBlob.properties, + creationTime: context.startTime!, + lastModified: context.startTime!, + etag: newEtag(), + leaseStatus: + destBlob !== undefined + ? destBlob.properties.leaseStatus + : Models.LeaseStatusType.Unlocked, + leaseState: + destBlob !== undefined + ? destBlob.properties.leaseState + : Models.LeaseStateType.Available, + leaseDuration: + destBlob !== undefined + ? destBlob.properties.leaseDuration + : undefined, + copyId: uuid(), + copyStatus: Models.CopyStatusType.Success, + copySource, + copyProgress: sourceBlob.properties.contentLength + ? `${sourceBlob.properties.contentLength}/${sourceBlob.properties.contentLength}` + : undefined, + copyCompletionTime: context.startTime, + copyStatusDescription: undefined, + incrementalCopy: false, + destinationSnapshot: undefined, + deletedTime: undefined, + remainingRetentionDays: undefined, + archiveStatus: undefined, + accessTierChangeTime: undefined + }, + metadata: + metadata === undefined || Object.keys(metadata).length === 0 + ? { ...sourceBlob.metadata } + : metadata, + accountName: destination.account, + containerName: destination.container, + pageRangesInOrder: sourceBlob.pageRangesInOrder, + isCommitted: sourceBlob.isCommitted, + leaseDurationSeconds: + destBlob !== undefined ? destBlob.leaseDurationSeconds : undefined, + leaseId: destBlob !== undefined ? destBlob.leaseId : undefined, + leaseExpireTime: + destBlob !== undefined ? destBlob.leaseExpireTime : undefined, + leaseBreakTime: + destBlob !== undefined ? destBlob.leaseBreakTime : undefined, + committedBlocksInOrder: sourceBlob.committedBlocksInOrder, + persistency: sourceBlob.persistency, + blobTags: options.blobTagsString === undefined ? undefined : getTagsFromString(options.blobTagsString, context.contextId!) + }; + + if ( + copiedBlob.properties.blobType === Models.BlobType.BlockBlob && + tier !== undefined + ) { + copiedBlob.properties.accessTier = this.parseTier(tier); + if (copiedBlob.properties.accessTier === undefined) { + throw StorageErrorFactory.getInvalidHeaderValue(context.contextId, { + HeaderName: "x-ms-access-tier", + HeaderValue: `${tier}` + }); + } + } + + if ( + copiedBlob.properties.blobType === Models.BlobType.PageBlob && + tier !== undefined + ) { + throw StorageErrorFactory.getInvalidHeaderValue(context.contextId, { + HeaderName: "x-ms-access-tier", + HeaderValue: `${tier}` + }); + } + + await BlobsModel.upsert(this.convertBlobModelToDbModel(copiedBlob), { + transaction: t + }); + return copiedBlob.properties; + }); + } + + public copyFromURL( + context: Context, + source: BlobId, + destination: BlobId, + copySource: string, + metadata: Models.BlobMetadata | undefined + ): Promise { + throw new Error("Method not implemented."); + } + + public setTier( + context: Context, + account: string, + container: string, + blob: string, + tier: Models.AccessTier, + leaseAccessConditions?: Models.LeaseAccessConditions + ): Promise<200 | 202> { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + let responseCode: 200 | 202 = 200; + + // check the lease action aligned with current lease state. + // the API has not lease ID input, but run it on a lease blocked blob will fail with LeaseIdMissing, + // this is aligned with server behavior + + const blobModel: BlobModel = this.convertDbModelToBlobModel( + blobFindResult + ); + + LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).validate(new BlobWriteLeaseValidator(leaseAccessConditions)); + + // Check Blob is not snapshot + const snapshot = blobModel.snapshot; + if (snapshot !== "") { + throw StorageErrorFactory.getBlobSnapshotsPresent(context.contextId); + } + + // Check BlobTier matches blob type + let accessTier = blobModel.properties.accessTier; + const blobType = blobModel.properties.blobType; + if ( + (tier === Models.AccessTier.Archive || + tier === Models.AccessTier.Cool || + tier === Models.AccessTier.Hot) && + blobType === Models.BlobType.BlockBlob + ) { + // Block blob + // tslint:disable-next-line:max-line-length + // TODO: check blob is not block blob with snapshot, throw StorageErrorFactory.getBlobSnapshotsPresent_hassnapshot() + + // Archive -> Coo/Hot will return 202 + if ( + accessTier === Models.AccessTier.Archive && + (tier === Models.AccessTier.Cool || tier === Models.AccessTier.Hot) + ) { + responseCode = 202; + } + + accessTier = tier; + } else { + throw StorageErrorFactory.getAccessTierNotSupportedForBlobType( + context.contextId! + ); + } + await BlobsModel.update( + { + accessTier, + accessTierInferred: false, + accessTierChangeTime: context.startTime + }, + { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: "", + deleting: 0 + }, + transaction: t + } + ); + + return responseCode; + }); + } + + public uploadPages( + context: Context, + blob: BlobModel, + start: number, + end: number, + persistency: IExtentChunk, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + throw new Error("Method not implemented."); + } + + public clearRange( + context: Context, + blob: BlobModel, + start: number, + end: number, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + throw new Error("Method not implemented."); + } + + public getPageRanges( + context: Context, + account: string, + container: string, + blob: string, + snapshot?: string | undefined, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + throw new Error("Method not implemented."); + } + + public resizePageBlob( + context: Context, + account: string, + container: string, + blob: string, + blobContentLength: number, + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + throw new Error("Method not implemented."); + } + + public updateSequenceNumber( + context: Context, + account: string, + container: string, + blob: string, + sequenceNumberAction: Models.SequenceNumberActionType, + blobSequenceNumber: number | undefined + ): Promise { + throw new Error("Method not implemented."); + } + + public appendBlock( + context: Context, + block: BlockModel, + leaseAccessConditions?: Models.LeaseAccessConditions | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions | undefined, + appendPositionAccessConditions?: + | Models.AppendPositionAccessConditions + | undefined + ): Promise { + throw new Error("Method not implemented."); + } + + public async listUncommittedBlockPersistencyChunks( + marker: string = "-1", + maxResults: number = 2000 + ): Promise<[IExtentChunk[], string | undefined]> { + return BlocksModel.findAll({ + attributes: ["id", "persistency"], + where: { + id: { + [Op.gt]: parseInt(marker, 10) + }, + deleting: 0 + }, + limit: maxResults + 1, + order: [["id", "ASC"]] + }).then((res) => { + if (res.length < maxResults) { + return [ + res.map((obj) => { + return this.deserializeModelValue(obj, "persistency", true); + }), + undefined + ]; + } else { + res.pop(); + const nextMarker = this.getModelValue( + res[res.length - 1], + "id", + true + ); + return [ + res.map((obj) => + this.deserializeModelValue(obj, "persistency", true) + ), + nextMarker + ]; + } + }); + } + + public iteratorExtents(): AsyncIterator { + return new BlobReferredExtentsAsyncIterator(this); + } + + private async assertContainerExists( + context: Context, + account: string, + container: string, + transaction?: Transaction, + fullResult: boolean = false + ): Promise { + const findResult = await ContainersModel.findOne({ + attributes: fullResult ? undefined : ["accountName"], + where: { + accountName: account, + containerName: container + }, + transaction + }); + if (findResult === undefined || findResult === null) { + throw StorageErrorFactory.getContainerNotFound(context.contextId); + } + return findResult; + } + + private getModelValue(model: Model, key: string): T | undefined; + private getModelValue(model: Model, key: string, isRequired: true): T; + private getModelValue( + model: Model, + key: string, + isRequired?: boolean + ): T | undefined { + let value = model.get(key) as T | undefined; + if (value === null) { + value = undefined; + } + if (value === undefined && isRequired === true) { + // tslint:disable-next-line:max-line-length + throw new Error( + `SqlBlobMetadataStore:getModelValue() error. ${key} is required but value from database model is undefined.` + ); + } + return value; + } + + private deserializeModelValue( + model: Model, + key: string, + isRequired: boolean = false + ): any { + const rawValue = this.getModelValue(model, key); + if (typeof rawValue === "string") { + // TODO: Decouple deserializer + return JSON.parse(rawValue); + } + + if (isRequired) { + throw new Error( + // tslint:disable-next-line:max-line-length + `SqlBlobMetadataStore:deserializeModelValue() error. ${key} is required but value from database model is undefined.` + ); + } + + return undefined; + } + + private serializeModelValue(value: any): string | undefined { + if (value === undefined || value === null) { + return undefined; + } + + return JSON.stringify(value); + } + + /** + * This method will restore object to Uint8Array. + * + * @private + * @param {*} obj + * @returns {(Uint8Array | undefined)} + * @memberof LokiBlobMetadataStore + */ + private restoreUint8Array(obj: any): Uint8Array | undefined { + if (typeof obj !== "object") { + return undefined; + } + + if (obj instanceof Uint8Array) { + return obj; + } + + if (obj.type === "Buffer") { + obj = obj.data; + } + + const length = Object.keys(obj).length; + const arr = Buffer.allocUnsafe(length); + + for (let i = 0; i < length; i++) { + if (!obj.hasOwnProperty(i)) { + throw new TypeError( + `Cannot restore sql DB persisted object to Uint8Array. Key ${i} is missing.` + ); + } + + arr[i] = obj[i]; + } + + return arr; + } + + private convertDbModelToContainerModel( + dbModel: ContainersModel + ): ContainerModel { + const accountName = this.getModelValue( + dbModel, + "accountName", + true + ); + const name = this.getModelValue(dbModel, "containerName", true); + const containerAcl = this.deserializeModelValue(dbModel, "containerAcl"); + const metadata = this.deserializeModelValue(dbModel, "metadata"); + + const lastModified = this.getModelValue( + dbModel, + "lastModified", + true + ); + const etag = this.getModelValue(dbModel, "etag", true); + const publicAccess = this.deserializeModelValue(dbModel, "publicAccess"); + const lease = this.convertDbModelToLease(dbModel); + const leaseBreakTime = lease.leaseBreakTime; + const leaseExpireTime = lease.leaseExpireTime; + const leaseId = lease.leaseId; + const leaseDurationSeconds = lease.leaseDurationSeconds; + const leaseStatus = lease.leaseStatus; + const leaseState = lease.leaseState; + const leaseDuration = lease.leaseDurationType; + const hasImmutabilityPolicy = this.getModelValue( + dbModel, + "hasImmutabilityPolicy" + ); + const hasLegalHold = this.getModelValue(dbModel, "hasLegalHold"); + + const ret: ContainerModel = { + accountName, + name, + properties: { + lastModified, + etag, + leaseStatus, + leaseDuration, + leaseState + }, + leaseId, + leaseBreakTime, + leaseExpireTime, + leaseDurationSeconds + }; + + if (metadata !== undefined) { + ret.metadata = metadata; + } + + if (containerAcl !== undefined) { + ret.containerAcl = containerAcl; + } + + if (publicAccess !== undefined) { + ret.properties.publicAccess = publicAccess; + } + + if (hasImmutabilityPolicy !== undefined) { + ret.properties.hasImmutabilityPolicy = hasImmutabilityPolicy; + } + + if (hasLegalHold !== undefined) { + ret.properties.hasLegalHold = hasLegalHold; + } + + return ret; + } + + private convertContainerModelToDbModel(container: ContainerModel): any { + const lease = new ContainerLeaseAdapter(container).toString(); + return { + accountName: container.accountName, + containerName: container.name, + lastModified: container.properties.lastModified, + etag: container.properties.etag, + metadata: this.serializeModelValue(container.metadata), + containerAcl: this.serializeModelValue(container.containerAcl), + publicAccess: this.serializeModelValue(container.properties.publicAccess), + lease, + hasImmutabilityPolicy: container.properties.hasImmutabilityPolicy, + hasLegalHold: container.properties.hasLegalHold + }; + } + + private convertDbModelToBlobModel(dbModel: BlobsModel): BlobModel { + const contentProperties: IBlobContentProperties = this.convertDbModelToBlobContentProperties( + dbModel + ); + + const lease = this.convertDbModelToLease(dbModel); + + return { + accountName: this.getModelValue(dbModel, "accountName", true), + containerName: this.getModelValue(dbModel, "containerName", true), + name: this.getModelValue(dbModel, "blobName", true), + snapshot: this.getModelValue(dbModel, "snapshot", true), + isCommitted: this.getModelValue(dbModel, "isCommitted", true), + properties: { + lastModified: this.getModelValue(dbModel, "lastModified", true), + etag: this.getModelValue(dbModel, "etag", true), + leaseDuration: lease.leaseDurationType, + creationTime: this.getModelValue(dbModel, "creationTime"), + leaseState: lease.leaseState, + leaseStatus: lease.leaseStatus, + accessTier: this.getModelValue( + dbModel, + "accessTier" + ), + accessTierInferred: this.getModelValue( + dbModel, + "accessTierInferred" + ), + accessTierChangeTime: this.getModelValue( + dbModel, + "accessTierChangeTime" + ), + blobSequenceNumber: this.getModelValue( + dbModel, + "blobSequenceNumber" + ), + blobType: this.getModelValue(dbModel, "blobType"), + contentMD5: contentProperties + ? this.restoreUint8Array(contentProperties.contentMD5) + : undefined, + contentDisposition: contentProperties + ? contentProperties.contentDisposition + : undefined, + contentEncoding: contentProperties + ? contentProperties.contentEncoding + : undefined, + contentLanguage: contentProperties + ? contentProperties.contentLanguage + : undefined, + contentLength: contentProperties + ? contentProperties.contentLength + : undefined, + contentType: contentProperties + ? contentProperties.contentType + : undefined, + cacheControl: contentProperties + ? contentProperties.cacheControl + : undefined + }, + leaseDurationSeconds: lease.leaseDurationSeconds, + leaseBreakTime: lease.leaseBreakTime, + leaseExpireTime: lease.leaseExpireTime, + leaseId: lease.leaseId, + persistency: this.deserializeModelValue(dbModel, "persistency"), + committedBlocksInOrder: this.deserializeModelValue( + dbModel, + "committedBlocksInOrder" + ), + metadata: this.deserializeModelValue(dbModel, "metadata"), + blobTags: this.deserializeModelValue(dbModel, "blobTags") + }; + } + + private convertBlobModelToDbModel(blob: BlobModel): any { + const contentProperties = this.convertBlobContentPropertiesToDbModel( + blob.properties + ); + + const lease = this.convertLeaseToDbModel(new BlobLeaseAdapter(blob)); + return { + accountName: blob.accountName, + containerName: blob.containerName, + blobName: blob.name, + snapshot: blob.snapshot, + blobType: blob.properties.blobType, + blobSequenceNumber: blob.properties.blobSequenceNumber || null, + isCommitted: blob.isCommitted, + lastModified: blob.properties.lastModified, + creationTime: blob.properties.creationTime || null, + etag: blob.properties.etag, + accessTier: blob.properties.accessTier || null, + accessTierChangeTime: blob.properties.accessTierChangeTime || null, + accessTierInferred: blob.properties.accessTierInferred || null, + leaseBreakExpireTime: blob.leaseBreakTime || null, + leaseExpireTime: blob.leaseExpireTime || null, + leaseId: blob.leaseId || null, + leasedurationNumber: blob.leaseDurationSeconds || null, + leaseDuration: blob.properties.leaseDuration || null, + leaseStatus: blob.properties.leaseStatus || null, + leaseState: blob.properties.leaseState || null, + ...lease, + persistency: this.serializeModelValue(blob.persistency) || null, + committedBlocksInOrder: + this.serializeModelValue(blob.committedBlocksInOrder) || null, + metadata: this.serializeModelValue(blob.metadata) || null, + blobTags: this.serializeModelValue(blob.blobTags) || null, + ...contentProperties + }; + } + + private convertDbModelToBlobContentProperties( + dbModel: BlobsModel + ): IBlobContentProperties { + return this.deserializeModelValue(dbModel, "contentProperties"); + } + + private convertBlobContentPropertiesToDbModel( + contentProperties: IBlobContentProperties + ): object { + return { + contentProperties: + this.serializeModelValue({ + contentLength: contentProperties.contentLength, + contentType: contentProperties.contentType, + contentEncoding: contentProperties.contentEncoding, + contentLanguage: contentProperties.contentLanguage, + contentMD5: contentProperties.contentMD5, + contentDisposition: contentProperties.contentDisposition, + cacheControl: contentProperties.cacheControl + }) || null + }; + } + + private convertDbModelToLease(dbModel: ContainersModel | BlobsModel): ILease { + const lease = + (this.deserializeModelValue(dbModel, "lease") as ILease) || {}; + + if (lease.leaseBreakTime && typeof lease.leaseBreakTime === "string") { + lease.leaseBreakTime = new Date(lease.leaseBreakTime); + } + + if (lease.leaseExpireTime && typeof lease.leaseExpireTime === "string") { + lease.leaseExpireTime = new Date(lease.leaseExpireTime); + } + + return lease; + } + + private convertLeaseToDbModel(lease: ILease): object { + let leaseString = ""; + if ( + lease instanceof ContainerLeaseAdapter || + lease instanceof BlobLeaseAdapter + ) { + leaseString = lease.toString(); + } else { + leaseString = JSON.stringify(lease); + } + return { lease: leaseString }; + } + + private async getBlobWithLeaseUpdated( + account: string, + container: string, + blob: string, + snapshot: string = "", + context: Context, + forceExist?: boolean, + forceCommitted?: boolean, + transaction?: Transaction + ): Promise { + await this.checkContainerExist(context, account, container); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot + }, + transaction + }); + + if (blobFindResult === null || blobFindResult === undefined) { + if (forceExist === false) { + return undefined; + } else { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + } + + // Force exist if parameter forceExist is undefined or true + const doc = this.convertDbModelToBlobModel(blobFindResult); + if (forceExist === undefined || forceExist === true) { + if (forceCommitted) { + if (!doc || !(doc as BlobModel).isCommitted) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + } else { + if (!doc) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + } + } else { + if (forceCommitted) { + if (!doc || !(doc as BlobModel).isCommitted) { + return undefined; + } + } else { + if (!doc) { + return undefined; + } + } + } + + if (doc.properties) { + doc.properties.contentMD5 = this.restoreUint8Array( + doc.properties.contentMD5 + ); + } + + // Snapshot doesn't have lease + if (snapshot !== undefined && snapshot !== "") { + new BlobLeaseSyncer(doc).sync({ + leaseId: undefined, + leaseExpireTime: undefined, + leaseDurationSeconds: undefined, + leaseBreakTime: undefined, + leaseDurationType: undefined, + leaseState: Models.LeaseStateType.Available, // TODO: Lease state & status should be undefined for snapshots + leaseStatus: Models.LeaseStatusType.Unlocked // TODO: Lease state & status should be undefined for snapshots + }); + } else { + LeaseFactory.createLeaseState(new BlobLeaseAdapter(doc), context).sync( + new BlobLeaseSyncer(doc) + ); + } + + return doc; + } + + public setBlobTag( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string | undefined, + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + tags: Models.BlobTags | undefined, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: snapshot === undefined ? "" : snapshot, + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel = this.convertDbModelToBlobModel(blobFindResult); + + LeaseFactory.createLeaseState(new BlobLeaseAdapter(blobModel), context) + .validate(new BlobWriteLeaseValidator(leaseAccessConditions)) + .sync(new BlobWriteLeaseSyncer(blobModel)); + + const lastModified = context.startTime! || new Date(); + const etag = newEtag(); + + await BlobsModel.update( + { + blobTags: this.serializeModelValue(tags) || null, + lastModified, + etag, + ...this.convertLeaseToDbModel(new BlobLeaseAdapter(blobModel)) + }, + { + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot: snapshot === undefined ? "" : snapshot, + deleting: 0 + }, + transaction: t + } + ); + }); + } + + public async getBlobTag( + context: Context, + account: string, + container: string, + blob: string, + snapshot: string = "", + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions + ): Promise { + return this.sequelize.transaction(async (t) => { + await this.assertContainerExists(context, account, container, t); + + const blobFindResult = await BlobsModel.findOne({ + where: { + accountName: account, + containerName: container, + blobName: blob, + snapshot, + deleting: 0, + isCommitted: true + }, + transaction: t + }); + + if (blobFindResult === null || blobFindResult === undefined) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + const blobModel: BlobModel = this.convertDbModelToBlobModel( + blobFindResult + ); + + if (!blobModel.isCommitted) { + throw StorageErrorFactory.getBlobNotFound(context.contextId); + } + + LeaseFactory.createLeaseState( + new BlobLeaseAdapter(blobModel), + context + ).validate(new BlobReadLeaseValidator(leaseAccessConditions)); + + return blobModel.blobTags; + }); + } + + /** + * Get the tier setting from request headers. + * + * @private + * @param {string} tier + * @returns {(Models.AccessTier | undefined)} + * @memberof BlobHandler + */ + private parseTier(tier: string): Models.AccessTier | undefined { + tier = tier.toLowerCase(); + if (tier === Models.AccessTier.Hot.toLowerCase()) { + return Models.AccessTier.Hot; + } + if (tier === Models.AccessTier.Cool.toLowerCase()) { + return Models.AccessTier.Cool; + } + if (tier === Models.AccessTier.Archive.toLowerCase()) { + return Models.AccessTier.Archive; + } + return undefined; + } +} diff --git a/src/blob/utils/utils.ts b/src/blob/utils/utils.ts index 38df653c6..0f7fe895c 100644 --- a/src/blob/utils/utils.ts +++ b/src/blob/utils/utils.ts @@ -3,6 +3,7 @@ import { createWriteStream, PathLike } from "fs"; import StorageErrorFactory from "../errors/StorageErrorFactory"; import { USERDELEGATIONKEY_BASIC_KEY } from "./constants"; import { BlobTag, BlobTags } from "@azure/storage-blob"; +import { TagContent } from "../persistence/QueryInterpreter/QueryNodes/IQueryNode"; export function checkApiVersion( inputApiVersion: string, @@ -153,7 +154,7 @@ export function getUserDelegationKeyValue( signedStartsOn: string, signedExpiresOn: string, signedVersion: string, -) : string { +): string { const stringToSign = [ signedObjectid, signedTenantid, @@ -167,19 +168,18 @@ export function getUserDelegationKeyValue( } export function getBlobTagsCount( - blobTags: BlobTags | undefined -) : number | undefined { + blobTags: BlobTags | undefined +): number | undefined { return (blobTags === undefined || blobTags?.blobTagSet.length === 0) ? undefined : blobTags?.blobTagSet.length } export function getTagsFromString(blobTagsString: string, contextID: string): BlobTags | undefined { - if (blobTagsString === '' || blobTagsString === undefined) - { + if (blobTagsString === '' || blobTagsString === undefined) { return undefined; } - let blobTags:BlobTag[] = []; + let blobTags: BlobTag[] = []; const rawTags = blobTagsString.split("&"); - rawTags.forEach((rawTag)=>{ + rawTags.forEach((rawTag) => { const tagpair = rawTag.split("="); blobTags.push({ // When the Blob tag is input with header, it's encoded, sometimes space will be encoded to "+" ("+" will be encoded to "%2B") @@ -190,28 +190,28 @@ export function getTagsFromString(blobTagsString: string, contextID: string): Bl }) validateBlobTag( { - blobTagSet:blobTags, + blobTagSet: blobTags, }, contextID ); return { - blobTagSet:blobTags, + blobTagSet: blobTags, }; } // validate as the limitation from https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-tags?tabs=azure-ad#request-body export function validateBlobTag(tags: BlobTags, contextID: string): void { - if (tags.blobTagSet.length > 10){ + if (tags.blobTagSet.length > 10) { throw StorageErrorFactory.getTagsTooLarge(contextID); } - tags.blobTagSet.forEach((tag)=>{ - if (tag.key.length == 0){ + tags.blobTagSet.forEach((tag) => { + if (tag.key.length == 0) { throw StorageErrorFactory.getEmptyTagName(contextID); } - if (tag.key.length > 128){ + if (tag.key.length > 128) { throw StorageErrorFactory.getTagsTooLarge(contextID); } - if (tag.value.length > 256){ + if (tag.value.length > 256) { throw StorageErrorFactory.getTagsTooLarge(contextID); } if (ContainsInvalidTagCharacter(tag.key)) { @@ -223,23 +223,37 @@ export function validateBlobTag(tags: BlobTags, contextID: string): void { }); } -function ContainsInvalidTagCharacter(s: string): boolean{ - for (let c of s) - { +function ContainsInvalidTagCharacter(s: string): boolean { + for (let c of s) { if (!(c >= 'a' && c <= 'z' || - c >= 'A' && c <= 'Z' || - c >= '0' && c <= '9' || - c == ' ' || - c == '+' || - c == '-' || - c == '.' || - c == '/' || - c == ':' || - c == '=' || - c == '_')) - { - return true; + c >= 'A' && c <= 'Z' || + c >= '0' && c <= '9' || + c == ' ' || + c == '+' || + c == '-' || + c == '.' || + c == '/' || + c == ':' || + c == '=' || + c == '_')) { + return true; } } - return false; + return false; +} + +export function toBlobTags(input: TagContent[]): BlobTag[] { + const tags: Record = {}; + input.forEach(element => { + if (element.key !== '@container') { + tags[element.key!] = element.value!; + } + }); + + return Object.entries(tags).map(([key, value]) => { + return { + key: key, + value: value + } + }); } \ No newline at end of file diff --git a/tests/blob/apis/container.test.ts b/tests/blob/apis/container.test.ts index 64cf39880..544aca00d 100644 --- a/tests/blob/apis/container.test.ts +++ b/tests/blob/apis/container.test.ts @@ -6,7 +6,8 @@ import { BlobServiceClient, generateAccountSASQueryParameters, newPipeline, - StorageSharedKeyCredential + StorageSharedKeyCredential, + Tags } from "@azure/storage-blob"; import assert = require("assert"); import StorageErrorFactory from "../../../src/blob/errors/StorageErrorFactory"; @@ -674,7 +675,7 @@ describe("ContainerAPIs", () => { const inputmarker = undefined; let result = ( await containerClient - .listBlobsByHierarchy("/",{ + .listBlobsByHierarchy("/", { prefix: "" }) .byPage({ @@ -1151,6 +1152,65 @@ describe("ContainerAPIs", () => { assert.equal(result.segment.blobItems.length, 4); }); + it("filter blob by tags should work on container @loki @sql", async () => { + + const key1 = getUniqueName("key"); + const key2 = getUniqueName("key2"); + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + const tags1: Tags = {}; + tags1[key1] = getUniqueName("val1"); + tags1[key2] = "default"; + await appendBlobClient1.create({ tags: tags1 }); + + const blobName2 = getUniqueName("blobname2"); + const appendBlobClient2 = containerClient.getAppendBlobClient(blobName2); + const tags2: Tags = {}; + tags2[key1] = getUniqueName("val2"); + tags2[key2] = "default"; + await appendBlobClient2.create({ tags: tags2 }); + + const blobName3 = getUniqueName("blobname3"); + const appendBlobClient3 = containerClient.getAppendBlobClient(blobName3); + const tags3: Tags = {}; + tags3[key1] = getUniqueName("val3"); + tags3[key2] = "default"; + await appendBlobClient3.create({ tags: tags3 }); + + const expectedTags1: Tags = {}; + expectedTags1[key1] = tags1[key1]; + for await (const blob of containerClient.findBlobsByTags(`${key1}='${tags1[key1]}'`)) { + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, tags1[key1]); + } + + const expectedTags2: Tags = {}; + expectedTags2[key1] = tags2[key1]; + const blobs = []; + for await (const blob of containerClient.findBlobsByTags(`${key1}='${tags2[key1]}'`)) { + blobs.push(blob); + } + assert.deepStrictEqual(blobs.length, 1); + assert.deepStrictEqual(blobs[0].containerName, containerName); + assert.deepStrictEqual(blobs[0].name, blobName2); + assert.deepStrictEqual(blobs[0].tags, expectedTags2); + assert.deepStrictEqual(blobs[0].tagValue, tags2[key1]); + + const blobsWithTag2 = []; + for await (const segment of containerClient.findBlobsByTags(`${key2}='default'`).byPage({ + maxPageSize: 1, + })) { + assert.ok(segment.blobs.length <= 1); + for (const blob of segment.blobs) { + blobsWithTag2.push(blob); + } + } + assert.deepStrictEqual(blobsWithTag2.length, 3); + }); + // Skip the case currently since js sdk caculate the stringToSign with "+" in prefix instead of decode to space it.skip("List blob should success with '+' in query @loki @sql", async () => { const blobClients = []; @@ -1165,7 +1225,7 @@ describe("ContainerAPIs", () => { await blockBlobClient.upload("", 0); blobClients.push(blobClient); } - + // list with prefix has "+" instead of "%20" for space // create service client let pipeline = newPipeline( @@ -1205,7 +1265,7 @@ describe("ContainerAPIs", () => { gotNames.push(item.name); } assert.deepStrictEqual(gotNames, blobNames); - + // clean up for (const blob of blobClients) { await blob.delete(); diff --git a/tests/blob/apis/service.test.ts b/tests/blob/apis/service.test.ts index 69402f633..eaf6b2bdb 100644 --- a/tests/blob/apis/service.test.ts +++ b/tests/blob/apis/service.test.ts @@ -6,7 +6,8 @@ import { generateAccountSASQueryParameters, newPipeline, SASProtocol, - StorageSharedKeyCredential + StorageSharedKeyCredential, + Tags } from "@azure/storage-blob"; import * as assert from "assert"; @@ -55,12 +56,12 @@ describe("ServiceAPIs", () => { await server.clean(); }); - it(`getUserDelegationKey with Key credential should fail @loki @sql`, async () => { + it(`getUserDelegationKey with Key credential should fail @loki @sql`, async () => { const startTime = new Date(); startTime.setHours(startTime.getHours() - 1); const expiryTime = new Date(); expiryTime.setDate(expiryTime.getDate() + 1); - + try { await serviceClient.getUserDelegationKey(startTime, expiryTime); assert.fail("Should fail to invoke getUserDelegationKey with account key credentials") @@ -72,7 +73,7 @@ describe("ServiceAPIs", () => { it(`getUserDelegationKey with SAS token credential should fail @loki @sql`, async () => { const sasTokenStart = new Date(); sasTokenStart.setHours(sasTokenStart.getHours() - 1); - + const sasTokenExpiry = new Date(); sasTokenExpiry.setDate(sasTokenExpiry.getDate() + 1); @@ -98,11 +99,11 @@ describe("ServiceAPIs", () => { const skStart = new Date(); skStart.setHours(skStart.getHours() - 1); - + const skExpiry = new Date(); skExpiry.setDate(skExpiry.getDate() + 1); - - try { + + try { await serviceClientWithSAS.getUserDelegationKey(skStart, skExpiry); assert.fail("Should fail to invoke getUserDelegationKey with SAS token credentials") } catch (error) { @@ -411,7 +412,7 @@ describe("ServiceAPIs", () => { await containerClient1.delete(); await containerClient2.delete(); }); - + // fix issue 2382 it("ListContainers without include metadata should not return contaienr metadata. @loki @sql", async () => { const containerNamePrefix = getUniqueName("container"); @@ -430,7 +431,7 @@ describe("ServiceAPIs", () => { .byPage() .next() ).value; - + assert.equal(result1.containerItems!.length, 2); assert.ok(result1.containerItems![0].name.startsWith(containerNamePrefix)); assert.ok(result1.containerItems![1].name.startsWith(containerNamePrefix)); @@ -493,6 +494,79 @@ describe("ServiceAPIs", () => { assert.ok(err); });; }); + + it("Find blob by tags should work @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const key1 = getUniqueName("key"); + const key2 = getUniqueName("key2"); + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + const tags1: Tags = {}; + tags1[key1] = getUniqueName("val1"); + tags1[key2] = "default"; + await appendBlobClient1.create({ tags: tags1 }); + + const blobName2 = getUniqueName("blobname2"); + const appendBlobClient2 = containerClient.getAppendBlobClient(blobName2); + const tags2: Tags = {}; + tags2[key1] = getUniqueName("val2"); + tags2[key2] = "default"; + await appendBlobClient2.create({ tags: tags2 }); + + const blobName3 = getUniqueName("blobname3"); + const appendBlobClient3 = containerClient.getAppendBlobClient(blobName3); + const tags3: Tags = {}; + tags3[key1] = getUniqueName("val3"); + tags3[key2] = "default"; + await appendBlobClient3.create({ tags: tags3 }); + + const expectedTags1: Tags = {}; + expectedTags1[key1] = tags1[key1]; + for await (const blob of serviceClient.findBlobsByTags(`${key1}='${tags1[key1]}'`)) { + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, tags1[key1]); + } + + const expectedTags2: Tags = {}; + expectedTags2[key1] = tags2[key1]; + const blobs = []; + for await (const blob of serviceClient.findBlobsByTags(`${key1}='${tags2[key1]}'`)) { + blobs.push(blob); + } + assert.deepStrictEqual(blobs.length, 1); + assert.deepStrictEqual(blobs[0].containerName, containerName); + assert.deepStrictEqual(blobs[0].name, blobName2); + assert.deepStrictEqual(blobs[0].tags, expectedTags2); + assert.deepStrictEqual(blobs[0].tagValue, tags2[key1]); + + const blobsWithTag2 = []; + for await (const segment of serviceClient.findBlobsByTags(`${key2}='default'`).byPage({ + maxPageSize: 1, + })) { + assert.ok(segment.blobs.length <= 1); + for (const blob of segment.blobs) { + blobsWithTag2.push(blob); + } + } + assert.deepStrictEqual(blobsWithTag2.length, 3); + + for await (const blob of serviceClient.findBlobsByTags( + `@container='${containerName}' AND ${key1}='${tags1[key1]}' AND ${key2}='default'`, + )) { + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags1); + assert.deepStrictEqual(blob.tagValue, ""); + } + + await containerClient.delete(); + }); }); describe("ServiceAPIs - secondary location endpoint", () => {