From 1e7d69b1528fad28c361ac50baafe97de199bcb3 Mon Sep 17 00:00:00 2001 From: EmmaZhu-MSFT Date: Tue, 22 Oct 2024 20:11:45 -0700 Subject: [PATCH] Filter by tags (#2311) * Filter blob by tags * Added cases for filter/Tag permission check * Resove merge conflict --- README.md | 4 +- .../authentication/ContainerSASPermissions.ts | 1 + .../OperationBlobSASPermission.ts | 4 + .../conditions/ConditionResourceAdapter.ts | 10 +- .../conditions/ConditionalHeadersAdapter.ts | 3 + src/blob/conditions/IConditionResource.ts | 3 + src/blob/conditions/IConditionalHeaders.ts | 5 + .../IConditionalHeadersValidator.ts | 3 +- .../ReadConditionalHeadersValidator.ts | 23 +- .../WriteConditionalHeadersValidator.ts | 16 +- src/blob/errors/StorageErrorFactory.ts | 3 +- src/blob/handlers/BlockBlobHandler.ts | 19 +- src/blob/handlers/ContainerHandler.ts | 43 +- src/blob/handlers/ServiceHandler.ts | 42 +- src/blob/persistence/FilterBlobPage.ts | 128 ++++ src/blob/persistence/IBlobMetadataStore.ts | 25 +- src/blob/persistence/LokiBlobMetadataStore.ts | 95 ++- .../QueryInterpreter/IQueryContext.ts | 1 + .../QueryInterpreter/QueryInterpreter.ts | 83 +++ .../QueryInterpreter/QueryNodes/AndNode.ts | 20 + .../QueryNodes/BinaryOperatorNode.ts | 14 + .../QueryNodes/ConstantNode.ts | 20 + .../QueryInterpreter/QueryNodes/EqualsNode.ts | 26 + .../QueryNodes/ExpressionNode.ts | 18 + .../QueryNodes/GreaterThanEqualNode.ts | 27 + .../QueryNodes/GreaterThanNode.ts | 28 + .../QueryInterpreter/QueryNodes/IQueryNode.ts | 14 + .../QueryInterpreter/QueryNodes/KeyNode.ts | 21 + .../QueryNodes/LessThanEqualNode.ts | 27 + .../QueryNodes/LessThanNode.ts | 27 + .../QueryNodes/NotEqualsNode.ts | 28 + .../QueryInterpreter/QueryNodes/OrNode.ts | 20 + .../QueryInterpreter/QueryParser.ts | 606 +++++++++++++++ src/blob/persistence/SqlBlobMetadataStore.ts | 111 ++- src/blob/utils/utils.ts | 74 +- tests/blob/apis/appendblob.test.ts | 73 +- tests/blob/apis/blob.test.ts | 699 +++++++++++++++--- tests/blob/apis/blockblob.test.ts | 93 ++- tests/blob/apis/container.test.ts | 388 +++++++++- tests/blob/apis/pageblob.test.ts | 253 ++++--- tests/blob/apis/service.test.ts | 335 ++++++++- tests/blob/sas.test.ts | 201 ++++- 42 files changed, 3353 insertions(+), 281 deletions(-) create mode 100644 src/blob/persistence/FilterBlobPage.ts create mode 100644 src/blob/persistence/QueryInterpreter/IQueryContext.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryInterpreter.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts create mode 100644 src/blob/persistence/QueryInterpreter/QueryParser.ts diff --git a/README.md b/README.md index a4150bcc7..9e8ef7b13 100644 --- a/README.md +++ b/README.md @@ -981,7 +981,8 @@ Detailed support matrix: - OAuth authentication - Shared Access Signature Account Level - Shared Access Signature Service Level (Not support response header override in service SAS) - - Container Public Access + - Container Public Access + - Blob Tags (preview) - Supported REST APIs - List Containers - Set Service Properties @@ -1017,7 +1018,6 @@ Detailed support matrix: - Soft delete & Undelete Container - Soft delete & Undelete Blob - Incremental Copy Blob - - Blob Tags - Blob Query - Blob Versions - Blob Last Access Time diff --git a/src/blob/authentication/ContainerSASPermissions.ts b/src/blob/authentication/ContainerSASPermissions.ts index 148c1421f..ff3daecab 100644 --- a/src/blob/authentication/ContainerSASPermissions.ts +++ b/src/blob/authentication/ContainerSASPermissions.ts @@ -5,5 +5,6 @@ export enum ContainerSASPermission { Write = "w", Delete = "d", List = "l", + Filter = "f", Any = "AnyPermission" // This is only for blob batch operation. } diff --git a/src/blob/authentication/OperationBlobSASPermission.ts b/src/blob/authentication/OperationBlobSASPermission.ts index 4b7a29564..ddf21c746 100644 --- a/src/blob/authentication/OperationBlobSASPermission.ts +++ b/src/blob/authentication/OperationBlobSASPermission.ts @@ -369,6 +369,10 @@ OPERATION_BLOB_SAS_CONTAINER_PERMISSIONS.set( Operation.Container_ListBlobFlatSegment, new OperationBlobSASPermission(ContainerSASPermission.List) ); +OPERATION_BLOB_SAS_CONTAINER_PERMISSIONS.set( + Operation.Container_FilterBlobs, + new OperationBlobSASPermission(ContainerSASPermission.Filter) +); OPERATION_BLOB_SAS_CONTAINER_PERMISSIONS.set( Operation.Container_ListBlobHierarchySegment, new OperationBlobSASPermission(ContainerSASPermission.List) diff --git a/src/blob/conditions/ConditionResourceAdapter.ts b/src/blob/conditions/ConditionResourceAdapter.ts index f6a54c0ed..007dd644a 100644 --- a/src/blob/conditions/ConditionResourceAdapter.ts +++ b/src/blob/conditions/ConditionResourceAdapter.ts @@ -1,10 +1,11 @@ -import { BlobModel, ContainerModel } from "../persistence/IBlobMetadataStore"; +import { BlobModel, ContainerModel, FilterBlobModel } from "../persistence/IBlobMetadataStore"; import IConditionResource from "./IConditionResource"; export default class ConditionResourceAdapter implements IConditionResource { public exist: boolean; public etag: string; public lastModified: Date; + public blobItemWithTags?: FilterBlobModel; public constructor(resource: BlobModel | ContainerModel | undefined | null) { if ( @@ -33,5 +34,12 @@ export default class ConditionResourceAdapter implements IConditionResource { this.lastModified = new Date(resource.properties.lastModified); this.lastModified.setMilliseconds(0); // Precision to seconds + + const blobItem = resource as BlobModel; + this.blobItemWithTags = { + name: blobItem.name, + containerName: blobItem.containerName, + tags: blobItem.blobTags + }; } } diff --git a/src/blob/conditions/ConditionalHeadersAdapter.ts b/src/blob/conditions/ConditionalHeadersAdapter.ts index 40ef469ec..6846ac034 100644 --- a/src/blob/conditions/ConditionalHeadersAdapter.ts +++ b/src/blob/conditions/ConditionalHeadersAdapter.ts @@ -7,6 +7,7 @@ export default class ConditionalHeadersAdapter implements IConditionalHeaders { public ifUnmodifiedSince?: Date; public ifMatch?: string[]; public ifNoneMatch?: string[]; + public ifTags?: string; public constructor( context: Context, @@ -43,5 +44,7 @@ export default class ConditionalHeadersAdapter implements IConditionalHeaders { if (this.ifUnmodifiedSince) { this.ifUnmodifiedSince.setMilliseconds(0); // Precision to seconds } + + this.ifTags = modifiedAccessConditions.ifTags; } } diff --git a/src/blob/conditions/IConditionResource.ts b/src/blob/conditions/IConditionResource.ts index dda91509c..2e49b4210 100644 --- a/src/blob/conditions/IConditionResource.ts +++ b/src/blob/conditions/IConditionResource.ts @@ -1,3 +1,5 @@ +import { FilterBlobModel } from "../persistence/IBlobMetadataStore"; + export default interface IConditionResource { /** * Whether resource exists or not. @@ -13,4 +15,5 @@ export default interface IConditionResource { * last modified time for container or blob. */ lastModified: Date; + blobItemWithTags?: FilterBlobModel; } diff --git a/src/blob/conditions/IConditionalHeaders.ts b/src/blob/conditions/IConditionalHeaders.ts index 1749fca54..b7fa14251 100644 --- a/src/blob/conditions/IConditionalHeaders.ts +++ b/src/blob/conditions/IConditionalHeaders.ts @@ -11,4 +11,9 @@ export interface IConditionalHeaders { * If-None-Match etag list without quotes. */ ifNoneMatch?: string[]; + + /** + * Specify a SQL where clause on blob tags to operate only on blobs with a matching value. + */ + ifTags?: string; } diff --git a/src/blob/conditions/IConditionalHeadersValidator.ts b/src/blob/conditions/IConditionalHeadersValidator.ts index 7a8c04431..9da4aa6af 100644 --- a/src/blob/conditions/IConditionalHeadersValidator.ts +++ b/src/blob/conditions/IConditionalHeadersValidator.ts @@ -6,6 +6,7 @@ export interface IConditionalHeadersValidator { validate( context: Context, conditionalHeaders: IConditionalHeaders, - resource: IConditionResource + resource: IConditionResource, + isSourceBlob?: boolean ): void; } diff --git a/src/blob/conditions/ReadConditionalHeadersValidator.ts b/src/blob/conditions/ReadConditionalHeadersValidator.ts index acf6036e7..d4b5a43fb 100644 --- a/src/blob/conditions/ReadConditionalHeadersValidator.ts +++ b/src/blob/conditions/ReadConditionalHeadersValidator.ts @@ -2,6 +2,7 @@ import StorageErrorFactory from "../errors/StorageErrorFactory"; import { ModifiedAccessConditions } from "../generated/artifacts/models"; import Context from "../generated/Context"; import { BlobModel, ContainerModel } from "../persistence/IBlobMetadataStore"; +import { generateQueryBlobWithTagsWhereFunction } from "../persistence/QueryInterpreter/QueryInterpreter"; import ConditionalHeadersAdapter from "./ConditionalHeadersAdapter"; import ConditionResourceAdapter from "./ConditionResourceAdapter"; import { IConditionalHeaders } from "./IConditionalHeaders"; @@ -11,12 +12,14 @@ import IConditionResource from "./IConditionResource"; export function validateReadConditions( context: Context, conditionalHeaders?: ModifiedAccessConditions, - model?: BlobModel | ContainerModel | null + model?: BlobModel | ContainerModel | null, + isSourceBlob?: boolean ) { new ReadConditionalHeadersValidator().validate( context, new ConditionalHeadersAdapter(context, conditionalHeaders), - new ConditionResourceAdapter(model) + new ConditionResourceAdapter(model), + isSourceBlob ); } @@ -30,11 +33,13 @@ export default class ReadConditionalHeadersValidator * @param context * @param conditionalHeaders * @param resource + * @param isSourceBlob */ public validate( context: Context, conditionalHeaders: IConditionalHeaders, - resource: IConditionResource + resource: IConditionResource, + isSourceBlob?: boolean ): void { // If-Match && If-Unmodified-Since && (If-None-Match || If-Modified-Since) @@ -66,7 +71,7 @@ export default class ReadConditionalHeadersValidator // If-Match const ifMatchPass = conditionalHeaders.ifMatch ? conditionalHeaders.ifMatch.includes(resource.etag) || - conditionalHeaders.ifMatch[0] === "*" + conditionalHeaders.ifMatch[0] === "*" : undefined; // If-Unmodified-Since @@ -107,6 +112,16 @@ export default class ReadConditionalHeadersValidator if (isModifiedSincePass === false && ifNoneMatchPass !== true) { throw StorageErrorFactory.getNotModified(context.contextId!); } + + if (conditionalHeaders.ifTags) { + const againstSourceBlob = isSourceBlob === undefined ? false : isSourceBlob; + const validateFunction = generateQueryBlobWithTagsWhereFunction(context, conditionalHeaders.ifTags, againstSourceBlob ? 'x-ms-source-if-tags' : 'x-ms-if-tags'); + + if (conditionalHeaders?.ifTags !== undefined + && validateFunction(resource.blobItemWithTags).length === 0) { + throw StorageErrorFactory.getConditionNotMet(context.contextId!); + } + } } } } diff --git a/src/blob/conditions/WriteConditionalHeadersValidator.ts b/src/blob/conditions/WriteConditionalHeadersValidator.ts index 9572f47b1..88ca42dd8 100644 --- a/src/blob/conditions/WriteConditionalHeadersValidator.ts +++ b/src/blob/conditions/WriteConditionalHeadersValidator.ts @@ -5,6 +5,7 @@ import { } from "../generated/artifacts/models"; import Context from "../generated/Context"; import { BlobModel, ContainerModel } from "../persistence/IBlobMetadataStore"; +import { generateQueryBlobWithTagsWhereFunction } from "../persistence/QueryInterpreter/QueryInterpreter"; import ConditionalHeadersAdapter from "./ConditionalHeadersAdapter"; import ConditionResourceAdapter from "./ConditionResourceAdapter"; import { IConditionalHeaders } from "./IConditionalHeaders"; @@ -29,7 +30,7 @@ export function validateSequenceNumberWriteConditions( if ( conditionalHeaders.ifSequenceNumberLessThanOrEqualTo !== undefined && conditionalHeaders.ifSequenceNumberLessThanOrEqualTo < - model.properties.blobSequenceNumber + model.properties.blobSequenceNumber ) { throw StorageErrorFactory.getSequenceNumberConditionNotMet( context.contextId! @@ -39,7 +40,7 @@ export function validateSequenceNumberWriteConditions( if ( conditionalHeaders.ifSequenceNumberLessThan !== undefined && conditionalHeaders.ifSequenceNumberLessThan <= - model.properties.blobSequenceNumber + model.properties.blobSequenceNumber ) { throw StorageErrorFactory.getSequenceNumberConditionNotMet( context.contextId! @@ -49,7 +50,7 @@ export function validateSequenceNumberWriteConditions( if ( conditionalHeaders.ifSequenceNumberEqualTo !== undefined && conditionalHeaders.ifSequenceNumberEqualTo !== - model.properties.blobSequenceNumber + model.properties.blobSequenceNumber ) { throw StorageErrorFactory.getSequenceNumberConditionNotMet( context.contextId! @@ -167,6 +168,15 @@ export default class WriteConditionalHeadersValidator } return; } + + if (conditionalHeaders.ifTags) { + const validateFunction = generateQueryBlobWithTagsWhereFunction(context, conditionalHeaders.ifTags, 'x-ms-if-tags'); + + if (conditionalHeaders?.ifTags !== undefined + && validateFunction(resource.blobItemWithTags).length === 0) { + throw StorageErrorFactory.getConditionNotMet(context.contextId!); + } + } } } diff --git a/src/blob/errors/StorageErrorFactory.ts b/src/blob/errors/StorageErrorFactory.ts index 5290b7854..5fd4bca05 100644 --- a/src/blob/errors/StorageErrorFactory.ts +++ b/src/blob/errors/StorageErrorFactory.ts @@ -780,7 +780,6 @@ export default class StorageErrorFactory { { ReceivedCopyStatus: copyStatus } ); } - public static getInvalidMetadata(contextID: string): StorageError { return new StorageError( @@ -825,7 +824,7 @@ export default class StorageErrorFactory { "The tags specified are invalid. It contains characters that are not permitted.", contextID ); - } + } public static getInvalidXmlDocument( contextID: string = "" diff --git a/src/blob/handlers/BlockBlobHandler.ts b/src/blob/handlers/BlockBlobHandler.ts index c3252161d..84703faff 100644 --- a/src/blob/handlers/BlockBlobHandler.ts +++ b/src/blob/handlers/BlockBlobHandler.ts @@ -47,7 +47,7 @@ export default class BlockBlobHandler "application/octet-stream"; const contentMD5 = context.request!.getHeader("content-md5") ? options.blobHTTPHeaders.blobContentMD5 || - context.request!.getHeader("content-md5") + context.request!.getHeader("content-md5") : undefined; await this.metadataStore.checkContainerExist( @@ -161,9 +161,9 @@ export default class BlockBlobHandler } public async putBlobFromUrl(contentLength: number, copySource: string, options: Models.BlockBlobPutBlobFromUrlOptionalParams, context: Context - ): Promise { + ): Promise { throw new NotImplementedError(context.contextId); - } + } public async stageBlock( blockId: string, @@ -183,7 +183,7 @@ export default class BlockBlobHandler // options.blobHTTPHeaders = options.blobHTTPHeaders || {}; const contentMD5 = context.request!.getHeader("content-md5") ? options.transactionalContentMD5 || - context.request!.getHeader("content-md5") + context.request!.getHeader("content-md5") : undefined; this.validateBlockId(blockId, blobCtx); @@ -332,7 +332,7 @@ export default class BlockBlobHandler accountName, containerName, name: blobName, - snapshot: "", + snapshot: "", blobTags: options.blobTagsString === undefined ? undefined : getTagsFromString(options.blobTagsString, context.contextId!), properties: { lastModified: context.startTime!, @@ -412,7 +412,8 @@ export default class BlockBlobHandler blobName, options.snapshot, undefined, - options.leaseAccessConditions + options.leaseAccessConditions, + options.modifiedAccessConditions ); // TODO: Create uncommitted blockblob when stage block @@ -437,16 +438,16 @@ export default class BlockBlobHandler (options.listType.toLowerCase() === Models.BlockListType.All.toLowerCase() || options.listType.toLowerCase() === - Models.BlockListType.Uncommitted.toLowerCase()) + Models.BlockListType.Uncommitted.toLowerCase()) ) { response.uncommittedBlocks = res.uncommittedBlocks; } if ( options.listType === undefined || options.listType.toLowerCase() === - Models.BlockListType.All.toLowerCase() || + Models.BlockListType.All.toLowerCase() || options.listType.toLowerCase() === - Models.BlockListType.Committed.toLowerCase() + Models.BlockListType.Committed.toLowerCase() ) { response.committedBlocks = res.committedBlocks; } diff --git a/src/blob/handlers/ContainerHandler.ts b/src/blob/handlers/ContainerHandler.ts index e7703d46d..66c40af6d 100644 --- a/src/blob/handlers/ContainerHandler.ts +++ b/src/blob/handlers/ContainerHandler.ts @@ -368,7 +368,48 @@ export default class ContainerHandler extends BaseHandler public async filterBlobs(options: Models.ContainerFilterBlobsOptionalParams, context: Context ): Promise { - throw new NotImplementedError(context.contextId!); + const blobCtx = new BlobStorageContext(context); + const accountName = blobCtx.account!; + const containerName = blobCtx.container!; + await this.metadataStore.checkContainerExist( + context, + accountName, + containerName + ); + + const request = context.request!; + const marker = options.marker; + options.marker = options.marker || ""; + if ( + options.maxresults === undefined || + options.maxresults > DEFAULT_LIST_BLOBS_MAX_RESULTS + ) { + options.maxresults = DEFAULT_LIST_BLOBS_MAX_RESULTS; + } + + const [blobs, nextMarker] = await this.metadataStore.filterBlobs( + context, + accountName, + containerName, + options.where, + options.maxresults, + marker, + ); + + const serviceEndpoint = `${request.getEndpoint()}/${accountName}`; + const response: Models.ContainerFilterBlobsResponse = { + statusCode: 200, + requestId: context.contextId, + version: BLOB_API_VERSION, + date: context.startTime, + serviceEndpoint, + where: options.where!, + blobs: blobs, + clientRequestId: options.requestId, + nextMarker: `${nextMarker || ""}` + }; + + return response; } /** diff --git a/src/blob/handlers/ServiceHandler.ts b/src/blob/handlers/ServiceHandler.ts index 27a2d3f96..c7cb5010e 100644 --- a/src/blob/handlers/ServiceHandler.ts +++ b/src/blob/handlers/ServiceHandler.ts @@ -6,6 +6,7 @@ import IServiceHandler from "../generated/handlers/IServiceHandler"; import { parseXML } from "../generated/utils/xml"; import { BLOB_API_VERSION, + DEFAULT_LIST_BLOBS_MAX_RESULTS, DEFAULT_LIST_CONTAINERS_MAX_RESULTS, EMULATOR_ACCOUNT_ISHIERARCHICALNAMESPACEENABLED, EMULATOR_ACCOUNT_KIND, @@ -22,8 +23,7 @@ import { Readable } from "stream"; import { OAuthLevel } from "../../common/models"; import { BEARER_TOKEN_PREFIX } from "../../common/utils/constants"; import { decode } from "jsonwebtoken"; -import { getUserDelegationKeyValue } from "../utils/utils"; -import NotImplementedError from "../errors/NotImplementedError"; +import { getUserDelegationKeyValue } from "../utils/utils" /** * ServiceHandler handles Azure Storage Blob service related requests. @@ -373,10 +373,44 @@ export default class ServiceHandler extends BaseHandler return this.getAccountInfo(context); } - public filterBlobs( + public async filterBlobs( options: Models.ServiceFilterBlobsOptionalParams, context: Context ): Promise { - throw new NotImplementedError(context.contextId); + const blobCtx = new BlobStorageContext(context); + const accountName = blobCtx.account!; + + const request = context.request!; + const marker = options.marker; + options.marker = options.marker || ""; + if ( + options.maxresults === undefined || + options.maxresults > DEFAULT_LIST_BLOBS_MAX_RESULTS + ) { + options.maxresults = DEFAULT_LIST_BLOBS_MAX_RESULTS; + } + + const [blobs, nextMarker] = await this.metadataStore.filterBlobs( + context, + accountName, + undefined, + options.where, + options.maxresults, + marker, + ); + + const serviceEndpoint = `${request.getEndpoint()}/${accountName}`; + const response: Models.ServiceFilterBlobsResponse = { + statusCode: 200, + requestId: context.contextId, + version: BLOB_API_VERSION, + date: context.startTime, + serviceEndpoint, + where: options.where!, + blobs: blobs, + clientRequestId: options.requestId, + nextMarker: `${nextMarker || ""}` + }; + return response; } } diff --git a/src/blob/persistence/FilterBlobPage.ts b/src/blob/persistence/FilterBlobPage.ts new file mode 100644 index 000000000..5fda2a210 --- /dev/null +++ b/src/blob/persistence/FilterBlobPage.ts @@ -0,0 +1,128 @@ + +/** + * This implements a page of blob results. + * When maxResults is smaller than the number of prefixed items in the metadata source, multiple reads from + * the source may be necessary. + * + * @export + * @class FilterBlobPage + */ +export default class FilterBlobPage { + readonly maxResults: number; + + filterBlobItems: FilterBlobType[] = []; + latestMarker: string = ""; + + // isFull indicates we could only (maybe) add a prefix + private isFull: boolean = false; + + // isExhausted indicates nothing more should be added + private isExhausted: boolean = false; + + constructor(maxResults: number) { + this.maxResults = maxResults; + } + + /** + * Empty the page (useful in unit tests) + * + */ + public reset() { + this.filterBlobItems.splice(0); + this.isFull = false; + this.isExhausted = false; + this.latestMarker = ""; + } + + private updateFull() { + this.isFull = (this.filterBlobItems.length === this.maxResults); + } + + /** + * addItem will add to the blob list if possible and update the full/exhausted state of the page + */ + private addItem(item: FilterBlobType): boolean { + if (this.isExhausted) { + return false; + } + let added: boolean = false; + if (! this.isFull) { + this.filterBlobItems.push(item); + added = true; + } + this.updateFull(); + + // if a blob causes fullness the next item read cannot be squashed only duplicate prefixes can + this.isExhausted = this.isFull; + return added; + } + + /** + * Add a BlobType item to the appropriate collection, update the marker + * + * When the page becomes full, items may still be added iff the item is existing prefix + * + * Return the number of items added + */ + private add(name: string, item: FilterBlobType): boolean { + if (this.isExhausted) { + return false; + } + if (name < this.latestMarker) { + throw new Error("add received unsorted item. add must be called on sorted data"); + } + const marker = (name > this.latestMarker) ? name : this.latestMarker; + let added: boolean = false; + added = this.addItem(item); + if (added) { + this.latestMarker = marker; + } + return added; + } + + /** + * Iterate over an array blobs read from a source and add them until the page cannot accept new items + */ + private processList(docs: FilterBlobType[], nameFn: (item: FilterBlobType) => string): number { + let added: number = 0; + for (const item of docs) { + if (this.add(nameFn(item), item)) { + added++; + } + if (this.isExhausted) break; + } + return added; + } + + /** + * Fill the page if possible by using the provided reader function. + * + * For any BlobType, the name is used with delimiter to treat the item as a blob or + * a BlobPrefix for the list blobs result. + * + * This function will use the reader for BlobType to keep reading from a metadata + * data source until the source has no more items or the page cannot add any more items. + * + * Return the contents of the page, blobs, prefixes, and a continuation token if applicable + */ + public async fill( + reader: (offset: number) => Promise, + namer: (item: FilterBlobType) => string, + ): Promise<[FilterBlobType[], string]> { + let offset: number = 0; + let docs = await reader(offset); + let added: number = 0; + while (docs.length) { + added = this.processList(docs, namer); + offset += added; + if (added < this.maxResults) { + break; + } + docs = await reader(offset); + } + return [ + this.filterBlobItems, + added < docs.length ? this.latestMarker : "" + ]; + } +} diff --git a/src/blob/persistence/IBlobMetadataStore.ts b/src/blob/persistence/IBlobMetadataStore.ts index 12d91ca7d..39942b17d 100644 --- a/src/blob/persistence/IBlobMetadataStore.ts +++ b/src/blob/persistence/IBlobMetadataStore.ts @@ -4,6 +4,7 @@ import IDataStore from "../../common/IDataStore"; import IGCExtentProvider from "../../common/IGCExtentProvider"; import * as Models from "../generated/artifacts/models"; import Context from "../generated/Context"; +import { FilterBlobItem } from "../generated/artifacts/models"; /** * This model describes a chunk inside a persistency extent for a given extent ID. @@ -153,6 +154,8 @@ interface IGetBlobPropertiesRes { } export type GetBlobPropertiesRes = IGetBlobPropertiesRes; +export type FilterBlobModel = FilterBlobItem; + // The response model for each lease-related request. interface IBlobLeaseResponse { properties: Models.BlobPropertiesInternal; @@ -212,8 +215,8 @@ export type BlockModel = IBlockAdditionalProperties & PersistencyBlockModel; */ export interface IBlobMetadataStore extends IGCExtentProvider, - IDataStore, - ICleaner { + IDataStore, + ICleaner { /** * Update blob service properties. Create service properties if not exists in persistency layer. * @@ -502,6 +505,15 @@ export interface IBlobMetadataStore includeUncommittedBlobs?: boolean ): Promise<[BlobModel[], string | undefined]>; + filterBlobs( + context: Context, + account: string, + container?: string, + where?: string, + maxResults?: number, + marker?: string, + ): Promise<[FilterBlobModel[], string | undefined]>; + /** * Create blob item in persistency layer. Will replace if blob exists. * @@ -946,7 +958,8 @@ export interface IBlobMetadataStore blob: string, snapshot: string | undefined, isCommitted: boolean | undefined, - leaseAccessConditions: Models.LeaseAccessConditions | undefined + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + modifiedAccessConditions: Models.ModifiedAccessConditions | undefined ): Promise<{ properties: Models.BlobPropertiesInternal; uncommittedBlocks: Models.Block[]; @@ -1078,7 +1091,7 @@ export interface IBlobMetadataStore listUncommittedBlockPersistencyChunks( marker?: string, maxResults?: number - ): Promise<[IExtentChunk[], string | undefined]>; + ): Promise<[IExtentChunk[], string | undefined]>; /** * Set blob tags. @@ -1103,7 +1116,7 @@ export interface IBlobMetadataStore leaseAccessConditions: Models.LeaseAccessConditions | undefined, tags: Models.BlobTags | undefined, modifiedAccessConditions?: Models.ModifiedAccessConditions - ): Promise; + ): Promise; /** * Get blob tags. @@ -1125,7 +1138,7 @@ export interface IBlobMetadataStore blob: string, snapshot: string | undefined, leaseAccessConditions: Models.LeaseAccessConditions | undefined, - modifiedAccessConditions?: Models.ModifiedAccessConditions + modifiedAccessConditions?: Models.ModifiedAccessConditions, ): Promise; } diff --git a/src/blob/persistence/LokiBlobMetadataStore.ts b/src/blob/persistence/LokiBlobMetadataStore.ts index dbf231c9b..c40eb72a6 100644 --- a/src/blob/persistence/LokiBlobMetadataStore.ts +++ b/src/blob/persistence/LokiBlobMetadataStore.ts @@ -47,6 +47,7 @@ import IBlobMetadataStore, { ChangeContainerLeaseResponse, ContainerModel, CreateSnapshotResponse, + FilterBlobModel, GetBlobPropertiesRes, GetContainerAccessPolicyResponse, GetContainerPropertiesResponse, @@ -62,7 +63,9 @@ import IBlobMetadataStore, { SetContainerAccessPolicyOptions } from "./IBlobMetadataStore"; import PageWithDelimiter from "./PageWithDelimiter"; -import { getBlobTagsCount, getTagsFromString } from "../utils/utils"; +import FilterBlobPage from "./FilterBlobPage"; +import { generateQueryBlobWithTagsWhereFunction } from "./QueryInterpreter/QueryInterpreter"; +import { getBlobTagsCount, getTagsFromString, toBlobTags } from "../utils/utils"; /** * This is a metadata source implementation for blob based on loki DB. @@ -821,6 +824,80 @@ export default class LokiBlobMetadataStore } } + public async filterBlobs( + context: Context, + account: string, + container?: string, + where?: string, + maxResults: number = DEFAULT_LIST_BLOBS_MAX_RESULTS, + marker: string = "", + ): Promise<[FilterBlobModel[], string | undefined]> { + const query: any = {}; + if (account !== undefined) { + query.accountName = account; + } + if (container !== undefined) { + query.containerName = container; + await this.checkContainerExist( + context, + account, + container + ); + } + + const filterFunction = generateQueryBlobWithTagsWhereFunction(context, where!); + + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const page = new FilterBlobPage(maxResults); + const readPage = async (offset: number): Promise => { + const doc = await coll + .chain() + .find(query) + .where((obj) => { + return obj.name > marker!; + }) + .where((obj) => { + return obj.snapshot === undefined || obj.snapshot === ''; + }) + .sort((obj1, obj2) => { + if (obj1.name === obj2.name) return 0; + if (obj1.name > obj2.name) return 1; + return -1; + }) + .offset(offset) + .limit(maxResults) + .data(); + + return doc.map((item) => { + let blobItem: FilterBlobModel; + blobItem = { + name: item.name, + containerName: item.containerName, + tags: item.blobTags + }; + return blobItem; + }).filter((blobItem) => { + const tagsMeetConditions = filterFunction(blobItem); + if (tagsMeetConditions.length !== 0) { + blobItem.tags = { blobTagSet: toBlobTags(tagsMeetConditions) }; + return true; + } + return false; + }); + }; + + const nameItem = (item: FilterBlobModel) => { + return item.name; + }; + + const [blobItems, nextMarker] = await page.fill(readPage, nameItem); + + return [ + blobItems, + nextMarker + ]; + } + public async listBlobs( context: Context, account: string, @@ -1810,9 +1887,11 @@ export default class LokiBlobMetadataStore ifUnmodifiedSince: options.sourceModifiedAccessConditions.sourceIfUnmodifiedSince, ifMatch: options.sourceModifiedAccessConditions.sourceIfMatch, - ifNoneMatch: options.sourceModifiedAccessConditions.sourceIfNoneMatch + ifNoneMatch: options.sourceModifiedAccessConditions.sourceIfNoneMatch, + ifTags: options.sourceModifiedAccessConditions.sourceIfTags }, - sourceBlob + sourceBlob, + true ); const destBlob = await this.getBlobWithLeaseUpdated( @@ -1996,7 +2075,8 @@ export default class LokiBlobMetadataStore ifUnmodifiedSince: options.sourceModifiedAccessConditions.sourceIfUnmodifiedSince, ifMatch: options.sourceModifiedAccessConditions.sourceIfMatch, - ifNoneMatch: options.sourceModifiedAccessConditions.sourceIfNoneMatch + ifNoneMatch: options.sourceModifiedAccessConditions.sourceIfNoneMatch, + // Storage service will ignore x-ms-source-if-tags header for copyFromUrl }, sourceBlob ); @@ -2572,7 +2652,8 @@ export default class LokiBlobMetadataStore blob: string, snapshot: string | undefined, isCommitted: boolean | undefined, - leaseAccessConditions: Models.LeaseAccessConditions | undefined + leaseAccessConditions: Models.LeaseAccessConditions | undefined, + modifiedAccessConditions: Models.ModifiedAccessConditions | undefined ): Promise<{ properties: Models.BlobPropertiesInternal; uncommittedBlocks: Models.Block[]; @@ -2586,6 +2667,8 @@ export default class LokiBlobMetadataStore context ); + validateReadConditions(context, modifiedAccessConditions, doc); + new BlobReadLeaseValidator(leaseAccessConditions).validate( new BlobLeaseAdapter(doc), context @@ -3392,6 +3475,8 @@ export default class LokiBlobMetadataStore true ); + validateReadConditions(context, modifiedAccessConditions, doc); + // When block blob don't have committed block, should return 404 if (!doc) { throw StorageErrorFactory.getBlobNotFound(context.contextId); diff --git a/src/blob/persistence/QueryInterpreter/IQueryContext.ts b/src/blob/persistence/QueryInterpreter/IQueryContext.ts new file mode 100644 index 000000000..3f42ca1a4 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/IQueryContext.ts @@ -0,0 +1 @@ +export type IQueryContext = any; \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryInterpreter.ts b/src/blob/persistence/QueryInterpreter/QueryInterpreter.ts new file mode 100644 index 000000000..b52e62d7d --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryInterpreter.ts @@ -0,0 +1,83 @@ +import StorageError from "../../errors/StorageError"; +import StorageErrorFactory from "../../errors/StorageErrorFactory"; +import { BlobTags } from "../../generated/artifacts/models"; +import Context from "../../generated/Context"; +import { FilterBlobModel } from "../IBlobMetadataStore"; +import BinaryOperatorNode from "./QueryNodes/BinaryOperatorNode"; +import ExpressionNode from "./QueryNodes/ExpressionNode"; +import IQueryNode, { TagContent } from "./QueryNodes/IQueryNode"; +import parseQuery from "./QueryParser"; + +export default function executeQuery(context: FilterBlobModel, queryTree: IQueryNode): TagContent[] { + let tags: any = {}; + const blobTags = context.tags; + if (blobTags) { + let blobTagsValue: BlobTags; + if (typeof (blobTags) === 'string') { + blobTagsValue = JSON.parse(blobTags as any); + } + else { + blobTagsValue = blobTags; + } + blobTagsValue.blobTagSet.forEach((aTag) => { + tags[aTag.key] = aTag.value; + }) + } + tags["@container"] = context.containerName; + return queryTree.evaluate(tags) +} + +function countIdentifierReferences(queryTree: IQueryNode): number { + if (queryTree instanceof BinaryOperatorNode) { + return 1; + } + + if (queryTree instanceof ExpressionNode) { + return countIdentifierReferences(queryTree.child) + } + + return 0 +} + + +export function generateQueryBlobWithTagsWhereFunction( + requestContext: Context, + query: string | undefined, + conditionHeader?: string, +): (entity: any) => TagContent[] { + if (query === undefined) { + return () => { + return []; + } + } + + const queryTree = parseQuery(requestContext, query, conditionHeader); + + // Validates that the provided query tree represents a valid query. + // That is, a query containing at least one conditional expression, + // where every conditional expression operates on at least + // one column or built -in identifier(i.e.comparison between two constants is not allowed). + const identifierReferencesCount = countIdentifierReferences(queryTree); + if (identifierReferencesCount == 0) { + if (conditionHeader === undefined) { + throw new StorageError( + 400, + `InvalidQueryParameterValue`, + `Error parsing query at or near character position 1: expected an operator`, + requestContext.contextId!, + { + QueryParameterName: `where`, + QueryParameterValue: query + }); + } + else { + throw StorageErrorFactory.getInvalidHeaderValue( + requestContext.contextId!, { + HeaderName: conditionHeader, + HeaderValue: query + }); + } + } + + return (entity) => executeQuery(entity, queryTree); +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts new file mode 100644 index 000000000..48ffab431 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts @@ -0,0 +1,20 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class AndNode extends BinaryOperatorNode { + get name(): string { + return `and` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + if (leftContent.length !== 0 && rightContent.length !== 0) { + return leftContent.concat(rightContent); + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts new file mode 100644 index 000000000..101ffc47e --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts @@ -0,0 +1,14 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode from "./IQueryNode"; + +export default abstract class BinaryOperatorNode implements IQueryNode { + constructor(public left: IQueryNode, public right: IQueryNode) { } + + abstract evaluate(context: IQueryContext): any + + abstract get name(): string + + toString(): string { + return `(${this.left.toString()} ${this.name} ${this.right.toString()})` + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts new file mode 100644 index 000000000..94b7236b2 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts @@ -0,0 +1,20 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode, { TagContent } from "./IQueryNode"; + +export default class ConstantNode implements IQueryNode { + constructor(private value: string) { } + + get name(): string { + return "constant" + } + + evaluate(_context: IQueryContext): TagContent[] { + return [{ + value: this.value + }]; + } + + toString(): string { + return JSON.stringify(this.value) + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts new file mode 100644 index 000000000..ffcf09cf9 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts @@ -0,0 +1,26 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class EqualsNode extends BinaryOperatorNode { + get name(): string { + return `eq` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value === rightContent[0].value) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts new file mode 100644 index 000000000..8c3180de1 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts @@ -0,0 +1,18 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode, { TagContent } from "./IQueryNode"; + +export default class ExpressionNode implements IQueryNode { + constructor(public child: IQueryNode) { } + + get name(): string { + return "expression" + } + + evaluate(context: IQueryContext): TagContent[] { + return this.child.evaluate(context) + } + + toString(): string { + return `(${this.child.toString()})` + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts new file mode 100644 index 000000000..d2c79c375 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts @@ -0,0 +1,27 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; + +export default class GreaterThanEqualNode extends BinaryOperatorNode { + get name(): string { + return `gte` + } + + evaluate(context: IQueryContext): any { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value >= rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts new file mode 100644 index 000000000..3e461c20d --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts @@ -0,0 +1,28 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class GreaterThanNode extends BinaryOperatorNode { + get name(): string { + return `gt` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value > rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts new file mode 100644 index 000000000..9f045961b --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts @@ -0,0 +1,14 @@ +import { IQueryContext } from "../IQueryContext"; + +export interface TagContent { + key?: string; + value?: string; +} + +export default interface IQueryNode { + get name(): string + + evaluate(context: IQueryContext): TagContent[] + + toString(): string +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts new file mode 100644 index 000000000..dc97adf92 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts @@ -0,0 +1,21 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode, { TagContent } from "./IQueryNode"; + +export default class KeyNode implements IQueryNode { + constructor(private identifier: string) { } + + get name(): string { + return "id" + } + + evaluate(context: IQueryContext): TagContent[] { + return [{ + key: this.identifier, + value: context[this.identifier] + }]; + } + + toString(): string { + return this.identifier; + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts new file mode 100644 index 000000000..c6c8ef1d8 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts @@ -0,0 +1,27 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; + +export default class LessThanEqualNode extends BinaryOperatorNode { + get name(): string { + return `lte` + } + + evaluate(context: IQueryContext): any { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value <= rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts new file mode 100644 index 000000000..d5d788927 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts @@ -0,0 +1,27 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; + +export default class LessThanNode extends BinaryOperatorNode { + get name(): string { + return `lt` + } + + evaluate(context: IQueryContext): any { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value < rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts new file mode 100644 index 000000000..b757f9cac --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts @@ -0,0 +1,28 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class NotEqualsNode extends BinaryOperatorNode { + get name(): string { + return `ne` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value !== rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts new file mode 100644 index 000000000..0337bab1d --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts @@ -0,0 +1,20 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class OrNode extends BinaryOperatorNode { + get name(): string { + return `or` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + if (leftContent.length !== 0 || rightContent.length !== 0) { + return leftContent.concat(rightContent); + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryParser.ts b/src/blob/persistence/QueryInterpreter/QueryParser.ts new file mode 100644 index 000000000..38cd9ed9b --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryParser.ts @@ -0,0 +1,606 @@ +import StorageError from "../../errors/StorageError"; +import StorageErrorFactory from "../../errors/StorageErrorFactory"; +import Context from "../../generated/Context"; +import AndNode from "./QueryNodes/AndNode"; +import ConstantNode from "./QueryNodes/ConstantNode"; +import EqualsNode from "./QueryNodes/EqualsNode"; +import ExpressionNode from "./QueryNodes/ExpressionNode"; +import GreaterThanEqualNode from "./QueryNodes/GreaterThanEqualNode"; +import GreaterThanNode from "./QueryNodes/GreaterThanNode"; +import IQueryNode from "./QueryNodes/IQueryNode"; +import KeyNode from "./QueryNodes/KeyNode"; +import LessThanEqualNode from "./QueryNodes/LessThanEqualNode"; +import LessThanNode from "./QueryNodes/LessThanNode"; +import NotEqualsNode from "./QueryNodes/NotEqualsNode"; +import OrNode from "./QueryNodes/OrNode"; + +/** + * This file is used to parse query string for Azure Blob filter by tags and x-ms-if-tags conditions. + * https://learn.microsoft.com/en-us/azure/storage/blobs/storage-manage-find-blobs?tabs=azure-portal + * https://learn.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations + */ + +enum ComparisonType { + Equal, + Greater, + Less, + NotEqual +} + +interface ComparisonNode { + key: string; + existedComparison: ComparisonType[]; +} + +export default function parseQuery( + requestContext: Context, + query: string, + conditionsHeader?: string): IQueryNode { + return new QueryParser(requestContext, query, conditionsHeader).visit() +} + +/** + * A recursive descent parser for Azure Blob filter by tags query syntax. + * + * This parser is implemented using a recursive descent strategy, which composes + * layers of syntax hierarchy, roughly corresponding to the structure of an EBNF + * grammar. Each layer of the hierarchy is implemented as a method which consumes + * the syntax for that layer, and then calls the next layer of the hierarchy. + * + * So for example, the syntax tree that we currently use is composed of: + * - QUERY := EXPRESSION + * - EXPRESSION := OR + * - OR := AND ("or" OR)* + * - AND := UNARY ("and" AND)* + * - UNARY := ("not")? EXPRESSION_GROUP + * - EXPRESSION_GROUP := ("(" EXPRESSION ")") | BINARY + * - BINARY := IDENTIFIER_OR_CONSTANT (OPERATOR IDENTIFIER_OR_CONSTANT)? + * - IDENTIFIER_OR_CONSTANT := CONSTANT | IDENTIFIER + * - CONSTANT := STRING + */ +class QueryParser { + constructor( + requestContext: Context, + query: string, + conditionHeader?: string) { + this.queryString = query; + this.requestContext = requestContext; + this.query = new ParserContext(this.requestContext, query, conditionHeader); + this.conditionHeader = conditionHeader; + } + + private requestContext: Context; + private query: ParserContext; + private comparisonNodes: Record = {}; + private comparisonCount: number = 0; + private conditionHeader?: string; + private queryString: string; + + validateWithPreviousComparison(key: string, currentComparison: ComparisonType) { + if (this.conditionHeader) return; + if (currentComparison === ComparisonType.NotEqual) { + return; + } + + if (this.comparisonNodes[key]) { + for (let i = 0; i < this.comparisonNodes[key].existedComparison.length; ++i) { + if (currentComparison === ComparisonType.Equal) { + throw new Error("can't have multiple conditions for a single tag unless they define a range"); + } + + if (currentComparison === ComparisonType.Greater && + (this.comparisonNodes[key].existedComparison[i] === ComparisonType.Less + || this.comparisonNodes[key].existedComparison[i] === ComparisonType.Equal)) { + throw new Error("can't have multiple conditions for a single tag unless they define a range"); + } + + if (currentComparison === ComparisonType.Less && + (this.comparisonNodes[key].existedComparison[i] === ComparisonType.Greater + || this.comparisonNodes[key].existedComparison[i] === ComparisonType.Equal)) { + throw new Error("can't have multiple conditions for a single tag unless they define a range"); + } + } + } + + return; + } + + appendComparionNode(key: string, currentComparison: ComparisonType) { + if (this.conditionHeader) { + return; + } + + if (key !== '@container') { + if (!this.comparisonNodes.hasOwnProperty(key)) { + ++this.comparisonCount; + } + } + + if (this.comparisonCount > 10) { + throw new StorageError( + 400, + `InvalidQueryParameterValue`, + `Error parsing query: there can be at most 10 unique tags in a query`, + this.requestContext.contextId!, + { + QueryParameterName: `where`, + QueryParameterValue: this.queryString + }); + } + + if (this.comparisonNodes[key]) { + this.comparisonNodes[key].existedComparison.push(currentComparison); + } + else { + this.comparisonNodes[key] = { + key: key, + existedComparison: [currentComparison] + } + } + } + + /** + * Visits the root of the query syntax tree, returning the corresponding root node. + * + * @returns {IQueryNode} + */ + visit(): IQueryNode { + return this.visitQuery(); + } + + /** + * Visits the QUERY layer of the query syntax tree, returning the appropriate node. + * + * @returns {IQueryNode} + */ + private visitQuery(): IQueryNode { + const tree = this.visitExpression(); + + this.query.skipWhitespace(); + this.query.assertEndOfQuery(); + + return tree; + } + + /** + * Visits the EXPRESSION layer of the query syntax tree, returning the appropriate node. + * + * EXPRESSION := OR + * + * @returns {IQueryNode} + */ + private visitExpression(): IQueryNode { + return this.visitOr(); + } + + /** + * Visits the OR layer of the query syntax tree, returning the appropriate node. + * + * OR := AND ("or" OR)* + * + * @returns {IQueryNode} + */ + private visitOr(): IQueryNode { + const left = this.visitAnd(); + + this.query.skipWhitespace(); + if (this.query.consume("or", true)) { + if (!this.conditionHeader) { + this.query.throw(`unexpected or`); + } + const right = this.visitOr(); + return new OrNode(left, right); + } else { + return left; + } + } + + /** + * Visits the AND layer of the query syntax tree, returning the appropriate node. + * + * AND := UNARY ("and" AND)* + * + * @returns {IQueryNode} + */ + private visitAnd(): IQueryNode { + const left = this.visitUnary(); + + this.query.skipWhitespace(); + if (this.query.consume("and", true)) { + const right = this.visitAnd(); + + return new AndNode(left, right); + } else { + return left; + } + } + + /** + * Visits the UNARY layer of the query syntax tree, returning the appropriate node. + * + * UNARY := ("not")? EXPRESSION_GROUP + * + * @returns {IQueryNode} + */ + private visitUnary(): IQueryNode { + this.query.skipWhitespace(); + const right = this.visitExpressionGroup() + return right; + } + + /** + * Visits the EXPRESSION_GROUP layer of the query syntax tree, returning the appropriate node. + * + * EXPRESSION_GROUP := ("(" OR ")") | BINARY + * + * @returns {IQueryNode} + */ + private visitExpressionGroup(): IQueryNode { + this.query.skipWhitespace(); + if (this.query.consume("(")) { + const child = this.visitExpression() + + this.query.skipWhitespace(); + this.query.consume(")") || this.query.throw(`Expected a ')' to close the expression group, but found '${this.query.peek()}' instead.`) + + return new ExpressionNode(child) + } else { + return this.visitBinary() + } + } + + /** + * Visits the BINARY layer of the query syntax tree, returning the appropriate node. + * + * BINARY := IDENTIFIER_OR_CONSTANT (OPERATOR IDENTIFIER_OR_CONSTANT)? + * + * @returns {IQueryNode} + */ + private visitBinary(): IQueryNode { + const left = this.visitKey(); + + this.query.skipWhitespace(); + const operator = this.query.consumeOneOf(true, "=", ">=", "<=", "<>", ">", "<") + if (operator) { + const right = this.visitValue(); + + switch (operator) { + case "=": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Equal); + this.appendComparionNode(left.toString(), ComparisonType.Equal); + return new EqualsNode(left, right); + case "<>": + if (!this.conditionHeader) { + this.query.throw(`unexpected <>`); + } + this.validateWithPreviousComparison(left.toString(), ComparisonType.NotEqual); + this.appendComparionNode(left.toString(), ComparisonType.NotEqual); + return new NotEqualsNode(left, right); + case ">=": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Greater); + this.appendComparionNode(left.toString(), ComparisonType.Greater); + return new GreaterThanEqualNode(left, right); + case ">": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Greater); + this.appendComparionNode(left.toString(), ComparisonType.Greater); + return new GreaterThanNode(left, right); + case "<": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Less); + this.appendComparionNode(left.toString(), ComparisonType.Less); + return new LessThanNode(left, right); + case "<=": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Less); + this.appendComparionNode(left.toString(), ComparisonType.Less); + return new LessThanEqualNode(left, right); + } + } + + return left; + } + + /** + * Visits the IDENTIFIER_OR_CONSTANT layer of the query syntax tree, returning the appropriate node. + * + * IDENTIFIER_OR_CONSTANT := CONSTANT | IDENTIFIER + * + * @returns {IQueryNode} + */ + private visitValue(): IQueryNode { + this.query.skipWhitespace(); + + if (`'`.includes(this.query.peek())) { + return this.visitString(); + } + this.query.throw('expecting tag value'); + } + + private ContainsInvalidTagKeyCharacter(key: string): boolean { + for (let c of key) { + if (!(c >= 'a' && c <= 'z' || + c >= 'A' && c <= 'Z' || + c >= '0' && c <= '9' || + c == '_')) { + return true; + } + } + return false; + } + + private validateKey(key: string) { + if (key.startsWith("@")) { + if (this.conditionHeader) { + this.query.throw(""); + } + + if (key !== "@container") { + this.query.throw(`unsupported parameter '${key}'`); + } + // Key is @container, no need for further check. + return; + } + + if (!this.conditionHeader && ((key.length == 0) || (key.length > 128))) { + this.query.throw('tag must be between 1 and 128 characters in length'); + } + if (this.ContainsInvalidTagKeyCharacter(key)) { + this.query.throw(`unexpected '${key}'`); + } + } + + private validateValue(value: string) { + if (!this.conditionHeader && (value.length > 256)) { + this.query.throw(`tag value must be between 0 and 256 characters in length`); + } + for (let c of value) { + if (!(c >= 'a' && c <= 'z' || + c >= 'A' && c <= 'Z' || + c >= '0' && c <= '9' || + c == ' ' || + c == '+' || + c == '-' || + c == '.' || + c == '/' || + c == ':' || + c == '=' || + c == '_')) { + this.query.throw(`'${c}' not permitted in tag name or value`); + } + } + } + + /** + * Visits the STRING layer of the query syntax tree, returning the appropriate node. + * + * Strings are wrapped in either single quotes (') or double quotes (") and may contain + * doubled-up quotes to introduce a literal. + */ + private visitString(isAKey: boolean = false): IQueryNode { + const openCharacter = this.query.take() + + /** + * Strings are terminated by the same character that opened them. + * But we also allow doubled-up characters to represent a literal, which means we need to only terminate a string + * when we receive an odd-number of closing characters followed by a non-closing character. + * + * Conceptually, this is represented by the following state machine: + * + * - start: normal + * - normal+(current: !') -> normal + * - normal+(current: ', next: ') -> escaping + * - normal+(current: ', next: !') -> end + * - escaping+(current: ') -> normal + * + * We can implement this using the state field of the `take` method's predicate. + */ + const content = this.query.take((c, peek, state) => { + if (state === "escaping") { + return "normal"; + } else if (c === openCharacter && peek === openCharacter) { + return "escaping"; + } else if (c !== openCharacter) { + return "normal"; + } else { + return false; + } + }); + + this.query.consume(openCharacter) || this.query.throw(`Expected a \`${openCharacter}\` to close the string, but found ${this.query.peek()} instead.`); + + if (isAKey) { + const keyName = content.replace(new RegExp(`${openCharacter}${openCharacter}`, 'g'), openCharacter); + this.validateKey(keyName); + return new KeyNode(keyName); + } + else { + const value = content.replace(new RegExp(`${openCharacter}${openCharacter}`, 'g'), openCharacter); + this.validateValue(value); + return new ConstantNode(value); + } + } + + /** + * Visits the IDENTIFIER layer of the query syntax tree, returning the appropriate node. + * + * Identifiers are a sequence of characters which are not whitespace. + * + * @returns {IQueryNode} + */ + private visitKey(): IQueryNode { + // A key name can be surrounded by double quotes. + if (`"`.includes(this.query.peek())) { + return this.visitString(true); + } + else { + const identifier = this.query.take( + c => !!c.trim() && c !== '=' && c != '>' && c !== '<' + ) || this.query.throw(`Expected a valid identifier, but found '${this.query.peek()}' instead.`); + this.validateKey(identifier); + return new KeyNode(identifier) + } + } +} + +/** + * Provides the logic and helper functions for consuming tokens from a query string. + * This includes low level constructs like peeking at the next character, consuming a + * specific sequence of characters, and skipping whitespace. + */ +export class ParserContext { + constructor( + private requestContext: Context, + private query: string, + private conditionHeader?: string) { + } + private tokenPosition: number = 0; + + /** + * Asserts that the query has been fully consumed. + * + * This method should be called after the parser has finished consuming the known parts of the query. + * Any remaining query after this point is indicative of a syntax error. + */ + assertEndOfQuery() { + if (this.tokenPosition < this.query.length) { + this.throw(`Unexpected token '${this.peek()}'.`) + } + } + + /** + * Retrieves the next character in the query without advancing the parser. + * + * @returns {string} A single character, or `undefined` if the end of the query has been reached. + */ + peek(): string { + return this.query[this.tokenPosition] + } + + /** + * Advances the parser past any whitespace characters. + */ + skipWhitespace() { + while (this.query[this.tokenPosition] && !this.query[this.tokenPosition].trim()) { + this.tokenPosition++ + } + } + + /** + * Attempts to consume a given sequence of characters from the query, + * advancing the parser if the sequence is found. + * + * @param {string} sequence The sequence of characters which should be consumed. + * @param {boolean} ignoreCase Whether or not the case of the characters should be ignored. + * @returns {boolean} `true` if the sequence was consumed, `false` otherwise. + */ + consume(sequence: string, ignoreCase: boolean = false): boolean { + const normalize = ignoreCase ? (s: string) => s.toLowerCase() : (s: string) => s; + + if (normalize(this.query.substring(this.tokenPosition, this.tokenPosition + sequence.length)) === normalize(sequence)) { + this.tokenPosition += sequence.length + return true + } + + return false + } + + /** + * Attempts to consume one of a given set of sequences from the query, + * advancing the parser if one of the sequences is found. + * + * Sequences are tested in the order they are provided, and the first + * sequence which is found is consumed. As such, it is important to + * avoid prefixes appearing before their longer counterparts. + * + * @param {boolean} ignoreCase Whether or not the case of the characters should be ignored. + * @param {string[]} options The list of character sequences which should be consumed. + * @returns {string | null} The sequence which was consumed, or `null` if none of the sequences were found. + */ + consumeOneOf(ignoreCase: boolean = false, ...options: string[]): string | null { + for (const option of options) { + if (this.consume(option, ignoreCase)) { + return option + } + } + + return null + } + + /** + * Consumes a sequence of characters from the query based on a character predicate function. + * + * The predicate function is called for each character in the query, and the sequence is + * consumed until the predicate returns `false` or the end of the query is reached. + * + * @param {Function} predicate The function which determines which characters should be consumed. + * @returns {string} The sequence of characters which were consumed. + */ + take(predicate?: (char: string, peek: string, state: T | undefined) => T): string { + const start = this.tokenPosition + let until = this.tokenPosition + + if (predicate) { + let state: T | undefined; + while (this.query[until]) { + state = predicate(this.query[until], this.query[until + 1], state) + if (!state) { + break + } + + until++; + } + } else { + // If no predicate is provided, then just take one character + until++ + } + + this.tokenPosition = until + return this.query.substring(start, until) + } + + /** + * Consumes a sequence of characters from the query based on a character predicate function, + * and then consumes a terminating sequence of characters (throwing an exception if these are not found). + * + * This function is particularly useful for consuming sequences of characters which are surrounded + * by a prefix and suffix, such as strings. + * + * @param {string} prefix The prefix which should be consumed. + * @param {Function} predicate The function which determines which characters should be consumed. + * @param {string} suffix The suffix which should be consumed. + * @returns {string | null} The sequence of characters which were consumed, or `null` if the prefix was not found. + */ + takeWithTerminator(prefix: string, predicate: (char: string, peek: string, state: T | undefined) => T, suffix: string): string | null { + if (!this.consume(prefix)) { + return null; + } + + const value = this.take(predicate); + this.consume(suffix) || this.throw(`Expected "${suffix}" to close the "${prefix}...${suffix}", but found '${this.peek()}' instead.`); + + return value; + } + + /** + * Throws an exception with a message indicating the position of the parser in the query. + * @param {string} message The message to include in the exception. + */ + throw(message: string): never { + if (this.conditionHeader) { + throw StorageErrorFactory.getInvalidHeaderValue( + this.requestContext.contextId!, { + HeaderName: this.conditionHeader, + HeaderValue: this.query + }); + } + else { + throw new StorageError( + 400, + `InvalidQueryParameterValue`, + `Error parsing query at or near character position ${this.tokenPosition}: ${message}`, + this.requestContext.contextId!, + { + QueryParameterName: `where`, + QueryParameterValue: this.query + }); + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/SqlBlobMetadataStore.ts b/src/blob/persistence/SqlBlobMetadataStore.ts index 45666056f..9faa5144c 100644 --- a/src/blob/persistence/SqlBlobMetadataStore.ts +++ b/src/blob/persistence/SqlBlobMetadataStore.ts @@ -53,6 +53,7 @@ import IBlobMetadataStore, { ChangeContainerLeaseResponse, ContainerModel, CreateSnapshotResponse, + FilterBlobModel, GetBlobPropertiesRes, GetContainerAccessPolicyResponse, GetContainerPropertiesResponse, @@ -67,7 +68,9 @@ import IBlobMetadataStore, { SetContainerAccessPolicyOptions } from "./IBlobMetadataStore"; import PageWithDelimiter from "./PageWithDelimiter"; -import { getBlobTagsCount, getTagsFromString } from "../utils/utils"; +import FilterBlobPage from "./FilterBlobPage"; +import { getBlobTagsCount, getTagsFromString, toBlobTags } from "../utils/utils"; +import { generateQueryBlobWithTagsWhereFunction } from "./QueryInterpreter/QueryInterpreter"; // tslint:disable: max-classes-per-file class ServicesModel extends Model { } @@ -1227,6 +1230,79 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { }); } + public async filterBlobs( + context: Context, + account: string, + container?: string, + where?: string, + maxResults: number = DEFAULT_LIST_BLOBS_MAX_RESULTS, + marker?: string, + ): Promise<[FilterBlobModel[], string | undefined]> { + return this.sequelize.transaction(async (t) => { + if (container) { + await this.assertContainerExists(context, account, container, t); + } + + let whereQuery: any; + if (container) { + whereQuery = { + accountName: account, + containerName: container + } + } + else { + whereQuery = { + accountName: account + }; + }; + + if (marker !== undefined) { + if (whereQuery.blobName !== undefined) { + whereQuery.blobName[Op.gt] = marker; + } else { + whereQuery.blobName = { + [Op.gt]: marker + }; + } + } + whereQuery.snapshot = ""; + whereQuery.deleting = 0; + + // fill the page by possibly querying multiple times + const page = new FilterBlobPage(maxResults); + + const nameItem = (item: BlobsModel): string => { + return this.getModelValue(item, "blobName", true); + }; + const filterFunction = generateQueryBlobWithTagsWhereFunction(context, where!); + + const readPage = async (off: number): Promise => { + return (await BlobsModel.findAll({ + where: whereQuery as any, + order: [["blobName", "ASC"]], + transaction: t, + limit: maxResults, + offset: off + })); + }; + + const [blobItems, nextMarker] = await page.fill(readPage, nameItem); + + const filterBlobModelMapper = (model: BlobsModel) => { + return this.convertDbModelToFilterBlobModel(model); + }; + + return [blobItems.map(filterBlobModelMapper).filter((blobItem) => { + const tagsMeetConditions = filterFunction(blobItem); + if (tagsMeetConditions.length !== 0) { + blobItem.tags = { blobTagSet: toBlobTags(tagsMeetConditions) }; + return true; + } + return false; + }), nextMarker]; + }); + } + public async listBlobs( context: Context, account: string, @@ -1446,7 +1522,8 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { blob: string, snapshot: string = "", isCommitted?: boolean, - leaseAccessConditions?: Models.LeaseAccessConditions + leaseAccessConditions?: Models.LeaseAccessConditions, + modifiedAccessConditions?: Models.ModifiedAccessConditions ): Promise { return this.sequelize.transaction(async (t) => { await this.assertContainerExists(context, account, container, t); @@ -1462,6 +1539,14 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { transaction: t }); + validateReadConditions( + context, + modifiedAccessConditions, + blobFindResult + ? this.convertDbModelToBlobModel(blobFindResult) + : undefined + ); + if (blobFindResult === null || blobFindResult === undefined) { throw StorageErrorFactory.getBlobNotFound(context.contextId); } @@ -2525,9 +2610,11 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { ifUnmodifiedSince: options.sourceModifiedAccessConditions.sourceIfUnmodifiedSince, ifMatch: options.sourceModifiedAccessConditions.sourceIfMatch, - ifNoneMatch: options.sourceModifiedAccessConditions.sourceIfNoneMatch + ifNoneMatch: options.sourceModifiedAccessConditions.sourceIfNoneMatch, + ifTags: options.sourceModifiedAccessConditions.sourceIfTags, }, - sourceBlob + sourceBlob, + true ); const destBlob = await this.getBlobWithLeaseUpdated( @@ -3078,6 +3165,14 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { }; } + private convertDbModelToFilterBlobModel(dbModel: BlobsModel): FilterBlobModel { + return { + containerName: this.getModelValue(dbModel, "containerName", true), + name: this.getModelValue(dbModel, "blobName", true), + tags: this.deserializeModelValue(dbModel, "blobTags") + }; + } + private convertDbModelToBlobModel(dbModel: BlobsModel): BlobModel { const contentProperties: IBlobContentProperties = this.convertDbModelToBlobContentProperties( dbModel @@ -3405,6 +3500,8 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { blobFindResult ); + validateReadConditions(context, modifiedAccessConditions, blobModel); + if (!blobModel.isCommitted) { throw StorageErrorFactory.getBlobNotFound(context.contextId); } @@ -3414,6 +3511,12 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { context ).validate(new BlobReadLeaseValidator(leaseAccessConditions)); + if (modifiedAccessConditions?.ifTags) { + const validateFunction = generateQueryBlobWithTagsWhereFunction(context, modifiedAccessConditions?.ifTags, 'x-ms-if-tags'); + if (!validateFunction(blobModel)) { + throw new Error("412"); + } + } return blobModel.blobTags; }); } diff --git a/src/blob/utils/utils.ts b/src/blob/utils/utils.ts index 38df653c6..0f7fe895c 100644 --- a/src/blob/utils/utils.ts +++ b/src/blob/utils/utils.ts @@ -3,6 +3,7 @@ import { createWriteStream, PathLike } from "fs"; import StorageErrorFactory from "../errors/StorageErrorFactory"; import { USERDELEGATIONKEY_BASIC_KEY } from "./constants"; import { BlobTag, BlobTags } from "@azure/storage-blob"; +import { TagContent } from "../persistence/QueryInterpreter/QueryNodes/IQueryNode"; export function checkApiVersion( inputApiVersion: string, @@ -153,7 +154,7 @@ export function getUserDelegationKeyValue( signedStartsOn: string, signedExpiresOn: string, signedVersion: string, -) : string { +): string { const stringToSign = [ signedObjectid, signedTenantid, @@ -167,19 +168,18 @@ export function getUserDelegationKeyValue( } export function getBlobTagsCount( - blobTags: BlobTags | undefined -) : number | undefined { + blobTags: BlobTags | undefined +): number | undefined { return (blobTags === undefined || blobTags?.blobTagSet.length === 0) ? undefined : blobTags?.blobTagSet.length } export function getTagsFromString(blobTagsString: string, contextID: string): BlobTags | undefined { - if (blobTagsString === '' || blobTagsString === undefined) - { + if (blobTagsString === '' || blobTagsString === undefined) { return undefined; } - let blobTags:BlobTag[] = []; + let blobTags: BlobTag[] = []; const rawTags = blobTagsString.split("&"); - rawTags.forEach((rawTag)=>{ + rawTags.forEach((rawTag) => { const tagpair = rawTag.split("="); blobTags.push({ // When the Blob tag is input with header, it's encoded, sometimes space will be encoded to "+" ("+" will be encoded to "%2B") @@ -190,28 +190,28 @@ export function getTagsFromString(blobTagsString: string, contextID: string): Bl }) validateBlobTag( { - blobTagSet:blobTags, + blobTagSet: blobTags, }, contextID ); return { - blobTagSet:blobTags, + blobTagSet: blobTags, }; } // validate as the limitation from https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-tags?tabs=azure-ad#request-body export function validateBlobTag(tags: BlobTags, contextID: string): void { - if (tags.blobTagSet.length > 10){ + if (tags.blobTagSet.length > 10) { throw StorageErrorFactory.getTagsTooLarge(contextID); } - tags.blobTagSet.forEach((tag)=>{ - if (tag.key.length == 0){ + tags.blobTagSet.forEach((tag) => { + if (tag.key.length == 0) { throw StorageErrorFactory.getEmptyTagName(contextID); } - if (tag.key.length > 128){ + if (tag.key.length > 128) { throw StorageErrorFactory.getTagsTooLarge(contextID); } - if (tag.value.length > 256){ + if (tag.value.length > 256) { throw StorageErrorFactory.getTagsTooLarge(contextID); } if (ContainsInvalidTagCharacter(tag.key)) { @@ -223,23 +223,37 @@ export function validateBlobTag(tags: BlobTags, contextID: string): void { }); } -function ContainsInvalidTagCharacter(s: string): boolean{ - for (let c of s) - { +function ContainsInvalidTagCharacter(s: string): boolean { + for (let c of s) { if (!(c >= 'a' && c <= 'z' || - c >= 'A' && c <= 'Z' || - c >= '0' && c <= '9' || - c == ' ' || - c == '+' || - c == '-' || - c == '.' || - c == '/' || - c == ':' || - c == '=' || - c == '_')) - { - return true; + c >= 'A' && c <= 'Z' || + c >= '0' && c <= '9' || + c == ' ' || + c == '+' || + c == '-' || + c == '.' || + c == '/' || + c == ':' || + c == '=' || + c == '_')) { + return true; } } - return false; + return false; +} + +export function toBlobTags(input: TagContent[]): BlobTag[] { + const tags: Record = {}; + input.forEach(element => { + if (element.key !== '@container') { + tags[element.key!] = element.value!; + } + }); + + return Object.entries(tags).map(([key, value]) => { + return { + key: key, + value: value + } + }); } \ No newline at end of file diff --git a/tests/blob/apis/appendblob.test.ts b/tests/blob/apis/appendblob.test.ts index c0ec6b97f..19ca562b8 100644 --- a/tests/blob/apis/appendblob.test.ts +++ b/tests/blob/apis/appendblob.test.ts @@ -1,7 +1,8 @@ import { StorageSharedKeyCredential, BlobServiceClient, - newPipeline + newPipeline, + Tags } from "@azure/storage-blob"; import assert = require("assert"); @@ -85,6 +86,32 @@ describe("AppendBlobAPIs", () => { assert.deepStrictEqual(properties.blobCommittedBlockCount, 0); }); + it("Create append blob with ifTags should work @loki", async () => { + await appendBlobClient.create(); + + const tags: Tags = { + tag1: 'val1', + tag2: 'val2' + } + + await appendBlobClient.setTags(tags); + + try { + await appendBlobClient.create({ + conditions: { + tagConditions: `tag1<>'val1'` + } + }); + assert.fail(); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("Create append blob override existing pageblob @loki", async () => { const pageBlobClient = blobClient.getPageBlobClient(); await pageBlobClient.create(512); @@ -383,6 +410,46 @@ describe("AppendBlobAPIs", () => { assert.deepStrictEqual(string, "abcdef123456T@"); }); + it("AppendBlock with ifTags should work @loki", async () => { + await appendBlobClient.create(); + + const tags: Tags = { + tag1: 'val1', + tag2: 'val2' + } + + await appendBlobClient.setTags(tags); + + try { + await appendBlobClient.appendBlock("123456", 6, { + conditions: { + tagConditions: `tag1<>'val1'` + } + }); + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + await appendBlobClient.appendBlock("123456", 6, { + conditions: { + tagConditions: `tag1='val1'` + } + }); + + const response = await appendBlobClient.download(0, undefined, { + conditions: { + tagConditions: `tag1='val1'` + } + }); + const string = await bodyToString(response, response.contentLength); + + assert.deepStrictEqual(string, "123456"); + }); + it("Download append blob should work @loki", async () => { await appendBlobClient.create(); await appendBlobClient.appendBlock("abcdef", 6); @@ -413,7 +480,7 @@ describe("AppendBlobAPIs", () => { await appendBlobClient.appendBlock("T", 1); await appendBlobClient.appendBlock("@", 2); - const response = await snapshotAppendBlobURL.download(3, undefined, {rangeGetContentMD5: true}); + const response = await snapshotAppendBlobURL.download(3, undefined, { rangeGetContentMD5: true }); const string = await bodyToString(response); assert.deepStrictEqual(string, "def"); assert.deepEqual(response.contentMD5, await getMD5FromString("def")); @@ -430,7 +497,7 @@ describe("AppendBlobAPIs", () => { await appendBlobClient.delete(); - const response = await copiedAppendBlobClient.download(3, undefined, {rangeGetContentMD5: true}); + const response = await copiedAppendBlobClient.download(3, undefined, { rangeGetContentMD5: true }); const string = await bodyToString(response); assert.deepStrictEqual(string, "def"); assert.deepEqual(response.contentMD5, await getMD5FromString("def")); diff --git a/tests/blob/apis/blob.test.ts b/tests/blob/apis/blob.test.ts index 5574a4f81..6b33cedf4 100644 --- a/tests/blob/apis/blob.test.ts +++ b/tests/blob/apis/blob.test.ts @@ -3,7 +3,8 @@ import { StorageSharedKeyCredential, newPipeline, BlobServiceClient, - BlobItem + BlobItem, + Tags } from "@azure/storage-blob"; import assert = require("assert"); @@ -101,6 +102,82 @@ describe("BlobAPIs", () => { ); }); + it("download with ifTags condition @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + + await blobClient.setTags(tags); + try { + (await blobClient.download(undefined, undefined, { conditions: { tagConditions: `tag1='val11'` } })); + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + + it("getProperties with ifTags condition @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + + await blobClient.setTags(tags); + try { + (await blobClient.getProperties({ conditions: { tagConditions: `tag1='val11'` } })); + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + } + }); + + it("setProperties with ifTags condition @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + + await blobClient.setTags(tags); + try { + (await blobClient.setHTTPHeaders({ blobContentType: 'contenttype/subtype' }, + { conditions: { tagConditions: `tag1='val11'` } })); + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + + it("setMetadata with ifTags condition @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + + await blobClient.setTags(tags); + try { + (await blobClient.setMetadata({ key1: 'val1' }, + { conditions: { tagConditions: `tag1='val11'` } })); + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("download should work with ifMatch value * @loki @sql", async () => { const result = await blobClient.download(0, undefined, { conditions: { @@ -372,6 +449,30 @@ describe("BlobAPIs", () => { assert.fail(); }); + it("Delete with ifTags should work @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + await blobClient.setTags(tags); + + try { + await blobClient.delete( + { + conditions: + { + tagConditions: `tag1 <> 'val1'` + } + } + ); + } catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("should create a snapshot from a blob @loki @sql", async () => { const result = await blobClient.createSnapshot(); assert.ok(result.snapshot); @@ -381,6 +482,28 @@ describe("BlobAPIs", () => { ); }); + it("Create a snapshot from a blob with ifTags @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + await blobClient.setTags(tags); + + try { + await blobClient.createSnapshot({ + conditions: { + tagConditions: `tag1 <> 'val1'` + } + }); + assert.fail("Should not reach here"); + } catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("should create a snapshot with metadata from a blob @loki @sql", async () => { const metadata = { meta1: "val1", @@ -479,9 +602,9 @@ describe("BlobAPIs", () => { const metadata = { "Content-SHA256": "a" }; - + // set metadata should fail - let hasError = false; + let hasError = false; try { await blobClient.setMetadata(metadata); } catch (error) { @@ -489,8 +612,7 @@ describe("BlobAPIs", () => { assert.strictEqual(error.code, 'InvalidMetadata'); hasError = true; } - if (!hasError) - { + if (!hasError) { assert.fail(); } @@ -560,6 +682,98 @@ describe("BlobAPIs", () => { await blobLeaseClient.releaseLease(); }); + it("lease blob with ifTags @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + await blobClient.setTags(tags); + + const guid = "ca761232ed4211cebacd00aa0057b223"; + const duration = 30; + blobLeaseClient = await blobClient.getBlobLeaseClient(guid); + try { + await blobLeaseClient.acquireLease(duration, + { + conditions: { + tagConditions: `tag1 <> 'val1'` + } + } + ); + assert.fail("Should not reach here"); + } catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + await blobLeaseClient.acquireLease(duration); + try { + await blobLeaseClient.renewLease( + { + conditions: { + tagConditions: `tag1 <> 'val1'` + } + }); + assert.fail("Should not reach here"); + } catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + try { + const newGuid = "3c7e72ebb4304526bc53d8ecef03798f"; + await blobLeaseClient.changeLease(newGuid, + { + conditions: { + tagConditions: `tag1 <> 'val1'` + } + }); + assert.fail("Should not reach here"); + } catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + try { + await blobLeaseClient.breakLease(3, + { + conditions: { + tagConditions: `tag1 <> 'val1'` + } + }); + assert.fail("Should not reach here"); + } catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + try { + await blobLeaseClient.releaseLease( + { + conditions: { + tagConditions: `tag1 <> 'val1'` + } + } + ); + assert.fail("Should not reach here"); + } catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + await blobLeaseClient.releaseLease(); + }); + it("releaseLease @loki @sql", async () => { const guid = "ca761232ed4211cebacd00aa0057b223"; const duration = -1; @@ -724,6 +938,30 @@ describe("BlobAPIs", () => { assert.deepStrictEqual(result.contentLanguage, contentLanguage); }); + it("Settier with ifTags should work @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + await blobClient.setTags(tags); + + try { + await blobClient.setAccessTier("Cool", + { + conditions: + { + tagConditions: `tag1 <> 'val1'` + } + } + ); + } catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("setTier set default to cool @loki @sql", async () => { // Created Blob should have accessTierInferred as true in Get/list let properties = await blockBlobClient.getProperties(); @@ -864,7 +1102,7 @@ describe("BlobAPIs", () => { ); } }); - + it("Upload blob with accesstier should get accessTierInferred as false @loki", async () => { const blobName = getUniqueName("blob"); @@ -874,7 +1112,7 @@ describe("BlobAPIs", () => { const properties = await blobClient.getProperties(); assert.equal(false, properties.accessTierInferred); - + blobClient.delete(); }); @@ -980,6 +1218,41 @@ describe("BlobAPIs", () => { ); }); + it("Copy blob with ifTags should work @loki", async () => { + const sourceBlob = getUniqueName("blob"); + const destBlob = getUniqueName("blob"); + + const sourceBlobClient = containerClient.getBlockBlobClient(sourceBlob); + const destBlobClient = containerClient.getBlockBlobClient(destBlob); + + await sourceBlobClient.upload("hello", 5); + await destBlobClient.upload("start", 5); + + const tags = { + tag1: "val1", + tag2: "val2", + }; + await sourceBlobClient.setTags(tags); + await destBlobClient.setTags(tags); + + try { + await destBlobClient.beginCopyFromURL( + sourceBlobClient.url, + { + conditions: + { + tagConditions: `tag1 <> 'val1'` + } + } + ); + } catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("Copy blob should work to override metadata @loki", async () => { const sourceBlob = getUniqueName("blob"); const destBlob = getUniqueName("blob"); @@ -1015,15 +1288,14 @@ describe("BlobAPIs", () => { await sourceBlobClient.setAccessTier("Archive"); // Copy from Archive blob without accesstier will fail - let hasError = false; + let hasError = false; try { await destBlobClient.beginCopyFromURL(sourceBlobClient.url); } catch (error) { assert.deepStrictEqual(error.statusCode, 409); hasError = true; } - if (!hasError) - { + if (!hasError) { assert.fail(); } @@ -1178,11 +1450,10 @@ describe("BlobAPIs", () => { try { await destBlobClient.beginCopyFromURL('/devstoreaccount1/container78/blob125') - } - catch (error) - { + } + catch (error) { assert.deepStrictEqual(error.statusCode, 400); - assert.deepStrictEqual(error.code, 'InvalidHeaderValue'); + assert.deepStrictEqual(error.code, 'InvalidHeaderValue'); return; } assert.fail(); @@ -1227,14 +1498,14 @@ describe("BlobAPIs", () => { // async copy try { await destBlobClient.beginCopyFromURL( - sourceBlobClient.url, - { - conditions: + sourceBlobClient.url, { - ifNoneMatch: "*" - } - }); - } + conditions: + { + ifNoneMatch: "*" + } + }); + } catch (error) { assert.deepStrictEqual(error.statusCode, 409); return; @@ -1244,14 +1515,14 @@ describe("BlobAPIs", () => { // Sync copy try { await destBlobClient.syncCopyFromURL( - sourceBlobClient.url, - { - conditions: + sourceBlobClient.url, { - ifNoneMatch: "*" - } - }); - } + conditions: + { + ifNoneMatch: "*" + } + }); + } catch (error) { assert.deepStrictEqual(error.statusCode, 409); return; @@ -1374,8 +1645,8 @@ describe("BlobAPIs", () => { assert.equal(getResult.leaseStatus, "locked"); await destLeaseClient.releaseLease(); - }); - + }); + it("Synchronized copy blob should work to override tag @loki", async () => { const tags = { tag1: "val1" @@ -1483,7 +1754,7 @@ describe("BlobAPIs", () => { result.contentDisposition, blobHTTPHeaders.blobContentDisposition ); - }); + }); it("set/get blob tag should work, with base blob or snapshot @loki @sql", async () => { const tags = { @@ -1510,13 +1781,13 @@ describe("BlobAPIs", () => { const blobClientSnapshot = blobClient.withSnapshot(snapshotResponse.snapshot!); let outputTags2 = (await blobClientSnapshot.getTags()).tags; assert.deepStrictEqual(outputTags2, tags); - + // Set/get tags on snapshot, base blob tags should not be impacted, etag, lastModified should not change var properties1 = await blobClientSnapshot.getProperties(); await blobClientSnapshot.setTags(tags2); outputTags2 = (await blobClientSnapshot.getTags()).tags; assert.deepStrictEqual(outputTags2, tags2); - var properties2 = await blobClientSnapshot.getProperties(); + var properties2 = await blobClientSnapshot.getProperties(); assert.deepStrictEqual(properties1.etag, properties2.etag); assert.deepStrictEqual(properties1.lastModified, properties2.lastModified); @@ -1539,12 +1810,12 @@ describe("BlobAPIs", () => { const blockBlobName1 = "block1"; const blockBlobName2 = "block2"; - + let blockBlobClient1 = containerClient.getBlockBlobClient(blockBlobName1); let blockBlobClient2 = containerClient.getBlockBlobClient(blockBlobName2); - + // Upload block blob with tags - await blockBlobClient1.upload(content, content.length, + await blockBlobClient1.upload(content, content.length, { tags: tags }); @@ -1554,7 +1825,7 @@ describe("BlobAPIs", () => { assert.deepStrictEqual(outputTags, tags); // Get blob properties, can get tag count - let blobProperties = await blockBlobClient1.getProperties(); + let blobProperties = await blockBlobClient1.getProperties(); assert.deepStrictEqual(blobProperties._response.parsedHeaders.tagCount, 2); // download blob, can get tag count @@ -1577,46 +1848,40 @@ describe("BlobAPIs", () => { ).value; let blobs = (await listResult).segment.blobItems; let blobNotChecked = blobs!.length; - blobs.forEach((blobItem: BlobItem) => - { - if (blobItem.name === blockBlobName1) - { + blobs.forEach((blobItem: BlobItem) => { + if (blobItem.name === blockBlobName1) { assert.deepStrictEqual(blobItem.properties.tagCount, 2); blobNotChecked--; } - if (blobItem.name === blockBlobName2) - { + if (blobItem.name === blockBlobName2) { assert.deepStrictEqual(blobItem.properties.tagCount, 3); blobNotChecked--; } }); - assert.deepStrictEqual(blobs!.length-2, blobNotChecked); + assert.deepStrictEqual(blobs!.length - 2, blobNotChecked); // listBlobsFlat with include tags can get tag listResult = ( await containerClient - .listBlobsFlat({includeTags: true}) + .listBlobsFlat({ includeTags: true }) .byPage() .next() ).value; blobs = (await listResult).segment.blobItems; blobNotChecked = blobs!.length; - blobs.forEach((blobItem: BlobItem) => - { - if (blobItem.name === blockBlobName1) - { + blobs.forEach((blobItem: BlobItem) => { + if (blobItem.name === blockBlobName1) { assert.deepStrictEqual(blobItem.properties.tagCount, 2); assert.deepStrictEqual(blobItem.tags, tags); blobNotChecked--; } - if (blobItem.name === blockBlobName2) - { + if (blobItem.name === blockBlobName2) { assert.deepStrictEqual(blobItem.properties.tagCount, 3); assert.deepStrictEqual(blobItem.tags, tags2); blobNotChecked--; } - }); - assert.deepStrictEqual(blobs!.length-2, blobNotChecked); + }); + assert.deepStrictEqual(blobs!.length - 2, blobNotChecked); // listBlobsByHierarchy can get tag count const delimiter = "/"; @@ -1628,46 +1893,40 @@ describe("BlobAPIs", () => { ).value; blobs = (await listResult).segment.blobItems; blobNotChecked = blobs!.length; - blobs.forEach((blobItem: BlobItem) => - { - if (blobItem.name === blockBlobName1) - { + blobs.forEach((blobItem: BlobItem) => { + if (blobItem.name === blockBlobName1) { assert.deepStrictEqual(blobItem.properties.tagCount, 2); blobNotChecked--; } - if (blobItem.name === blockBlobName2) - { + if (blobItem.name === blockBlobName2) { assert.deepStrictEqual(blobItem.properties.tagCount, 3); blobNotChecked--; } }); - assert.deepStrictEqual(blobs!.length-2, blobNotChecked); + assert.deepStrictEqual(blobs!.length - 2, blobNotChecked); // listBlobsByHierarchy include tags can get tag listResult = ( await containerClient - .listBlobsByHierarchy(delimiter, {includeTags: true}) + .listBlobsByHierarchy(delimiter, { includeTags: true }) .byPage() .next() ).value; blobs = (await listResult).segment.blobItems; blobNotChecked = blobs!.length; - blobs.forEach((blobItem: BlobItem) => - { - if (blobItem.name === blockBlobName1) - { + blobs.forEach((blobItem: BlobItem) => { + if (blobItem.name === blockBlobName1) { assert.deepStrictEqual(blobItem.properties.tagCount, 2); assert.deepStrictEqual(blobItem.tags, tags); blobNotChecked--; } - if (blobItem.name === blockBlobName2) - { + if (blobItem.name === blockBlobName2) { assert.deepStrictEqual(blobItem.properties.tagCount, 3); assert.deepStrictEqual(blobItem.tags, tags2); blobNotChecked--; } }); - assert.deepStrictEqual(blobs!.length-2, blobNotChecked); + assert.deepStrictEqual(blobs!.length - 2, blobNotChecked); // clean up blockBlobClient1.delete(); @@ -1691,24 +1950,24 @@ describe("BlobAPIs", () => { const pageBlobName2 = "page2"; const appendBlobName1 = "append1"; const appendBlobName2 = "append2"; - + let blockBlobClient1 = containerClient.getBlockBlobClient(blockBlobName1); let blockBlobClient2 = containerClient.getBlockBlobClient(blockBlobName2); let pageBlobClient1 = containerClient.getBlockBlobClient(pageBlobName1); let pageBlobClient2 = containerClient.getBlockBlobClient(pageBlobName2); let appendBlobClient1 = containerClient.getBlockBlobClient(appendBlobName1); let appendBlobClient2 = containerClient.getBlockBlobClient(appendBlobName2); - + // Upload blob with tags - await blockBlobClient1.upload(content, content.length, + await blockBlobClient1.upload(content, content.length, { tags: tags }); - await pageBlobClient1.upload(content, content.length, + await pageBlobClient1.upload(content, content.length, { tags: tags }); - await appendBlobClient1.upload(content, content.length, + await appendBlobClient1.upload(content, content.length, { tags: tags }); @@ -1749,28 +2008,25 @@ describe("BlobAPIs", () => { // listBlobsFlat with include tags can get tag let listResult = ( await containerClient - .listBlobsFlat({includeTags: true}) + .listBlobsFlat({ includeTags: true }) .byPage() .next() ).value; let blobs = (await listResult).segment.blobItems; let blobNotChecked = blobs!.length; - blobs.forEach((blobItem: BlobItem) => - { - if (blobItem.name === blockBlobName1 || blobItem.name === pageBlobName1 || blobItem.name === appendBlobName1 ) - { + blobs.forEach((blobItem: BlobItem) => { + if (blobItem.name === blockBlobName1 || blobItem.name === pageBlobName1 || blobItem.name === appendBlobName1) { assert.deepStrictEqual(blobItem.properties.tagCount, 2); assert.deepStrictEqual(blobItem.tags, tags); blobNotChecked--; } - if (blobItem.name === blockBlobName2 || blobItem.name === pageBlobName2 || blobItem.name === appendBlobName2 ) - { + if (blobItem.name === blockBlobName2 || blobItem.name === pageBlobName2 || blobItem.name === appendBlobName2) { assert.deepStrictEqual(blobItem.properties.tagCount, 3); assert.deepStrictEqual(blobItem.tags, tags2); blobNotChecked--; } - }); - assert.deepStrictEqual(blobs!.length-6, blobNotChecked); + }); + assert.deepStrictEqual(blobs!.length - 6, blobNotChecked); // clean up blockBlobClient1.delete(); @@ -1781,12 +2037,12 @@ describe("BlobAPIs", () => { appendBlobClient2.delete(); }); - it("set blob tag fail with invalid tag. @loki @sql", async () => { + it("set blob tag fail with invalid tag. @loki @sql", async () => { - const blockBlobName1 = "block1"; + const blockBlobName1 = "block1"; let blockBlobClient1 = containerClient.getBlockBlobClient(blockBlobName1); await blockBlobClient1.upload(content, content.length); - + // tag count should <= 10 const tooManyTags = { tag1: "val1", @@ -1800,7 +2056,7 @@ describe("BlobAPIs", () => { tag9: "val2", tag10: "val2", tag11: "val2", - }; + }; let statusCode = 0; try { await await blockBlobClient1.setTags(tooManyTags);; @@ -1819,7 +2075,7 @@ describe("BlobAPIs", () => { tag8: "val2", tag9: "val2", tag10: "val2", - }; + }; await blockBlobClient1.setTags(tags1); let outputTags = (await blockBlobClient1.getTags()).tags; assert.deepStrictEqual(outputTags, tags1); @@ -1827,7 +2083,7 @@ describe("BlobAPIs", () => { // key length should >0 and <= 128 const emptyKeyTags = { "": "123123123", - }; + }; statusCode = 0; try { await await blockBlobClient1.setTags(emptyKeyTags);; @@ -1837,7 +2093,7 @@ describe("BlobAPIs", () => { assert.deepStrictEqual(statusCode, 400); const tooLongKeyTags = { "key123401234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890012345678901234567890": "val1", - }; + }; statusCode = 0; try { await await blockBlobClient1.setTags(tooLongKeyTags);; @@ -1847,7 +2103,7 @@ describe("BlobAPIs", () => { assert.deepStrictEqual(statusCode, 400); let tags2 = { "key12301234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890012345678901234567890": "val1", - }; + }; await blockBlobClient1.setTags(tags2); outputTags = (await blockBlobClient1.getTags()).tags; assert.deepStrictEqual(outputTags, tags2); @@ -1855,10 +2111,10 @@ describe("BlobAPIs", () => { // value length should <= 256 const tooLongvalueTags = { tag1: "val12345678900123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789001234567890123456789001234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890012345678901234567890", - }; + }; statusCode = 0; try { - await blockBlobClient1.upload(content, content.length, + await blockBlobClient1.upload(content, content.length, { tags: tooLongvalueTags }); @@ -1868,8 +2124,8 @@ describe("BlobAPIs", () => { assert.deepStrictEqual(statusCode, 400); let tags3 = { tag1: "va12345678900123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789001234567890123456789001234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890012345678901234567890", - }; - await blockBlobClient1.upload(content, content.length, + }; + await blockBlobClient1.upload(content, content.length, { tags: tags3 }); @@ -1879,10 +2135,10 @@ describe("BlobAPIs", () => { // invalid char in key let invalidTags = { tag1: "abc%abc", - }; + }; statusCode = 0; try { - await blockBlobClient1.upload(content, content.length, + await blockBlobClient1.upload(content, content.length, { tags: invalidTags }); @@ -1893,10 +2149,10 @@ describe("BlobAPIs", () => { let invalidTags1 = { "abc#ew": "abc", - }; + }; statusCode = 0; try { - await blockBlobClient1.upload(content, content.length, + await blockBlobClient1.upload(content, content.length, { tags: invalidTags1 }); @@ -1907,8 +2163,8 @@ describe("BlobAPIs", () => { let tags4 = { "azAz09 +-./:=_": "azAz09 +-./:=_", - }; - await blockBlobClient1.upload(content, content.length, + }; + await blockBlobClient1.upload(content, content.length, { tags: tags4 }); @@ -1918,8 +2174,8 @@ describe("BlobAPIs", () => { // clean up blockBlobClient1.delete(); }); - - it("Set and get blob tags should work with lease condition @loki @sql", async () => { + + it("Set and get blob tags should work with lease condition @loki @sql", async () => { const guid = "ca761232ed4211cebacd00aa0057b223"; const leaseClient = blockBlobClient.getBlobLeaseClient(guid); await leaseClient.acquireLease(-1); @@ -1959,6 +2215,245 @@ describe("BlobAPIs", () => { await leaseClient.releaseLease(); }); + it("get blob tag with ifTags condition @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + await blobClient.setTags(tags); + + // Equal conditions + let outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: `tag1='val1'` } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + try { + (await blobClient.getTags({ conditions: { tagConditions: `tag1='val11'` } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + // Greater conditions + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: `tag1>'val'` } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + try { + (await blobClient.getTags({ conditions: { tagConditions: `tag1>'val11'` } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + // Greater or equal conditions + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: `tag1>'val'` } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: `tag1>='val1'` } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + try { + (await blobClient.getTags({ conditions: { tagConditions: `tag1>='vam'` } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + // Less conditions + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: `tag1 <'val11'` } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: `tag1< 'vam'` } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + try { + (await blobClient.getTags({ conditions: { tagConditions: `tag1 < 'val1'` } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + // Less or equal conditions + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: `tag1 <'val11'` } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: `tag1< 'vam'` } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + try { + (await blobClient.getTags({ conditions: { tagConditions: `tag1 < 'val1'` } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + try { + (await blobClient.getTags({ conditions: { tagConditions: `adfec` } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidHeaderValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidHeaderValue'); + assert.ok((err as any).details.message.startsWith('The value for one of the HTTP headers is not in the correct format.')); + } + + try { + (await blobClient.getTags({ conditions: { tagConditions: `@container='ab'` } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidHeaderValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidHeaderValue'); + assert.ok((err as any).details.message.startsWith('The value for one of the HTTP headers is not in the correct format.')); + } + }); + + it("get blob tag with ifTags condition - special char comparing @loki @sql", async () => { + const tags: Tags = { + key1: '1a', + key2: 'a1' + }; + await blobClient.setTags(tags); + + let queryString = `key1>'1 a'`; + let outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + queryString = `key2>'a 1'`; + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + queryString = `key1>'1+a'`; + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + queryString = `key2>'a+1'`; + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + queryString = `key1>'1.a'`; + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.deepStrictEqual(outputTags1, tags); + + queryString = `key2>'a.1'`; + outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.deepStrictEqual(outputTags1, tags); + }); + + it("get blob tag with long ifTags condition @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + + let queryString = `tag1 <> 'v0' `; + // Storage service may support more than 1000 compare expressions at most + // Azurite can support only 700 comparing expressions. + for (let index = 1; index < 700; ++index) { + queryString += `and tag1 <> 'v${index}'`; + } + + await blobClient.setTags(tags); + const result = (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.deepStrictEqual(tags, result); + }); + + it("get blob tag with invalid ifTags condition string @loki @sql", async () => { + const tags: Tags = { + key1: 'value1' + }; + await blobClient.setTags(tags); + + let queryString = `key111==value1`; + try { + (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidHeaderValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidHeaderValue'); + assert.ok((err as any).details.message.startsWith('The value for one of the HTTP headers is not in the correct format.')); + } + + // ifTags header doesn't support @container + queryString = `@container='value1'`; + try { + (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidHeaderValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidHeaderValue'); + assert.ok((err as any).details.message.startsWith('The value for one of the HTTP headers is not in the correct format.')); + } + + queryString = `key--1='value1'`; + try { + (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidHeaderValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidHeaderValue'); + assert.ok((err as any).details.message.startsWith('The value for one of the HTTP headers is not in the correct format.')); + } + + queryString = `key1='value$$##'`; + try { + (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidHeaderValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidHeaderValue'); + assert.ok((err as any).details.message.startsWith('The value for one of the HTTP headers is not in the correct format.')); + } + + // key length longer than 128 + queryString = `key12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890<>'value1'`; + try { + (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.fail("Should not reach here."); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + // Value length longer than 256 + queryString = `key1<>'value12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890'`; + + const result = (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.deepStrictEqual(result, tags); + }); + it("Acquire Lease on Breaking Lease status, if LeaseId not match, throw LeaseIdMismatchWithLease error @loki @sql", async () => { // TODO: implement the case later }); diff --git a/tests/blob/apis/blockblob.test.ts b/tests/blob/apis/blockblob.test.ts index a23a09b13..d282a3163 100644 --- a/tests/blob/apis/blockblob.test.ts +++ b/tests/blob/apis/blockblob.test.ts @@ -2,7 +2,8 @@ import { StorageSharedKeyCredential, BlobServiceClient, newPipeline, - BlobSASPermissions + BlobSASPermissions, + Tags } from "@azure/storage-blob"; import assert = require("assert"); import crypto = require("crypto"); @@ -92,6 +93,32 @@ describe("BlockBlobAPIs", () => { } }); + it("Block blob upload with ifTags should work @loki @sql", async () => { + await blockBlobClient.upload('a', 1); + + const tags: Tags = { + tag1: 'val1', + tag2: 'val2' + } + + await blockBlobClient.setTags(tags); + + try { + await blockBlobClient.upload('b', 1, { + conditions: { + tagConditions: `tag1<>'val1'` + } + }); + assert.fail(); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("upload with string body and default parameters @loki @sql", async () => { const body: string = getUniqueName("randomstring"); const result_upload = await blockBlobClient.upload(body, body.length); @@ -313,6 +340,34 @@ describe("BlockBlobAPIs", () => { ); }); + it("commitBlockList with ifTags @loki @sql", async () => { + const body = "HelloWorld"; + await blockBlobClient.upload(body, 10); + const tags: Tags = { + key1: 'value1' + }; + await blockBlobClient.setTags(tags); + await blockBlobClient.stageBlock(base64encode("1"), body, body.length); + await blockBlobClient.stageBlock(base64encode("2"), body, body.length); + try { + await blockBlobClient.commitBlockList([ + base64encode("1"), + base64encode("2") + ], { + conditions: { + tagConditions: `key1<>'value1'` + } + }); + assert.fail("Should not reach here."); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("commitBlockList with previous committed blocks @loki @sql", async () => { const body = "HelloWorld"; await blockBlobClient.stageBlock(base64encode("1"), body, body.length); @@ -379,12 +434,12 @@ describe("BlockBlobAPIs", () => { it("Download a blob range should only return ContentMD5 when has request header x-ms-range-get-content-md5 @loki @sql", async () => { blockBlobClient.deleteIfExists(); - + await blockBlobClient.upload("abc", 0); const properties1 = await blockBlobClient.getProperties(); assert.deepEqual(properties1.contentMD5, await getMD5FromString("abc")); - + let result = await blockBlobClient.download(0, 6); assert.deepStrictEqual(await bodyToString(result, 3), "abc"); assert.deepStrictEqual(result.contentLength, 3); @@ -397,7 +452,7 @@ describe("BlockBlobAPIs", () => { assert.deepEqual(result.contentMD5, await getMD5FromString("abc")); assert.deepEqual(result.blobContentMD5, await getMD5FromString("abc")); - result = await blockBlobClient.download(0, 1, {rangeGetContentMD5: true}); + result = await blockBlobClient.download(0, 1, { rangeGetContentMD5: true }); assert.deepStrictEqual(await bodyToString(result, 1), "a"); assert.deepStrictEqual(result.contentLength, 1); assert.deepEqual(result.contentMD5, await getMD5FromString("a")); @@ -501,6 +556,36 @@ describe("BlockBlobAPIs", () => { assert.equal(listResponse.committedBlocks![0].size, body.length); }); + it("getBlockList with ifTags @loki @sql", async () => { + const body = "HelloWorld"; + await blockBlobClient.upload(body, 10); + const tags: Tags = { + key1: 'value1' + }; + await blockBlobClient.setTags(tags); + await blockBlobClient.stageBlock(base64encode("1"), body, body.length); + await blockBlobClient.stageBlock(base64encode("2"), body, body.length); + await blockBlobClient.commitBlockList([ + base64encode("1"), + base64encode("2") + ]); + + try { + await blockBlobClient.getBlockList("all", { + conditions: { + tagConditions: `key1<>'value1'` + } + }); + assert.fail("Should not reach here."); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("getBlockList_BlockListingFilter @loki @sql", async () => { const body = "HelloWorld"; await blockBlobClient.stageBlock(base64encode("1"), body, body.length); diff --git a/tests/blob/apis/container.test.ts b/tests/blob/apis/container.test.ts index 75db520c3..10c57abb6 100644 --- a/tests/blob/apis/container.test.ts +++ b/tests/blob/apis/container.test.ts @@ -6,7 +6,8 @@ import { BlobServiceClient, generateAccountSASQueryParameters, newPipeline, - StorageSharedKeyCredential + StorageSharedKeyCredential, + Tags } from "@azure/storage-blob"; import assert = require("assert"); import StorageErrorFactory from "../../../src/blob/errors/StorageErrorFactory"; @@ -722,7 +723,7 @@ describe("ContainerAPIs", () => { const inputmarker = undefined; let result = ( await containerClient - .listBlobsByHierarchy("/",{ + .listBlobsByHierarchy("/", { prefix: "" }) .byPage({ @@ -1199,7 +1200,384 @@ describe("ContainerAPIs", () => { assert.equal(result.segment.blobItems.length, 4); }); - // Skip the case currently since js sdk calculate the stringToSign with "+" in prefix instead of decode to space + it("filter blob by tags should work on container @loki @sql", async () => { + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + const tags1 = { + key1: "value1", + key2: "default" + } + await appendBlobClient1.create({ tags: tags1 }); + + const blobName2 = getUniqueName("blobname2"); + const appendBlobClient2 = containerClient.getAppendBlobClient(blobName2); + const tags2: Tags = { + key1: "value2", + key2: "default" + } + await appendBlobClient2.create({ tags: tags2 }); + + const blobName3 = getUniqueName("blobname3"); + const appendBlobClient3 = containerClient.getAppendBlobClient(blobName3); + const tags3 = { + key1: "value3", + key3: "default" + } + await appendBlobClient3.create({ tags: tags3 }); + + const expectedTags1: Tags = {}; + expectedTags1['key1'] = tags1['key1']; + for await (const blob of containerClient.findBlobsByTags(`key1='${tags1["key1"]}'`)) { + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, tags1["key1"]); + } + + const blobsWithTag2 = []; + for await (const segment of containerClient.findBlobsByTags(`key2='default'`).byPage({ + maxPageSize: 1, + })) { + assert.ok(segment.blobs.length <= 1); + for (const blob of segment.blobs) { + blobsWithTag2.push(blob); + } + } + assert.deepStrictEqual(blobsWithTag2.length, 2); + }); + + it("filter blob by tags with greater or less should work on container @loki @sql", async () => { + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + const tags1 = { + key1: "a1", + key2: "1a" + } + await appendBlobClient1.create({ tags: tags1 }); + + const expectedTags1 = { + key1: "a1" + } + + const expectedTags2 = { + key2: "1a" + } + + let blobCountCount = 0; + for await (const blob of containerClient.findBlobsByTags(`key1>'a 1'`)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, tags1["key1"]); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob should be returned."); + + blobCountCount = 0; + for await (const blob of containerClient.findBlobsByTags(`key2>'1 a'`)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags2); + assert.deepStrictEqual(blob.tagValue, tags1["key2"]); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob should be returned."); + + blobCountCount = 0; + for await (const blob of containerClient.findBlobsByTags(`key1<='a11'`)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, tags1["key1"]); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob should be returned."); + + blobCountCount = 0; + for await (const blob of containerClient.findBlobsByTags(`key2<='1aa'`)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags2); + assert.deepStrictEqual(blob.tagValue, tags1["key2"]); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob should be returned."); + }); + + it("filter blob by tags with more than limited conditions on container @loki @sql", async () => { + const tags: Tags = {}; + const tagsLength = 10; + + let queryString = ''; + for (let i = 0; i < tagsLength; ++i) { + const key = getUniqueName("key" + i); + const value = getUniqueName("val" + i); + tags[key] = value; + queryString += `${key}='${value}' and `; + } + + queryString += `anotherkey='anotherValue'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query: there can be at most 10 unique tags in a query')); + } + }); + + it("filter blob by tags with conditions number equal to limitation on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = {}; + const tagsLength = 10; + + let queryString = ''; + for (let i = 0; i < tagsLength; ++i) { + const key = getUniqueName("key" + i); + const value = getUniqueName("val" + i); + tags[key] = value; + queryString += `${key}='${value}'`; + queryString += ` and `; + } + + queryString = queryString.substring(0, queryString.length - 5); + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of containerClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with invalid key chars on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + let queryString = `'key 1'='valffffff'`; + + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + + queryString = `'key-1'='valffffff'`; + + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + + containerClient.delete(); + }); + + it("filter blob by tags with valid special key chars on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value_1' + }; + const queryString = `key_1='value_1'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of containerClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with long key on container @loki @sql", async function () { + const queryString = `key12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890='value'`; + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('tag must be between 1 and 128 characters in length')); + } + }); + + it("filter blob by tags with invalid value chars on container @loki @sql", async function () { + const queryString = `key1='valffffff @'`; + + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('not permitted in tag name or value')); + } + }); + + it("filter blob by tags with valid special value chars on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value +-.:=_/' + }; + const queryString = `key_1='value +-.:=_/' and @container='${containerName}'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of containerClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with long value on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const queryString = `key_1='value12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890'`; + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('tag value must be between 0 and 256 characters in length')); + } + containerClient.delete(); + }); + + it("filter blob by tags with invalid query string @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + let queryString = `astring`; + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + + queryString = `key1<>'ab'`; + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + + queryString = `key1='ab' or key2='cd'`; + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + containerClient.delete(); + }); + + it("filter blob by tags with continuationToken on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value_1' + }; + const queryString = `key_1='value_1'`; + + for (let index = 0; index < 5002; ++index) { + const blobName1 = getUniqueName("blobname" + index); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + } + + let result = (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.ok(result.continuationToken !== undefined); + + await containerClient.delete(); + }); + + // Skip the case currently since js sdk caculate the stringToSign with "+" in prefix instead of decode to space it.skip("List blob should success with '+' in query @loki @sql", async () => { const blobClients = []; let blobNames: Array = [ @@ -1213,7 +1591,7 @@ describe("ContainerAPIs", () => { await blockBlobClient.upload("", 0); blobClients.push(blobClient); } - + // list with prefix has "+" instead of "%20" for space // create service client let pipeline = newPipeline( @@ -1253,7 +1631,7 @@ describe("ContainerAPIs", () => { gotNames.push(item.name); } assert.deepStrictEqual(gotNames, blobNames); - + // clean up for (const blob of blobClients) { await blob.delete(); diff --git a/tests/blob/apis/pageblob.test.ts b/tests/blob/apis/pageblob.test.ts index e66ec5756..711d4bc7a 100644 --- a/tests/blob/apis/pageblob.test.ts +++ b/tests/blob/apis/pageblob.test.ts @@ -1,7 +1,8 @@ import { newPipeline, BlobServiceClient, - StorageSharedKeyCredential + StorageSharedKeyCredential, + Tags } from "@azure/storage-blob"; import assert = require("assert"); @@ -171,6 +172,32 @@ describe("PageBlobAPIs", () => { } }); + it("Create page blob with ifTags should work @loki @sql", async () => { + await pageBlobClient.create(512); + + const tags: Tags = { + tag1: 'val1', + tag2: 'val2' + } + + await pageBlobClient.setTags(tags); + + try { + await pageBlobClient.create(512, { + conditions: { + tagConditions: `tag1<>'val1'` + } + }); + assert.fail(); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("download page blob with partial ranges @loki", async () => { const length = 512 * 10; await pageBlobClient.create(length); @@ -295,8 +322,8 @@ describe("PageBlobAPIs", () => { ); }); - it("download a 0 size page blob with range > 0 will get error @loki", async () => { - pageBlobClient.deleteIfExists(); + it("download a 0 size page blob with range > 0 will get error @loki", async () => { + pageBlobClient.deleteIfExists(); await pageBlobClient.create(0); try { @@ -309,14 +336,14 @@ describe("PageBlobAPIs", () => { }); it("Download a blob range should only return ContentMD5 when has request header x-ms-range-get-content-md5 @loki", async () => { - pageBlobClient.deleteIfExists(); - - await pageBlobClient.create(512, {blobHTTPHeaders: {blobContentMD5: await getMD5FromString("a".repeat(512))}}); + pageBlobClient.deleteIfExists(); + + await pageBlobClient.create(512, { blobHTTPHeaders: { blobContentMD5: await getMD5FromString("a".repeat(512)) } }); await pageBlobClient.uploadPages("a".repeat(512), 0, 512); const properties1 = await pageBlobClient.getProperties(); assert.deepEqual(properties1.contentMD5, await getMD5FromString("a".repeat(512))); - + let result = await pageBlobClient.download(0, 1024); assert.deepStrictEqual(await bodyToString(result, 512), "a".repeat(512)); assert.deepStrictEqual(result.contentLength, 512); @@ -329,7 +356,7 @@ describe("PageBlobAPIs", () => { assert.deepEqual(properties1.contentMD5, await getMD5FromString("a".repeat(512))); assert.deepEqual(result.blobContentMD5, await getMD5FromString("a".repeat(512))); - result = await pageBlobClient.download(0, 3, {rangeGetContentMD5: true}); + result = await pageBlobClient.download(0, 3, { rangeGetContentMD5: true }); assert.deepStrictEqual(await bodyToString(result, 3), "aaa"); assert.deepStrictEqual(result.contentLength, 3); assert.deepEqual(result.contentMD5, await getMD5FromString("aaa")); @@ -395,6 +422,32 @@ describe("PageBlobAPIs", () => { assert.equal(await bodyToString(page2, 512), "b".repeat(512)); }); + it("uploadPages with ifTags should work @loki", async () => { + await pageBlobClient.create(1024); + + const tags: Tags = { + tag1: 'val1', + tag2: 'val2' + } + + await pageBlobClient.setTags(tags); + + try { + await pageBlobClient.uploadPages("a".repeat(512), 0, 512, { + conditions: { + tagConditions: `tag1<>'val1'` + } + }); + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("uploadPages should not work if ifSequenceNumberEqualTo doesn't match @loki", async () => { await pageBlobClient.create(1024); @@ -558,10 +611,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "a".repeat(512) + - "\u0000".repeat(512) + - "c".repeat(512) + - "\u0000".repeat(512) + "a".repeat(512) + + "\u0000".repeat(512) + + "c".repeat(512) + + "\u0000".repeat(512) ); const page1 = await pageBlobClient.download(0, 512); @@ -721,10 +774,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "a".repeat(512) + - "b".repeat(512) + - "c".repeat(512) + - "\u0000".repeat(512) + "a".repeat(512) + + "b".repeat(512) + + "c".repeat(512) + + "\u0000".repeat(512) ); let ranges = await pageBlobClient.getPageRanges(0, length); @@ -777,10 +830,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "d".repeat(512) + - "d".repeat(512) + - "b".repeat(512) + - "c".repeat(512) + - "\u0000".repeat(512) + "d".repeat(512) + + "b".repeat(512) + + "c".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -821,10 +874,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "a".repeat(512) + - "b".repeat(512) + - "d".repeat(512) + - "d".repeat(512) + "a".repeat(512) + + "b".repeat(512) + + "d".repeat(512) + + "d".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -840,6 +893,38 @@ describe("PageBlobAPIs", () => { }); }); + it("getPageRanges with ifTags should work @loki", async () => { + const length = 512 * 5; + await pageBlobClient.create(length); + await pageBlobClient.uploadPages( + "a".repeat(512) + "b".repeat(512) + "c".repeat(512), + 512, + 512 * 3 + ); + + const tags: Tags = { + tag1: 'val1', + tag2: 'val2' + } + + await pageBlobClient.setTags(tags); + + try { + await pageBlobClient.getPageRanges(0, length, { + conditions: { + tagConditions: `tag1<>'val1'` + } + }); + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + it("resize override a sequential range @loki", async () => { let length = 512 * 3; await pageBlobClient.create(length); @@ -912,10 +997,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "a".repeat(512) + - "d".repeat(512) + - "d".repeat(512) + - "d".repeat(512) + - "b".repeat(512) + "d".repeat(512) + + "d".repeat(512) + + "d".repeat(512) + + "b".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -964,10 +1049,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "a".repeat(512) + - "d".repeat(512) + - "d".repeat(512) + - "d".repeat(512) + - "b".repeat(512) + "d".repeat(512) + + "d".repeat(512) + + "d".repeat(512) + + "b".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1016,10 +1101,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "d".repeat(512) + - "d".repeat(512) + - "d".repeat(512) + - "\u0000".repeat(512) + "d".repeat(512) + + "d".repeat(512) + + "d".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1060,10 +1145,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "d".repeat(512) + - "d".repeat(512) + - "b".repeat(512) + - "\u0000".repeat(512) + "d".repeat(512) + + "d".repeat(512) + + "b".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1108,10 +1193,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "a".repeat(512) + - "d".repeat(512) + - "b".repeat(512) + - "\u0000".repeat(512) + "a".repeat(512) + + "d".repeat(512) + + "b".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1160,10 +1245,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "a".repeat(512) + - "d".repeat(512) + - "d".repeat(512) + - "\u0000".repeat(512) + "a".repeat(512) + + "d".repeat(512) + + "d".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1310,10 +1395,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "a".repeat(512) + - "\u0000".repeat(512) + - "c".repeat(512) + - "\u0000".repeat(512) + "a".repeat(512) + + "\u0000".repeat(512) + + "c".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1360,10 +1445,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + "\u0000".repeat(512) + + "\u0000".repeat(512) + + "\u0000".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1402,10 +1487,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "a".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + "a".repeat(512) + + "\u0000".repeat(512) + + "\u0000".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1448,10 +1533,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + - "c".repeat(512) + - "\u0000".repeat(512) + "\u0000".repeat(512) + + "\u0000".repeat(512) + + "c".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1492,10 +1577,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "a".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + - "c".repeat(512) + "\u0000".repeat(512) + + "\u0000".repeat(512) + + "\u0000".repeat(512) + + "c".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1540,10 +1625,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "a".repeat(512) + - "\u0000".repeat(512) + - "b".repeat(512) + - "\u0000".repeat(512) + - "c".repeat(512) + "\u0000".repeat(512) + + "b".repeat(512) + + "\u0000".repeat(512) + + "c".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1651,10 +1736,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + "\u0000".repeat(512) + + "\u0000".repeat(512) + + "\u0000".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1690,10 +1775,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "a".repeat(512) + - "\u0000".repeat(512) + - "b".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + "\u0000".repeat(512) + + "b".repeat(512) + + "\u0000".repeat(512) + + "\u0000".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); @@ -1737,10 +1822,10 @@ describe("PageBlobAPIs", () => { assert.equal( await bodyToString(full, length), "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + - "\u0000".repeat(512) + - "b".repeat(512) + "\u0000".repeat(512) + + "\u0000".repeat(512) + + "\u0000".repeat(512) + + "b".repeat(512) ); const ranges = await pageBlobClient.getPageRanges(0, length); diff --git a/tests/blob/apis/service.test.ts b/tests/blob/apis/service.test.ts index 1f52e86d9..cf9e94850 100644 --- a/tests/blob/apis/service.test.ts +++ b/tests/blob/apis/service.test.ts @@ -6,7 +6,8 @@ import { generateAccountSASQueryParameters, newPipeline, SASProtocol, - StorageSharedKeyCredential + StorageSharedKeyCredential, + Tags } from "@azure/storage-blob"; import * as assert from "assert"; @@ -55,12 +56,12 @@ describe("ServiceAPIs", () => { await server.clean(); }); - it(`getUserDelegationKey with Key credential should fail @loki @sql`, async () => { + it(`getUserDelegationKey with Key credential should fail @loki @sql`, async () => { const startTime = new Date(); startTime.setHours(startTime.getHours() - 1); const expiryTime = new Date(); expiryTime.setDate(expiryTime.getDate() + 1); - + try { await serviceClient.getUserDelegationKey(startTime, expiryTime); assert.fail("Should fail to invoke getUserDelegationKey with account key credentials") @@ -72,7 +73,7 @@ describe("ServiceAPIs", () => { it(`getUserDelegationKey with SAS token credential should fail @loki @sql`, async () => { const sasTokenStart = new Date(); sasTokenStart.setHours(sasTokenStart.getHours() - 1); - + const sasTokenExpiry = new Date(); sasTokenExpiry.setDate(sasTokenExpiry.getDate() + 1); @@ -98,11 +99,11 @@ describe("ServiceAPIs", () => { const skStart = new Date(); skStart.setHours(skStart.getHours() - 1); - + const skExpiry = new Date(); skExpiry.setDate(skExpiry.getDate() + 1); - - try { + + try { await serviceClientWithSAS.getUserDelegationKey(skStart, skExpiry); assert.fail("Should fail to invoke getUserDelegationKey with SAS token credentials") } catch (error) { @@ -411,7 +412,7 @@ describe("ServiceAPIs", () => { await containerClient1.delete(); await containerClient2.delete(); }); - + // fix issue 2382, 2416 it("ListContainers without include metadata should not return container metadata. @loki @sql", async () => { const containerNamePrefix = getUniqueName("container"); @@ -431,7 +432,7 @@ describe("ServiceAPIs", () => { .byPage() .next() ).value; - + assert.equal(result1.containerItems!.length, 2); assert.ok(result1.containerItems![0].name.startsWith(containerNamePrefix)); assert.ok(result1.containerItems![1].name.startsWith(containerNamePrefix)); @@ -448,7 +449,7 @@ describe("ServiceAPIs", () => { .byPage() .next() ).value; - + assert.equal(result2.containerItems!.length, 2); assert.ok(result2.containerItems![0].name.startsWith(containerNamePrefix)); assert.ok(result2.containerItems![1].name.startsWith(containerNamePrefix)); @@ -511,6 +512,320 @@ describe("ServiceAPIs", () => { assert.ok(err); });; }); + + it("Find blob by tags should work @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const key1 = getUniqueName("key"); + const key2 = getUniqueName("key2"); + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + const tags1: Tags = {}; + tags1[key1] = getUniqueName("val1"); + tags1[key2] = "default"; + await appendBlobClient1.create({ tags: tags1 }); + + const blobName2 = getUniqueName("blobname2"); + const appendBlobClient2 = containerClient.getAppendBlobClient(blobName2); + const tags2: Tags = {}; + tags2[key1] = getUniqueName("val2"); + tags2[key2] = "default"; + await appendBlobClient2.create({ tags: tags2 }); + + const blobName3 = getUniqueName("blobname3"); + const appendBlobClient3 = containerClient.getAppendBlobClient(blobName3); + const tags3: Tags = {}; + tags3[key1] = getUniqueName("val3"); + tags3[key2] = "default"; + await appendBlobClient3.create({ tags: tags3 }); + + const expectedTags1: Tags = {}; + expectedTags1[key1] = tags1[key1]; + for await (const blob of serviceClient.findBlobsByTags(`${key1}='${tags1[key1]}'`)) { + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, tags1[key1]); + } + + const expectedTags2: Tags = {}; + expectedTags2[key1] = tags2[key1]; + const blobs = []; + for await (const blob of serviceClient.findBlobsByTags(`${key1}='${tags2[key1]}'`)) { + blobs.push(blob); + } + assert.deepStrictEqual(blobs.length, 1); + assert.deepStrictEqual(blobs[0].containerName, containerName); + assert.deepStrictEqual(blobs[0].name, blobName2); + assert.deepStrictEqual(blobs[0].tags, expectedTags2); + assert.deepStrictEqual(blobs[0].tagValue, tags2[key1]); + + const blobsWithTag2 = []; + for await (const segment of serviceClient.findBlobsByTags(`${key2}='default'`).byPage({ + maxPageSize: 1, + })) { + assert.ok(segment.blobs.length <= 1); + for (const blob of segment.blobs) { + blobsWithTag2.push(blob); + } + } + assert.deepStrictEqual(blobsWithTag2.length, 3); + + for await (const blob of serviceClient.findBlobsByTags( + `@container='${containerName}' AND ${key1}='${tags1[key1]}' AND ${key2}='default'`, + )) { + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags1); + assert.deepStrictEqual(blob.tagValue, ""); + } + + await containerClient.delete(); + }); + + it("filter blob by tags with more than limited conditions on service @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = {}; + const tagsLength = 10; + + let queryString = ''; + for (let i = 0; i < tagsLength; ++i) { + const key = getUniqueName("key" + i); + const value = getUniqueName("val" + i); + tags[key] = value; + queryString += `${key}='${value}' and `; + } + + queryString += `anotherkey='anotherValue'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query: there can be at most 10 unique tags in a query')); + } + + await containerClient.delete(); + }); + + it("filter blob by tags with conditions number equal to limitation on service @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = {}; + const tagsLength = 10; + + let queryString = ''; + for (let i = 0; i < tagsLength; ++i) { + const key = getUniqueName("key" + i); + const value = getUniqueName("val" + i); + tags[key] = value; + queryString += `${key}='${value}' and `; + } + + // key @container isn't count in limitation + queryString += `@container='${containerName}'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of serviceClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with invalid key chars on service @loki @sql", async function () { + let queryString = `key1='valffffff' and @container11='1111'`; + + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('unsupported parameter')); + } + + queryString = `'key 1'='valffffff'`; + + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + + queryString = `'key-1'='valffffff'`; + + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + }); + + it("filter blob by tags with valid special key chars on service @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value_1' + }; + const queryString = `key_1='value_1'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of serviceClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with long key @loki @sql", async function () { + const queryString = `key12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890='value'`; + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('tag must be between 1 and 128 characters in length')); + } + }); + + it("filter blob by tags with invalid value chars on service @loki @sql", async function () { + const queryString = `key1='valffffff @'`; + + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('not permitted in tag name or value')); + } + }); + + it("filter blob by tags with valid special value chars on service @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value +-.:=_/' + }; + const queryString = `key_1='value +-.:=_/' and @container='${containerName}'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of serviceClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with long value @loki @sql", async function () { + const queryString = `key_1='value12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890'`; + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('tag value must be between 0 and 256 characters in length')); + } + }); + + it("filter blob by tags with continuationToken on service @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value_1' + }; + const queryString = `key_1='value_1'`; + + for (let index = 0; index < 5002; ++index) { + const blobName1 = getUniqueName("blobname" + index); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + } + + let result = (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.ok(result.continuationToken !== undefined); + + await containerClient.delete(); + }); }); describe("ServiceAPIs - secondary location endpoint", () => { diff --git a/tests/blob/sas.test.ts b/tests/blob/sas.test.ts index d3164f58e..956f10f2d 100644 --- a/tests/blob/sas.test.ts +++ b/tests/blob/sas.test.ts @@ -14,7 +14,9 @@ import { ContainerClient, PageBlobClient, AppendBlobClient, - BlobBatch + BlobBatch, + Tags, + BlobClient } from "@azure/storage-blob"; import * as assert from "assert"; @@ -2196,4 +2198,201 @@ describe("Shared Access Signature (SAS) authentication", () => { assert.equal(properties4.metadata!["bar"], undefined); assert.equal(properties4.metadata!["baz"], "3"); }); + + it("ContainerClient.generateSasUrl should work with filtertag permission", async () => { + const tmr = new Date(); + tmr.setDate(tmr.getDate() + 1); + + const containerName = getUniqueName("container"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags = { + tag1: "val1", + tag2: "val2", + }; + + const blockBlockName = getUniqueName("blockblob"); + const blockBlobClient = containerClient.getBlockBlobClient(blockBlockName); + await blockBlobClient.upload("Hello, world", 12, { tags: tags }); + + const sasURL = await containerClient.generateSasUrl({ + expiresOn: tmr, + permissions: ContainerSASPermissions.parse("f"), + protocol: SASProtocol.HttpsAndHttp, + }); + + const containerClientWithSAS = new ContainerClient(sasURL); + + const expectedTags1: Tags = { + tag1: "val1", + }; + + for await (const blob of containerClientWithSAS.findBlobsByTags(`tag1='val1'`)) { + assert.deepStrictEqual(blob.name, blockBlockName); + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, "val1"); + } + + await containerClient.delete(); + }); + + it("generateAccountSASQueryParameters should work with filtertag permission against service", async function () { + const tmr = new Date(); + tmr.setDate(tmr.getDate() + 1); + + const containerName = getUniqueName("container"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags = { + tag1: "val1", + tag2: "val2", + }; + + const blockBlobName = getUniqueName("blockblob"); + const blockBlobClient = containerClient.getBlockBlobClient(blockBlobName); + await blockBlobClient.upload("Hello, world", 12, { tags: tags }); + + // By default, credential is always the last element of pipeline factories + const factories = (serviceClient as any).pipeline.factories; + const sourceStorageSharedKeyCredential = factories[factories.length - 1]; + + const sasURL = generateAccountSASQueryParameters({ + expiresOn: tmr, + services: 'b', + resourceTypes: 'so', + permissions: AccountSASPermissions.parse("f"), + protocol: SASProtocol.HttpsAndHttp, + }, + sourceStorageSharedKeyCredential).toString(); + + const serviceClientWithSAS = new BlobServiceClient(`${serviceClient.url}?${sasURL}`); + + const expectedTags1: Tags = { + tag1: "val1", + }; + + for await (const blob of serviceClientWithSAS.findBlobsByTags(`tag1='val1'`)) { + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, "val1"); + } + + await containerClient.delete(); + }); + + it("generateAccountSASQueryParameters should work with filtertag permission against container", async function () { + const tmr = new Date(); + tmr.setDate(tmr.getDate() + 1); + + const containerName = getUniqueName("container"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags = { + tag1: "val1", + tag2: "val2", + }; + + const blockBlobName = getUniqueName("blockblob"); + const blockBlobClient = containerClient.getBlockBlobClient(blockBlobName); + await blockBlobClient.upload("Hello, world", 12, { tags: tags }); + + // By default, credential is always the last element of pipeline factories + const factories = (serviceClient as any).pipeline.factories; + const sourceStorageSharedKeyCredential = factories[factories.length - 1]; + + const sasURL = generateAccountSASQueryParameters({ + expiresOn: tmr, + services: 'b', + resourceTypes: 'c', + permissions: AccountSASPermissions.parse("f"), + protocol: SASProtocol.HttpsAndHttp, + }, + sourceStorageSharedKeyCredential).toString(); + + const containerClientWithSas = new ContainerClient(`${containerClient.url}?${sasURL}`); + + const expectedTags1: Tags = { + tag1: "val1", + }; + + for await (const blob of containerClientWithSas.findBlobsByTags(`tag1='val1'`)) { + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, "val1"); + } + + await containerClient.delete(); + }); + + it("BlobClient.generateSasUrl should work with get/set tags permission", async () => { + const tmr = new Date(); + tmr.setDate(tmr.getDate() + 1); + + const containerName = getUniqueName("container"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags = { + tag1: "val1", + tag2: "val2", + }; + + const blockBlobName = getUniqueName("blockblob"); + const blockBlobClient = containerClient.getBlockBlobClient(blockBlobName); + await blockBlobClient.upload("Hello, world", 12); + + const sasURL = await blockBlobClient.generateSasUrl({ + expiresOn: tmr, + permissions: BlobSASPermissions.parse("t"), + protocol: SASProtocol.HttpsAndHttp, + }); + + const blobClientWithSAS = new BlobClient(sasURL); + + await blobClientWithSAS.setTags(tags); + const getTagsResult = await blobClientWithSAS.getTags(); + assert.deepStrictEqual(getTagsResult.tags, tags); + + await containerClient.delete(); + }); + + it("generateAccountSASQueryParameters should work with should work with get/set tags permission", async function () { + const tmr = new Date(); + tmr.setDate(tmr.getDate() + 1); + + const containerName = getUniqueName("container"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags = { + tag1: "val1", + tag2: "val2", + }; + + const blockBlobName = getUniqueName("blockblob"); + const blockBlobClient = containerClient.getBlockBlobClient(blockBlobName); + await blockBlobClient.upload("Hello, world", 12); + + // By default, credential is always the last element of pipeline factories + const factories = (serviceClient as any).pipeline.factories; + const sourceStorageSharedKeyCredential = factories[factories.length - 1]; + + const sasURL = generateAccountSASQueryParameters({ + expiresOn: tmr, + services: 'b', + resourceTypes: 'o', + permissions: AccountSASPermissions.parse("t"), + protocol: SASProtocol.HttpsAndHttp, + }, + sourceStorageSharedKeyCredential).toString(); + + const blobClientWithSAS = new BlobClient(`${blockBlobClient.url}?${sasURL}`); + + await blobClientWithSAS.setTags(tags); + const getTagsResult = await blobClientWithSAS.getTags(); + assert.deepStrictEqual(getTagsResult.tags, tags); + + await containerClient.delete(); + }); });