Skip to content

Commit

Permalink
Fixes for new metadata structure (#379)
Browse files Browse the repository at this point in the history
  • Loading branch information
Murderlon authored Jan 18, 2023
1 parent d46663f commit ea855df
Show file tree
Hide file tree
Showing 12 changed files with 94 additions and 51 deletions.
2 changes: 1 addition & 1 deletion packages/file-store/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ describe('FileStore', function () {
// @ts-expect-error todo
size: this.testFileSize,
offset: 0,
metadata: 'filename d29ybGRfZG9taW5hdGlvbl9wbGFuLnBkZg==,is_confidential',
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
})

it("created file's size should match 'upload_length'", async function () {
Expand Down
5 changes: 3 additions & 2 deletions packages/gcs-store/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,15 @@ export class GCSStore extends DataStore {
}

const gcs_file = this.bucket.file(file.id)

const options = {
metadata: {
metadata: {
tus_version: TUS_RESUMABLE,
size: file.size,
sizeIsDeferred: `${file.sizeIsDeferred}`,
offset: file.offset,
metadata: file.metadata,
metadata: JSON.stringify(file.metadata),
},
},
}
Expand Down Expand Up @@ -160,7 +161,7 @@ export class GCSStore extends DataStore {
id,
size: size ? Number.parseInt(size, 10) : size,
offset: Number.parseInt(metadata.size, 10), // `size` is set by GCS
metadata: meta,
metadata: meta ? JSON.parse(meta) : undefined,
})
)
})
Expand Down
18 changes: 11 additions & 7 deletions packages/s3-store/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,22 +84,21 @@ export class S3Store extends DataStore {
* on the S3 object's `Metadata` field, so that only a `headObject`
* is necessary to retrieve the data.
*/
private async saveMetadata(file: Upload, upload_id: string) {
log(`[${file.id}] saving metadata`)
private async saveMetadata(upload: Upload, upload_id: string) {
log(`[${upload.id}] saving metadata`)
await this.client
.putObject({
Bucket: this.bucket,
Key: `${file.id}.info`,
Key: `${upload.id}.info`,
Body: '',
Metadata: {
file: JSON.stringify(file),
file: JSON.stringify(upload),
upload_id,
tus_version: TUS_RESUMABLE,
},
})
.promise()
log(`[${file.id}] metadata file saved`)
return {file, upload_id}
log(`[${upload.id}] metadata file saved`)
}

/**
Expand Down Expand Up @@ -399,7 +398,11 @@ export class S3Store extends DataStore {
Key: upload.id,
Metadata: {tus_version: TUS_RESUMABLE},
}
const file: Record<string, string | number> = {id: upload.id, offset: upload.offset}
const file: Record<string, string | number | Upload['metadata']> = {
id: upload.id,
offset: upload.offset,
metadata: upload.metadata,
}

if (upload.size) {
file.size = upload.size.toString()
Expand Down Expand Up @@ -486,6 +489,7 @@ export class S3Store extends DataStore {
...this.cache.get(id)?.file,
offset: metadata.file.size as number,
size: metadata.file.size,
metadata: metadata.file.metadata,
})
}

Expand Down
4 changes: 4 additions & 0 deletions packages/server/src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,10 @@ export const ERRORS = {
status_code: 400,
body: 'Upload-Length or Upload-Defer-Length header required\n',
},
INVALID_METADATA: {
status_code: 400,
body: 'Upload-Metadata is invalid. It MUST consist of one or more comma-separated key-value pairs. The key and value MUST be separated by a space. The key MUST NOT contain spaces and commas and MUST NOT be empty. The key SHOULD be ASCII encoded and the value MUST be Base64 encoded. All keys MUST be unique',
},
UNKNOWN_ERROR: {
status_code: 500,
body: 'Something went wrong with that request\n',
Expand Down
2 changes: 1 addition & 1 deletion packages/server/src/handlers/HeadHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ export class HeadHandler extends BaseHandler {
if (file.metadata !== undefined) {
// If the size of the upload is known, the Server MUST include
// the Upload-Length header in the response.
res.setHeader('Upload-Metadata', Metadata.stringify(file.metadata))
res.setHeader('Upload-Metadata', Metadata.stringify(file.metadata) as string)
}

return res.end()
Expand Down
10 changes: 8 additions & 2 deletions packages/server/src/handlers/PostHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,19 +52,25 @@ export class PostHandler extends BaseHandler {
}

let id

try {
id = this.options.namingFunction(req)
} catch (error) {
log('create: check your `namingFunction`. Error', error)
throw ERRORS.FILE_WRITE_ERROR
}

let metadata
try {
metadata = Metadata.parse(upload_metadata)
} catch (error) {
throw ERRORS.INVALID_METADATA
}

const upload = new Upload({
id,
size: upload_length ? Number.parseInt(upload_length, 10) : undefined,
offset: 0,
metadata: Metadata.parse(upload_metadata),
metadata,
})

if (this.options.onUploadCreate) {
Expand Down
12 changes: 5 additions & 7 deletions packages/server/src/models/Metadata.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@ export function validateValue(value: string) {
}

export function parse(str?: string) {
const meta: Record<string, string | undefined> = {}
const meta: Record<string, string | null> = {}

if (!str) {
return {}
return undefined
}

for (const pair of str.split(',')) {
Expand All @@ -46,9 +46,7 @@ export function parse(str?: string) {
(tokens.length === 2 && validateKey(key) && validateValue(value))) &&
!(key in meta)
) {
const decodedValue = value
? Buffer.from(value, 'base64').toString('utf8')
: undefined
const decodedValue = value ? Buffer.from(value, 'base64').toString('utf8') : null
meta[key] = decodedValue
} else {
throw new Error('Metadata string is not valid')
Expand All @@ -58,10 +56,10 @@ export function parse(str?: string) {
return meta
}

export function stringify(metadata: NonNullable<Upload['metadata']>) {
export function stringify(metadata: NonNullable<Upload['metadata']>): string {
return Object.entries(metadata)
.map(([key, value]) => {
if (value === undefined) {
if (value === null) {
return key
}

Expand Down
2 changes: 1 addition & 1 deletion packages/server/src/models/Upload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ type TUpload = {
id: string
size?: number
offset: number
metadata?: Record<string, string | undefined | never>
metadata?: Record<string, string | null>
creation_date?: string
}

Expand Down
2 changes: 1 addition & 1 deletion packages/server/test/HeadHandler.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ describe('HeadHandler', () => {
const file = new Upload({
id: '1234',
offset: 0,
metadata: {is_confidential: undefined, foo: 'bar'},
metadata: {is_confidential: null, foo: 'bar'},
})
fake_store.getUpload.resolves(file)
await handler.send(req, res)
Expand Down
24 changes: 11 additions & 13 deletions packages/server/test/Metadata.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ describe('Metadata', () => {
'file/name': 'test.mp4',
size: '960244',
'type!': 'video/mp4',
video: undefined,
withWhitespace: undefined,
video: null,
withWhitespace: null,
}
const decoded = parse(str)
assert.deepStrictEqual(decoded, obj)
Expand All @@ -21,8 +21,8 @@ describe('Metadata', () => {
filename: 'test.mp4',
size: '960244',
type: 'video/mp4',
video: undefined,
withWhitespace: undefined,
video: null,
withWhitespace: null,
}
const encoded = stringify(obj)

Expand All @@ -34,11 +34,9 @@ describe('Metadata', () => {
assert.strictEqual(stringify({size: '960244'}), 'size OTYwMjQ0')
assert.strictEqual(stringify({type: 'video/mp4'}), 'type dmlkZW8vbXA0')
// Multiple valid options
assert.notStrictEqual(['video', 'video '].indexOf(stringify({video: undefined})), -1)
assert.notStrictEqual(['video', 'video '].indexOf(stringify({video: null})), -1)
assert.notStrictEqual(
['withWhitespace', 'withWhitespace '].indexOf(
stringify({withWhitespace: undefined})
),
['withWhitespace', 'withWhitespace '].indexOf(stringify({withWhitespace: null})),
-1
)
})
Expand All @@ -51,20 +49,20 @@ describe('Metadata', () => {
assert.deepStrictEqual(parse('type dmlkZW8vbXA0'), {
type: 'video/mp4',
})
assert.deepStrictEqual(parse('video'), {video: undefined})
assert.deepStrictEqual(parse('video '), {video: undefined})
assert.deepStrictEqual(parse('video'), {video: null})
assert.deepStrictEqual(parse('video '), {video: null})
assert.deepStrictEqual(parse('withWhitespace'), {
withWhitespace: undefined,
withWhitespace: null,
})
assert.deepStrictEqual(parse('withWhitespace '), {
withWhitespace: undefined,
withWhitespace: null,
})
})

it('cyclic test', () => {
const obj = {
filename: 'world_domination_plan.pdf',
is_confidential: undefined,
is_confidential: null,
}
// Object -> string -> object
assert.deepStrictEqual(parse(stringify(obj)), obj)
Expand Down
52 changes: 42 additions & 10 deletions test/e2e.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import rimraf from 'rimraf'
import request from 'supertest'
import {Storage} from '@google-cloud/storage'

import {Server, TUS_RESUMABLE, MemoryConfigstore} from '@tus/server'
import {Server, TUS_RESUMABLE, MemoryConfigstore, ERRORS} from '@tus/server'
import {FileStore} from '@tus/file-store'
import {GCSStore} from '@tus/gcs-store'

Expand All @@ -20,7 +20,7 @@ const BUCKET = 'tus-node-server-ci'
const FILES_DIRECTORY = path.resolve(STORE_PATH)
const TEST_FILE_SIZE = '960244'
const TEST_FILE_PATH = path.resolve('fixtures', 'test.mp4')
const TEST_METADATA = 'some data, for you'
const TEST_METADATA = 'filename d29ybGRfZG9taW5hdGlvbl9wbGFuLnBkZg==,is_confidential'
const gcs = new Storage({
projectId: PROJECT_ID,
keyFilename: KEYFILE,
Expand Down Expand Up @@ -115,6 +115,19 @@ describe('EndToEnd', () => {
})
})

it('should throw informative error on invalid metadata', (done) => {
agent
.post(STORE_PATH)
.set('Tus-Resumable', TUS_RESUMABLE)
.set('Upload-Length', TEST_FILE_SIZE)
.set('Upload-Metadata', 'no sir')
.expect(ERRORS.INVALID_METADATA.status_code)
.end((_, res) => {
assert.equal(res.text, ERRORS.INVALID_METADATA.body)
done()
})
})

it('should create a file with a deferred length', (done) => {
agent
.post(STORE_PATH)
Expand Down Expand Up @@ -649,15 +662,34 @@ describe('EndToEnd', () => {

describe('HEAD', () => {
it('should return the ending offset of the uploaded file', (done) => {
agent
.head(`${STORE_PATH}/${file_id}`)
const read_stream = fs.createReadStream(TEST_FILE_PATH)
const write_stream = agent
.post(STORE_PATH)
.set('Tus-Resumable', TUS_RESUMABLE)
.expect(200)
.expect('Upload-Metadata', TEST_METADATA)
.expect('Upload-Offset', `${TEST_FILE_SIZE}`)
.expect('Upload-Length', `${TEST_FILE_SIZE}`)
.expect('Tus-Resumable', TUS_RESUMABLE)
.end(done)
.set('Upload-Length', TEST_FILE_SIZE)
.set('Content-Type', 'application/offset+octet-stream')
write_stream.on('response', (res) => {
assert.equal(res.statusCode, 201)
assert.equal(res.header['tus-resumable'], TUS_RESUMABLE)
assert.equal(res.header['upload-offset'], `${TEST_FILE_SIZE}`)
const id = res.headers.location.split('/').pop()
agent
.head(`${STORE_PATH}/${id}`)
.set('Tus-Resumable', TUS_RESUMABLE)
.expect(200)
.expect('Upload-Offset', `${TEST_FILE_SIZE}`)
.expect('Upload-Length', `${TEST_FILE_SIZE}`)
.expect('Tus-Resumable', TUS_RESUMABLE)
.end(done)
})
// Using .pipe() broke when upgrading to Superagent 3.0+,
// so now we use data events to read the file to the agent.
read_stream.on('data', (chunk) => {
write_stream.write(chunk)
})
read_stream.on('end', () => {
write_stream.end(() => {})
})
})
})
})
Expand Down
12 changes: 6 additions & 6 deletions test/stores.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export const shouldCreateUploads = function () {
id: 'create-test',
size: 1000,
offset: 0,
metadata: 'filename d29ybGRfZG9taW5hdGlvbl9wbGFuLnBkZg==,is_confidential',
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
})
const file_defered = new Upload({
id: 'create-test-deferred',
Expand Down Expand Up @@ -62,7 +62,7 @@ export const shouldCreateUploads = function () {
it('should store `upload_metadata` when creating new resource', async function () {
await this.datastore.create(file)
const upload = await this.datastore.getUpload(file.id)
assert.strictEqual(upload.metadata, file.metadata)
assert.deepStrictEqual(upload.metadata, file.metadata)
})
})
}
Expand Down Expand Up @@ -103,7 +103,7 @@ export const shouldWriteUploads = function () {
id: 'write-test',
size: this.testFileSize,
offset: 0,
metadata: 'filename d29ybGRfZG9taW5hdGlvbl9wbGFuLnBkZg==,is_confidential',
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
})
await this.datastore.create(file)
const readable = fs.createReadStream(this.testFilePath)
Expand All @@ -116,7 +116,7 @@ export const shouldWriteUploads = function () {
id: 'write-test-reject',
size: this.testFileSize,
offset: 0,
metadata: 'filename d29ybGRfZG9taW5hdGlvbl9wbGFuLnBkZg==,is_confidential',
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
})
await this.datastore.create(file)
const readable = new stream.Readable({
Expand All @@ -142,7 +142,7 @@ export const shouldHandleOffset = function () {
id: 'offset-test',
size: this.testFileSize,
offset: 0,
metadata: 'filename d29ybGRfZG9taW5hdGlvbl9wbGFuLnBkZg==,is_confidential',
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
})

await this.datastore.create(file)
Expand All @@ -167,7 +167,7 @@ export const shouldDeclareUploadLength = function () {
const file = new Upload({
id: 'declare-length-test',
offset: 0,
metadata: 'filename d29ybGRfZG9taW5hdGlvbl9wbGFuLnBkZg==,is_confidential',
metadata: {filename: 'world_domination_plan.pdf', is_confidential: null},
})

await this.datastore.create(file)
Expand Down

0 comments on commit ea855df

Please sign in to comment.