diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e1b6c8..388d35c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +- introduce method for creating blobs to `Session` [#28](https://github.com/p2panda/shirokuma/pull/28) + ## [0.1.2] > Released on 2023-11-16 :package: diff --git a/src/session.ts b/src/session.ts index f4e5987..a046acc 100644 --- a/src/session.ts +++ b/src/session.ts @@ -1,7 +1,7 @@ // SPDX-License-Identifier: AGPL-3.0-or-later import { GraphQLClient } from 'graphql-request'; -import { generateHash, KeyPair } from 'p2panda-js'; +import { generateHash, KeyPair, OperationFields } from 'p2panda-js'; import { createOperation, @@ -20,6 +20,8 @@ import type { SchemaId, } from './types.js'; +const MAX_BLOB_PIECE_LENGTH = 256 * 1000; + /** * Options we can pass in into methods which will override the globally set * options for that session for that method call. @@ -409,4 +411,129 @@ export class Session { const localViewId = await this.publish(entry, operation); return localViewId; } + + /** + * Publish a blob. + * + * The blob byte array is split into 256kb long pieces which are each published + * individually, following this the blob document itself is published. Included + * metadata is `mime_type` [str] and `length` [int] representing the complete + * byte length of the blob file. + * + * @param blob - blob data to be published + * @param options - overrides globally set options for this method call + * @param options.keyPair - will be used to sign the new entry + * @returns Document id of the blob we've created + * @example + * ``` + * const endpoint = 'http://localhost:2020/graphql'; + * const keyPair = new KeyPair(); + * + * const session = await new Session(endpoint) + * .setKeyPair(keyPair) + * .create(fields, { schemaId }); + * + * const input = document.querySelector('input'); + * const blob = input.files[0]; + * await session.createBlob(blob); + * ``` + + */ + async createBlob( + blob: Blob, + options?: Partial, + ): Promise { + if (!blob) { + throw new Error('blob must be provided'); + } + + const mimetype = blob.type; + const bytes = await intoBytes(blob); + const keyPair = options?.keyPair || this.keyPair; + + // Retrieve next entry arguments + const publicKey = keyPair.publicKey(); + + const pieces = []; + + // Get the length and calculate the total number of pieces, based on the + // maximum allowed piece size. + const length = bytes.byteLength; + const expected_pieces = Math.ceil(length / MAX_BLOB_PIECE_LENGTH); + + for (let index = 0; index < expected_pieces; index++) { + const start = index * MAX_BLOB_PIECE_LENGTH; + const end = start + MAX_BLOB_PIECE_LENGTH; + + // Take a slice from the blob bytes, these are the bytes we will publish + // as a blob piece. + const pieceBytes = bytes.slice(start, end); + + // Compose a blob piece operation. + const fields = new OperationFields(); + fields.insert('data', 'bytes', pieceBytes); + + const nextArgs = await this.nextArgs(publicKey); + + // Sign and encode entry with CREATE operation + const { entry, operation } = createOperation( + { + schemaId: 'blob_piece_v1', + fields, + }, + { + keyPair, + nextArgs, + }, + ); + + // Publish the blob piece and push it's id to the pieces array. + const viewId = await this.publish(entry, operation); + pieces.push([viewId.toString()]); + } + + // Compose a blob operation, using the array of blob pieces we published + // in the previous step. + const fields = new OperationFields(); + fields.insert('mime_type', 'str', mimetype); + fields.insert('length', 'int', length); + fields.insert('pieces', 'pinned_relation_list', pieces); + + const nextArgs = await this.nextArgs(publicKey); + + // Sign and encode entry with CREATE operation + const { entry, operation } = createOperation( + { + schemaId: 'blob_v1', + fields, + }, + { + keyPair, + nextArgs, + }, + ); + + // Publish the blob and return it's document view id. + const viewId = await this.publish(entry, operation); + + return viewId; + } } + +// Helper method for converting a blob into a Uint8Array. +const intoBytes = async (blob: Blob): Promise => { + const reader = new FileReader(); + + return new Promise((resolve, reject) => { + reader.onload = (e) => { + const arrayBuffer = e.target?.result; + const array = new Uint8Array(arrayBuffer as ArrayBuffer); + resolve(array); + }; + reader.onerror = (e) => { + reject(e); + }; + + reader.readAsArrayBuffer(blob); + }); +}; diff --git a/src/types.ts b/src/types.ts index f282cba..765f4b2 100644 --- a/src/types.ts +++ b/src/types.ts @@ -24,6 +24,8 @@ export type DocumentViewId = string | string[]; export type SchemaId = | 'schema_definition_v1' | 'schema_field_definition_v1' + | 'blob_v1' + | 'blob_piece_v1' | string; /**