Skip to content

Commit

Permalink
Merge pull request #28 from p2panda/create-blob-method
Browse files Browse the repository at this point in the history
Introduce method for publishing blobs to `Session`
  • Loading branch information
sandreae authored Jan 17, 2024
2 parents cbb906a + dba1e07 commit a28641c
Show file tree
Hide file tree
Showing 3 changed files with 132 additions and 1 deletion.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]

- introduce method for creating blobs to `Session` [#28](https://github.com/p2panda/shirokuma/pull/28)

## [0.1.2]

> Released on 2023-11-16 :package:
Expand Down
129 changes: 128 additions & 1 deletion src/session.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// SPDX-License-Identifier: AGPL-3.0-or-later

import { GraphQLClient } from 'graphql-request';
import { generateHash, KeyPair } from 'p2panda-js';
import { generateHash, KeyPair, OperationFields } from 'p2panda-js';

import {
createOperation,
Expand All @@ -20,6 +20,8 @@ import type {
SchemaId,
} from './types.js';

const MAX_BLOB_PIECE_LENGTH = 256 * 1000;

/**
* Options we can pass in into methods which will override the globally set
* options for that session for that method call.
Expand Down Expand Up @@ -409,4 +411,129 @@ export class Session {
const localViewId = await this.publish(entry, operation);
return localViewId;
}

/**
* Publish a blob.
*
* The blob byte array is split into 256kb long pieces which are each published
* individually, following this the blob document itself is published. Included
* metadata is `mime_type` [str] and `length` [int] representing the complete
* byte length of the blob file.
*
* @param blob - blob data to be published
* @param options - overrides globally set options for this method call
* @param options.keyPair - will be used to sign the new entry
* @returns Document id of the blob we've created
* @example
* ```
* const endpoint = 'http://localhost:2020/graphql';
* const keyPair = new KeyPair();
*
* const session = await new Session(endpoint)
* .setKeyPair(keyPair)
* .create(fields, { schemaId });
*
* const input = document.querySelector('input');
* const blob = input.files[0];
* await session.createBlob(blob);
* ```
*/
async createBlob(
blob: Blob,
options?: Partial<Options>,
): Promise<DocumentViewId> {
if (!blob) {
throw new Error('blob must be provided');
}

const mimetype = blob.type;
const bytes = await intoBytes(blob);
const keyPair = options?.keyPair || this.keyPair;

// Retrieve next entry arguments
const publicKey = keyPair.publicKey();

const pieces = [];

// Get the length and calculate the total number of pieces, based on the
// maximum allowed piece size.
const length = bytes.byteLength;
const expected_pieces = Math.ceil(length / MAX_BLOB_PIECE_LENGTH);

for (let index = 0; index < expected_pieces; index++) {
const start = index * MAX_BLOB_PIECE_LENGTH;
const end = start + MAX_BLOB_PIECE_LENGTH;

// Take a slice from the blob bytes, these are the bytes we will publish
// as a blob piece.
const pieceBytes = bytes.slice(start, end);

// Compose a blob piece operation.
const fields = new OperationFields();
fields.insert('data', 'bytes', pieceBytes);

const nextArgs = await this.nextArgs(publicKey);

// Sign and encode entry with CREATE operation
const { entry, operation } = createOperation(
{
schemaId: 'blob_piece_v1',
fields,
},
{
keyPair,
nextArgs,
},
);

// Publish the blob piece and push it's id to the pieces array.
const viewId = await this.publish(entry, operation);
pieces.push([viewId.toString()]);
}

// Compose a blob operation, using the array of blob pieces we published
// in the previous step.
const fields = new OperationFields();
fields.insert('mime_type', 'str', mimetype);
fields.insert('length', 'int', length);
fields.insert('pieces', 'pinned_relation_list', pieces);

const nextArgs = await this.nextArgs(publicKey);

// Sign and encode entry with CREATE operation
const { entry, operation } = createOperation(
{
schemaId: 'blob_v1',
fields,
},
{
keyPair,
nextArgs,
},
);

// Publish the blob and return it's document view id.
const viewId = await this.publish(entry, operation);

return viewId;
}
}

// Helper method for converting a blob into a Uint8Array.
const intoBytes = async (blob: Blob): Promise<Uint8Array> => {
const reader = new FileReader();

return new Promise((resolve, reject) => {
reader.onload = (e) => {
const arrayBuffer = e.target?.result;
const array = new Uint8Array(arrayBuffer as ArrayBuffer);
resolve(array);
};
reader.onerror = (e) => {
reject(e);
};

reader.readAsArrayBuffer(blob);
});
};
2 changes: 2 additions & 0 deletions src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ export type DocumentViewId = string | string[];
export type SchemaId =
| 'schema_definition_v1'
| 'schema_field_definition_v1'
| 'blob_v1'
| 'blob_piece_v1'
| string;

/**
Expand Down

0 comments on commit a28641c

Please sign in to comment.