Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixed failing test on Windows caused by #2433 #2446

Merged
merged 2 commits into from
Aug 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 10 additions & 6 deletions src/common/persistence/FSExtentStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import {
createReadStream,
createWriteStream,
fdatasync,
ftruncate,
truncate,
mkdir,
open,
stat,
Expand Down Expand Up @@ -35,7 +35,7 @@ import OperationQueue from "./OperationQueue";
const statAsync = promisify(stat);
const mkdirAsync = promisify(mkdir);
const unlinkAsync = promisify(unlink);
const ftruncateAsync = promisify(ftruncate);
const truncateAsync = promisify(truncate);

// The max size of an extent.
const MAX_EXTENT_SIZE = DEFAULT_MAX_EXTENT_SIZE;
Expand Down Expand Up @@ -295,18 +295,22 @@ export default class FSExtentStore implements IExtentStore {
count
};
} catch (err) {
// Reset cursor position to the current offset.
// Reset cursor position to the current offset. On Windows, truncating a file open in append mode doesn't
// work, so we need to close the file descriptor first.
try {
await ftruncateAsync(fd, appendExtent.offset);
appendExtent.fd = undefined;
await closeAsync(fd);
await truncateAsync(path, appendExtent.offset);
// Indicate that the extent is ready for the next append operation.
appendExtent.appendStatus = AppendStatusCode.Idle;
} catch (truncate_err) {
this.logger.error(
`FSExtentStore:appendExtent() Truncate fd:${fd} len: ${appendExtent.offset} error:${JSON.stringify(
`FSExtentStore:appendExtent() Truncate path:${path} len: ${appendExtent.offset} error:${JSON.stringify(
truncate_err
)}.`,
contextId
);
}
appendExtent.appendStatus = AppendStatusCode.Idle;
throw err;
}
})()
Expand Down
42 changes: 27 additions & 15 deletions tests/blob/fsStore.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,27 +12,39 @@ describe("FSExtentStore", () => {
const metadataStore: IExtentMetadataStore = mock<IExtentMetadataStore>();
metadataStore.getExtentLocationId = () => Promise.resolve("Default");

async function readIntoString(readable: NodeJS.ReadableStream): Promise<string> {
const chunks: Buffer[] = [];
for await (const chunk of readable) {
chunks.push(chunk as Buffer);
}
const buffer = Buffer.concat(chunks);
return buffer.toString();
}

it("should handle input stream error gracefully during appendExtent @loki", async () => {
const store = new FSExtentStore(metadataStore, DEFAULT_BLOB_PERSISTENCE_ARRAY, logger);
await store.init();

// Write a valid stream to the store.
const stream1 = Readable.from("First", { objectMode: false });
const extent1 = await store.appendExtent(stream1);
assert.strictEqual(extent1.offset, 0);
assert.strictEqual(extent1.count, 5);

// A null value within the Readable.from array causes the stream to emit an error.
const stream1 = Readable.from(["deadbeef", null], { objectMode: false });
await assert.rejects(store.appendExtent(stream1));
const stream2 = Readable.from(["deadbeef", null], { objectMode: false });
await assert.rejects(store.appendExtent(stream2));

// Write a valid stream to the store.
const stream2 = Readable.from("Test", { objectMode: false });
const extent = await store.appendExtent(stream2);
assert.strictEqual(extent.offset, 0);
assert.strictEqual(extent.count, 4);
// Write another valid stream to the store.
const stream3 = Readable.from("Test", { objectMode: false });
const extent3 = await store.appendExtent(stream3);
assert.strictEqual(extent3.offset, 5);
assert.strictEqual(extent3.count, 4);

// Check that the extent is readable.
let readable = await store.readExtent(extent);
const chunks: Buffer[] = [];
for await (const chunk of readable) {
chunks.push(chunk as Buffer);
}
const data = Buffer.concat(chunks);
assert.strictEqual(data.toString(), "Test");
// Check that the extents is readable.
let readable1 = await store.readExtent(extent1);
assert.strictEqual(await readIntoString(readable1), "First");
let readable3 = await store.readExtent(extent3);
assert.strictEqual(await readIntoString(readable3), "Test");
});
});
Loading