From 81577838c668d1698422453c9b9bf5c40b2d145e Mon Sep 17 00:00:00 2001 From: David Worms Date: Mon, 26 Aug 2024 11:59:02 +0200 Subject: [PATCH] build: prettify and lint staged files --- .husky/pre-commit | 1 + package.json | 10 ++++- packages/csv-generate/README.md | 44 +++++++++--------- packages/csv-generate/package.json | 9 +++- packages/csv-parse/README.md | 62 ++++++++++++-------------- packages/csv-parse/package.json | 9 +++- packages/csv-stringify/README.md | 37 ++++++++------- packages/csv-stringify/package.json | 9 +++- packages/csv/README.md | 61 +++++++++++++------------ packages/csv/package.json | 9 +++- packages/stream-transform/README.md | 62 +++++++++++++------------- packages/stream-transform/package.json | 4 ++ 12 files changed, 175 insertions(+), 142 deletions(-) create mode 100644 .husky/pre-commit diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 00000000..1fb913e3 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1 @@ +npm run lint:staged diff --git a/package.json b/package.json index 835aa871..35e56292 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,12 @@ "cz-conventional-changelog": "^3.3.0", "glob": "^11.0.0", "husky": "^9.1.5", - "lerna": "^8.1.8" + "lerna": "^8.1.8", + "lint-staged": "^15.2.9" + }, + "lint-staged": { + "*.js": "npm run lint:fix", + "*.md": "prettier -w" }, "repository": { "type": "git", @@ -18,7 +23,8 @@ "publish": "lerna publish from-git --yes", "lint:check": "lerna run lint:check", "lint:fix": "lerna run lint:fix", - "pretest": "npm run lint", + "lint:staged": "npx lint-staged", + "pretest": "npm run build", "test": "lerna run test", "test:legacy": "lerna run test:legacy", "version": "lerna version" diff --git a/packages/csv-generate/README.md b/packages/csv-generate/README.md index 4fd74b87..c6fdcbc3 100644 --- a/packages/csv-generate/README.md +++ b/packages/csv-generate/README.md @@ -1,4 +1,3 @@ - # CSV generator for Node.js and the web [![Build Status](https://img.shields.io/github/actions/workflow/status/adaltas/node-csv/nodejs.yml?branch=master)](https://github.com/adaltas/node-csv/actions) @@ -9,19 +8,19 @@ The [`csv-generate` package](https://csv.js.org/generate/) provides a flexible g ## Documentation -* [Project homepage](https://csv.js.org/generate/) -* [API](https://csv.js.org/generate/api/) -* [Options](https://csv.js.org/generate/options/) -* [Examples](https://csv.js.org/generate/examples/) +- [Project homepage](https://csv.js.org/generate/) +- [API](https://csv.js.org/generate/api/) +- [Options](https://csv.js.org/generate/options/) +- [Examples](https://csv.js.org/generate/examples/) ## Main features -* Scalable `stream.Readable` implementation -* random or pseudo-random seed based generation -* Idempotence with the "seed" option -* User-defined value generation -* Multiple types of values (integer, boolean, dates, ...) -* MIT License +- Scalable `stream.Readable` implementation +- random or pseudo-random seed based generation +- Idempotence with the "seed" option +- User-defined value generation +- Multiple types of values (integer, boolean, dates, ...) +- MIT License ## Usage @@ -34,8 +33,8 @@ Use the callback and sync APIs for simplicity or the stream based API for scalab The [API](https://csv.js.org/generate/api/) is available in multiple flavors. This example illustrates the stream API. ```js -import { generate } from 'csv-generate'; -import assert from 'assert'; +import { generate } from "csv-generate"; +import assert from "assert"; const records = []; // Initialize the generator @@ -43,30 +42,31 @@ generate({ seed: 1, objectMode: true, columns: 2, - length: 2 + length: 2, }) // Use the readable stream api to consume generated records - .on('readable', function(){ - let record; while((record = this.read()) !== null){ + .on("readable", function () { + let record; + while ((record = this.read()) !== null) { records.push(record); } }) // Catch any error - .on('error', function(err){ + .on("error", function (err) { console.error(err); }) // Test that the generated records matched the expected records - .on('end', function(){ + .on("end", function () { assert.deepEqual(records, [ - [ 'OMH', 'ONKCHhJmjadoA' ], - [ 'D', 'GeACHiN' ] + ["OMH", "ONKCHhJmjadoA"], + ["D", "GeACHiN"], ]); }); ``` ## Development -Tests are executed with [Mocha](https://mochajs.org/). To install it, simple run `npm install` followed by `npm test`. It will install mocha and its dependencies in your project "node_modules" directory and run the test suite. The tests run against the CoffeeScript source files. +Tests are executed with [Mocha](https://mochajs.org/). To install it, simple run `npm install` followed by `npm test`. It will install mocha and its dependencies in your project "node_modules" directory and run the test suite. The tests run against the CoffeeScript source files. To generate the JavaScript files, run `npm run coffee`. @@ -76,4 +76,4 @@ The test suite is run online with [Travis](https://travis-ci.org/#!/adaltas/node The project is sponsored by [Adaltas](https://www.adaltas.com), an Big Data consulting firm based in Paris, France. -* David Worms: +- David Worms: diff --git a/packages/csv-generate/package.json b/packages/csv-generate/package.json index d4c83e74..dcf8cc18 100644 --- a/packages/csv-generate/package.json +++ b/packages/csv-generate/package.json @@ -90,6 +90,10 @@ "throw-deprecation": false, "timeout": 40000 }, + "lint-staged": { + "*.js": "npm run lint:fix", + "*.md": "prettier -w" + }, "repository": { "type": "git", "url": "https://github.com/adaltas/node-csv.git", @@ -100,8 +104,9 @@ "build:rollup": "npx rollup -c", "build:ts": "cp lib/index.d.ts dist/cjs/index.d.cts && cp lib/stream.d.ts dist/cjs/stream.d.cts && cp lib/sync.d.ts dist/cjs/sync.d.cts && cp lib/*.ts dist/esm", "postbuild:ts": "find dist/cjs -name '*.d.cts' -exec sh -c \"sed -i \"s/\\.js'/\\.cjs'/g\" {} || sed -i '' \"s/\\.js'/\\.cjs'/g\" {}\" \\;", - "lint:check": "eslint && tsc --noEmit true", - "lint:fix": "eslint --fix && tsc --noEmit true", + "lint:check": "eslint", + "lint:fix": "eslint --fix", + "lint:ts": "tsc --noEmit true", "preversion": "npm run build && git add dist", "pretest": "npm run build", "test": "mocha 'test/**/*.{coffee,ts}'", diff --git a/packages/csv-parse/README.md b/packages/csv-parse/README.md index e6ad762e..180e320d 100644 --- a/packages/csv-parse/README.md +++ b/packages/csv-parse/README.md @@ -1,36 +1,35 @@ - # CSV parser for Node.js and the web [![Build Status](https://img.shields.io/github/actions/workflow/status/adaltas/node-csv/nodejs.yml?branch=master)](https://github.com/adaltas/node-csv/actions) [![NPM](https://img.shields.io/npm/dm/csv-parse)](https://www.npmjs.com/package/csv-parse) [![NPM](https://img.shields.io/npm/v/csv-parse)](https://www.npmjs.com/package/csv-parse) - + The [`csv-parse` package](https://csv.js.org/parse/) is a parser converting CSV text input into arrays or objects. It is part of the [CSV project](https://csv.js.org/). It implements the Node.js [`stream.Transform` API](http://nodejs.org/api/stream.html#stream_class_stream_transform). It also provides a simple callback-based API for convenience. It is both extremely easy to use and powerful. It was first released in 2010 and is used against big data sets by a large community. ## Documentation -* [Project homepage](https://csv.js.org/parse/) -* [API](https://csv.js.org/parse/api/) -* [Options](https://csv.js.org/parse/options/) -* [Info properties](https://csv.js.org/parse/info/) -* [Common errors](https://csv.js.org/parse/errors/) -* [Examples](https://csv.js.org/project/examples/) +- [Project homepage](https://csv.js.org/parse/) +- [API](https://csv.js.org/parse/api/) +- [Options](https://csv.js.org/parse/options/) +- [Info properties](https://csv.js.org/parse/info/) +- [Common errors](https://csv.js.org/parse/errors/) +- [Examples](https://csv.js.org/project/examples/) ## Main features -* Flexible with lot of [options](https://csv.js.org/parse/options/) -* Multiple [distributions](https://csv.js.org/parse/distributions/): Node.js, Web, ECMAScript modules and CommonJS -* Follow the Node.js streaming API -* Simplicity with the optional callback API -* Support delimiters, quotes, escape characters and comments -* Line breaks discovery -* Support big datasets -* Complete test coverage and lot of samples for inspiration -* No external dependencies -* Work nicely with the [csv-generate](https://csv.js.org/generate/), [stream-transform](https://csv.js.org/transform/) and [csv-stringify](https://csv.js.org/stringify/) packages -* MIT License +- Flexible with lot of [options](https://csv.js.org/parse/options/) +- Multiple [distributions](https://csv.js.org/parse/distributions/): Node.js, Web, ECMAScript modules and CommonJS +- Follow the Node.js streaming API +- Simplicity with the optional callback API +- Support delimiters, quotes, escape characters and comments +- Line breaks discovery +- Support big datasets +- Complete test coverage and lot of samples for inspiration +- No external dependencies +- Work nicely with the [csv-generate](https://csv.js.org/generate/), [stream-transform](https://csv.js.org/transform/) and [csv-stringify](https://csv.js.org/stringify/) packages +- MIT License ## Usage @@ -43,34 +42,31 @@ Use the callback and sync APIs for simplicity or the stream based API for scalab The [API](https://csv.js.org/parse/api/) is available in multiple flavors. This example illustrates the stream API. ```js -import assert from 'assert'; -import { parse } from 'csv-parse'; +import assert from "assert"; +import { parse } from "csv-parse"; const records = []; // Initialize the parser const parser = parse({ - delimiter: ':' + delimiter: ":", }); // Use the readable stream api to consume records -parser.on('readable', function(){ +parser.on("readable", function () { let record; while ((record = parser.read()) !== null) { records.push(record); } }); // Catch any error -parser.on('error', function(err){ +parser.on("error", function (err) { console.error(err.message); }); // Test that the parsed records matched the expected records -parser.on('end', function(){ - assert.deepStrictEqual( - records, - [ - [ 'root','x','0','0','root','/root','/bin/bash' ], - [ 'someone','x','1022','1022','','/home/someone','/bin/bash' ] - ] - ); +parser.on("end", function () { + assert.deepStrictEqual(records, [ + ["root", "x", "0", "0", "root", "/root", "/bin/bash"], + ["someone", "x", "1022", "1022", "", "/home/someone", "/bin/bash"], + ]); }); // Write data to the stream parser.write("root:x:0:0:root:/root:/bin/bash\n"); @@ -83,4 +79,4 @@ parser.end(); The project is sponsored by [Adaltas](https://www.adaltas.com), an Big Data consulting firm based in Paris, France. -* David Worms: +- David Worms: diff --git a/packages/csv-parse/package.json b/packages/csv-parse/package.json index 10707fa3..68b83988 100644 --- a/packages/csv-parse/package.json +++ b/packages/csv-parse/package.json @@ -103,6 +103,10 @@ "throw-deprecation": false, "timeout": 40000 }, + "lint-staged": { + "*.js": "npm run lint:fix", + "*.md": "prettier -w" + }, "repository": { "type": "git", "url": "https://github.com/adaltas/node-csv.git", @@ -113,8 +117,9 @@ "build:rollup": "npx rollup -c", "build:ts": "cp lib/index.d.ts dist/cjs/index.d.cts && cp lib/sync.d.ts dist/cjs/sync.d.cts && cp lib/*.ts dist/esm", "postbuild:ts": "find dist/cjs -name '*.d.cts' -exec sh -c \"sed -i \"s/\\.js'/\\.cjs'/g\" {} || sed -i '' \"s/\\.js'/\\.cjs'/g\" {}\" \\;", - "lint:check": "eslint && tsc --noEmit true", - "lint:fix": "eslint --fix && tsc --noEmit true", + "lint:check": "eslint", + "lint:fix": "eslint --fix", + "lint:ts": "tsc --noEmit true", "preversion": "npm run build && git add dist", "pretest": "npm run build", "test": "mocha 'test/**/*.{coffee,ts}'", diff --git a/packages/csv-stringify/README.md b/packages/csv-stringify/README.md index 5c195dd9..cd1afebb 100644 --- a/packages/csv-stringify/README.md +++ b/packages/csv-stringify/README.md @@ -1,4 +1,3 @@ - # CSV stringifier for Node.js and the web [![Build Status](https://img.shields.io/github/actions/workflow/status/adaltas/node-csv/nodejs.yml?branch=master)](https://github.com/adaltas/node-csv/actions) @@ -9,21 +8,21 @@ The [`csv-stringify` package](https://csv.js.org/stringify/) is a stringifier co ## Documentation -* [Project homepage](https://csv.js.org/stringify/) -* [API](https://csv.js.org/stringify/api/) -* [Options](https://csv.js.org/stringify/options/) -* [Examples](https://csv.js.org/stringify/examples/) +- [Project homepage](https://csv.js.org/stringify/) +- [API](https://csv.js.org/stringify/api/) +- [Options](https://csv.js.org/stringify/options/) +- [Examples](https://csv.js.org/stringify/examples/) ## Main features -* Follow the Node.js streaming API -* Simplicity with the optional callback API -* Support for custom formatters, delimiters, quotes, escape characters and header -* Support big datasets -* Complete test coverage and samples for inspiration -* Only 1 external dependency -* to be used conjointly with `csv-generate`, `csv-parse` and `stream-transform` -* MIT License +- Follow the Node.js streaming API +- Simplicity with the optional callback API +- Support for custom formatters, delimiters, quotes, escape characters and header +- Support big datasets +- Complete test coverage and samples for inspiration +- Only 1 external dependency +- to be used conjointly with `csv-generate`, `csv-parse` and `stream-transform` +- MIT License ## Usage @@ -36,15 +35,15 @@ The module is built on the Node.js Stream API. Use the callback and sync APIs fo The [API](https://csv.js.org/stringify/api/) is available in multiple flavors. This example illustrates the sync API. ```js -import { stringify } from 'csv-stringify/sync'; -import assert from 'assert'; +import { stringify } from "csv-stringify/sync"; +import assert from "assert"; const output = stringify([ - [ '1', '2', '3', '4' ], - [ 'a', 'b', 'c', 'd' ] + ["1", "2", "3", "4"], + ["a", "b", "c", "d"], ]); -assert.equal(output, '1,2,3,4\na,b,c,d\n'); +assert.equal(output, "1,2,3,4\na,b,c,d\n"); ``` ## Development @@ -59,4 +58,4 @@ The test suite is run online with [Travis](https://travis-ci.org/#!/adaltas/node The project is sponsored by [Adaltas](https://www.adaltas.com), an Big Data consulting firm based in Paris, France. -* David Worms: +- David Worms: diff --git a/packages/csv-stringify/package.json b/packages/csv-stringify/package.json index 83d09507..461aa012 100644 --- a/packages/csv-stringify/package.json +++ b/packages/csv-stringify/package.json @@ -80,6 +80,10 @@ "throw-deprecation": false, "timeout": 40000 }, + "lint-staged": { + "*.js": "npm run lint:fix", + "*.md": "prettier -w" + }, "repository": { "type": "git", "url": "https://github.com/adaltas/node-csv.git", @@ -90,8 +94,9 @@ "build:rollup": "npx rollup -c", "build:ts": "cp lib/index.d.ts dist/cjs/index.d.cts && cp lib/sync.d.ts dist/cjs/sync.d.cts && cp lib/*.ts dist/esm", "postbuild:ts": "find dist/cjs -name '*.d.cts' -exec sh -c \"sed -i \"s/\\.js'/\\.cjs'/g\" {} || sed -i '' \"s/\\.js'/\\.cjs'/g\" {}\" \\;", - "lint:check": "eslint && tsc --noEmit true", - "lint:fix": "eslint --fix && tsc --noEmit true", + "lint:check": "eslint", + "lint:fix": "eslint --fix", + "lint:ts": "tsc --noEmit true", "preversion": "npm run build && git add dist", "pretest": "npm run build", "test": "mocha 'test/**/*.{coffee,ts}'", diff --git a/packages/csv/README.md b/packages/csv/README.md index 9b677b39..59d156c7 100644 --- a/packages/csv/README.md +++ b/packages/csv/README.md @@ -1,4 +1,3 @@ - # CSV for Node.js and the web [![Build Status](https://img.shields.io/github/actions/workflow/status/adaltas/node-csv/nodejs.yml?branch=master)](https://github.com/adaltas/node-csv/actions) @@ -10,16 +9,16 @@ It has been tested and used by a large community over the years and should be co This package exposes 4 packages: -* [`csv-generate`](https://csv.js.org/generate/) +- [`csv-generate`](https://csv.js.org/generate/) ([GitHub](https://github.com/adaltas/node-csv/tree/master/packages/csv-generate)), a flexible generator of CSV string and Javascript objects. -* [`csv-parse`](https://csv.js.org/parse/) +- [`csv-parse`](https://csv.js.org/parse/) ([GitHub](https://github.com/adaltas/node-csv/tree/master/packages/csv-parse)), a parser converting CSV text into arrays or objects. -* [`csv-stringify`](https://csv.js.org/stringify/) +- [`csv-stringify`](https://csv.js.org/stringify/) ([GitHub](https://github.com/adaltas/node-csv/tree/master/packages/csv-stringify)), a stringifier converting records into a CSV text. -* [`stream-transform`](https://csv.js.org/transform/) +- [`stream-transform`](https://csv.js.org/transform/) ([GitHub](https://github.com/adaltas/node-csv/tree/master/packages/stream-transform)), a transformation framework. @@ -39,30 +38,36 @@ This example uses the Stream API to create a processing pipeline. ```js // Import the package -import * as csv from '../lib/index.js'; +import * as csv from "../lib/index.js"; // Run the pipeline csv -// Generate 20 records + // Generate 20 records .generate({ - delimiter: '|', - length: 20 + delimiter: "|", + length: 20, }) -// Transform CSV data into records - .pipe(csv.parse({ - delimiter: '|' - })) -// Transform each value into uppercase - .pipe(csv.transform((record) => { - return record.map((value) => { - return value.toUpperCase(); - }); - })) -// Convert objects into a stream - .pipe(csv.stringify({ - quoted: true - })) -// Print the CSV stream to stdout + // Transform CSV data into records + .pipe( + csv.parse({ + delimiter: "|", + }), + ) + // Transform each value into uppercase + .pipe( + csv.transform((record) => { + return record.map((value) => { + return value.toUpperCase(); + }); + }), + ) + // Convert objects into a stream + .pipe( + csv.stringify({ + quoted: true, + }), + ) + // Print the CSV stream to stdout .pipe(process.stdout); ``` @@ -76,10 +81,10 @@ Read the documentation of the child projects for additional information. The project is sponsored by [Adaltas](https://www.adaltas.com), an Big Data consulting firm based in Paris, France. -* David Worms: +- David Worms: ## Related projects -* Pavel Kolesnikov "ya-csv": -* Chris Williams "node-csv": -* Mat Holt "PapaParse": +- Pavel Kolesnikov "ya-csv": +- Chris Williams "node-csv": +- Mat Holt "PapaParse": diff --git a/packages/csv/package.json b/packages/csv/package.json index 771a6998..a3136951 100644 --- a/packages/csv/package.json +++ b/packages/csv/package.json @@ -99,6 +99,10 @@ "throw-deprecation": false, "timeout": 40000 }, + "lint-staged": { + "*.js": "npm run lint:fix", + "*.md": "prettier -w" + }, "repository": { "type": "git", "url": "https://github.com/adaltas/node-csv.git", @@ -109,8 +113,9 @@ "build:rollup": "npx rollup -c", "build:ts": "cp lib/index.d.ts dist/cjs/index.d.cts && cp lib/sync.d.ts dist/cjs/sync.d.cts && cp lib/*.ts dist/esm", "postbuild:ts": "find dist/cjs -name '*.d.cts' -exec sh -c \"sed -i \"s/\\.js'/\\.cjs'/g\" {} || sed -i '' \"s/\\.js'/\\.cjs'/g\" {}\" \\;", - "lint:check": "eslint && tsc --noEmit true", - "lint:fix": "eslint --fix && tsc --noEmit true", + "lint:check": "eslint", + "lint:fix": "eslint --fix", + "lint:ts": "tsc --noEmit true", "preversion": "npm run build && git add dist", "pretest": "npm run build", "test": "mocha 'test/**/*.{coffee,ts}'", diff --git a/packages/stream-transform/README.md b/packages/stream-transform/README.md index e0b650f3..8ea95160 100644 --- a/packages/stream-transform/README.md +++ b/packages/stream-transform/README.md @@ -1,4 +1,3 @@ - # Stream transformation for Node.js and the web [![Build Status](https://img.shields.io/github/actions/workflow/status/adaltas/node-csv/nodejs.yml?branch=master)](https://github.com/adaltas/node-csv/actions) @@ -11,25 +10,25 @@ The Node.js [`stream.Transform` API](http://nodejs.org/api/stream.html#stream_cl ## Documentation -* [Project homepage](https://csv.js.org/transform/) -* [API](https://csv.js.org/transform/api/) -* [Options](https://csv.js.org/transform/options/) -* [Handler](https://csv.js.org/transform/handler/) -* [State properties](https://csv.js.org/transform/state/) -* [Examples](https://csv.js.org/transform/examples/) +- [Project homepage](https://csv.js.org/transform/) +- [API](https://csv.js.org/transform/api/) +- [Options](https://csv.js.org/transform/options/) +- [Handler](https://csv.js.org/transform/handler/) +- [State properties](https://csv.js.org/transform/state/) +- [Examples](https://csv.js.org/transform/examples/) ## Main features -* Extends the native Node.js [transform stream API](http://nodejs.org/api/stream.html#stream_class_stream_transform) -* Simplicity with the optional callback and sync API -* Pipe transformations between readable and writable streams -* Synchronous versus asynchronous user functions -* Sequential and parallel execution -* Accept object, array or JSON as input and output -* Sequential or user-defined concurrent execution -* Skip and multiply records -* Alter or clone input records -* MIT License +- Extends the native Node.js [transform stream API](http://nodejs.org/api/stream.html#stream_class_stream_transform) +- Simplicity with the optional callback and sync API +- Pipe transformations between readable and writable streams +- Synchronous versus asynchronous user functions +- Sequential and parallel execution +- Accept object, array or JSON as input and output +- Sequential or user-defined concurrent execution +- Skip and multiply records +- Alter or clone input records +- MIT License ## Usage @@ -42,20 +41,23 @@ The module is built on the Node.js Stream API. Use the callback and sync APIs fo The [API](https://csv.js.org/transform/api/) is available in multiple flavors. This example illustrates the sync API. ```js -import { transform } from 'stream-transform/sync'; -import assert from 'assert'; - -const records = transform([ - [ 'a', 'b', 'c', 'd' ], - [ '1', '2', '3', '4' ] -], function(record){ - record.push(record.shift()); - return record; -}); +import { transform } from "stream-transform/sync"; +import assert from "assert"; + +const records = transform( + [ + ["a", "b", "c", "d"], + ["1", "2", "3", "4"], + ], + function (record) { + record.push(record.shift()); + return record; + }, +); assert.deepEqual(records, [ - [ 'b', 'c', 'd', 'a' ], - [ '2', '3', '4', '1' ] + ["b", "c", "d", "a"], + ["2", "3", "4", "1"], ]); ``` @@ -71,4 +73,4 @@ The test suite is run online with [Travis](http://travis-ci.org/wdavidw/node-str The project is sponsored by [Adaltas](https://www.adaltas.com), an Big Data consulting firm based in Paris, France. -* David Worms: +- David Worms: diff --git a/packages/stream-transform/package.json b/packages/stream-transform/package.json index 06dc90e0..2de39204 100644 --- a/packages/stream-transform/package.json +++ b/packages/stream-transform/package.json @@ -81,6 +81,10 @@ "throw-deprecation": false, "timeout": 40000 }, + "lint-staged": { + "*.js": "npm run lint:fix", + "*.md": "prettier -w" + }, "repository": { "type": "git", "url": "https://github.com/adaltas/node-csv.git",