diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 8be0d76..0000000 --- a/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -sudo: false -language: node_js -script: npm run ci - -cache: - directories: - - node_modules diff --git a/CHANGELOG.md b/CHANGELOG.md index a916c96..2021bb1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog +## v4.1.0 (27/09/2021) + +* Now checks for field name format. + ## v4.0.1 (12/02/2021) * Support more formats for table ID utility. diff --git a/README.md b/README.md index 1e68d4d..226e6ce 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # jsonschema-bigquery [![npm](https://img.shields.io/npm/v/jsonschema-bigquery.svg)](https://www.npmjs.com/package/jsonschema-bigquery) -[![Build Status](https://travis-ci.com/thedumbterminal/jsonschema-bigquery.svg?branch=master)](https://travis-ci.com/github/thedumbterminal/jsonschema-bigquery) +[![Node.js CI](https://github.com/thedumbterminal/jsonschema-bigquery/actions/workflows/main.yml/badge.svg)](https://github.com/thedumbterminal/jsonschema-bigquery/actions/workflows/main.yml) Convert JSON schema to Google BigQuery schema diff --git a/package.json b/package.json index 9becea4..f78f3ff 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "jsonschema-bigquery", - "version": "4.0.1", + "version": "4.1.0", "description": "Convert JSON schema to Google BigQuery schema", "main": "src/converter.js", "scripts": { diff --git a/src/converter.js b/src/converter.js index 0034a01..90ae5c2 100644 --- a/src/converter.js +++ b/src/converter.js @@ -14,6 +14,8 @@ const JSON_SCHEMA_TO_BIGQUERY_TYPE_DICT = { const OFS = ['allOf', 'anyOf', 'oneOf'] +const BIGQUERY_FIELD_NAME_REGEXP = /^[a-z_][a-z0-9_]+$/i + converter._merge_property = (merge_type, property_name, destination_value, source_value) => { // Merges two properties. let destination_list @@ -112,10 +114,14 @@ converter._merge_dicts = (merge_type, dest_dict, source_dict) => { } converter._scalar = (name, type, mode, description) => { + if(!name.match(BIGQUERY_FIELD_NAME_REGEXP)){ + throw new SchemaError(`Invalid field name: ${name}`) + } + const result = { - name: name, - type: type, - mode: mode + name, + type, + mode } if (description) { @@ -160,16 +166,8 @@ converter._object = (name, node, mode) => { return field != null }) - result = { - name: name, - type: 'RECORD', - mode: mode, - fields: fields - } - - if(node.description){ - result.description = node.description - } + result = converter._scalar(name, 'RECORD', mode, node.description) + result.fields = fields } catch (e) { if(!converter._options.continueOnError){ diff --git a/test/unit/converter.js b/test/unit/converter.js index 3f901c7..5001a7f 100644 --- a/test/unit/converter.js +++ b/test/unit/converter.js @@ -208,4 +208,26 @@ describe('converter unit', () => { }) }) }) + + describe('_scalar()', () => { + context('with an invalid field', () => { + it('throws an error', () => { + assert.throws(() => { + converter._scalar('123test', 'STRING', 'NULLABLE') + }, /Invalid field name: 123test/) + }) + }) + }) + + describe('_scalar()', () => { + context('with a valid field', () => { + it('returns a bigquery field object', () => { + assert.deepStrictEqual(converter._scalar('test123', 'STRING', 'NULLABLE'), { + mode: 'NULLABLE', + name: 'test123', + type: 'STRING' + }) + }) + }) + }) })