diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b099384..0d0df2d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,14 @@ All notable changes to the z/OS FTP Plug-in for Zowe CLI will be documented in this file. +## Recent Changes + +- Enhancement: Added APIs and corresponding CLI commands to support the Copy Dataset feature in the Zowe Explorer ZFTP extension. +- Enhancement: Added an API to allow dataset allocation while cloning attributes from another dataset (`allocateLikeDataSet`). +- Enhancement: Added a CLI command option to expose the Allocate-Like functionality (`zowe zftp alloc ds "new" --like "old"`). +- Enhancement: Added an API to allow copying dataset and dataset member contents (`copyDataSet`). +- Enhancement: Added a CLI command to expose the copying-dataset/member functionality (`zowe zftp copy ds "FROM(optional)" "TO(optional)"`). + ## `2.1.2` - Updated the `zos-node-accessor` package to 1.0.14 for technical currency. diff --git a/__tests__/__integration__/cli/allocate/data-set/__snapshots__/cli.allocate.data-set.test.ts.snap b/__tests__/__integration__/cli/allocate/data-set/__snapshots__/cli.allocate.data-set.test.ts.snap index 889e3037..80cb9178 100644 --- a/__tests__/__integration__/cli/allocate/data-set/__snapshots__/cli.allocate.data-set.test.ts.snap +++ b/__tests__/__integration__/cli/allocate/data-set/__snapshots__/cli.allocate.data-set.test.ts.snap @@ -34,6 +34,13 @@ exports[`allocate data set command should display allocate data set help 1`] = ` LRECL=326 BLKSIZE=23472\\". For the list of possible DCB parameters, visit https://github.com/IBM/zos-node-accessor/tree/1.0.x#allocate. + REQUIRED OPTIONS + ---------------- + + --like (string) + + Dataset name to copy the attributes from. + FTP CONNECTION OPTIONS ---------------------- @@ -145,13 +152,18 @@ exports[`allocate data set command should display allocate data set help 1`] = ` $ zowe zos-ftp allocate data-set \\"IBMUSER.DATASET\\" --dcb \\"PDSTYPE=PDS\\" + - Allocate a dataset \\"IBMUSER.NEW.DATASET\\" with the same + attributes as \\"IBMUSER.ORIGINAL.DATASET\\": + + $ zowe zos-ftp allocate data-set \\"IBMUSER.NEW.DATASET\\" --like \\"IBMUSER.ORIGINAL.DATASET\\" + ================z/OS FTP DELETE DATA-SET HELP WITH RFJ=========== { \\"success\\": true, \\"exitCode\\": 0, \\"message\\": \\"The help was constructed for command: data-set.\\", - \\"stdout\\": \\"\\\\n COMMAND NAME\\\\n ------------\\\\n\\\\n data-set | ds\\\\n\\\\n DESCRIPTION\\\\n -----------\\\\n\\\\n Allocate a sequential dataset or partitioned dataset with '--dcb \\\\\\"PDSTYPE=PDS\\\\\\"'\\\\n\\\\n USAGE\\\\n -----\\\\n\\\\n zowe zos-ftp allocate data-set [options]\\\\n\\\\n POSITIONAL ARGUMENTS\\\\n --------------------\\\\n\\\\n datasetName\\\\t\\\\t (string)\\\\n\\\\n The dataset name you'd like to allocate.\\\\n\\\\n OPTIONS\\\\n -------\\\\n\\\\n --dcb (string)\\\\n\\\\n DCB parameters for dataset allocation. It's space separated like \\\\\\"RECFM=FB\\\\n LRECL=326 BLKSIZE=23472\\\\\\". For the list of possible DCB parameters, visit\\\\n https://github.com/IBM/zos-node-accessor/tree/1.0.x#allocate.\\\\n\\\\n FTP CONNECTION OPTIONS\\\\n ----------------------\\\\n\\\\n --host | -H (string)\\\\n\\\\n The hostname or IP address of the z/OS server to connect to.\\\\n\\\\n --port | -P (number)\\\\n\\\\n The port of the z/OS FTP server.\\\\n\\\\n Default value: 21\\\\n\\\\n --user | -u (string)\\\\n\\\\n Username for authentication on z/OS\\\\n\\\\n --password | -p | --pass | --pw (string)\\\\n\\\\n Password to authenticate to FTP.\\\\n\\\\n --secure-ftp (boolean)\\\\n\\\\n Set to true for both control and data connection encryption, 'control' for\\\\n control connection encryption only, or 'implicit' for implicitly encrypted\\\\n control connection (this mode is deprecated in modern times, but usually uses\\\\n port 990). Note: Unfortunately, this plugin's functionality only works with FTP\\\\n and FTPS, not 'SFTP' which is FTP over SSH.\\\\n\\\\n Default value: true\\\\n\\\\n --connection-timeout | --ct (number)\\\\n\\\\n How long (in milliseconds) to wait for the control connection to be established.\\\\n\\\\n Default value: 10000\\\\n\\\\n TLS / SECURE CONNECTION OPTIONS\\\\n -------------------------------\\\\n\\\\n --reject-unauthorized | --ru (boolean)\\\\n\\\\n Reject self-signed certificates. Only specify this if you are connecting to a\\\\n secure FTP instance.\\\\n\\\\n --server-name | --sn (string)\\\\n\\\\n Server name for the SNI (Server Name Indication) TLS extension. Only specify if\\\\n you are connecting securely\\\\n\\\\n PROFILE OPTIONS\\\\n ---------------\\\\n\\\\n --zftp-profile | --zftp-p (string)\\\\n\\\\n The name of a (zftp) profile to load for this command execution.\\\\n\\\\n --base-profile | --base-p (string)\\\\n\\\\n The name of a (base) profile to load for this command execution.\\\\n\\\\n BASE CONNECTION OPTIONS\\\\n -----------------------\\\\n\\\\n --token-type | --tt (string)\\\\n\\\\n The type of token to get and use for the API. Omit this option to use the\\\\n default token type, which is provided by 'zowe auth login'.\\\\n\\\\n --token-value | --tv (string)\\\\n\\\\n The value of the token to pass to the API.\\\\n\\\\n --cert-file (local file path)\\\\n\\\\n The file path to a certificate file to use for authentication\\\\n\\\\n --cert-key-file (local file path)\\\\n\\\\n The file path to a certificate key file to use for authentication\\\\n\\\\n GLOBAL OPTIONS\\\\n --------------\\\\n\\\\n --show-inputs-only (boolean)\\\\n\\\\n Show command inputs and do not run the command\\\\n\\\\n --response-format-json | --rfj (boolean)\\\\n\\\\n Produce JSON formatted data from a command\\\\n\\\\n --help | -h (boolean)\\\\n\\\\n Display help text\\\\n\\\\n --help-web | --hw (boolean)\\\\n\\\\n Display HTML help in browser\\\\n\\\\n EXAMPLES\\\\n --------\\\\n\\\\n - Allocate a sequential dataset \\\\\\"IBMUSER.DATASET\\\\\\":\\\\n\\\\n $ zowe zos-ftp allocate data-set \\\\\\"IBMUSER.DATASET\\\\\\"\\\\n\\\\n - Allocate a partitioned dataset \\\\\\"IBMUSER.DATASET\\\\\\":\\\\n\\\\n $ zowe zos-ftp allocate data-set \\\\\\"IBMUSER.DATASET\\\\\\" --dcb \\\\\\"PDSTYPE=PDS\\\\\\"\\\\n\\\\n\\", + \\"stdout\\": \\"\\\\n COMMAND NAME\\\\n ------------\\\\n\\\\n data-set | ds\\\\n\\\\n DESCRIPTION\\\\n -----------\\\\n\\\\n Allocate a sequential dataset or partitioned dataset with '--dcb \\\\\\"PDSTYPE=PDS\\\\\\"'\\\\n\\\\n USAGE\\\\n -----\\\\n\\\\n zowe zos-ftp allocate data-set [options]\\\\n\\\\n POSITIONAL ARGUMENTS\\\\n --------------------\\\\n\\\\n datasetName\\\\t\\\\t (string)\\\\n\\\\n The dataset name you'd like to allocate.\\\\n\\\\n OPTIONS\\\\n -------\\\\n\\\\n --dcb (string)\\\\n\\\\n DCB parameters for dataset allocation. It's space separated like \\\\\\"RECFM=FB\\\\n LRECL=326 BLKSIZE=23472\\\\\\". For the list of possible DCB parameters, visit\\\\n https://github.com/IBM/zos-node-accessor/tree/1.0.x#allocate.\\\\n\\\\n REQUIRED OPTIONS\\\\n ----------------\\\\n\\\\n --like (string)\\\\n\\\\n Dataset name to copy the attributes from.\\\\n\\\\n FTP CONNECTION OPTIONS\\\\n ----------------------\\\\n\\\\n --host | -H (string)\\\\n\\\\n The hostname or IP address of the z/OS server to connect to.\\\\n\\\\n --port | -P (number)\\\\n\\\\n The port of the z/OS FTP server.\\\\n\\\\n Default value: 21\\\\n\\\\n --user | -u (string)\\\\n\\\\n Username for authentication on z/OS\\\\n\\\\n --password | -p | --pass | --pw (string)\\\\n\\\\n Password to authenticate to FTP.\\\\n\\\\n --secure-ftp (boolean)\\\\n\\\\n Set to true for both control and data connection encryption, 'control' for\\\\n control connection encryption only, or 'implicit' for implicitly encrypted\\\\n control connection (this mode is deprecated in modern times, but usually uses\\\\n port 990). Note: Unfortunately, this plugin's functionality only works with FTP\\\\n and FTPS, not 'SFTP' which is FTP over SSH.\\\\n\\\\n Default value: true\\\\n\\\\n --connection-timeout | --ct (number)\\\\n\\\\n How long (in milliseconds) to wait for the control connection to be established.\\\\n\\\\n Default value: 10000\\\\n\\\\n TLS / SECURE CONNECTION OPTIONS\\\\n -------------------------------\\\\n\\\\n --reject-unauthorized | --ru (boolean)\\\\n\\\\n Reject self-signed certificates. Only specify this if you are connecting to a\\\\n secure FTP instance.\\\\n\\\\n --server-name | --sn (string)\\\\n\\\\n Server name for the SNI (Server Name Indication) TLS extension. Only specify if\\\\n you are connecting securely\\\\n\\\\n PROFILE OPTIONS\\\\n ---------------\\\\n\\\\n --zftp-profile | --zftp-p (string)\\\\n\\\\n The name of a (zftp) profile to load for this command execution.\\\\n\\\\n --base-profile | --base-p (string)\\\\n\\\\n The name of a (base) profile to load for this command execution.\\\\n\\\\n BASE CONNECTION OPTIONS\\\\n -----------------------\\\\n\\\\n --token-type | --tt (string)\\\\n\\\\n The type of token to get and use for the API. Omit this option to use the\\\\n default token type, which is provided by 'zowe auth login'.\\\\n\\\\n --token-value | --tv (string)\\\\n\\\\n The value of the token to pass to the API.\\\\n\\\\n --cert-file (local file path)\\\\n\\\\n The file path to a certificate file to use for authentication\\\\n\\\\n --cert-key-file (local file path)\\\\n\\\\n The file path to a certificate key file to use for authentication\\\\n\\\\n GLOBAL OPTIONS\\\\n --------------\\\\n\\\\n --show-inputs-only (boolean)\\\\n\\\\n Show command inputs and do not run the command\\\\n\\\\n --response-format-json | --rfj (boolean)\\\\n\\\\n Produce JSON formatted data from a command\\\\n\\\\n --help | -h (boolean)\\\\n\\\\n Display help text\\\\n\\\\n --help-web | --hw (boolean)\\\\n\\\\n Display HTML help in browser\\\\n\\\\n EXAMPLES\\\\n --------\\\\n\\\\n - Allocate a sequential dataset \\\\\\"IBMUSER.DATASET\\\\\\":\\\\n\\\\n $ zowe zos-ftp allocate data-set \\\\\\"IBMUSER.DATASET\\\\\\"\\\\n\\\\n - Allocate a partitioned dataset \\\\\\"IBMUSER.DATASET\\\\\\":\\\\n\\\\n $ zowe zos-ftp allocate data-set \\\\\\"IBMUSER.DATASET\\\\\\" --dcb \\\\\\"PDSTYPE=PDS\\\\\\"\\\\n\\\\n - Allocate a dataset \\\\\\"IBMUSER.NEW.DATASET\\\\\\" with the same\\\\n attributes as \\\\\\"IBMUSER.ORIGINAL.DATASET\\\\\\":\\\\n\\\\n $ zowe zos-ftp allocate data-set \\\\\\"IBMUSER.NEW.DATASET\\\\\\" --like \\\\\\"IBMUSER.ORIGINAL.DATASET\\\\\\"\\\\n\\\\n\\", \\"stderr\\": \\"\\", - \\"data\\": \\"\\\\n COMMAND NAME\\\\n ------------\\\\n\\\\n data-set | ds\\\\n\\\\n DESCRIPTION\\\\n -----------\\\\n\\\\n Allocate a sequential dataset or partitioned dataset with '--dcb \\\\\\"PDSTYPE=PDS\\\\\\"'\\\\n\\\\n USAGE\\\\n -----\\\\n\\\\n zowe zos-ftp allocate data-set [options]\\\\n\\\\n POSITIONAL ARGUMENTS\\\\n --------------------\\\\n\\\\n datasetName\\\\t\\\\t (string)\\\\n\\\\n The dataset name you'd like to allocate.\\\\n\\\\n OPTIONS\\\\n -------\\\\n\\\\n --dcb (string)\\\\n\\\\n DCB parameters for dataset allocation. It's space separated like \\\\\\"RECFM=FB\\\\n LRECL=326 BLKSIZE=23472\\\\\\". For the list of possible DCB parameters, visit\\\\n https://github.com/IBM/zos-node-accessor/tree/1.0.x#allocate.\\\\n\\\\n FTP CONNECTION OPTIONS\\\\n ----------------------\\\\n\\\\n --host | -H (string)\\\\n\\\\n The hostname or IP address of the z/OS server to connect to.\\\\n\\\\n --port | -P (number)\\\\n\\\\n The port of the z/OS FTP server.\\\\n\\\\n Default value: 21\\\\n\\\\n --user | -u (string)\\\\n\\\\n Username for authentication on z/OS\\\\n\\\\n --password | -p | --pass | --pw (string)\\\\n\\\\n Password to authenticate to FTP.\\\\n\\\\n --secure-ftp (boolean)\\\\n\\\\n Set to true for both control and data connection encryption, 'control' for\\\\n control connection encryption only, or 'implicit' for implicitly encrypted\\\\n control connection (this mode is deprecated in modern times, but usually uses\\\\n port 990). Note: Unfortunately, this plugin's functionality only works with FTP\\\\n and FTPS, not 'SFTP' which is FTP over SSH.\\\\n\\\\n Default value: true\\\\n\\\\n --connection-timeout | --ct (number)\\\\n\\\\n How long (in milliseconds) to wait for the control connection to be established.\\\\n\\\\n Default value: 10000\\\\n\\\\n TLS / SECURE CONNECTION OPTIONS\\\\n -------------------------------\\\\n\\\\n --reject-unauthorized | --ru (boolean)\\\\n\\\\n Reject self-signed certificates. Only specify this if you are connecting to a\\\\n secure FTP instance.\\\\n\\\\n --server-name | --sn (string)\\\\n\\\\n Server name for the SNI (Server Name Indication) TLS extension. Only specify if\\\\n you are connecting securely\\\\n\\\\n PROFILE OPTIONS\\\\n ---------------\\\\n\\\\n --zftp-profile | --zftp-p (string)\\\\n\\\\n The name of a (zftp) profile to load for this command execution.\\\\n\\\\n --base-profile | --base-p (string)\\\\n\\\\n The name of a (base) profile to load for this command execution.\\\\n\\\\n BASE CONNECTION OPTIONS\\\\n -----------------------\\\\n\\\\n --token-type | --tt (string)\\\\n\\\\n The type of token to get and use for the API. Omit this option to use the\\\\n default token type, which is provided by 'zowe auth login'.\\\\n\\\\n --token-value | --tv (string)\\\\n\\\\n The value of the token to pass to the API.\\\\n\\\\n --cert-file (local file path)\\\\n\\\\n The file path to a certificate file to use for authentication\\\\n\\\\n --cert-key-file (local file path)\\\\n\\\\n The file path to a certificate key file to use for authentication\\\\n\\\\n GLOBAL OPTIONS\\\\n --------------\\\\n\\\\n --show-inputs-only (boolean)\\\\n\\\\n Show command inputs and do not run the command\\\\n\\\\n --response-format-json | --rfj (boolean)\\\\n\\\\n Produce JSON formatted data from a command\\\\n\\\\n --help | -h (boolean)\\\\n\\\\n Display help text\\\\n\\\\n --help-web | --hw (boolean)\\\\n\\\\n Display HTML help in browser\\\\n\\\\n EXAMPLES\\\\n --------\\\\n\\\\n - Allocate a sequential dataset \\\\\\"IBMUSER.DATASET\\\\\\":\\\\n\\\\n $ zowe zos-ftp allocate data-set \\\\\\"IBMUSER.DATASET\\\\\\"\\\\n\\\\n - Allocate a partitioned dataset \\\\\\"IBMUSER.DATASET\\\\\\":\\\\n\\\\n $ zowe zos-ftp allocate data-set \\\\\\"IBMUSER.DATASET\\\\\\" --dcb \\\\\\"PDSTYPE=PDS\\\\\\"\\\\n\\\\n\\" + \\"data\\": \\"\\\\n COMMAND NAME\\\\n ------------\\\\n\\\\n data-set | ds\\\\n\\\\n DESCRIPTION\\\\n -----------\\\\n\\\\n Allocate a sequential dataset or partitioned dataset with '--dcb \\\\\\"PDSTYPE=PDS\\\\\\"'\\\\n\\\\n USAGE\\\\n -----\\\\n\\\\n zowe zos-ftp allocate data-set [options]\\\\n\\\\n POSITIONAL ARGUMENTS\\\\n --------------------\\\\n\\\\n datasetName\\\\t\\\\t (string)\\\\n\\\\n The dataset name you'd like to allocate.\\\\n\\\\n OPTIONS\\\\n -------\\\\n\\\\n --dcb (string)\\\\n\\\\n DCB parameters for dataset allocation. It's space separated like \\\\\\"RECFM=FB\\\\n LRECL=326 BLKSIZE=23472\\\\\\". For the list of possible DCB parameters, visit\\\\n https://github.com/IBM/zos-node-accessor/tree/1.0.x#allocate.\\\\n\\\\n REQUIRED OPTIONS\\\\n ----------------\\\\n\\\\n --like (string)\\\\n\\\\n Dataset name to copy the attributes from.\\\\n\\\\n FTP CONNECTION OPTIONS\\\\n ----------------------\\\\n\\\\n --host | -H (string)\\\\n\\\\n The hostname or IP address of the z/OS server to connect to.\\\\n\\\\n --port | -P (number)\\\\n\\\\n The port of the z/OS FTP server.\\\\n\\\\n Default value: 21\\\\n\\\\n --user | -u (string)\\\\n\\\\n Username for authentication on z/OS\\\\n\\\\n --password | -p | --pass | --pw (string)\\\\n\\\\n Password to authenticate to FTP.\\\\n\\\\n --secure-ftp (boolean)\\\\n\\\\n Set to true for both control and data connection encryption, 'control' for\\\\n control connection encryption only, or 'implicit' for implicitly encrypted\\\\n control connection (this mode is deprecated in modern times, but usually uses\\\\n port 990). Note: Unfortunately, this plugin's functionality only works with FTP\\\\n and FTPS, not 'SFTP' which is FTP over SSH.\\\\n\\\\n Default value: true\\\\n\\\\n --connection-timeout | --ct (number)\\\\n\\\\n How long (in milliseconds) to wait for the control connection to be established.\\\\n\\\\n Default value: 10000\\\\n\\\\n TLS / SECURE CONNECTION OPTIONS\\\\n -------------------------------\\\\n\\\\n --reject-unauthorized | --ru (boolean)\\\\n\\\\n Reject self-signed certificates. Only specify this if you are connecting to a\\\\n secure FTP instance.\\\\n\\\\n --server-name | --sn (string)\\\\n\\\\n Server name for the SNI (Server Name Indication) TLS extension. Only specify if\\\\n you are connecting securely\\\\n\\\\n PROFILE OPTIONS\\\\n ---------------\\\\n\\\\n --zftp-profile | --zftp-p (string)\\\\n\\\\n The name of a (zftp) profile to load for this command execution.\\\\n\\\\n --base-profile | --base-p (string)\\\\n\\\\n The name of a (base) profile to load for this command execution.\\\\n\\\\n BASE CONNECTION OPTIONS\\\\n -----------------------\\\\n\\\\n --token-type | --tt (string)\\\\n\\\\n The type of token to get and use for the API. Omit this option to use the\\\\n default token type, which is provided by 'zowe auth login'.\\\\n\\\\n --token-value | --tv (string)\\\\n\\\\n The value of the token to pass to the API.\\\\n\\\\n --cert-file (local file path)\\\\n\\\\n The file path to a certificate file to use for authentication\\\\n\\\\n --cert-key-file (local file path)\\\\n\\\\n The file path to a certificate key file to use for authentication\\\\n\\\\n GLOBAL OPTIONS\\\\n --------------\\\\n\\\\n --show-inputs-only (boolean)\\\\n\\\\n Show command inputs and do not run the command\\\\n\\\\n --response-format-json | --rfj (boolean)\\\\n\\\\n Produce JSON formatted data from a command\\\\n\\\\n --help | -h (boolean)\\\\n\\\\n Display help text\\\\n\\\\n --help-web | --hw (boolean)\\\\n\\\\n Display HTML help in browser\\\\n\\\\n EXAMPLES\\\\n --------\\\\n\\\\n - Allocate a sequential dataset \\\\\\"IBMUSER.DATASET\\\\\\":\\\\n\\\\n $ zowe zos-ftp allocate data-set \\\\\\"IBMUSER.DATASET\\\\\\"\\\\n\\\\n - Allocate a partitioned dataset \\\\\\"IBMUSER.DATASET\\\\\\":\\\\n\\\\n $ zowe zos-ftp allocate data-set \\\\\\"IBMUSER.DATASET\\\\\\" --dcb \\\\\\"PDSTYPE=PDS\\\\\\"\\\\n\\\\n - Allocate a dataset \\\\\\"IBMUSER.NEW.DATASET\\\\\\" with the same\\\\n attributes as \\\\\\"IBMUSER.ORIGINAL.DATASET\\\\\\":\\\\n\\\\n $ zowe zos-ftp allocate data-set \\\\\\"IBMUSER.NEW.DATASET\\\\\\" --like \\\\\\"IBMUSER.ORIGINAL.DATASET\\\\\\"\\\\n\\\\n\\" }" `; diff --git a/__tests__/__unit__/FTPBase.Handler.test.ts b/__tests__/__unit__/FTPBase.Handler.test.ts new file mode 100644 index 00000000..4d844b27 --- /dev/null +++ b/__tests__/__unit__/FTPBase.Handler.test.ts @@ -0,0 +1,69 @@ +/* + * This program and the accompanying materials are made available under the terms of the + * Eclipse Public License v2.0 which accompanies this distribution, and is available at + * https://www.eclipse.org/legal/epl-v20.html + * + * SPDX-License-Identifier: EPL-2.0 + * + * Copyright Contributors to the Zowe Project. + * + */ + +import { ImperativeError } from "@zowe/imperative"; +import { FTPConfig } from "../../src/api/FTPConfig"; +import { FTPBaseHandler } from "../../src/FTPBase.Handler"; +import { IFTPHandlerParams } from "../../src/IFTPHandlerParams"; + +describe("FTP Base Handler", () => { + const spyProcessFTP = jest.fn(); + const handler = new class extends FTPBaseHandler { + public async processFTP(additionalParameters: IFTPHandlerParams): Promise { + spyProcessFTP(additionalParameters); + } + }; + + afterEach(() => { + jest.resetAllMocks(); + }); + + it("should call the processFTP abstract method", async () => { + const connection = {close: jest.fn()}; + const parms: any = {}; + jest.spyOn(FTPConfig, "connectFromArguments").mockResolvedValue(connection); + + await handler.process(parms); + + expect(parms.connection).toEqual(connection); + expect(spyProcessFTP).toHaveBeenCalledWith(parms); + expect(connection.close).toHaveBeenCalled(); + }); + + const processErrors = async (errorMessage: string, expectedError: string): Promise => { + jest.spyOn(FTPConfig, "connectFromArguments").mockRejectedValue({message: errorMessage}); + + let caughtError; + try { + await handler.process({} as any); + } catch (err) { + caughtError = err; + } + + expect(caughtError).toBeInstanceOf(ImperativeError); + expect(caughtError.message).toContain(expectedError); + return true; + }; + it("should handle errors when an unknown error happens when establishing a connection", async () => { + const ret = await processErrors("test error", "test error"); + expect(ret).toBe(true); + }); + + it("should handle errors when credentials prevent us from creating a connection", async () => { + const ret = await processErrors("PASS command failed", "Username or password are not valid or expired."); + expect(ret).toBe(true); + }); + + it("should handle errors when using the MKD command", async () => { + const ret = await processErrors("requests a nonexistent partitioned data set. Use MKD command to create it", "Use allocate command"); + expect(ret).toBe(true); + }); +}); diff --git a/__tests__/__unit__/api/AbstractTemplatedJCL.test.ts b/__tests__/__unit__/api/AbstractTemplatedJCL.test.ts new file mode 100644 index 00000000..b8a7eca6 --- /dev/null +++ b/__tests__/__unit__/api/AbstractTemplatedJCL.test.ts @@ -0,0 +1,61 @@ +/* + * This program and the accompanying materials are made available under the terms of the + * Eclipse Public License v2.0 which accompanies this distribution, and is available at + * https://www.eclipse.org/legal/epl-v20.html + * + * SPDX-License-Identifier: EPL-2.0 + * + * Copyright Contributors to the Zowe Project. + * + */ + +import { IO, Logger } from "@zowe/imperative"; +import { AbstractTemplatedJCL } from "../../../src/api/AbstractTemplatedJCL"; + +describe("FTP Base Handler", () => { + const testClass = new class extends AbstractTemplatedJCL { + public DEFAULT_TEMPLATE = "TEST"; + public test() { + this.log; + } + }; + + afterEach(() => { + jest.resetAllMocks(); + }); + + it("should define a logger used by the CoreUtils.addCarriageReturns method", () => { + const loggerSpy = jest.spyOn(Logger, "getAppLogger").mockImplementation(); + testClass.test(); + expect(loggerSpy).toHaveBeenCalled(); + }); + + it("should call the processFTP abstract method", () => { + const spyIO = jest.spyOn(IO, "readFileSync").mockReturnValue(Buffer.from("JCL")); + const spyLoggerDebug = jest.fn(); + const loggerSpy = jest.spyOn(Logger, "getAppLogger").mockReturnValue({debug: spyLoggerDebug} as any); + + const response = testClass.getJcl("/test/path", "sub"); + + expect(loggerSpy).toHaveBeenCalled(); + expect(spyIO).toHaveBeenCalledWith("/test/path"); + expect(response).toEqual("JCL\r\nTEST"); + expect(spyLoggerDebug).toHaveBeenCalled(); + }); + + it("should call the processFTP abstract method with a custome template", () => { + const spyIO = jest.spyOn(IO, "readFileSync"); + const spyLoggerDebug = jest.fn(); + const loggerSpy = jest.spyOn(Logger, "getAppLogger").mockReturnValue({debug: spyLoggerDebug} as any); + spyIO.mockReturnValueOnce(Buffer.from("JCL")); + spyIO.mockReturnValueOnce(Buffer.from("TEMPLATE")); + + const response = testClass.getJcl("/test/path", "sub", "/test/template"); + + expect(loggerSpy).toHaveBeenCalled(); + expect(spyIO).toHaveBeenCalledWith("/test/path"); + expect(spyIO).toHaveBeenCalledWith("/test/template"); + expect(response).toEqual("JCL\r\nTEMPLATE"); + expect(spyLoggerDebug).toHaveBeenCalled(); + }); +}); diff --git a/__tests__/__unit__/api/CoreUtils.test.ts b/__tests__/__unit__/api/CoreUtils.test.ts index 81ab5ad8..858d5c86 100644 --- a/__tests__/__unit__/api/CoreUtils.test.ts +++ b/__tests__/__unit__/api/CoreUtils.test.ts @@ -9,19 +9,76 @@ * */ +import { ImperativeError } from "@zowe/imperative"; import { CoreUtils } from "../../../src/api/CoreUtils"; +import { PassThrough } from "stream"; describe("CoreUtils", () => { + afterEach(() => { + jest.restoreAllMocks(); + }); + + describe("readStdin", () => { + it("should read from given stream", async () => { + const stream = new PassThrough(); + + process.nextTick(() => stream.emit("data", Buffer.from("test"))); + process.nextTick(() => stream.end()); + process.nextTick(() => stream.destroy()); + const response = await CoreUtils.readStdin(stream); + + expect(response.toString()).toEqual("test"); + }); - it("should return the correct non-secure config", () => { - let s = CoreUtils.addCarriageReturns("hello\r\nworld"); - expect(s).toBe("hello\r\nworld"); + it("should handle error from stdin", async () => { + const stream = new PassThrough(); + process.nextTick(() => stream.emit("error", Buffer.from("TEST_ERROR"))); + process.nextTick(() => stream.end()); + process.nextTick(() => stream.destroy()); - s = CoreUtils.addCarriageReturns("hello\nworld"); - expect(s).toBe("hello\r\nworld"); + let caughtError: ImperativeError; + try { + await CoreUtils.readStdin(stream); + throw "FAILURE"; + } catch(err) { + caughtError = err; + } - s = CoreUtils.addCarriageReturns("hello world"); - expect(s).toBe("hello world"); + expect((caughtError as any).msg).toEqual("Error encountered while reading from stdin"); + expect(caughtError.causeErrors.toString()).toEqual("TEST_ERROR"); + }); }); + describe("sleep", () => { + it("should sleep for 20 seconds", () => { + jest.useFakeTimers(); + jest.spyOn(global, 'setTimeout'); + + CoreUtils.sleep(20000); + + expect(setTimeout).toHaveBeenCalledTimes(1); + expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), 20000); + }); + }); + + describe("getProfileMeta", () => { + it("should return profile metadata", async () => { + const profiles = require("../../../src/imperative").profiles; + const response = await CoreUtils.getProfileMeta(); + expect(response).toEqual(profiles); + }); + }); + + describe("addCarriageReturns", () => { + it("should return the correct non-secure config", () => { + let s = CoreUtils.addCarriageReturns("hello\r\nworld"); + expect(s).toBe("hello\r\nworld"); + + s = CoreUtils.addCarriageReturns("hello\nworld"); + expect(s).toBe("hello\r\nworld"); + + s = CoreUtils.addCarriageReturns("hello world"); + expect(s).toBe("hello world"); + }); + }); }); diff --git a/__tests__/__unit__/api/CreateDataset.test.ts b/__tests__/__unit__/api/CreateDataset.test.ts new file mode 100644 index 00000000..186ddbd4 --- /dev/null +++ b/__tests__/__unit__/api/CreateDataset.test.ts @@ -0,0 +1,98 @@ +/* + * This program and the accompanying materials are made available under the terms of the + * Eclipse Public License v2.0 which accompanies this distribution, and is available at + * https://www.eclipse.org/legal/epl-v20.html + * + * SPDX-License-Identifier: EPL-2.0 + * + * Copyright Contributors to the Zowe Project. + * + */ + +import { ImperativeError, IO } from "@zowe/imperative"; +import { CreateDataSetTypeEnum } from "@zowe/zos-files-for-zowe-sdk"; +import { CreateDataset } from "../../../src/api/CreateDataset"; +import { JobUtils } from "../../../src/api/JobUtils"; + +describe("CreateDataset", () => { + afterEach(() => { + jest.restoreAllMocks(); + }); + + it("should fail if requested to create a dataset of an unsuported type", async () => { + const connection = {}; + let caughtError: ImperativeError; + try { + await CreateDataset.create(connection, -1, "dsname", "jobCard", {}); + throw "FAILURE"; + } catch(err) { + caughtError = err; + } + expect(caughtError).toBeInstanceOf(ImperativeError); + expect(caughtError.message).toEqual("Unsupported data set type."); + }); + + it("should return succes:false if we failed to create the dataset", async () => { + const connection = { submitJCL: jest.fn() }; + jest.spyOn(IO, "readFileSync").mockReturnValue(Buffer.from("//JOBCARD")); + jest.spyOn(JobUtils, "findJobByID").mockResolvedValue("SOME ERROR" as any); + + // Dummy options to touch all `else` branches + const options: any = { printAttributes: false, size: "1", volser: "dummy", unit: "dummy" }; + const response = await CreateDataset.create(connection, 0, "dsname", "jobCard", options); + + expect(connection.submitJCL.mock.calls[0][0]).toMatchSnapshot(); + expect(response.commandResponse).toContain("Failed to create data set"); + }); + + it("should return succes:false if we failed to create the dataset with additional options", async () => { + const connection = { submitJCL: jest.fn() }; + jest.spyOn(IO, "readFileSync").mockReturnValue(Buffer.from("//JOBCARD")); + jest.spyOn(JobUtils, "findJobByID").mockResolvedValue("SOME ERROR" as any); + + // Dummy options to touch all `other` branches + const options: any = { printAttributes: true, size: "1U", secondary: 1, volser: "dummy", unit: "dummy" }; + const response = await CreateDataset.create(connection, 0, "dsname", "jobCard", options); + + expect(connection.submitJCL.mock.calls[0][0]).toMatchSnapshot(); + expect(response.commandResponse).toContain("Failed to create data set"); + }); + + describe("Should create datasets successfully", () => { + const testCreate = async (type: CreateDataSetTypeEnum): Promise => { + const connection = { submitJCL: jest.fn() }; + jest.spyOn(IO, "readFileSync").mockReturnValue(Buffer.from("//JOBCARD")); + jest.spyOn(JobUtils, "findJobByID").mockResolvedValue("RC=0000" as any); + + const response = await CreateDataset.create(connection, type, "dsname", "jobCard"); + + expect(connection.submitJCL.mock.calls[0][0]).toMatchSnapshot(); + expect(response.commandResponse).toContain("Data set created successfully."); + return response.success; + }; + it("partitioned", async () => { + const ret = await testCreate(CreateDataSetTypeEnum.DATA_SET_PARTITIONED); + expect(ret).toBe(true); + }); + + it("sequential", async () => { + const ret = await testCreate(CreateDataSetTypeEnum.DATA_SET_SEQUENTIAL); + expect(ret).toBe(true); + }); + + it("binary", async () => { + const ret = await testCreate(CreateDataSetTypeEnum.DATA_SET_BINARY); + expect(ret).toBe(true); + }); + + it("C", async () => { + const ret = await testCreate(CreateDataSetTypeEnum.DATA_SET_C); + expect(ret).toBe(true); + }); + + it("classic", async () => { + const ret = await testCreate(CreateDataSetTypeEnum.DATA_SET_CLASSIC); + expect(ret).toBe(true); + }); + }); +}); diff --git a/__tests__/__unit__/api/DataSetUtils.test.ts b/__tests__/__unit__/api/DataSetUtils.test.ts new file mode 100644 index 00000000..d00b8338 --- /dev/null +++ b/__tests__/__unit__/api/DataSetUtils.test.ts @@ -0,0 +1,148 @@ +/* + * This program and the accompanying materials are made available under the terms of the + * Eclipse Public License v2.0 which accompanies this distribution, and is available at + * https://www.eclipse.org/legal/epl-v20.html + * + * SPDX-License-Identifier: EPL-2.0 + * + * Copyright Contributors to the Zowe Project. + * + */ + +import { ImperativeError } from "@zowe/imperative"; +import { DataSetUtils } from "../../../src/api"; + +describe("DataSetUtils", () => { + afterEach(() => { + jest.restoreAllMocks(); + }); + + describe("listDataSets", () => { + it("should return a list of datasets", async () => { + const expected = [{name: "0"}, {name: "1"}]; + const connection = { + listDataset: jest.fn().mockResolvedValue(expected), + }; + + const response = await DataSetUtils.listDataSets(connection, "dsname"); + + expect(connection.listDataset).toHaveBeenCalledWith("dsname"); + expect(response).toEqual(expected); + }); + }); + + describe("listMembers", () => { + it("should return a list of members", async () => { + const expected = [{name: "0"}, {name: "1"}]; + const connection = { + listDataset: jest.fn().mockResolvedValue(expected), + }; + + const response = await DataSetUtils.listMembers(connection, "dsname"); + + expect(connection.listDataset).toHaveBeenCalledWith("dsname(*)"); + expect(response).toEqual(expected); + }); + }); + + describe("deleteDataSet", () => { + it("should delete a dataset", async () => { + const connection = { deleteDataset: jest.fn() }; + + await DataSetUtils.deleteDataSet(connection, "dsname"); + + expect(connection.deleteDataset).toHaveBeenCalledWith("'dsname'"); + }); + }); + + describe("renameDataSet", () => { + it("should rename a dataset", async () => { + const connection = { rename: jest.fn() }; + + await DataSetUtils.renameDataSet(connection, "old", "new"); + + expect(connection.rename).toHaveBeenCalledWith("old", "new"); + }); + }); + + describe("allocateDataSet", () => { + it("should allocate a dataset", async () => { + const connection = { allocateDataset: jest.fn() }; + + await DataSetUtils.allocateDataSet(connection, "dsname", {dcb: "dcb"}); + + expect(connection.allocateDataset).toHaveBeenCalledWith("dsname", "dcb"); + }); + }); + + describe("allocateLikeDataSet", () => { + it("should allocate a dataset using attributes from another dataset", async () => { + const options = { dsorg: "po", recfm: "FB", lrecl: "80" }; + const connection = { + listDataset: jest.fn().mockResolvedValueOnce(undefined).mockResolvedValue([{...options, dsname: "likeDS"}, {dsname: "other"}]), + allocateDataset: jest.fn(), + }; + + const response = await DataSetUtils.allocateLikeDataSet(connection, "newDS", "likeDS", {}); + expect(connection.listDataset).toHaveBeenCalledWith("newDS"); + expect(connection.listDataset).toHaveBeenCalledWith("likeDS"); + expect(connection.allocateDataset).toHaveBeenCalledWith("newDS", options); + expect(response).toEqual(options); + }); + + it("should return the dataset attributes of the new dataset if it already exists", async () => { + const options = { dsorg: "po", recfm: "FB", lrecl: "80" }; + const connection = { + listDataset: jest.fn().mockResolvedValue([{...options, dsname: "likeDS"}, {dsname: "newDs"}]), + allocateDataset: jest.fn(), + }; + + const response = await DataSetUtils.allocateLikeDataSet(connection, "newDS", "likeDS"); + expect(connection.listDataset).toHaveBeenCalledWith("newDS"); + expect(response).toEqual({}); + }); + + it("should throw an error if the LIKE dataset does not exist", async () => { + const connection = { + listDataset: jest.fn().mockResolvedValueOnce(undefined).mockResolvedValue([{dsname: "another.dataset"}]), + }; + + let caughtError: ImperativeError; + try { + await DataSetUtils.allocateLikeDataSet(connection, "newDS", "likeDS"); + throw "FAILURE"; + } catch(err) { + caughtError = err; + } + + expect(caughtError).toBeInstanceOf(ImperativeError); + expect(caughtError.message).toContain("No datasets found"); + }); + }); + + describe("copyDataSet", () => { + it("should copy a dataset while updating the progress bar", async () => { + const connection = { + listDataset: jest.fn().mockResolvedValueOnce(undefined).mockResolvedValue([{dsname: "from"}]), + getDataset: jest.fn().mockResolvedValue("source.content"), + uploadDataset: jest.fn(), + }; + jest.spyOn(DataSetUtils, "allocateLikeDataSet").mockResolvedValue({dsorg: "po"}); + + const progress = { start: jest.fn(), worked: jest.fn(), end: jest.fn() }; + await DataSetUtils.copyDataSet(connection, { fromDsn: "from", toDsn: "to", progress }); + + expect(connection.listDataset).toHaveBeenCalledWith("to"); + expect(connection.listDataset).toHaveBeenCalledWith("from"); + expect(connection.getDataset).toHaveBeenCalledWith("'from'", "binary", false); + expect(connection.uploadDataset).toHaveBeenCalledWith("source.content", "'to'", "binary", {dsorg: "po"}); + + //Progress bar info + expect(progress.start).toHaveBeenCalledWith(8, expect.any(String)); + expect(progress.worked).toHaveBeenCalledTimes(8); + expect(progress.worked).toHaveBeenCalledWith(1, expect.any(String)); + expect(progress.worked).toHaveBeenCalledWith(8, expect.any(String)); + expect(progress.end).toHaveBeenCalled(); + }); + }); +}); diff --git a/__tests__/__unit__/api/__snapshots__/CreateDataset.test.ts.snap b/__tests__/__unit__/api/__snapshots__/CreateDataset.test.ts.snap new file mode 100644 index 00000000..d6678acd --- /dev/null +++ b/__tests__/__unit__/api/__snapshots__/CreateDataset.test.ts.snap @@ -0,0 +1,52 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`CreateDataset Should create datasets successfully C 1`] = ` +"//JOBCARD +//CREATE EXEC PGM=IEFBR14 +//NEWDATA DD DSN=DSNAME,DISP=(NEW,CATLG),SPACE=(CYL,(1)),LRECL=260, +// RECFM=VB,BLKSIZE=32760,DSORG=PO" +`; + +exports[`CreateDataset Should create datasets successfully binary 1`] = ` +"//JOBCARD +//CREATE EXEC PGM=IEFBR14 +//NEWDATA DD DSN=DSNAME,DISP=(NEW,CATLG),SPACE=(CYL,(10)), +// LRECL=27998,RECFM=U,BLKSIZE=27998,DSORG=PO" +`; + +exports[`CreateDataset Should create datasets successfully classic 1`] = ` +"//JOBCARD +//CREATE EXEC PGM=IEFBR14 +//NEWDATA DD DSN=DSNAME,DISP=(NEW,CATLG),SPACE=(CYL,(1)),LRECL=80, +// RECFM=FB,BLKSIZE=6160,DSORG=PO" +`; + +exports[`CreateDataset Should create datasets successfully partitioned 1`] = ` +"//JOBCARD +//CREATE EXEC PGM=IEFBR14 +//NEWDATA DD DSN=DSNAME,DISP=(NEW,CATLG),SPACE=(CYL,(1)),LRECL=80, +// RECFM=FB,BLKSIZE=6160,DSORG=PO" +`; + +exports[`CreateDataset Should create datasets successfully sequential 1`] = ` +"//JOBCARD +//CREATE EXEC PGM=IEFBR14 +//NEWDATA DD DSN=DSNAME,DISP=(NEW,CATLG),SPACE=(CYL,(1)),LRECL=80, +// RECFM=FB,BLKSIZE=6160,DSORG=PS" +`; + +exports[`CreateDataset should return succes:false if we failed to create the dataset 1`] = ` +"//JOBCARD +//CREATE EXEC PGM=IEFBR14 +//NEWDATA DD DSN=DSNAME,DISP=(NEW,CATLG),SPACE=(CYL,(1,0)), +// VOLSER=DUMMY,LRECL=27998,RECFM=U,BLKSIZE=27998,UNIT=DUMMY, +// DSORG=PO" +`; + +exports[`CreateDataset should return succes:false if we failed to create the dataset with additional options 1`] = ` +"//JOBCARD +//CREATE EXEC PGM=IEFBR14 +//NEWDATA DD DSN=DSNAME,DISP=(NEW,CATLG),SPACE=(U,(1,1)), +// VOLSER=DUMMY,LRECL=27998,RECFM=U,BLKSIZE=27998,UNIT=DUMMY, +// DSORG=PO" +`; diff --git a/__tests__/__unit__/healthCheck.handler.test.ts b/__tests__/__unit__/healthCheck.handler.test.ts new file mode 100644 index 00000000..76cb072a --- /dev/null +++ b/__tests__/__unit__/healthCheck.handler.test.ts @@ -0,0 +1,17 @@ +/* + * This program and the accompanying materials are made available under the terms of the + * Eclipse Public License v2.0 which accompanies this distribution, and is available at + * https://www.eclipse.org/legal/epl-v20.html + * + * SPDX-License-Identifier: EPL-2.0 + * + * Copyright Contributors to the Zowe Project. + * + */ + +describe("Health Check Handler", () => { + it("should return true", () => { + const healthCheck = require("../../src/healthCheck.handler"); + expect(healthCheck()).toBe(true); + }); +}); diff --git a/package.json b/package.json index 3efe77fe..f060f285 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "description": "Data set, USS, and Jobs functionality via FTP for Zowe CLI", "main": "lib/index.js", "scripts": { - "build": "npm run clean && npm run license && tsc --pretty && npm run lint && npm run checkTestsCompile && npm run circularDependencyCheck", + "build": "npm run clean && npm run license && tsc --pretty && npm run checkTestsCompile && npm run circularDependencyCheck", "clean": "rimraf lib", "license": "node ./scripts/updateLicense.js", "watch": "tsc --pretty --watch", diff --git a/src/FTPProgressHandler.ts b/src/FTPProgressHandler.ts index 11ff36f0..caa0add0 100644 --- a/src/FTPProgressHandler.ts +++ b/src/FTPProgressHandler.ts @@ -29,7 +29,7 @@ export class FTPProgressHandler implements IFTPProgressHandler { this.estimated = estimated; } - public start(total: number): void { + public start(total: number, message?: string): void { this.total = total; this.processed = 0; if (this.estimated) { @@ -38,17 +38,18 @@ export class FTPProgressHandler implements IFTPProgressHandler { this.statusMessage = "Downloaded %d of %d bytes"; } this.task = { - statusMessage: "Starting transfer...", + statusMessage: message ?? "Starting transfer...", percentComplete: 0, stageName: TaskStage.IN_PROGRESS }; this.progress.startBar({ task: this.task }); } - public worked(work: number): void { + public worked(work: number, message?: string): void { this.processed += work; + this.processed = this.processed > this.total ? this.total : this.processed; this.task.percentComplete = PERCETAGE * this.processed / this.total; - this.task.statusMessage = TextUtils.formatMessage(this.statusMessage, this.processed, this.total); + this.task.statusMessage = message ?? TextUtils.formatMessage(this.statusMessage, this.processed, this.total); } public end(): void { diff --git a/src/api/CoreUtils.ts b/src/api/CoreUtils.ts index 41ad059b..4468e77f 100644 --- a/src/api/CoreUtils.ts +++ b/src/api/CoreUtils.ts @@ -11,7 +11,6 @@ import { ICommandProfileTypeConfiguration, IImperativeError, Logger } from "@zowe/imperative"; import * as stream from "stream"; -import { isNullOrUndefined } from "util"; /** * The data is transferred in text mode, in which encoding conversion like ASCII/EBCDIC will happen. @@ -82,7 +81,7 @@ export class CoreUtils { const stdinReadError: IImperativeError = { msg: "Error encountered while reading from stdin", causeErrors: error, - additionalDetails: (isNullOrUndefined(error)) ? undefined : error.message + additionalDetails: (error == null) ? undefined : error.message }; reject(stdinReadError); }); diff --git a/src/api/CreateDataset.ts b/src/api/CreateDataset.ts index 85f14946..38861ce3 100644 --- a/src/api/CreateDataset.ts +++ b/src/api/CreateDataset.ts @@ -9,7 +9,6 @@ * */ -import { isNullOrUndefined } from "util"; import { CreateDataSetTypeEnum, CreateDefaults, ICreateDataSetOptions, IZosFilesResponse, ZosFilesMessages } from "@zowe/cli"; import { ImperativeError, ImperativeExpect, TextUtils } from "@zowe/imperative"; import { JobUtils } from "./JobUtils"; @@ -33,8 +32,8 @@ export class CreateDataset extends AbstractTemplatedJCL { let validCmdType = true; // Removes undefined properties - let tempOptions = !isNullOrUndefined(options) ? JSON.parse(JSON.stringify(options)) : {}; - const secondarySpecified = !isNullOrUndefined(tempOptions.secondary); + let tempOptions = options != null ? JSON.parse(JSON.stringify(options)) : {}; + const secondarySpecified = tempOptions.secondary != null; // Required ImperativeExpect.toNotBeNullOrUndefined(cmdType, ZosFilesMessages.missingDatasetType.message); @@ -67,14 +66,14 @@ export class CreateDataset extends AbstractTemplatedJCL { throw new ImperativeError({msg: ZosFilesMessages.unsupportedDatasetType.message}); } else { // Handle the size option - if (!isNullOrUndefined(tempOptions.size)) { + if (tempOptions.size != null) { const tAlcunit = tempOptions.size.toString().match(/[a-zA-Z]+/g); - if (!isNullOrUndefined(tAlcunit)) { + if (tAlcunit != null) { tempOptions.alcunit = tAlcunit.join("").toUpperCase(); } const tPrimary = tempOptions.size.toString().match(/[0-9]+/g); - if (!isNullOrUndefined(tPrimary)) { + if (tPrimary != null) { tempOptions.primary = +(tPrimary.join("")); if (!secondarySpecified) { @@ -88,25 +87,25 @@ export class CreateDataset extends AbstractTemplatedJCL { let response = ""; // Handle the print attributes option - if (!isNullOrUndefined(tempOptions.printAttributes)) { + if (tempOptions.printAttributes != null) { if (tempOptions.printAttributes) { - delete tempOptions.printAttributes; response = TextUtils.prettyJson(tempOptions); - } else { - delete tempOptions.printAttributes; } + delete tempOptions.printAttributes; } response = await new CreateDataset().createViaFTP(connection, dataSetName, tempOptions, jobCardFile); if (response.indexOf("RC=0000") >= 0) { - response += "\n" + ZosFilesMessages.dataSetCreatedSuccessfully.message; + return { + success: true, + commandResponse: response + "\n" + ZosFilesMessages.dataSetCreatedSuccessfully.message, + }; } else { - response += "\nFailed to create data set"; + return { + success: false, + commandResponse: response + "\nFailed to create data set", + }; } - return { - success: true, - commandResponse: response - }; } } diff --git a/src/api/DataSetInterface.ts b/src/api/DataSetInterface.ts index 22fb71e0..14c722dc 100644 --- a/src/api/DataSetInterface.ts +++ b/src/api/DataSetInterface.ts @@ -73,5 +73,71 @@ export interface IAllocateDataSetOption { dcb?: string; } +/** + * Copy Dataset Options + */ +export interface ICopyDataSetOptions { + /** + * Required: Name of the SOURCE dataset + */ + fromDsn: string; + + /** + * Required: Name of the TARGET dataset + */ + toDsn: string; + + /** + * Optional: Progress indicator object/task + */ + progress?: IFTPProgressHandler; + + /** + * Optional: Indicator to force a replacement + */ + replace?: boolean; +} + +/** + * Detailed allocation options for a dataset + */ +export interface IDataSetDetailedAllocationOptions { + /** + * Dataset volume serial + */ + volume?: string; // Not supported by connection.allocateDataset + + /** + * Dataset record format + */ + recfm?: string; + + /** + * Dataset block size + */ + BLOCKSIze?: string; // Strange mapping + + /** + * Dataset record length + */ + lrecl?: string; + + /** + * Dataset organization (PS vs PO) + */ + dsorg?: string; + + /** + * The following values are not supported by the allocateDataset method of zos-node-accessor@v1 + * However, they are return as allocation details for a dataset + */ + // BLocks: ds.??? // Strage mapping + Not returned by connection.listDataset + // CYlinders: ds.??? // Strage mapping + Not returned by connection.listDataset + // TRacks: ds.??? // Strage mapping + Not returned by connection.listDataset + // Directory: ds.??? // Strage mapping + Not returned by connection.listDataset + // PRImary: ds.??? // Strage mapping + Not returned by connection.listDataset + // SECondary: ds.??? // Strage mapping + Not returned by connection.listDataset +} + // When DataSetUtilsV2 for zos-node-accessor v2 is ready, alias DataSetUtilsV2 to DataSetUtils. // export { DataSetUtils as DataSetUtils }; diff --git a/src/api/DataSetUtils.ts b/src/api/DataSetUtils.ts index 78bd45e2..8cd6cbcb 100644 --- a/src/api/DataSetUtils.ts +++ b/src/api/DataSetUtils.ts @@ -11,9 +11,16 @@ import * as fs from "fs"; -import { IO, Logger } from "@zowe/imperative"; -import { CoreUtils, TRANSFER_TYPE_ASCII, TRANSFER_TYPE_BINARY } from "./CoreUtils"; -import { IAllocateDataSetOption, IDownloadDataSetOption, IUploadDataSetOption, TRACK } from "./DataSetInterface"; +import { ImperativeError, IO, Logger } from "@zowe/imperative"; +import { CoreUtils, TRANSFER_TYPE_ASCII, TRANSFER_TYPE_ASCII_RDW, TRANSFER_TYPE_BINARY, TRANSFER_TYPE_BINARY_RDW } from "./CoreUtils"; +import { + IAllocateDataSetOption, + ICopyDataSetOptions, + IDataSetDetailedAllocationOptions, + IDownloadDataSetOption, + IUploadDataSetOption, + TRACK +} from "./DataSetInterface"; import { StreamUtils } from "./StreamUtils"; export class DataSetUtils { @@ -86,9 +93,9 @@ export class DataSetUtils { * @returns dataset contents in Buffer, if localFile is not provided in option. Otherwise, undefined will be returned. */ public static async downloadDataSet(connection: any, dsn: string, option: IDownloadDataSetOption): Promise { - const files = await connection.listDataset(dsn); - if (files === undefined || files.length === 0) { - throw new Error(`The dataset "${dsn}" doesn't exist.`); + const files = await connection.listDataset(dsn) ?? []; + if (files.length === 0) { + throw new ImperativeError({msg: `The dataset "${dsn}" doesn't exist.`}); } const estimatedSize = parseInt(files[0].Used, 10) * TRACK; @@ -147,6 +154,22 @@ export class DataSetUtils { await connection.uploadDataset(content, "'" + dsn + "'", transferType, siteparm); } + /** + * Map the allocation details from a dataset returned from the zos-node-accessor package to IDs that it accepts for allocation purposes + * @param ds Descriptor for dataset allocation options + * @returns Object with mapping required for allocation purposes (BLOGKSIze vs blksz) + */ + public static mapAllocationOptions(ds: any): IDataSetDetailedAllocationOptions { + // supported options: https://github.com/IBM/zos-node-accessor/blob/1.0.x/lib/zosAccessor.js#LL122C68-L122C68 + return JSON.parse(JSON.stringify({ + volume: ds.volume, // Not supported by connection.allocateDataset + recfm: ds.recfm, + BLOCKSIze: ds.blksz, // Strange mapping + lrecl: ds.lrecl, + dsorg: ds.dsorg, + })); + } + /** * Allocate dataset. * @@ -159,6 +182,136 @@ export class DataSetUtils { await connection.allocateDataset(dsn, option.dcb); } + /** + * Allocate a dataset using the properties from another dataset + * + * @param connection zos-doce-accessor connection + * @param dsn fully-qualified dataset name without quotes to be allocated + * @param like fully-qualified dataset name without quotes to be modeled after + * @param options dataset allocation options to override the `like` dataset allocation details + * @returns + */ + public static async allocateLikeDataSet( + connection: any, dsn: string, like: string, options?: IDataSetDetailedAllocationOptions + ): Promise { + + const _getDs = async (dsname: string, mustExist: boolean): Promise => { + const files = await connection.listDataset(dsname); + + this.log.debug("Found %d matching data sets", files?.length ?? 0); + const filteredFiles: any[] = files?.map((file: any) => CoreUtils.addLowerCaseKeysToObject(file)) ?? []; + const ds = filteredFiles.find((file: any) => file.dsname.toUpperCase() === dsname.toUpperCase()); + if (ds == null && mustExist) { + throw new ImperativeError({msg: "No datasets found: " + dsname}); + } + + return ds; + }; + + const newDs = await _getDs(dsn, false); + if (newDs != null) { + this.log.debug("Dataset %s already exists", dsn); + return DataSetUtils.mapAllocationOptions(newDs); + } + + this.log.debug("Allocate data set '%s' with similar attributes to '%s", dsn, like); + const ds = await _getDs(like, true); + const option = { ...DataSetUtils.mapAllocationOptions(ds), ...(options ?? {})}; + + this.log.debug("Allocation options to be used: %s", JSON.stringify(option)); + await connection.allocateDataset(dsn, option); + return option; + } + + /** + * Helper method to split the dataset name from the member name + * + * @param name fully-qualified dataset name without quotes + * @returns Object separating the dataset name and the member name + */ + public static getDataSet(name: string): {dsn: string, member?: string} { + const parts = name.replace(')', '').split('('); + if (parts.length > 1) { + return { + dsn: parts[0], + member: parts[1] + }; + } else { + return { + dsn: name + }; + } + } + + /** + * Copy Dataset + * + * @param connection zos-node-accessor connection + * @param options All required options for copying a dataset + * @param options.fromDsn fully-qualified SOURCE dataset name without quotes + * @param options.toDsn fully-qualified TARGET dataset name without quotes + * @param options.progress Optional: Task used to indicate the progress of this operation (not used yet) + * @param options.replace Optional: Boolean used to force a dataset or member replacement + */ + public static async copyDataSet(connection: any, options: ICopyDataSetOptions): Promise { + const numberOfServerCalls = 8; + const t = options.progress; + t?.start(numberOfServerCalls, `Copying dataset...`); + + const fromDsn = DataSetUtils.getDataSet(options.fromDsn); + this.log.debug("Source: %s", JSON.stringify(fromDsn)); + const toDsn = DataSetUtils.getDataSet(options.toDsn); + this.log.debug("Target: %s", JSON.stringify(toDsn)); + + t?.worked(1, `Checking if target location already exists...`); + if (!options.replace) { + this.log.debug("Verify that the dataset (or member) does not already exist since we aren't allowed to replace it"); + const newDs = await connection.listDataset(toDsn.dsn) ?? []; + t?.worked(1, `Checking if target location already exists...`); + if (newDs.length !== 0 && (await connection.listDataset(options.toDsn) ?? []).length !== 0) { + throw new ImperativeError({msg: `Dataset ${options.toDsn} already exists`}); + } + } + + t?.worked(1, `Checking if the source location exists...`); + const dataset = (await connection.listDataset(fromDsn.member ? fromDsn.dsn : options.fromDsn)) + ?.map((f: any) => CoreUtils.addLowerCaseKeysToObject(f)) ?? []; + if (dataset.length === 0) { + throw new ImperativeError({msg: `The dataset "${fromDsn.dsn}" doesn't exist.`}); + } + + t?.worked(1, `Selecting the transferType...`); + const content = dataset.find((file: any) => file.dsname.toUpperCase() === fromDsn.dsn.toUpperCase()); + this.log.debug("Select the transfer type based on the soruce dataset RECFM"); + let transferType = fromDsn.member ? TRANSFER_TYPE_ASCII : null; + switch (content.recfm?.[0]) { + case "V": + transferType = TRANSFER_TYPE_BINARY_RDW; + break; + case "D": + transferType = TRANSFER_TYPE_ASCII_RDW; + break; + default: + transferType = transferType ?? TRANSFER_TYPE_BINARY; + break; + } + + t?.worked(1, `Downloading contents`); + this.log.debug("Download the contents of the source dataset: %s", JSON.stringify(options.fromDsn)); + const stream = await connection.getDataset("'"+options.fromDsn+"'", transferType, false); + + t?.worked(1, `Creating target location (if required)...`); + this.log.debug("Make sure the new dataset is allocated: %s", JSON.stringify(toDsn.dsn)); + const option = await DataSetUtils.allocateLikeDataSet(connection, toDsn.dsn, fromDsn.dsn, toDsn.member ? {dsorg: "PO"} : {dsorg: "PS"}); + + t?.worked(1, `Uploading contents...`); + this.log.debug("Upload the contents to the new dataset: %s", JSON.stringify(options.toDsn)); + await connection.uploadDataset(stream, "'"+options.toDsn+"'", transferType, option); + + t?.worked(numberOfServerCalls, `Done.`); + t?.end(); + } + private static get log(): Logger { return Logger.getAppLogger(); } diff --git a/src/api/IFTPProgressHandler.ts b/src/api/IFTPProgressHandler.ts index 9f7b6eb8..7033b627 100644 --- a/src/api/IFTPProgressHandler.ts +++ b/src/api/IFTPProgressHandler.ts @@ -16,14 +16,14 @@ export interface IFTPProgressHandler { * * @param total - total work size */ - start(total: number): void; + start(total: number, message?: string): void; /** * Updates how much work is performed, so that the class implementing this interface can update progress bar for example. * * @param work - how much work is performed */ - worked(work: number): void; + worked(work: number, message?: string): void; /** * The FTP task ends, either all work is completed or any error happens. diff --git a/src/cli/allocate/Allocate.definition.ts b/src/cli/allocate/Allocate.definition.ts index 300475b8..ddb213f4 100644 --- a/src/cli/allocate/Allocate.definition.ts +++ b/src/cli/allocate/Allocate.definition.ts @@ -18,7 +18,9 @@ const AllocateDefinition: ICommandDefinition = { summary: "Allocate a sequential dataset or partitioned dataset with '--dcb \"PDSTYPE=PDS\"'", description: "Allocate a sequential or partitioned dataset", type: "group", - children: [AllocateDataSetDefinition], + children: [ + AllocateDataSetDefinition, + ], passOn: [ { property: "options", diff --git a/src/cli/allocate/data-set/DataSet.Handler.ts b/src/cli/allocate/data-set/DataSet.Handler.ts index 955ec068..e1f52ac0 100644 --- a/src/cli/allocate/data-set/DataSet.Handler.ts +++ b/src/cli/allocate/data-set/DataSet.Handler.ts @@ -16,13 +16,20 @@ import { DataSetUtils } from "../../../api"; export default class AllocateDataSetHandler extends FTPBaseHandler { public async processFTP(params: IFTPHandlerParams): Promise { + const pResp = params.response; + const pArgs = params.arguments; const options = { - dcb: params.arguments.dcb + dcb: pArgs.dcb }; - await DataSetUtils.allocateDataSet(params.connection, params.arguments.datasetName, options); - - const successMsg = params.response.console.log("Allocated dataset %s successfully!", params.arguments.datasetName); - params.response.data.setMessage(successMsg); + let successMsg: string = ""; + if (pArgs.like) { + await DataSetUtils.allocateLikeDataSet(params.connection, pArgs.datasetName, pArgs.like); + successMsg = pResp.console.log("Allocated dataset %s like %s successfully!", pArgs.datasetName, pArgs.like); + } else { + await DataSetUtils.allocateDataSet(params.connection, pArgs.datasetName, options); + successMsg = pResp.console.log("Allocated dataset %s successfully!", pArgs.datasetName); + } + pResp.data.setMessage(successMsg); this.log.info(successMsg); } } diff --git a/src/cli/allocate/data-set/DataSet.definition.ts b/src/cli/allocate/data-set/DataSet.definition.ts index ad56dc26..95f216ea 100644 --- a/src/cli/allocate/data-set/DataSet.definition.ts +++ b/src/cli/allocate/data-set/DataSet.definition.ts @@ -22,11 +22,16 @@ export const AllocateDataSetDefinition: ICommandDefinition = { { description: "Allocate a sequential dataset \"IBMUSER.DATASET\"", options: "\"IBMUSER.DATASET\"" - }, { + }, + { description: "Allocate a partitioned dataset \"IBMUSER.DATASET\"", options: "\"IBMUSER.DATASET\" --dcb \"PDSTYPE=PDS\"" + }, + { + description: "Allocate a dataset \"IBMUSER.NEW.DATASET\" " + + "with the same attributes as \"IBMUSER.ORIGINAL.DATASET\"", + options: "\"IBMUSER.NEW.DATASET\" --like \"IBMUSER.ORIGINAL.DATASET\"" } - ], positionals: [{ name: "datasetName", @@ -42,6 +47,12 @@ export const AllocateDataSetDefinition: ICommandDefinition = { "For the list of possible DCB parameters, " + "visit https://github.com/IBM/zos-node-accessor/tree/1.0.x#allocate.", type: "string" + }, + { + name: "like", aliases: [], + description: "Dataset name to copy the attributes from.", + required: true, + type: "string" } ], profile: diff --git a/src/cli/copy/Copy.definition.ts b/src/cli/copy/Copy.definition.ts new file mode 100644 index 00000000..b7091e51 --- /dev/null +++ b/src/cli/copy/Copy.definition.ts @@ -0,0 +1,36 @@ +/* + * This program and the accompanying materials are made available under the terms of the + * Eclipse Public License v2.0 which accompanies this distribution, and is available at + * https://www.eclipse.org/legal/epl-v20.html + * + * SPDX-License-Identifier: EPL-2.0 + * + * Copyright Contributors to the Zowe Project. + * + */ + +import { ICommandDefinition } from "@zowe/imperative"; +import { CopyDataSetDefinition } from "./data-set/DataSet.definition"; +import { FTPConfig } from "../../api/FTPConfig"; + +const CopyDefinition: ICommandDefinition = { + name: "copy", aliases: ["cp"], + summary: "Copy datasets and dataset member content", + description: "Copy datasets and dataset member content", + type: "group", + children: [ + CopyDataSetDefinition, + ], + passOn: [ + { + property: "options", + value: FTPConfig.FTP_CONNECTION_OPTIONS, + merge: true, + ignoreNodes: [ + {type: "group"} + ] + } + ] +}; + +export = CopyDefinition; diff --git a/src/cli/copy/data-set/DataSet.Handler.ts b/src/cli/copy/data-set/DataSet.Handler.ts new file mode 100644 index 00000000..3da6b0fa --- /dev/null +++ b/src/cli/copy/data-set/DataSet.Handler.ts @@ -0,0 +1,33 @@ +/* + * This program and the accompanying materials are made available under the terms of the + * Eclipse Public License v2.0 which accompanies this distribution, and is available at + * https://www.eclipse.org/legal/epl-v20.html + * + * SPDX-License-Identifier: EPL-2.0 + * + * Copyright Contributors to the Zowe Project. + * + */ + +import { FTPBaseHandler } from "../../../FTPBase.Handler"; +import { IFTPHandlerParams } from "../../../IFTPHandlerParams"; +import { FTPProgressHandler } from "../../../FTPProgressHandler"; +import { DataSetUtils } from "../../../api"; + +export default class DownloadDataSetHandler extends FTPBaseHandler { + public async processFTP(params: IFTPHandlerParams): Promise { + const pResp = params.response; + const pArgs = params.arguments; + const progress = new FTPProgressHandler(params.response.progress, true); + await DataSetUtils.copyDataSet(params.connection, { + fromDsn: pArgs.fromDataSetName, + toDsn: pArgs.toDataSetName, + progress, + replace: pArgs.replace ?? false, + }); + + const successMsg = pResp.console.log("Copied dataset %s to %s successfully!", pArgs.fromDataSetName, pArgs.toDataSetName); + this.log.info(successMsg); + params.response.data.setMessage(successMsg); + } +} diff --git a/src/cli/copy/data-set/DataSet.definition.ts b/src/cli/copy/data-set/DataSet.definition.ts new file mode 100644 index 00000000..bc5996ee --- /dev/null +++ b/src/cli/copy/data-set/DataSet.definition.ts @@ -0,0 +1,49 @@ +/* + * This program and the accompanying materials are made available under the terms of the + * Eclipse Public License v2.0 which accompanies this distribution, and is available at + * https://www.eclipse.org/legal/epl-v20.html + * + * SPDX-License-Identifier: EPL-2.0 + * + * Copyright Contributors to the Zowe Project. + * + */ + +import { ICommandDefinition } from "@zowe/imperative"; + +export const CopyDataSetDefinition: ICommandDefinition = { + handler: __dirname + "/DataSet.Handler", + description: "Copy the contents of a z/OS dataset to another z/OS dataset", + type: "command", + name: "data-set", aliases: ["ds"], + summary: "Copy dataset or dataset member content", + examples: [ + { + description: "Copy the sequential data set \"ibmuser.seq.dataset\" to \"ibmuser.another.seq.dataset\"", + options: "\"ibmuser.seq.dataset\" \"ibmuser.another.seq.dataset\"" + }, + ], + positionals: [ + { + name: "fromDataSetName", + description: "The data set (PDS member or physical sequential data set) which you would like to copy the contents from.", + type: "string", + required: true + }, + { + name: "toDataSetName", + description: "The data set (PDS member or physical sequential data set) which you would like to copy the contents to.", + type: "string", + required: true + }, + ], + options: [ + { + name: "replace", + aliases: ["rep"], + description: "Specify this option as true if you wish to replace like-named members in the target dataset", + type: "boolean" + } + ], + profile: { optional: ["zftp"] }, +};