From 510da936c274d852018c9acc28f59c17f90c534f Mon Sep 17 00:00:00 2001 From: Maxime Franco Date: Wed, 15 Jan 2020 16:38:20 +0100 Subject: [PATCH 1/3] Issue 3 correction. Add an indication if the tool is running in authenticated mode or in basic mode --- swaggercheck/_basictests.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/swaggercheck/_basictests.py b/swaggercheck/_basictests.py index 105710a..abeda55 100644 --- a/swaggercheck/_basictests.py +++ b/swaggercheck/_basictests.py @@ -48,6 +48,14 @@ def api_conformance_test( Fore.BLUE + "Swagger client... " + Fore.GREEN + " ok" + Style.RESET_ALL ) + method = " authenticated" + if username is None and password is None: + method = " basic" + + print( + Fore.BLUE + "Authentication method : " + Fore.GREEN + method + Style.RESET_ALL + ) + fd, watchdog_filename = tempfile.mkstemp() os.close(fd) os.remove(watchdog_filename) From d884604b338afcbda3679fd8baca87721fbf1197 Mon Sep 17 00:00:00 2001 From: Maxime Franco Date: Sat, 18 Jan 2020 20:12:17 +0100 Subject: [PATCH 2/3] correction hypothesis --- requirements-dev.txt | 2 ++ swaggercheck/_basictests.py | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 4910009..dd0343f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,7 @@ +hypothesis==4.31.0 black==19.3b0 coveralls pytest==4.4.1 pytest-cov==2.7.1 responses==0.5.1 + diff --git a/swaggercheck/_basictests.py b/swaggercheck/_basictests.py index abeda55..5749f2d 100644 --- a/swaggercheck/_basictests.py +++ b/swaggercheck/_basictests.py @@ -48,9 +48,9 @@ def api_conformance_test( Fore.BLUE + "Swagger client... " + Fore.GREEN + " ok" + Style.RESET_ALL ) - method = " authenticated" - if username is None and password is None: - method = " basic" + method = " basic" + if username is not None and password is not None: + method = " authenticated" print( Fore.BLUE + "Authentication method : " + Fore.GREEN + method + Style.RESET_ALL From 06d3679363b01862ae04f67f24541e6ddf32bbe7 Mon Sep 17 00:00:00 2001 From: Maxime Franco Date: Sun, 19 Jan 2020 12:47:20 +0100 Subject: [PATCH 3/3] ISSUE 4 : add a report after the execution to gather the number of each HTTP answer we get. If there was an error, getting information about this error --- swaggercheck/__main__.py | 11 +++++ swaggercheck/_basictests.py | 83 +++++++++++++++++++++++++++++++++++-- 2 files changed, 90 insertions(+), 4 deletions(-) diff --git a/swaggercheck/__main__.py b/swaggercheck/__main__.py index edd1e69..b4386fd 100644 --- a/swaggercheck/__main__.py +++ b/swaggercheck/__main__.py @@ -31,6 +31,14 @@ def main(): help="continue on error", ) + parser.add_argument( + "-v", + "--verbose", + dest="get_report", + action="store_true", + help="get a report after the execution", + ) + parser.add_argument( "-u", "--username", help="username (implies 'basic' auth)" ) @@ -57,6 +65,9 @@ def main(): "cont_on_err": ( parsed_args.cont_on_err or environ.get("SC_CONTINUE_ON_ERROR") ), + "get_report": ( + parsed_args.get_report or environ.get("SC_GET_REPORT") + ), "username": parsed_args.username or environ.get("SC_BASIC_USERNAME"), "password": parsed_args.password or environ.get("SC_BASIC_PASSWORD"), "token": parsed_args.token or environ.get("SC_API_TOKEN"), diff --git a/swaggercheck/_basictests.py b/swaggercheck/_basictests.py index 5749f2d..a6852cb 100644 --- a/swaggercheck/_basictests.py +++ b/swaggercheck/_basictests.py @@ -14,6 +14,7 @@ def api_conformance_test( schema_path, num_tests_per_op=20, cont_on_err=True, + get_report=True, username=None, password=None, token=None, @@ -22,6 +23,7 @@ def api_conformance_test( ): init() + log_filename = "log.txt" print(Fore.BLUE + "Connecting to {}".format(schema_path) + Style.RESET_ALL) @@ -59,6 +61,8 @@ def api_conformance_test( fd, watchdog_filename = tempfile.mkstemp() os.close(fd) os.remove(watchdog_filename) + if os.path.isfile(log_filename): + os.remove(log_filename) for operation in client.api.operations(): try: @@ -68,6 +72,8 @@ def api_conformance_test( num_tests_per_op, cont_on_err, watchdog_filename, + get_report, + log_filename, ) except ValueError as exc: print( @@ -78,9 +84,68 @@ def api_conformance_test( ) sys.exit(1) + if get_report: + with open(log_filename, "r") as myfile: + dic = {} + nb_error = 0 + first = True + for line in myfile: + line = line.split(" ") + line[-1] = line[-1].split("\n")[0] + if line[0] == "test": + if not first: + print_report(dic, nb_error) + dic = {} + nb_error = 0 + + print( + Fore.BLUE + + "\n[" + + Fore.YELLOW + + line[1] + + Fore.BLUE + + "] " + + Fore.CYAN + + line[2] + + Style.RESET_ALL + ) + first = False + + elif line[0] == "ok": + if line[1] not in dic: + dic[line[1]] = 1 + else: + dic[line[1]] += 1 + + elif line[0] == "fail": + dic[nb_error] = line[1] + "\t" + line[2] + + print_report(dic, nb_error) + + +def print_report(dic, nb_error): + for k, v in dic.items(): + if isinstance(k, str): + print( + "[ SUCCESS " + + Fore.MAGENTA + + "Code: {0} \ttests : {1}".format(k, v) + + Style.RESET_ALL + + " ] " + ) + for i in range(0, nb_error): + tmp = dic[i].split("\t") + print( + "[ FAIL " + + Fore.RED + + "\n\tResponse code {} not in documented codes: {}".format(tmp[0], tmp[1]) + + Style.RESET_ALL + + " ] " + ) + def operation_conformance_test( - client, operation, num_tests, cont_on_err, watchdog_filename + client, operation, num_tests, cont_on_err, watchdog_filename, get_report, log_filename ): success = "\t[" + Fore.GREEN + " ok " + Style.RESET_ALL + "] " failed = "\t[" + Fore.RED + " fail " + Style.RESET_ALL + "] " @@ -98,6 +163,10 @@ def operation_conformance_test( + Style.RESET_ALL ) + if get_report: + with open(log_filename, "a+") as myfile: + myfile.write("test [" + str(operation.method) + "] " + operation.path + "\n") + for name, op in operation._parameters.items(): if not op.type: url = "https://github.com/adimian/swagger-check/labels/types%20support" @@ -117,9 +186,8 @@ def operation_conformance_test( ) @hypothesis.given(strategy) def single_operation_test( - client, operation, cont_on_err, watchdog_filename, params + client, operation, cont_on_err, get_report, log_filename, watchdog_filename, params ): - root = "Testing with params: {}".format(params) + Style.RESET_ALL result = client.request(operation, params) @@ -133,6 +201,9 @@ def single_operation_test( if result.status in operation.response_codes: print(success + status_code + root) + if get_report: + with open(log_filename, "a+") as myfile: + myfile.write("ok " + str(result.status) + "\n") else: outcome = ( Fore.RED @@ -142,6 +213,10 @@ def single_operation_test( + Style.RESET_ALL ) print(failed + status_code + root + outcome) + if get_report: + with open(log_filename, "a+") as myfile: + myfile.write("failed " + str(result.status) + " " + str(operation.response_codes) + "\n") + if not cont_on_err: # we use a file as a signal between inside and outside of # hypothesis since otherwise we'd see hypothesis extended help @@ -149,7 +224,7 @@ def single_operation_test( with open(watchdog_filename, "w"): pass - single_operation_test(client, operation, cont_on_err, watchdog_filename) + single_operation_test(client, operation, cont_on_err, get_report, log_filename, watchdog_filename) if os.path.isfile(watchdog_filename): print(Fore.RED + "Stopping after first failure" + Style.RESET_ALL)