Skip to content

Commit

Permalink
Merge pull request #10 from SatelliteQE/configurable_settings
Browse files Browse the repository at this point in the history
Configurable settings
  • Loading branch information
jyejare authored Dec 18, 2023
2 parents 5ee78de + 3d4075d commit 8cbba49
Show file tree
Hide file tree
Showing 21 changed files with 347 additions and 238 deletions.
2 changes: 2 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[flake8]
max-line-length = 100
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
settings.yaml
**/__pycache__/
*.json
*.csv
.idea
trust
20 changes: 20 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
repos:
- repo: https://github.com/asottile/reorder_python_imports
rev: v3.12.0
hooks:
- id: reorder-python-imports
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace
- id: check-yaml
- id: debug-statements
- repo: https://github.com/psf/black
rev: 23.11.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 6.1.0
hooks:
- id: flake8
17 changes: 10 additions & 7 deletions apix/diff.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
# -*- encoding: utf-8 -*-
"""Determine the changes between two API versions."""
from pathlib import Path

import attr
import yaml
from pathlib import Path
from logzero import logger
from apix.helpers import get_latest, get_previous, load_api

from apix.helpers import get_latest
from apix.helpers import get_previous
from apix.helpers import load_api


@attr.s()
Expand All @@ -23,9 +27,7 @@ def __attrs_post_init__(self):
self.api_name = get_latest(data_dir=self.data_dir, mock=self.mock)
if not self.ver1:
# get the latest saved version
self.ver1 = get_latest(
api_name=self.api_name, data_dir=self.data_dir, mock=self.mock
)
self.ver1 = get_latest(api_name=self.api_name, data_dir=self.data_dir, mock=self.mock)
if not self.ver2:
# get the version before ver1
self.ver2 = get_previous(self.api_name, self.ver1, self.data_dir, self.mock)
Expand Down Expand Up @@ -150,12 +152,13 @@ def save_diff(self, return_path=False):

if self.mock:
fpath = Path(
f"{self.data_dir}tests/APIs/{self.api_name}/{self.ver2}-to-{self.ver1}-diff.yaml"
f"{self.data_dir}tests/APIs/{self.api_name}"
f"/{self.ver2}-to-{self.ver1}-diff.yaml"
)
else:
ftype = "comp-diff" if self.compact else "diff"
fpath = Path(
f"{self.data_dir}APIs/{self.api_name}/{self.ver2}-to-{self.ver1}-{ftype}.yaml"
f"{self.data_dir}APIs/{self.api_name}/" f"{self.ver2}-to-{self.ver1}-{ftype}.yaml"
)
if fpath.exists():
fpath.unlink()
Expand Down
28 changes: 16 additions & 12 deletions apix/explore.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
"""Explore and API and save the results."""
import aiohttp
import asyncio
import time
from pathlib import Path

import aiohttp
import attr
import requests
import time
import yaml
from logzero import logger
from pathlib import Path
from apix.parsers import apipie, test

from apix.parsers import apipie
from apix.parsers import test


@attr.s()
Expand Down Expand Up @@ -35,7 +38,9 @@ def __attrs_post_init__(self):
logger.warning("No known parser specified! Please review documentation.")

async def _async_get(self, session, link):
"""visit a page and download the content, returning the link and content"""
"""
visit a page and download the content returning the link and content
"""
async with session.get(self.host_url + link[1], ssl=False) as response:
content = await response.read()
logger.debug(link[1])
Expand All @@ -53,15 +58,15 @@ async def _async_loop(self, links):
self._queue.append(result)

def _visit_links(self, links, retries=3):
"""main controller for asynchronous page visiting, will attempt 3 retries"""
"""
Main controller for asynchronous page visiting, will attempt 3 retries
"""
try:
loop = asyncio.get_event_loop()
loop.run_until_complete(self._async_loop(links))
except aiohttp.client_exceptions.ServerDisconnectedError as err:
except aiohttp.client_exceptions.ServerDisconnectedError:
logger.warning(
"Lost connection to host.{}".join(
"Retrying in 10 seconds" if retries else ""
)
"Lost connection to host.".join("Retrying in 10 seconds" if retries else "")
)
if retries:
time.sleep(10)
Expand Down Expand Up @@ -113,8 +118,7 @@ def explore(self):
result = requests.get(self.host_url + self.base_path, verify=False)
if not result:
logger.warning(
f"I couldn't find anything useful at "
f"{self.host_url}{self.base_path}."
f"I couldn't find anything useful at " f"{self.host_url}{self.base_path}."
)
return
self.base_path = self.base_path.replace(".html", "") # for next step
Expand Down
17 changes: 6 additions & 11 deletions apix/helpers.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
# -*- encoding: utf-8 -*-
"""A collection of miscellaneous helpers that don't quite fit in."""
import yaml
from copy import deepcopy
from pathlib import Path

import yaml
from logzero import logger


Expand All @@ -13,9 +14,7 @@ def get_api_list(data_dir=None, mock=False):
if not api_dir.exists():
return None
# get all versions in directory, that aren't diffs
apis = [
(api.name, api.stat().st_mtime) for api in api_dir.iterdir() if api.is_dir()
] or []
apis = [(api.name, api.stat().st_mtime) for api in api_dir.iterdir() if api.is_dir()] or []
apis = [api for api, _ in sorted(apis, key=lambda x: x[1], reverse=True)]
return apis

Expand All @@ -33,9 +32,7 @@ def get_ver_list(api_name, data_dir=None, mock=False):
versions = [
v_file.name.replace(".yaml", "")
for v_file in save_path.iterdir()
if "-diff." not in v_file.name
and "-comp." not in v_file.name
and ".yaml" in v_file.name
if "-diff." not in v_file.name and "-comp." not in v_file.name and ".yaml" in v_file.name
] or []
return sorted(versions, reverse=True)

Expand Down Expand Up @@ -86,12 +83,10 @@ def save_api(api_name, version, api_dict, data_dir=None, compact=False, mock=Fal
"""Save the dict to yaml, if the file doesn't exist"""
if mock:
a_path = Path(
f"{data_dir}tests/APIs/{api_name}/{version}{'-comp' if compact else ''}.yaml"
f"{data_dir}tests/APIs/{api_name}/{version}" f"{'-comp' if compact else ''}.yaml"
)
else:
a_path = Path(
f"{data_dir}APIs/{api_name}/{version}{'-comp' if compact else ''}.yaml"
)
a_path = Path(f"{data_dir}APIs/{api_name}/{version}" f"{'-comp' if compact else ''}.yaml")
a_path.parent.mkdir(parents=True, exist_ok=True)
logger.info(f"Saving {api_name} v{version} to {a_path}")
with a_path.open("w") as f:
Expand Down
19 changes: 8 additions & 11 deletions apix/parsers/apipie.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,14 @@ class APIPie:
def _compile_method(method_dict):
"""form the parameters and paths lists"""
params = [
f'{param["name"]} ~ {"required" if param["required"] else "optional"} ~ {param["expected_type"]}'
(
f'{param["name"]} ~ '
f'{"required" if param["required"] else "optional"} ~ '
f'{param["expected_type"]}'
)
for param in method_dict["params"]
]
paths = [
f'{path["http_method"].upper()} {path["api_url"]}'
for path in method_dict["apis"]
]
paths = [f'{path["http_method"].upper()} {path["api_url"]}' for path in method_dict["apis"]]
return {"paths": paths, "params": params}

def scrape_content(self, result):
Expand All @@ -38,12 +39,8 @@ def scrape_content(self, result):
logger.debug(f"Compiling {name} with {len(data['methods'])} methods")
self._data[name] = {"methods": []}
for method in data["methods"]:
self._data[name]["methods"].append(
{method["name"]: self._compile_method(method)}
)
self.params.update(
{param["name"]: param for param in method["params"]}
)
self._data[name]["methods"].append({method["name"]: self._compile_method(method)})
self.params.update({param["name"]: param for param in method["params"]})

def yaml_format(self, ingore=None):
"""Return the compiled data in a yaml-friendly format"""
Expand Down
8 changes: 1 addition & 7 deletions apix/parsers/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
https://www.google.com/search?q=apix
"""
import attr
from logzero import logger
from lxml import html


Expand Down Expand Up @@ -47,12 +46,7 @@ def pull_links(result, base_path):
links, last = [], None
for link in g_links:
url = link[2].replace("../", "")
if (
"JacobCallahan" in url
and "sparkline" not in url
and link[0].text
and url != last
):
if "JacobCallahan" in url and "sparkline" not in url and link[0].text and url != last:
links.append((link[0].text, url))
last = url
return links
Expand Down
60 changes: 60 additions & 0 deletions candore/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import asyncio # noqa: F401
import json
from pathlib import Path

import click

from candore.errors import ModeError
from candore.modules.api_lister import APILister
from candore.modules.comparator import Comparator
from candore.modules.extractor import Extractor
from candore.modules.report import Reporting


class Candore:
def __init__(self, settings):
self.settings = settings
self.api_lister = APILister(settings=self.settings)

def list_endpoints(self):
return self.api_lister.lister_endpoints()

async def save_all_entities(self, mode, output_file, full):
"""Save all the entities to a json file
:param mode: Pre or Post
:param output_file: Output file name
:param full: If True, save entities from all pages of the components,
else just saves first page
:return: None
"""
if mode not in ["pre", "post"]:
raise ModeError("Extracting mode must be 'pre' or 'post'")

async with Extractor(settings=self.settings, apilister=self.api_lister) as extractor:
if full:
extractor.full = True
data = await extractor.extract_all_entities()

if not data:
click.echo("Entities data is not data found!")

file_path = Path(output_file) if output_file else Path(f"{mode}_entities.json")
with file_path.open(mode="w") as entfile:
json.dump(data, entfile)
click.echo(f"Entities data saved to {file_path}")

def compare_entities(
self,
pre_file=None,
post_file=None,
output=None,
report_type=None,
record_evs=None,
):
comp = Comparator(settings=self.settings)
if record_evs:
comp.record_evs = True
results = comp.compare_json(pre_file=pre_file, post_file=post_file)
reporter = Reporting(results=results)
reporter.generate_report(output_file=output, output_type=report_type)
50 changes: 0 additions & 50 deletions candore/candore.py

This file was deleted.

Loading

0 comments on commit 8cbba49

Please sign in to comment.