diff --git a/examples/example-run.py b/examples/example-run.py index 6e4e6565d..f7c1912ef 100755 --- a/examples/example-run.py +++ b/examples/example-run.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 from frontik.server import main - if __name__ == '__main__': main('./frontik.cfg') diff --git a/examples/example_app/pages/__init__.py b/examples/example_app/pages/__init__.py index f06ad699f..a1d9fd236 100644 --- a/examples/example_app/pages/__init__.py +++ b/examples/example_app/pages/__init__.py @@ -3,6 +3,8 @@ class Page(frontik.handler.PageHandler): def get_page(self): - self.json.put({ - 'text': 'Hello, world!' - }) + self.json.put( + { + 'text': 'Hello, world!', + }, + ) diff --git a/examples/example_app/pages/tpl.py b/examples/example_app/pages/tpl.py index c4c58dd1d..329f61291 100644 --- a/examples/example_app/pages/tpl.py +++ b/examples/example_app/pages/tpl.py @@ -5,5 +5,5 @@ class Page(frontik.handler.PageHandler): def get_page(self): self.set_template('main.html') # This template is located in the `templates` folder self.json.put( - self.get_url(self.request.host, '/example') + self.get_url(self.request.host, '/example'), ) diff --git a/frontik/app.py b/frontik/app.py index 289f17bc2..9da7937ab 100644 --- a/frontik/app.py +++ b/frontik/app.py @@ -1,68 +1,60 @@ +from __future__ import annotations + import asyncio import importlib +import logging import multiprocessing import sys import time import traceback from functools import partial from typing import TYPE_CHECKING -import logging -import aiohttp -import tornado +from http_client import AIOHttpClientWrapper, HttpClientFactory +from http_client import options as http_client_options +from http_client.balancing import RequestBalancerBuilder, Upstream, UpstreamManager from lxml import etree -from tornado import httputil -from tornado.web import Application, RequestHandler, HTTPError -from http_client import HttpClientFactory, options as http_client_options, AIOHttpClientWrapper -from http_client.balancing import RequestBalancerBuilder, UpstreamManager +from tornado.web import Application, HTTPError, RequestHandler import frontik.producers.json_producer import frontik.producers.xml_producer from frontik import integrations, media_types, request_context -from frontik.integrations.statsd import create_statsd_client -from frontik.debug import DebugTransform +from frontik.debug import DebugTransform, get_frontik_and_apps_versions from frontik.handler import ErrorHandler +from frontik.integrations.statsd import create_statsd_client from frontik.loggers import CUSTOM_JSON_EXTRA, JSON_REQUESTS_LOGGER from frontik.options import options from frontik.routing import FileMappingRouter, FrontikRouter -from frontik.service_discovery import get_sync_service_discovery, get_async_service_discovery, UpstreamCaches -from frontik.util import generate_uniq_timestamp_request_id, check_request_id -from frontik.version import version as frontik_version +from frontik.service_discovery import UpstreamCaches, get_async_service_discovery, get_sync_service_discovery +from frontik.util import check_request_id, generate_uniq_timestamp_request_id app_logger = logging.getLogger('http_client') if TYPE_CHECKING: - from typing import Optional + from collections.abc import Callable + from multiprocessing.sharedctypes import Synchronized + from typing import Any from aiokafka import AIOKafkaProducer + from tornado import httputil + from tornado.httputil import HTTPServerRequest - -def get_frontik_and_apps_versions(application): - versions = etree.Element('versions') - - etree.SubElement(versions, 'frontik').text = frontik_version - etree.SubElement(versions, 'tornado').text = tornado.version - etree.SubElement(versions, 'lxml.etree.LXML').text = '.'.join(str(x) for x in etree.LXML_VERSION) - etree.SubElement(versions, 'lxml.etree.LIBXML').text = '.'.join(str(x) for x in etree.LIBXML_VERSION) - etree.SubElement(versions, 'lxml.etree.LIBXSLT').text = '.'.join(str(x) for x in etree.LIBXSLT_VERSION) - etree.SubElement(versions, 'aiohttp').text = aiohttp.__version__ - etree.SubElement(versions, 'python').text = sys.version.replace('\n', '') - etree.SubElement(versions, 'event_loop').text = str(type(asyncio.get_event_loop())).split("'")[1] - etree.SubElement(versions, 'application', name=options.app).extend(application.application_version_xml()) - - return versions + from frontik.integrations.statsd import StatsDClient, StatsDClientStub + from frontik.service_discovery import UpstreamUpdateListener class VersionHandler(RequestHandler): def get(self): + self.application: FrontikApplication self.set_header('Content-Type', 'text/xml') self.write( - etree.tostring(get_frontik_and_apps_versions(self.application), encoding='utf-8', xml_declaration=True) + etree.tostring(get_frontik_and_apps_versions(self.application), encoding='utf-8', xml_declaration=True), ) class StatusHandler(RequestHandler): def get(self): + self.application: FrontikApplication self.set_header('Content-Type', media_types.APPLICATION_JSON) self.finish(self.application.get_current_status()) @@ -82,19 +74,20 @@ def get(self): except BaseException: self.error_page() - def settrace(self, debugger_ip, debugger_port): + def settrace(self, debugger_ip: str | None, debugger_port: int) -> None: import pydevd + pydevd.settrace(debugger_ip, port=debugger_port, stdoutToServer=True, stderrToServer=True, suspend=False) - def trace_page(self, ip, port): + def trace_page(self, ip: str | None, port: str) -> None: self.set_header('Content-Type', media_types.TEXT_PLAIN) self.finish(f'Connected to debug server at {ip}:{port}') - def already_tracing_page(self): + def already_tracing_page(self) -> None: self.set_header('Content-Type', media_types.TEXT_PLAIN) self.finish('App is already in tracing mode, try to restart service') - def error_page(self): + def error_page(self) -> None: self.set_header('Content-Type', media_types.TEXT_PLAIN) self.finish(traceback.format_exc()) @@ -105,7 +98,7 @@ class FrontikApplication(Application): class DefaultConfig: pass - def __init__(self, **settings): + def __init__(self, **settings: Any) -> None: self.start_time = time.time() tornado_settings = settings.get('tornado_settings') @@ -115,22 +108,22 @@ def __init__(self, **settings): self.config = self.application_config() self.app = settings.get('app') self.app_module = settings.get('app_module') - self.app_root = settings.get('app_root') + self.app_root: str = settings.get('app_root') # type: ignore self.xml = frontik.producers.xml_producer.XMLProducerFactory(self) self.json = frontik.producers.json_producer.JsonProducerFactory(self) - self.available_integrations = None - self.tornado_http_client: Optional[AIOHttpClientWrapper] = None - self.http_client_factory: Optional[HttpClientFactory] = None - self.upstream_manager = None - self.upstreams = {} - self.children_pipes = {} - self.upstream_update_listener = None + self.available_integrations: list[integrations.Integration] = [] + self.tornado_http_client: AIOHttpClientWrapper | None = None + self.http_client_factory: HttpClientFactory = None # type: ignore + self.upstream_manager: UpstreamManager = None + self.upstreams: dict[str, Upstream] = {} + self.children_pipes: dict[int, Any] = {} + self.upstream_update_listener: UpstreamUpdateListener = None # type: ignore self.router = FrontikRouter(self) - self.init_workers_count_down = multiprocessing.Value('i', options.workers) + self.init_workers_count_down: Synchronized = multiprocessing.Value('i', options.workers) # type: ignore - core_handlers = [ + core_handlers: list[Any] = [ (r'/version/?', VersionHandler), (r'/status/?', StatusHandler), (r'.*', self.router), @@ -139,17 +132,21 @@ def __init__(self, **settings): if options.debug: core_handlers.insert(0, (r'/pydevd/?', PydevdHandler)) - statsd_client = create_statsd_client(options, self) - sync_service_discovery = get_sync_service_discovery(options, statsd_client) - self.service_discovery_client = get_async_service_discovery(options, statsd_client) \ - if options.workers == 1 else sync_service_discovery - self.upstream_caches = UpstreamCaches(self.children_pipes, self.upstreams, sync_service_discovery) \ - if options.consul_enabled else UpstreamCaches(self.children_pipes, self.upstreams) + self.statsd_client: StatsDClient | StatsDClientStub = create_statsd_client(options, self) + sync_service_discovery = get_sync_service_discovery(options, self.statsd_client) + self.service_discovery_client = ( + get_async_service_discovery(options, self.statsd_client) if options.workers == 1 else sync_service_discovery + ) + self.upstream_caches = ( + UpstreamCaches(self.children_pipes, self.upstreams, sync_service_discovery) + if options.consul_enabled + else UpstreamCaches(self.children_pipes, self.upstreams) + ) super().__init__(core_handlers, **tornado_settings) - async def init(self): - self.transforms.insert(0, partial(DebugTransform, self)) + async def init(self) -> None: + self.transforms.insert(0, partial(DebugTransform, self)) # type: ignore self.available_integrations, integration_futures = integrations.load_integrations(self) await asyncio.gather(*[future for future in integration_futures if future]) @@ -162,17 +159,22 @@ async def init(self): if kafka_cluster and kafka_cluster not in options.kafka_clusters: app_logger.warning( 'kafka cluster for http client metrics "%s" is not present in "kafka_clusters" option, ' - 'metrics will be disabled', kafka_cluster + 'metrics will be disabled', + kafka_cluster, ) else: app_logger.info('kafka metrics are %s', 'enabled' if send_metrics_to_kafka else 'disabled') - kafka_producer = self.get_kafka_producer(kafka_cluster) if send_metrics_to_kafka else None + kafka_producer = ( + self.get_kafka_producer(kafka_cluster) if send_metrics_to_kafka and kafka_cluster is not None else None + ) self.upstream_manager = UpstreamManager(self.upstreams) - request_balancer_builder = RequestBalancerBuilder(self.upstream_manager, - statsd_client=self.statsd_client, - kafka_producer=kafka_producer) + request_balancer_builder = RequestBalancerBuilder( + self.upstream_manager, + statsd_client=self.statsd_client, + kafka_producer=kafka_producer, + ) self.http_client_factory = HttpClientFactory(self.app, self.tornado_http_client, request_balancer_builder) def find_handler(self, request, **kwargs): @@ -182,7 +184,7 @@ def find_handler(self, request, **kwargs): if options.validate_request_id: check_request_id(request_id) - def wrapped_in_context(func): + def wrapped_in_context(func: Callable) -> Callable: def wrapper(*args, **kwargs): token = request_context.initialize(request, request_id) @@ -194,52 +196,52 @@ def wrapper(*args, **kwargs): return wrapper delegate: httputil.HTTPMessageDelegate = wrapped_in_context(super().find_handler)(request, **kwargs) - delegate.headers_received = wrapped_in_context(delegate.headers_received) - delegate.data_received = wrapped_in_context(delegate.data_received) - delegate.finish = wrapped_in_context(delegate.finish) - delegate.on_connection_close = wrapped_in_context(delegate.on_connection_close) + delegate.headers_received = wrapped_in_context(delegate.headers_received) # type: ignore + delegate.data_received = wrapped_in_context(delegate.data_received) # type: ignore + delegate.finish = wrapped_in_context(delegate.finish) # type: ignore + delegate.on_connection_close = wrapped_in_context(delegate.on_connection_close) # type: ignore return delegate - def reverse_url(self, name, *args, **kwargs): + def reverse_url(self, name: str, *args: Any, **kwargs: Any) -> str: return self.router.reverse_url(name, *args, **kwargs) - def application_urls(self): - return [ - ('', FileMappingRouter(importlib.import_module(f'{self.app_module}.pages'))) - ] + def application_urls(self) -> list[tuple]: + return [('', FileMappingRouter(importlib.import_module(f'{self.app_module}.pages')))] - def application_404_handler(self, request): + def application_404_handler(self, request: HTTPServerRequest) -> tuple: return ErrorHandler, {'status_code': 404} - def application_config(self): + def application_config(self) -> DefaultConfig: return FrontikApplication.DefaultConfig() - def application_version_xml(self): + def application_version_xml(self) -> list[etree.Element]: version = etree.Element('version') version.text = 'unknown' return [version] - def application_version(self): + def application_version(self) -> str | None: return None @staticmethod - def next_request_id(): + def next_request_id() -> str: FrontikApplication.request_id = generate_uniq_timestamp_request_id() return FrontikApplication.request_id - def get_current_status(self): + def get_current_status(self) -> dict[str, str]: if self.init_workers_count_down.value > 0: - raise HTTPError(500, f'some workers are not started ' - f'init_workers_count_down={self.init_workers_count_down.value}') + raise HTTPError( + 500, + f'some workers are not started init_workers_count_down={self.init_workers_count_down.value}', + ) cur_uptime = time.time() - self.start_time if cur_uptime < 60: - uptime_value = '{:.2f} seconds'.format(cur_uptime) + uptime_value = f'{cur_uptime:.2f} seconds' elif cur_uptime < 3600: - uptime_value = '{:.2f} minutes'.format(cur_uptime / 60) + uptime_value = f'{cur_uptime / 60:.2f} minutes' else: - uptime_value = '{:.2f} hours and {:.2f} minutes'.format(cur_uptime / 3600, (cur_uptime % 3600) / 60) + uptime_value = f'{cur_uptime / 3600:.2f} hours and {(cur_uptime % 3600) / 60:.2f} minutes' return { 'uptime': uptime_value, @@ -267,5 +269,5 @@ def log_request(self, handler): JSON_REQUESTS_LOGGER.info('', extra={CUSTOM_JSON_EXTRA: extra}) - def get_kafka_producer(self, producer_name: str) -> 'Optional[AIOKafkaProducer]': # pragma: no cover + def get_kafka_producer(self, producer_name: str) -> AIOKafkaProducer | None: # pragma: no cover pass diff --git a/frontik/auth.py b/frontik/auth.py index 7cb4c4647..68aac6a0f 100644 --- a/frontik/auth.py +++ b/frontik/auth.py @@ -1,9 +1,15 @@ +from __future__ import annotations + import base64 import http.client +from typing import TYPE_CHECKING from tornado.escape import to_unicode from tornado.web import Finish +if TYPE_CHECKING: + from frontik.handler import PageHandler + DEBUG_AUTH_HEADER_NAME = 'Frontik-Debug-Auth' @@ -11,7 +17,7 @@ class DebugUnauthorizedError(Finish): pass -def passed_basic_auth(handler, login, passwd): +def passed_basic_auth(handler: PageHandler, login: str | None, passwd: str | None) -> bool: auth_header = handler.request.headers.get('Authorization') if auth_header and auth_header.startswith('Basic '): method, auth_b64 = auth_header.split(' ') @@ -24,7 +30,7 @@ def passed_basic_auth(handler, login, passwd): return False -def check_debug_auth(handler, login, password): +def check_debug_auth(handler: PageHandler, login: str | None, password: str | None) -> None: """ :type handler: tornado.web.RequestHandler :return: None or tuple(http_code, headers) @@ -32,14 +38,14 @@ def check_debug_auth(handler, login, password): header_name = DEBUG_AUTH_HEADER_NAME debug_auth_header = handler.request.headers.get(header_name) if debug_auth_header is not None: - debug_access = (debug_auth_header == f'{login}:{password}') + debug_access = debug_auth_header == f'{login}:{password}' if not debug_access: handler.set_header('WWW-Authenticate', f'{header_name}-Header realm="Secure Area"') handler.set_status(http.client.UNAUTHORIZED) - raise DebugUnauthorizedError() + raise DebugUnauthorizedError else: debug_access = passed_basic_auth(handler, login, password) if not debug_access: handler.set_header('WWW-Authenticate', 'Basic realm="Secure Area"') handler.set_status(http.client.UNAUTHORIZED) - raise DebugUnauthorizedError() + raise DebugUnauthorizedError diff --git a/frontik/config_parser.py b/frontik/config_parser.py index 4d16990fb..d878cdb58 100644 --- a/frontik/config_parser.py +++ b/frontik/config_parser.py @@ -1,32 +1,44 @@ +from __future__ import annotations + import logging import sys from dataclasses import asdict, fields +from typing import TYPE_CHECKING, get_args import tornado.autoreload -from http_client.options import parse_config_file as http_client_parse_config_file, options as http_client_options +from http_client.options import options as http_client_options +from http_client.options import parse_config_file as http_client_parse_config_file -from frontik.loggers import bootstrap_core_logging, MDC +from frontik.loggers import MDC, bootstrap_core_logging from frontik.options import options, parse_config_file +if TYPE_CHECKING: + from collections.abc import Iterable + + from frontik.options import Options + log = logging.getLogger('config_parser') -def parse_configs(config_files): - """Reads command line options / config file and bootstraps logging. - """ +def parse_configs(config_files: str | None) -> None: + """Reads command line options / config file and bootstraps logging.""" allowed_options = {**asdict(options), **asdict(http_client_options)}.keys() parse_command_line(options, allowed_options) if options.config: configs_to_read = options.config else: + if config_files is None: + msg = 'Configs can not be None' + raise Exception(msg) configs_to_read = config_files - configs_to_read = filter( - None, [configs_to_read] if not isinstance(configs_to_read, (list, tuple)) else configs_to_read + configs_to_read_filter = filter( + None, + [configs_to_read] if not isinstance(configs_to_read, list | tuple) else configs_to_read, ) - for config in configs_to_read: + for config in configs_to_read_filter: http_client_parse_config_file(config) parse_config_file(config) @@ -35,13 +47,13 @@ def parse_configs(config_files): parse_command_line(http_client_options, allowed_options) MDC.init('master') bootstrap_core_logging(options.log_level, options.log_json, options.suppressed_loggers) - for config in configs_to_read: + for config in configs_to_read_filter: log.debug('using config: %s', config) if options.autoreload: tornado.autoreload.watch(config) -def parse_command_line(options, allowed_options): +def parse_command_line(options: Options, allowed_options: Iterable) -> None: args = sys.argv for i in range(1, len(args)): @@ -52,7 +64,7 @@ def parse_command_line(options, allowed_options): arg = args[i].lstrip("-") name, equals, value = arg.partition("=") if name not in allowed_options: - log.error(f'Unrecognized command line option: {name}, skipped') + log.error('Unrecognized command line option: %s, skipped', name) continue option = next(filter(lambda x: x.name == name, fields(options)), None) @@ -65,13 +77,14 @@ def parse_command_line(options, allowed_options): else: raise Exception('Option %r requires a value' % name) - if option.type == bool: + if option.type == bool or get_args(option.type) == (bool, type(None)): setattr(options, name, value.lower() not in ("false", "0", "f")) - elif option.type == int: + elif option.type == int or get_args(option.type) == (int, type(None)): setattr(options, name, int(value)) - elif option.type == float: + elif option.type == float or get_args(option.type) == (float, type(None)): setattr(options, name, float(value)) - elif option.type == str: + elif option.type == str or get_args(option.type) == (str, type(None)): setattr(options, name, value) else: - raise Exception('Complex types are not implemented %r: %r' % (name, value)) + msg = f'Complex types are not implemented {name!r}: {value!r} ({option.type})' + raise Exception(msg) diff --git a/frontik/consul_client.py b/frontik/consul_client.py index 30b84652f..7e3ce8ee1 100644 --- a/frontik/consul_client.py +++ b/frontik/consul_client.py @@ -1,13 +1,20 @@ -import sys +from __future__ import annotations + import asyncio +import sys import warnings -import requests +from typing import TYPE_CHECKING import aiohttp +import requests # type: ignore from aiohttp import ClientTimeout - from consul import base +if TYPE_CHECKING: + from asyncio import AbstractEventLoop + from collections.abc import Callable + from typing import Any + PY_341 = sys.version_info >= (3, 4, 1) HTTP_METHOD_GET = "GET" @@ -16,26 +23,57 @@ HTTP_METHOD_DELETE = "DELETE" +class ClientEventCallback: + def on_http_request_success(self, method: str, path: str, response_code: int) -> None: + pass + + def on_http_request_failure(self, method: str, path: str, ex: BaseException) -> None: + pass + + def on_http_request_invalid(self, method: str, path: str, response_code: int) -> None: + pass + + class ConsulClient(base.Consul): - def __init__(self, *args, client_event_callback=None, **kwargs): + def __init__(self, *args: Any, client_event_callback: ClientEventCallback | None = None, **kwargs: Any) -> None: self._client_event_callback = ClientEventCallback() if client_event_callback is None else client_event_callback super().__init__(*args, **kwargs) class AsyncConsulClient(ConsulClient): - def __init__(self, *args, loop=None, client_event_callback=None, **kwargs): - self._loop = loop or asyncio.get_event_loop() + def __init__( + self, + *args: Any, + loop: AbstractEventLoop | None = None, + client_event_callback: ClientEventCallback | None = None, + **kwargs: Any, + ) -> None: + self._loop: AbstractEventLoop = loop or asyncio.get_event_loop() super().__init__(*args, client_event_callback=client_event_callback, **kwargs) def http_connect(self, host, port, scheme, verify=True, cert=None): - return _AsyncConsulHttpClient(host, port, scheme, loop=self._loop, verify=verify, cert=None, - client_event_callback=self._client_event_callback) + return _AsyncConsulHttpClient( + host, + port, + scheme, + loop=self._loop, + verify=verify, + cert=None, + client_event_callback=self._client_event_callback, + ) class SyncConsulClient(ConsulClient): def http_connect(self, host, port, scheme, verify=True, cert=None, timeout=None): - return _SyncConsulHttpClient(host, port, scheme, verify, cert, timeout, - client_event_callback=self._client_event_callback) + return _SyncConsulHttpClient( + host, + port, + scheme, + verify, + cert, + timeout, + client_event_callback=self._client_event_callback, + ) # this implementation was copied from https://github.com/hhru/python-consul2/blob/master/consul/aio.py#L16 @@ -43,22 +81,34 @@ def http_connect(self, host, port, scheme, verify=True, cert=None, timeout=None) class _AsyncConsulHttpClient(base.HTTPClient): """Asyncio adapter for python consul using aiohttp library""" - def __init__(self, *args, loop=None, client_event_callback, **kwargs): - super(_AsyncConsulHttpClient, self).__init__(*args, **kwargs) - self._session = None + def __init__( + self, + *args: Any, + loop: AbstractEventLoop | None = None, + client_event_callback: ClientEventCallback, + **kwargs: Any, + ): + super().__init__(*args, **kwargs) + self._session: aiohttp.ClientSession | None = None self._loop = loop or asyncio.get_event_loop() - self._client_event_callback = client_event_callback - - async def _request(self, callback, method, path, params=None, data=None, headers=None, total_timeout=None): + self._client_event_callback: ClientEventCallback = client_event_callback + + async def _request( + self, + callback: Callable, + method: str, + path: str, + params: dict | None = None, + data: Any = None, + headers: Any = None, + total_timeout: float | None = None, + ) -> Any: uri = self.uri(path, params) connector = aiohttp.TCPConnector(loop=self._loop, verify_ssl=self.verify) async with aiohttp.ClientSession(connector=connector, timeout=ClientTimeout(total=total_timeout)) as session: self._session = session try: - resp = await session.request(method=method, - url=uri, - data=data, - headers=headers) + resp = await session.request(method=method, url=uri, data=data, headers=headers) body = await resp.text(encoding='utf-8') content = await resp.read() r = base.Response(resp.status, resp.headers, body, content) @@ -79,77 +129,73 @@ async def _request(self, callback, method, path, params=None, data=None, headers # python prior 3.4.1 does not play nice with __del__ method if PY_341: # pragma: no branch + def __del__(self): - warnings.warn("Unclosed connector in aio.Consul.HTTPClient", - ResourceWarning) + warnings.warn("Unclosed connector in aio.Consul.HTTPClient", ResourceWarning) if self._session and not self._session.closed: - warnings.warn("Unclosed connector in aio.Consul.HTTPClient", - ResourceWarning) + warnings.warn("Unclosed connector in aio.Consul.HTTPClient", ResourceWarning) asyncio.ensure_future(self.close()) async def get(self, callback, path, params=None, headers=None, total_timeout=None): - return await self._request(callback, - HTTP_METHOD_GET, - path, params, - headers=headers, - total_timeout=total_timeout) + return await self._request( + callback, + HTTP_METHOD_GET, + path, + params, + headers=headers, + total_timeout=total_timeout, + ) async def put(self, callback, path, params=None, data='', headers=None): - return await self._request(callback, - HTTP_METHOD_PUT, - path, - params, - data, - headers) + return await self._request(callback, HTTP_METHOD_PUT, path, params, data, headers) async def delete(self, callback, path, params=None, data='', headers=None): - return await self._request(callback, - HTTP_METHOD_DELETE, - path, - params, - data, - headers) + return await self._request(callback, HTTP_METHOD_DELETE, path, params, data, headers) async def post(self, callback, path, params=None, data='', headers=None): - return await self._request(callback, - HTTP_METHOD_POST, - path, - params, - data, - headers) + return await self._request(callback, HTTP_METHOD_POST, path, params, data, headers) - async def close(self): - await self._session.close() + async def close(self) -> None: + if self._session is not None: + await self._session.close() # this implementation was copied from https://github.com/hhru/python-consul2/blob/master/consul/std.py#L8 # and then _client_event_callback was added class _SyncConsulHttpClient(base.HTTPClient): - def __init__(self, *args, client_event_callback, **kwargs): - super(_SyncConsulHttpClient, self).__init__(*args, **kwargs) + def __init__(self, *args: Any, client_event_callback: ClientEventCallback, **kwargs: Any): + super().__init__(*args, **kwargs) self._session = requests.session() - self._client_event_callback = client_event_callback + self._client_event_callback: ClientEventCallback = client_event_callback @staticmethod - def response(response): + def response(response: requests.Response) -> base.Response: response.encoding = 'utf-8' - return base.Response( - response.status_code, - response.headers, - response.text, - response.content) - - def _request(self, callback, method, path, params=None, data=None, headers=None, total_timeout=None): + return base.Response(response.status_code, response.headers, response.text, response.content) + + def _request( + self, + callback: Callable, + method: str, + path: str, + params: dict | None = None, + data: Any = None, + headers: Any = None, + total_timeout: float | None = None, + ) -> Any: uri = self.uri(path, params) try: resp = self.response( - self._session.request(method=method, - url=uri, - data=data, - headers=headers, - verify=self.verify, - cert=self.cert, - timeout=self.timeout)) + self._session.request( + method=method, + url=uri, + data=data, + headers=headers, + verify=self.verify, + cert=self.cert, + timeout=self.timeout, + ), + ) try: result = callback(resp) @@ -163,44 +209,13 @@ def _request(self, callback, method, path, params=None, data=None, headers=None, raise ex def get(self, callback, path, params=None, headers=None, total_timeout=None): - return self._request(callback, - HTTP_METHOD_GET, - path, - params, - headers=headers, - total_timeout=total_timeout) + return self._request(callback, HTTP_METHOD_GET, path, params, headers=headers, total_timeout=total_timeout) def put(self, callback, path, params=None, data='', headers=None): - return self._request(callback, - HTTP_METHOD_PUT, - path, - params, - data, - headers) + return self._request(callback, HTTP_METHOD_PUT, path, params, data, headers) def delete(self, callback, path, params=None, data='', headers=None): - return self._request(callback, - HTTP_METHOD_DELETE, - path, - params, - data, - headers) + return self._request(callback, HTTP_METHOD_DELETE, path, params, data, headers) def post(self, callback, path, params=None, headers=None, data=''): - return self._request(callback, - HTTP_METHOD_POST, - path, - params, - data, - headers) - - -class ClientEventCallback: - def on_http_request_success(self, method, path, response_code): - pass - - def on_http_request_failure(self, method, path, ex): - pass - - def on_http_request_invalid(self, method, path, response_code): - pass + return self._request(callback, HTTP_METHOD_POST, path, params, data, headers) diff --git a/frontik/debug.py b/frontik/debug.py index 1f0b465ff..5b51f50ec 100644 --- a/frontik/debug.py +++ b/frontik/debug.py @@ -1,4 +1,8 @@ +from __future__ import annotations + +import asyncio import base64 +import contextlib import copy import inspect import json @@ -6,37 +10,48 @@ import os import pprint import re +import sys import time import traceback from binascii import crc32 from datetime import datetime from http.cookies import SimpleCookie +from typing import TYPE_CHECKING from urllib.parse import parse_qs, urlparse +import aiohttp +import tornado from lxml import etree from lxml.builder import E from tornado.escape import to_unicode, utf8 -from tornado.httputil import HTTPHeaders +from tornado.httputil import HTTPHeaders, HTTPServerRequest from tornado.web import OutputTransform -from http_client.request_response import RequestResult, RequestBuilder import frontik.util import frontik.xml_util from frontik import media_types, request_context from frontik.loggers import BufferedHandler +from frontik.options import options +from frontik.version import version as frontik_version +from frontik.xml_util import dict_to_xml + +if TYPE_CHECKING: + from typing import Any + + from http_client.request_response import RequestBuilder, RequestResult + + from frontik.app import FrontikApplication + from frontik.handler import PageHandler debug_log = logging.getLogger('frontik.debug') -def response_to_xml(result: RequestResult): +def response_to_xml(result: RequestResult) -> etree.Element: time_info = etree.Element('time_info') content_type = result.headers.get('Content-Type', '') mode = '' - if 'charset' in content_type: - charset = content_type.partition('=')[-1] - else: - charset = 'utf-8' + charset = content_type.partition('=')[-1] if 'charset' in content_type else 'utf-8' try_charsets = (charset, 'cp1251') @@ -82,7 +97,7 @@ def response_to_xml(result: RequestResult): return response -def request_to_xml(request: RequestBuilder): +def request_to_xml(request: RequestBuilder) -> etree.Element: content_type = request.headers.get('Content-Type', '') body = etree.Element('body', content_type=content_type) @@ -110,9 +125,7 @@ def request_to_xml(request: RequestBuilder): _params_to_xml(request.url), _headers_to_xml(request.headers), _cookies_to_xml(request.headers), - E.curl( - request_to_curl_string(request) - ) + E.curl(request_to_curl_string(request)), ) except Exception: debug_log.exception('cannot parse request body') @@ -122,7 +135,7 @@ def request_to_xml(request: RequestBuilder): return request -def balanced_request_to_xml(balanced_request, retry, datacenter): +def balanced_request_to_xml(balanced_request: RequestBuilder, retry: int, datacenter: str) -> etree.Element: info = etree.Element('meta-info') if balanced_request.upstream_name != balanced_request.host: @@ -136,8 +149,8 @@ def balanced_request_to_xml(balanced_request, retry, datacenter): return info -def request_to_curl_string(request: RequestBuilder): - def _escape_apos(string): +def request_to_curl_string(request: RequestBuilder) -> str: + def _escape_apos(string: str) -> str: return string.replace("'", "'\"'\"'") try: @@ -149,7 +162,7 @@ def _escape_apos(string): curl_headers = HTTPHeaders(request.headers) if request.body and 'Content-Length' not in curl_headers: - curl_headers['Content-Length'] = len(request.body) + curl_headers['Content-Length'] = str(len(request.body)) if is_binary_body: curl_echo_data = f'echo -e {request_body} |' @@ -158,7 +171,7 @@ def _escape_apos(string): curl_echo_data = '' curl_data_string = f"--data '{request_body}'" if request_body else '' - def _format_header(key): + def _format_header(key: str) -> str: header_value = frontik.util.any_to_unicode(curl_headers[key]) return f"-H '{key}: {_escape_apos(header_value)}'" @@ -167,16 +180,16 @@ def _format_header(key): method=request.method, url=to_unicode(request.url), headers=' '.join(_format_header(k) for k in sorted(curl_headers.keys())), - data=curl_data_string + data=curl_data_string, ).strip() -def _get_query_parameters(url): +def _get_query_parameters(url: str) -> dict: url = 'http://' + url if not re.match(r'[a-z]+://.+\??.*', url, re.IGNORECASE) else url return parse_qs(urlparse(url).query, True) -def _params_to_xml(url): +def _params_to_xml(url: str) -> etree.Element: params = etree.Element('params') query = _get_query_parameters(url) for name, values in query.items(): @@ -189,7 +202,7 @@ def _params_to_xml(url): return params -def _headers_to_xml(request_or_response_headers): +def _headers_to_xml(request_or_response_headers: dict | HTTPHeaders) -> etree.Element: headers = etree.Element('headers') for name, value in request_or_response_headers.items(): if name != 'Cookie': @@ -198,16 +211,16 @@ def _headers_to_xml(request_or_response_headers): return headers -def _cookies_to_xml(request_or_response_headers): +def _cookies_to_xml(request_or_response_headers: dict) -> etree.Element: cookies = etree.Element('cookies') if 'Cookie' in request_or_response_headers: - _cookies = SimpleCookie(request_or_response_headers['Cookie']) + _cookies: SimpleCookie = SimpleCookie(request_or_response_headers['Cookie']) for cookie in _cookies: cookies.append(E.cookie(_cookies[cookie].value, name=cookie)) return cookies -def _exception_to_xml(exc_info, log=debug_log): +def _exception_to_xml(exc_info: tuple, log: logging.Logger = debug_log) -> etree.Element: exc_node = etree.Element('exception') try: @@ -220,12 +233,12 @@ def _exception_to_xml(exc_info, log=debug_log): try: lines, starting_line = inspect.getsourcelines(frame) - except IOError: - lines, starting_line = [], None + except OSError: + lines, starting_line = [], 0 - for i, l in enumerate(lines): + for i, line in enumerate(lines): line_node = etree.Element('line') - line_node.append(E.text(to_unicode(l))) + line_node.append(E.text(to_unicode(line))) line_node.append(E.number(str(starting_line + i))) if starting_line + i == frame.f_lineno: line_node.set('selected', 'true') @@ -247,27 +260,41 @@ def _exception_to_xml(exc_info, log=debug_log): _format_number = '{:.4f}'.format -def _pretty_print_xml(node): +def _pretty_print_xml(node: etree.Element) -> str: return etree.tostring(node, pretty_print=True, encoding='unicode') -def _pretty_print_json(node): +def _pretty_print_json(node: Any) -> str: return json.dumps(node, sort_keys=True, indent=2, ensure_ascii=False) -def _string_to_color(value): - value_hash = crc32(utf8(value)) % 0xffffffff +def _string_to_color(value: None | str | bytes) -> tuple[str, str]: + value_hash = crc32(utf8(value)) % 0xFFFFFFFF # type: ignore r = (value_hash & 0xFF0000) >> 16 g = (value_hash & 0x00FF00) >> 8 b = value_hash & 0x0000FF - bgcolor = '#%02x%02x%02x' % (r, g, b) - fgcolor = 'black' if 0.2126 * r + 0.7152 * g + 0.0722 * b > 0xff / 2 else 'white' + bgcolor = f'#{r:02x}{g:02x}{b:02x}' + fgcolor = 'black' if 0.2126 * r + 0.7152 * g + 0.0722 * b > 0xFF / 2 else 'white' return bgcolor, fgcolor class DebugBufferedHandler(BufferedHandler): - FIELDS = ('created', 'filename', 'funcName', 'levelname', 'levelno', 'lineno', 'module', 'msecs', - 'name', 'pathname', 'process', 'processName', 'relativeCreated', 'threadName') + FIELDS = ( + 'created', + 'filename', + 'funcName', + 'levelname', + 'levelno', + 'lineno', + 'module', + 'msecs', + 'name', + 'pathname', + 'process', + 'processName', + 'relativeCreated', + 'threadName', + ) def produce_all(self): log_data = etree.Element('log') @@ -277,7 +304,7 @@ def produce_all(self): return copy.deepcopy(log_data) - def _produce_one(self, record): + def _produce_one(self, record: logging.LogRecord) -> etree.Element: entry_attrs = {} for field in self.FIELDS: val = getattr(record, field) @@ -297,35 +324,38 @@ def _produce_one(self, record): if record.exc_info is not None: entry.append(_exception_to_xml(record.exc_info)) - if getattr(record, '_response', None) is not None: + if hasattr(record, '_response') and getattr(record, '_response', None) is not None: entry.append(response_to_xml(record._response)) - if getattr(record, '_request', None) is not None: + if hasattr(record, '_request') and getattr(record, '_request', None) is not None: entry.append(request_to_xml(record._request)) - entry.append(balanced_request_to_xml(record._request, record._request_retry, - record._datacenter)) + entry.append( + balanced_request_to_xml(record._request, record._request_retry, record._datacenter), # type: ignore + ) - if getattr(record, '_debug_response', None) is not None: + if hasattr(record, '_debug_response') and getattr(record, '_debug_response', None) is not None: entry.append(E.debug(record._debug_response)) - if getattr(record, '_xslt_profile', None) is not None: + if hasattr(record, '_xslt_profile') and getattr(record, '_xslt_profile', None) is not None: entry.append(record._xslt_profile) - if getattr(record, '_xml', None) is not None: + if hasattr(record, '_xml') and getattr(record, '_xml', None) is not None: entry.append(E.text(etree.tostring(record._xml, encoding='unicode'))) - if getattr(record, '_protobuf', None) is not None: + if hasattr(record, '_protobuf') and getattr(record, '_protobuf', None) is not None: entry.append(E.text(str(record._protobuf))) - if getattr(record, '_text', None) is not None: + if hasattr(record, '_text') and getattr(record, '_text', None) is not None: entry.append(E.text(to_unicode(record._text))) - if getattr(record, '_stage', None) is not None: - entry.append(E.stage( - E.name(record._stage.name), - E.delta(_format_number(record._stage.delta)), - E.start_delta(_format_number(record._stage.start_delta)) - )) + if hasattr(record, '_stage') and getattr(record, '_stage', None) is not None: + entry.append( + E.stage( + E.name(record._stage.name), + E.delta(_format_number(record._stage.delta)), + E.start_delta(_format_number(record._stage.start_delta)), + ), + ) return entry @@ -335,14 +365,14 @@ def _produce_one(self, record): class DebugTransform(OutputTransform): - def __init__(self, application, request): + def __init__(self, application: FrontikApplication, request: HTTPServerRequest) -> None: self.application = application self.request = request - def is_enabled(self): + def is_enabled(self) -> bool: return getattr(self.request, '_debug_enabled', False) - def is_inherited(self): + def is_inherited(self) -> bool: return getattr(self.request, '_debug_inherited', False) def transform_first_chunk(self, status_code, headers, chunk, finishing): @@ -356,14 +386,11 @@ def transform_first_chunk(self, status_code, headers, chunk, finishing): if not self.is_inherited(): headers = HTTPHeaders({'Content-Type': media_types.TEXT_HTML}) else: - headers = HTTPHeaders({ - 'Content-Type': media_types.APPLICATION_XML, - DEBUG_HEADER_NAME: 'true' - }) + headers = HTTPHeaders({'Content-Type': media_types.APPLICATION_XML, DEBUG_HEADER_NAME: 'true'}) return 200, headers, self.produce_debug_body(finishing) - def transform_chunk(self, chunk, finishing): + def transform_chunk(self, chunk: bytes, finishing: bool) -> bytes: if not self.is_enabled(): return chunk @@ -371,57 +398,50 @@ def transform_chunk(self, chunk, finishing): return self.produce_debug_body(finishing) - def produce_debug_body(self, finishing): + def produce_debug_body(self, finishing: bool) -> bytes: if not finishing: return b'' start_time = time.time() - debug_log_data = request_context.get_log_handler().produce_all() + debug_log_data = request_context.get_log_handler().produce_all() # type: ignore debug_log_data.set('code', str(int(self.status_code))) debug_log_data.set('handler-name', request_context.get_handler_name()) debug_log_data.set('started', _format_number(self.request._start_time)) - debug_log_data.set('request-id', str(self.request.request_id)) + debug_log_data.set('request-id', str(self.request.request_id)) # type: ignore debug_log_data.set('stages-total', _format_number((time.time() - self.request._start_time) * 1000)) try: - debug_log_data.append(E.versions( - _pretty_print_xml( - frontik.app.get_frontik_and_apps_versions(self.application) - ) - )) + debug_log_data.append(E.versions(_pretty_print_xml(get_frontik_and_apps_versions(self.application)))) except Exception: debug_log.exception('cannot add version information') debug_log_data.append(E.versions('failed to get version information')) try: - debug_log_data.append(E.status( - _pretty_print_json(self.application.get_current_status()) - )) + debug_log_data.append(E.status(_pretty_print_json(self.application.get_current_status()))) except Exception: debug_log.exception('cannot add status information') debug_log_data.append(E.status('failed to get status information')) - debug_log_data.append(E.request( - E.method(self.request.method), - _params_to_xml(self.request.uri), - _headers_to_xml(self.request.headers), - _cookies_to_xml(self.request.headers) - )) + debug_log_data.append( + E.request( + E.method(self.request.method), + _params_to_xml(self.request.uri), # type: ignore + _headers_to_xml(self.request.headers), + _cookies_to_xml(self.request.headers), # type: ignore + ), + ) - debug_log_data.append(E.response( - _headers_to_xml(self.headers), - _cookies_to_xml(self.headers) - )) + debug_log_data.append(E.response(_headers_to_xml(self.headers), _cookies_to_xml(self.headers))) response_buffer = b''.join(self.chunks) original_response = { 'buffer': base64.b64encode(response_buffer), 'headers': dict(self.headers), - 'code': int(self.status_code) + 'code': int(self.status_code), } - debug_log_data.append(frontik.xml_util.dict_to_xml(original_response, 'original-response')) + debug_log_data.append(dict_to_xml(original_response, 'original-response')) debug_log_data.set('response-size', str(len(response_buffer))) debug_log_data.set('generate-time', _format_number((time.time() - start_time) * 1000)) @@ -437,12 +457,11 @@ def produce_debug_body(self, finishing): except Exception: debug_log.exception('XSLT debug file error') - try: - debug_log.error('XSL error log entries:\n' + '\n'.join( - '{0.filename}:{0.line}:{0.column}\n\t{0.message}'.format(m) for m in transform.error_log - )) - except Exception: - pass + with contextlib.suppress(Exception): + debug_log.error( + 'XSL error log entries:\n' + + '\n'.join(f'{m.filename}:{m.line}:{m.column}\n\t{m.message}' for m in transform.error_log), + ) log_document = etree.tostring(debug_log_data, encoding='UTF-8', xml_declaration=True) else: @@ -452,7 +471,7 @@ def produce_debug_body(self, finishing): class DebugMode: - def __init__(self, handler): + def __init__(self, handler: PageHandler) -> None: debug_value = frontik.util.get_cookie_or_url_param_value(handler, 'debug') self.mode_values = debug_value.split(',') if debug_value is not None else '' @@ -460,12 +479,12 @@ def __init__(self, handler): if self.inherited: debug_log.debug('debug mode is inherited due to %s request header', DEBUG_HEADER_NAME) - handler.request._debug_inherited = True + handler.request._debug_inherited = True # type: ignore if debug_value is not None or self.inherited: handler.require_debug_access() - self.enabled = handler.request._debug_enabled = True + self.enabled = handler.request._debug_enabled = True # type: ignore self.pass_debug = 'nopass' not in self.mode_values or self.inherited self.profile_xslt = 'xslt' in self.mode_values @@ -477,3 +496,19 @@ def __init__(self, handler): self.enabled = False self.pass_debug = False self.profile_xslt = False + + +def get_frontik_and_apps_versions(application: FrontikApplication) -> etree.Element: + versions = etree.Element('versions') + + etree.SubElement(versions, 'frontik').text = frontik_version + etree.SubElement(versions, 'tornado').text = tornado.version + etree.SubElement(versions, 'lxml.etree.LXML').text = '.'.join(str(x) for x in etree.LXML_VERSION) + etree.SubElement(versions, 'lxml.etree.LIBXML').text = '.'.join(str(x) for x in etree.LIBXML_VERSION) + etree.SubElement(versions, 'lxml.etree.LIBXSLT').text = '.'.join(str(x) for x in etree.LIBXSLT_VERSION) + etree.SubElement(versions, 'aiohttp').text = aiohttp.__version__ + etree.SubElement(versions, 'python').text = sys.version.replace('\n', '') + etree.SubElement(versions, 'event_loop').text = str(type(asyncio.get_event_loop())).split("'")[1] + etree.SubElement(versions, 'application', name=options.app).extend(application.application_version_xml()) + + return versions diff --git a/frontik/doc.py b/frontik/doc.py index 758f3a4df..ac20cac90 100644 --- a/frontik/doc.py +++ b/frontik/doc.py @@ -1,9 +1,16 @@ -import lxml.etree as etree +from __future__ import annotations + +from typing import TYPE_CHECKING +import lxml.etree as etree from tornado.concurrent import Future +if TYPE_CHECKING: + from collections.abc import Generator + from typing import Any -def _is_valid_element(node): + +def _is_valid_element(node: Any) -> bool: if not isinstance(node, etree._Element): return False @@ -16,17 +23,18 @@ def _is_valid_element(node): class Doc: __slots__ = ('root_node', 'data') - def __init__(self, root_node='doc'): + def __init__(self, root_node: Any = 'doc') -> None: if isinstance(root_node, str): root_node = etree.Element(root_node) if not (_is_valid_element(root_node) or isinstance(root_node, Doc)): - raise TypeError(f'Cannot set {root_node} as root node') + msg = f'Cannot set {root_node} as root node' + raise TypeError(msg) self.root_node = root_node - self.data = [] + self.data: list = [] - def put(self, chunk): + def put(self, chunk: Any) -> Doc: if isinstance(chunk, list): self.data.extend(chunk) else: @@ -34,16 +42,16 @@ def put(self, chunk): return self - def is_empty(self): + def is_empty(self) -> bool: return len(self.data) == 0 def clear(self): self.data = [] - def to_etree_element(self): + def to_etree_element(self) -> etree.Element: res = self.root_node.to_etree_element() if isinstance(self.root_node, Doc) else self.root_node - def chunk_to_element(chunk): + def chunk_to_element(chunk: Any) -> Generator: if isinstance(chunk, list): for chunk_i in chunk: for i in chunk_to_element(chunk_i): @@ -63,12 +71,13 @@ def chunk_to_element(chunk): yield chunk elif chunk is not None: - raise ValueError(f'Unexpected value of type {type(chunk)} in doc') + msg = f'Unexpected value of type {type(chunk)} in doc' + raise ValueError(msg) for chunk_element in chunk_to_element(self.data): res.append(chunk_element) return res - def to_string(self): + def to_string(self) -> bytes: return etree.tostring(self.to_etree_element(), encoding='utf-8', xml_declaration=True) diff --git a/frontik/file_cache.py b/frontik/file_cache.py index b4f7ca0de..1232118af 100644 --- a/frontik/file_cache.py +++ b/frontik/file_cache.py @@ -1,14 +1,17 @@ import copy +import logging import os +from collections.abc import Callable +from typing import Any from frontik.options import options # This implementation is broken in so many ways class LimitedDict(dict): - def __init__(self, max_len=None, step=None, deepcopy=False): + def __init__(self, max_len: int | None = None, step: int | None = None, deepcopy: bool = False) -> None: dict.__init__(self) - self._order = [] + self._order: list = [] self.max_len = max_len self.step = step self.deepcopy = deepcopy @@ -40,7 +43,16 @@ class FileCache: """ load_fn :: filename -> (status, result) """ - def __init__(self, cache_name, root_dir, load_fn, max_len=None, step=None, deepcopy=False): + + def __init__( + self, + cache_name: str, + root_dir: str, + load_fn: Callable, + max_len: int | None = None, + step: int | None = None, + deepcopy: bool = False, + ) -> None: self.cache_name = cache_name self.root_dir = root_dir self.load_fn = load_fn @@ -48,7 +60,7 @@ def __init__(self, cache_name, root_dir, load_fn, max_len=None, step=None, deepc self.max_len = max_len self.cache = LimitedDict(max_len, step, deepcopy) - def populate(self, filenames, log, freeze=False): + def populate(self, filenames: list, log: logging.Logger, freeze: bool = False) -> None: if self.max_len == 0: return @@ -57,17 +69,18 @@ def populate(self, filenames, log, freeze=False): self.frozen = freeze and self.max_len is None - def load(self, filename, log): + def load(self, filename: str, log: logging.Logger) -> Any: if filename in self.cache: log.debug('got %s file from cache (%s cache size: %s)', filename, self.cache_name, len(self.cache)) return self.cache[filename] if self.frozen: - raise Exception(f'encounter file {filename} not in cache while cache is frozen') + msg = f'encounter file {filename} not in cache while cache is frozen' + raise Exception(msg) return self._load(filename, log) - def _load(self, filename, log): + def _load(self, filename: str, log: logging.Logger) -> Any: real_filename = os.path.normpath(os.path.join(self.root_dir, filename)) log.info('reading file "%s"', real_filename) result = self.load_fn(real_filename, log) @@ -77,14 +90,23 @@ def _load(self, filename, log): class InvalidOptionCache: - def __init__(self, option): + def __init__(self, option: str) -> None: self.option = option def load(self, filename, *args, **kwargs): - raise Exception(f'{self.option} option is undefined') - - -def make_file_cache(cache_name, option_name, root_dir, fun, max_len=None, step=None, deepcopy=False): + msg = f'{self.option} option is undefined' + raise Exception(msg) + + +def make_file_cache( + cache_name: str, + option_name: str, + root_dir: str | None, + fun: Callable, + max_len: int | None = None, + step: int | None = None, + deepcopy: bool = False, +) -> FileCache | InvalidOptionCache: if root_dir: # disable cache in development environment max_len = 0 if options.debug else max_len diff --git a/frontik/futures.py b/frontik/futures.py index c07f5e9b0..f95e8be7e 100644 --- a/frontik/futures.py +++ b/frontik/futures.py @@ -1,10 +1,17 @@ +from __future__ import annotations + import asyncio -import time import logging -from functools import wraps, partial +import time +from functools import partial, wraps +from typing import TYPE_CHECKING -from tornado.ioloop import IOLoop from tornado.concurrent import Future +from tornado.ioloop import IOLoop + +if TYPE_CHECKING: + from collections.abc import Callable + from typing import Any async_logger = logging.getLogger('frontik.futures') @@ -24,28 +31,28 @@ class AsyncGroup: would not be automatically called. """ - def __init__(self, finish_cb, name=None): + def __init__(self, finish_cb: Callable, name: str | None = None) -> None: self._counter = 0 self._finish_cb = finish_cb self._finished = False self._name = name - self._future = Future() + self._future: Future = Future() self._start_time = time.time() - self._futures = [] + self._futures: list[Future] = [] - def is_finished(self): + def is_finished(self) -> bool: return self._finished - def abort(self): + def abort(self) -> None: async_logger.info('aborting %s', self) self._finished = True if not self._future.done(): self._future.set_exception(AbortAsyncGroup()) - def finish(self): + def finish(self) -> None: if self._finished: async_logger.warning('trying to finish already finished %s', self) - return + return None self._finished = True self._future.set_result(None) @@ -54,9 +61,11 @@ def finish(self): self._finish_cb() finally: # prevent possible cycle references - self._finish_cb = None + self._finish_cb = None # type: ignore + + return None - def try_finish(self): + def try_finish(self) -> None: if self._counter == 0: self.finish() @@ -65,17 +74,17 @@ def try_finish_async(self): if self._counter == 0: IOLoop.current().add_callback(self.finish) - def _inc(self): + def _inc(self) -> None: if self._finished: async_logger.info('ignoring adding callback in %s', self) - raise AbortAsyncGroup() + raise AbortAsyncGroup self._counter += 1 - def _dec(self): + def _dec(self) -> None: self._counter -= 1 - def add(self, intermediate_cb, exception_handler=None): + def add(self, intermediate_cb: Callable, exception_handler: Callable | None = None) -> Callable: self._inc() @wraps(intermediate_cb) @@ -98,7 +107,7 @@ def new_cb(*args, **kwargs): return new_cb - def add_notification(self): + def add_notification(self) -> Callable: self._inc() def new_cb(*args, **kwargs): @@ -112,22 +121,26 @@ def _handle_future(callback, future): future.result() callback() - def add_future(self, future): + def add_future(self, future: Future) -> Future: IOLoop.current().add_future(future, partial(self._handle_future, self.add_notification())) self._futures.append(future) return future - def get_finish_future(self): + def get_finish_future(self) -> Future: return self._future - def get_gathering_future(self): + def get_gathering_future(self) -> Future: return asyncio.gather(*self._futures) def __str__(self): return f'AsyncGroup(name={self._name}, finished={self._finished})' -def future_fold(future, result_mapper=None, exception_mapper=None): +def future_fold( + future: Future, + result_mapper: Callable | None = None, + exception_mapper: Callable | None = None, +) -> Future: """ Creates a new future with result or exception processed by result_mapper and exception_mapper. @@ -135,9 +148,9 @@ def future_fold(future, result_mapper=None, exception_mapper=None): Any of the mappers can be None — then the result or exception is left as is. """ - res_future = Future() + res_future: Future = Future() - def _process(func, value): + def _process(func: Callable | None, value: Any) -> None: try: processed = func(value) if func is not None else value except Exception as e: @@ -149,8 +162,10 @@ def _on_ready(wrapped_future): exception = wrapped_future.exception() if exception is not None: if not callable(exception_mapper): + def default_exception_func(error): raise error + _process(default_exception_func, exception) else: _process(exception_mapper, exception) diff --git a/frontik/handler.py b/frontik/handler.py index 394c7cbfe..47c89231b 100644 --- a/frontik/handler.py +++ b/frontik/handler.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import http.client import json @@ -5,15 +7,14 @@ import math import re import time +from asyncio import Task from asyncio.futures import Future from functools import wraps -from typing import (TYPE_CHECKING, Any, Coroutine, List, Optional, Type, Union, - overload) +from typing import TYPE_CHECKING, Any import tornado.httputil import tornado.web -from http_client import HttpClient -from http_client.request_response import USER_AGENT_HEADER, FailFastError, RequestResult, RequestBuilder +from http_client.request_response import USER_AGENT_HEADER, FailFastError, RequestBuilder, RequestResult from pydantic import BaseModel, ValidationError from tornado.ioloop import IOLoop from tornado.web import Finish, RequestHandler @@ -30,23 +31,28 @@ from frontik.http_status import ALLOWED_STATUSES from frontik.loggers.stages import StagesLogger from frontik.options import options -from frontik.preprocessors import (_get_preprocessor_name, _get_preprocessors, - _unwrap_preprocessors) +from frontik.preprocessors import _get_preprocessor_name, _get_preprocessors, _unwrap_preprocessors from frontik.timeout_tracking import get_timeout_checker from frontik.util import gather_dict, make_url from frontik.validator import BaseValidationModel, Validators from frontik.version import version as frontik_version if TYPE_CHECKING: - from tornado.httpclient import HTTPRequest + from collections.abc import Callable, Coroutine, Reversible + + from http_client import HttpClient + from tornado.httputil import HTTPServerRequest + + from frontik.app import FrontikApplication + from frontik.integrations.statsd import StatsDClient, StatsDClientStub -def _fallback_status_code(status_code): +def _fallback_status_code(status_code: int) -> int: return status_code if status_code in ALLOWED_STATUSES else http.client.SERVICE_UNAVAILABLE class FinishWithPostprocessors(Exception): - def __init__(self, wait_finish_group=False): + def __init__(self, wait_finish_group: bool = False) -> None: self.wait_finish_group = wait_finish_group @@ -59,8 +65,8 @@ class TypedArgumentError(tornado.web.HTTPError): class JSONBodyParseError(tornado.web.HTTPError): - def __init__(self): - super(JSONBodyParseError, self).__init__(400, 'Failed to parse json in request body') + def __init__(self) -> None: + super().__init__(400, 'Failed to parse json in request body') class DefaultValueError(Exception): @@ -68,18 +74,19 @@ def __init__(self, *args: object) -> None: super().__init__(*args) -_ARG_DEFAULT = object() +_ARG_DEFAULT = None MEDIA_TYPE_PARAMETERS_SEPARATOR_RE = r' *; *' OUTER_TIMEOUT_MS_HEADER = 'X-Outer-Timeout-Ms' handler_logger = logging.getLogger('handler') -def _fail_fast_policy(fail_fast, waited, host, path): +def _fail_fast_policy(fail_fast: bool, waited: bool, host: str, path: str) -> bool: if fail_fast and not waited: handler_logger.warning( 'attempted to make NOT waited http request to %s %s with fail fast policy, turn off fail_fast', - host, path + host, + path, ) return False @@ -87,49 +94,52 @@ def _fail_fast_policy(fail_fast, waited, host, path): class PageHandler(RequestHandler): + preprocessors: Reversible = () + _priority_preprocessor_names: list = [] - preprocessors = () - _priority_preprocessor_names = [] - - def __init__(self, application, request, **kwargs): + def __init__(self, application: FrontikApplication, request: HTTPServerRequest, **kwargs: Any) -> None: self.name = self.__class__.__name__ - self.request_id = request.request_id = request_context.get_request_id() + self.request_id = request.request_id = request_context.get_request_id() # type: ignore self.config = application.config self.log = handler_logger - self.text = None + self.text: Any = None + self.application: FrontikApplication = None # type: ignore super().__init__(application, request, **kwargs) - self._launched_preprocessors = [] - self._preprocessor_futures = [] - self._exception_hooks = [] + self._launched_preprocessors: list = [] + self._preprocessor_futures: list | None = [] + self._exception_hooks: list = [] + self.statsd_client: StatsDClient | StatsDClientStub = None # type: ignore for integration in application.available_integrations: integration.initialize_handler(self) self.stages_logger = StagesLogger(request, self.statsd_client) - self._debug_access = None - self._render_postprocessors = [] - self._postprocessors = [] + self._debug_access: bool | None = None + self._render_postprocessors: list = [] + self._postprocessors: list = [] - self._mandatory_cookies = {} + self._mandatory_cookies: dict = {} self._mandatory_headers = tornado.httputil.HTTPHeaders() - self._validation_model = BaseValidationModel + self._validation_model: type[BaseValidationModel | BaseModel] = BaseValidationModel self.timeout_checker = None self.use_adaptive_strategy = False outer_timeout = request.headers.get(OUTER_TIMEOUT_MS_HEADER) if outer_timeout: - self.timeout_checker = get_timeout_checker(request.headers.get(USER_AGENT_HEADER), - float(outer_timeout), - request.request_time) + self.timeout_checker = get_timeout_checker( + request.headers.get(USER_AGENT_HEADER), + float(outer_timeout), + request.request_time, + ) def __repr__(self): - return '.'.join([self.__module__, self.__class__.__name__]) + return f'{self.__module__}.{self.__class__.__name__}' - def prepare(self): + def prepare(self) -> None: self.active_limit = frontik.handler_active_limit.ActiveHandlersLimit(self.statsd_client) self.debug_mode = DebugMode(self) self.finish_group = AsyncGroup(lambda: None, name='finish') @@ -140,15 +150,17 @@ def prepare(self): self.xml_producer = self.application.xml.get_producer(self) self.doc = self.xml_producer.doc - self._http_client = self.application.http_client_factory.get_http_client( - self.modify_http_client_request, self.debug_mode.enabled, self.use_adaptive_strategy - ) # type: HttpClient + self._http_client: HttpClient = self.application.http_client_factory.get_http_client( + self.modify_http_client_request, + self.debug_mode.enabled, + self.use_adaptive_strategy, + ) self._handler_finished_notification = self.finish_group.add_notification() super().prepare() - def require_debug_access(self, login=None, passwd=None): + def require_debug_access(self, login: str | None = None, passwd: str | None = None) -> None: if self._debug_access is None: if options.debug: debug_access = True @@ -161,12 +173,14 @@ def require_debug_access(self, login=None, passwd=None): self._debug_access = debug_access def set_default_headers(self): - self._headers = tornado.httputil.HTTPHeaders({ - 'Server': f'Frontik/{frontik_version}', - 'X-Request-Id': self.request_id, - }) + self._headers = tornado.httputil.HTTPHeaders( + { + 'Server': f'Frontik/{frontik_version}', # type: ignore + 'X-Request-Id': self.request_id, # type: ignore + }, + ) - def decode_argument(self, value, name=None): + def decode_argument(self, value: bytes, name: str | None = None) -> str: try: return super().decode_argument(value, name) except (UnicodeError, tornado.web.HTTPError): @@ -178,7 +192,7 @@ def decode_argument(self, value, name=None): self.log.exception('cannot decode argument, ignoring invalid chars') return value.decode('utf-8', 'ignore') - def get_body_argument(self, name, default=_ARG_DEFAULT, strip=True): + def get_body_argument(self, name: str, default: Any = _ARG_DEFAULT, strip: bool = True) -> str | None: if self._get_request_mime_type(self.request) == media_types.APPLICATION_JSON: if name not in self.json_body and default == _ARG_DEFAULT: raise tornado.web.MissingArgumentError(name) @@ -194,15 +208,21 @@ def get_body_argument(self, name, default=_ARG_DEFAULT, strip=True): return super().get_body_argument(name, strip=strip) return super().get_body_argument(name, default, strip) - def set_validation_model(self, model: Type[Union[BaseValidationModel, BaseModel]]): + def set_validation_model(self, model: type[BaseValidationModel | BaseModel]) -> None: if issubclass(model, BaseModel): self._validation_model = model else: - raise TypeError('model is not subclass of BaseClass') + msg = 'model is not subclass of BaseClass' + raise TypeError(msg) def get_validated_argument( - self, name: str, validation: Validators, default: any = _ARG_DEFAULT, - from_body: bool = False, array: bool = False, strip: bool = True + self, + name: str, + validation: Validators, + default: Any = _ARG_DEFAULT, + from_body: bool = False, + array: bool = False, + strip: bool = True, ) -> Any: validator = validation.value if default is not _ARG_DEFAULT and default is not None: @@ -214,6 +234,7 @@ def get_validated_argument( else: validated_default = default + value: Any if array and from_body: value = self.get_body_arguments(name, strip) elif from_body: @@ -233,75 +254,46 @@ def get_validated_argument( return validated_value - @overload - def get_str_argument(self, name: str, default: None = _ARG_DEFAULT, path_safe: bool = True, - **kwargs) -> Optional[Union[str, List[str]]]: - ... - - @overload - def get_str_argument(self, name: str, default: Union[str, List[str]] = _ARG_DEFAULT, path_safe: bool = True, - **kwargs) -> Union[str, List[str]]: - ... - def get_str_argument( - self, name: str, default: Optional[Union[str, List[str]]] = _ARG_DEFAULT, path_safe: bool = True, **kwargs - ) -> Optional[Union[str, List[str]]]: + self, + name: str, + default: str | list[str] | None = _ARG_DEFAULT, + path_safe: bool = True, + **kwargs: Any, + ) -> str | list[str] | None: if path_safe: return self.get_validated_argument(name, Validators.PATH_SAFE_STRING, default=default, **kwargs) return self.get_validated_argument(name, Validators.STRING, default=default, **kwargs) - @overload - def get_int_argument(self, name: str, default: None = _ARG_DEFAULT, - **kwargs) -> Optional[Union[int, List[int]]]: - ... - - @overload - def get_int_argument(self, name: str, default: Union[int, List[int]] = _ARG_DEFAULT, - **kwargs) -> Union[int, List[int]]: - ... - def get_int_argument( - self, name: str, default: Optional[Union[int, List[int]]] = _ARG_DEFAULT, **kwargs - ) -> Optional[Union[int, List[int]]]: + self, + name: str, + default: int | list[int] | None = _ARG_DEFAULT, + **kwargs: Any, + ) -> int | list[int] | None: return self.get_validated_argument(name, Validators.INTEGER, default=default, **kwargs) - @overload - def get_bool_argument(self, name: str, default: None = _ARG_DEFAULT, - **kwargs) -> Optional[Union[bool, List[bool]]]: - ... - - @overload - def get_bool_argument(self, name: str, - default: Union[bool, List[bool]] = _ARG_DEFAULT, **kwargs) -> Union[bool, List[bool]]: - ... - def get_bool_argument( - self, name: str, default: Optional[Union[bool, List[bool]]] = _ARG_DEFAULT, **kwargs - ) -> Optional[Union[bool, List[bool]]]: + self, + name: str, + default: bool | list[bool] | None = _ARG_DEFAULT, + **kwargs: Any, + ) -> bool | list[bool] | None: return self.get_validated_argument(name, Validators.BOOLEAN, default=default, **kwargs) - @overload - def get_float_argument( - self, name: str, default: None = _ARG_DEFAULT, **kwargs - ) -> Optional[Union[float, List[float]]]: - ... - - @overload def get_float_argument( - self, name: str, default: Union[float, List[float]] = _ARG_DEFAULT, **kwargs - ) -> Union[float, List[float]]: - ... - - def get_float_argument( - self, name: str, default: Optional[Union[float, List[float]]] = _ARG_DEFAULT, **kwargs - ) -> Optional[Union[float, List[float]]]: + self, + name: str, + default: float | list[float] | None = _ARG_DEFAULT, + **kwargs: Any, + ) -> float | list[float] | None: return self.get_validated_argument(name, Validators.FLOAT, default=default, **kwargs) - def _get_request_mime_type(self, request): + def _get_request_mime_type(self, request: HTTPServerRequest) -> str: content_type = request.headers.get('Content-Type', '') return re.split(MEDIA_TYPE_PARAMETERS_SEPARATOR_RE, content_type)[0] - def set_status(self, status_code, reason=None): + def set_status(self, status_code: int, reason: str | None = None) -> None: status_code = _fallback_status_code(status_code) super().set_status(status_code, reason=reason) @@ -312,13 +304,11 @@ def redirect(self, url, *args, allow_protocol_relative=False, **kwargs): # of a part of the path, making this effectively an open redirect. # Reject paths starting with two slashes to prevent this. # This is only reachable under certain configurations. - raise tornado.web.HTTPError( - 403, 'cannot redirect path with two initial slashes' - ) + raise tornado.web.HTTPError(403, 'cannot redirect path with two initial slashes') self.log.info('redirecting to: %s', url) return super().redirect(url, *args, **kwargs) - def reverse_url(self, name, *args, **kwargs): + def reverse_url(self, name: str, *args: Any, **kwargs: Any) -> str: return self.application.reverse_url(name, *args, **kwargs) @property @@ -327,18 +317,18 @@ def json_body(self): self._json_body = self._get_json_body() return self._json_body - def _get_json_body(self): + def _get_json_body(self) -> Any: try: return json.loads(self.request.body) except json.JSONDecodeError as _: - raise JSONBodyParseError() + raise JSONBodyParseError @classmethod - def add_callback(cls, callback, *args, **kwargs): + def add_callback(cls, callback: Callable, *args: Any, **kwargs: Any) -> None: IOLoop.current().add_callback(callback, *args, **kwargs) @classmethod - def add_timeout(cls, deadline, callback, *args, **kwargs): + def add_timeout(cls, deadline: float, callback: Callable, *args: Any, **kwargs: Any) -> Any: return IOLoop.current().add_timeout(deadline, callback, *args, **kwargs) @staticmethod @@ -346,7 +336,7 @@ def remove_timeout(timeout): IOLoop.current().remove_timeout(timeout) @classmethod - def add_future(cls, future, callback): + def add_future(cls, future: Future, callback: Callable) -> None: IOLoop.current().add_future(future, callback) # Requests handling @@ -377,9 +367,9 @@ async def head(self, *args, **kwargs): def options(self, *args, **kwargs): self.__return_405() - async def _execute_page(self, page_handler_method): + async def _execute_page(self, page_handler_method: Callable[[], Coroutine[Any, Any, None]]) -> None: self.stages_logger.commit_stage('prepare') - preprocessors = _get_preprocessors(page_handler_method.__func__) + preprocessors = _get_preprocessors(page_handler_method.__func__) # type: ignore def _prioritise_preprocessor_by_list(preprocessor): name = _get_preprocessor_name(preprocessor) @@ -407,50 +397,48 @@ def _prioritise_preprocessor_by_list(preprocessor): self.write(render_result) async def get_page(self): - """ This method can be implemented in the subclass """ + """This method can be implemented in the subclass""" self.__return_405() async def post_page(self): - """ This method can be implemented in the subclass """ + """This method can be implemented in the subclass""" self.__return_405() async def put_page(self): - """ This method can be implemented in the subclass """ + """This method can be implemented in the subclass""" self.__return_405() async def delete_page(self): - """ This method can be implemented in the subclass """ + """This method can be implemented in the subclass""" self.__return_405() - def __return_405(self): - allowed_methods = [ - name for name in ('get', 'post', 'put', 'delete') if f'{name}_page' in vars(self.__class__) - ] + def __return_405(self) -> None: + allowed_methods = [name for name in ('get', 'post', 'put', 'delete') if f'{name}_page' in vars(self.__class__)] self.set_header('Allow', ', '.join(allowed_methods)) self.set_status(405) self.finish() - def get_page_fail_fast(self, request_result: RequestResult): + def get_page_fail_fast(self, request_result: RequestResult) -> None: self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) - def post_page_fail_fast(self, request_result: RequestResult): + def post_page_fail_fast(self, request_result: RequestResult) -> None: self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) - def put_page_fail_fast(self, request_result: RequestResult): + def put_page_fail_fast(self, request_result: RequestResult) -> None: self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) - def delete_page_fail_fast(self, request_result: RequestResult): + def delete_page_fail_fast(self, request_result: RequestResult) -> None: self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) - def __return_error(self, response_code, **kwargs): + def __return_error(self, response_code: int, **kwargs: Any) -> None: self.send_error(response_code if 300 <= response_code < 500 else 502, **kwargs) # Finish page - def is_finished(self): + def is_finished(self) -> bool: return self._finished - def check_finished(self, callback): + def check_finished(self, callback: Callable) -> Callable: @wraps(callback) def wrapper(*args, **kwargs): if self.is_finished(): @@ -460,7 +448,7 @@ def wrapper(*args, **kwargs): return wrapper - def finish_with_postprocessors(self): + def finish_with_postprocessors(self) -> None: if not self.finish_group.get_finish_future().done(): self.finish_group.abort() @@ -470,12 +458,12 @@ def _cb(future): asyncio.create_task(self._postprocess()).add_done_callback(_cb) - def run_task(self: 'PageHandler', coro: Coroutine): + def run_task(self: PageHandler, coro: Coroutine) -> Task: task = asyncio.create_task(coro) self.finish_group.add_future(task) return task - async def _postprocess(self): + async def _postprocess(self) -> Any: if self._finished: self.log.info('page was already finished, skipping postprocessors') return @@ -487,6 +475,7 @@ async def _postprocess(self): self.log.info('page was already finished, skipping page producer') return + renderer: Any if self.text is not None: renderer = self._generic_producer elif not self.json.is_empty(): @@ -500,7 +489,7 @@ async def _postprocess(self): postprocessed_result = await self._run_template_postprocessors( self._render_postprocessors, rendered_result, - meta_info + meta_info, ) return postprocessed_result @@ -529,7 +518,7 @@ def log_exception(self, typ, value, tb): super().log_exception(typ, value, tb) - def _handle_request_exception(self, e): + def _handle_request_exception(self, e: BaseException) -> None: if isinstance(e, AbortAsyncGroup): self.log.info('page was aborted, skipping postprocessing') return @@ -559,11 +548,14 @@ def _handle_request_exception(self, e): if request.name: request_name = f'{request_name} ({request.name})' - self.log.warning('FailFastError: request %s failed with %s code', request_name, - e.failed_result.status_code) + self.log.warning( + 'FailFastError: request %s failed with %s code', + request_name, + e.failed_result.status_code, + ) try: - error_method_name = f'{self.request.method.lower()}_page_fail_fast' + error_method_name = f'{self.request.method.lower()}_page_fail_fast' # type: ignore method = getattr(self, error_method_name, None) if callable(method): method(e.failed_result) @@ -576,7 +568,7 @@ def _handle_request_exception(self, e): else: super()._handle_request_exception(e) - def send_error(self, status_code=500, **kwargs): + def send_error(self, status_code: int = 500, **kwargs: Any) -> None: """`send_error` is adapted to support `write_error` that can call `finish` asynchronously. """ @@ -607,28 +599,25 @@ def send_error(self, status_code=500, **kwargs): if not self._finished: self.finish() - def write_error(self, status_code=500, **kwargs): + def write_error(self, status_code: int = 500, **kwargs: Any) -> None: """ `write_error` can call `finish` asynchronously if HTTPErrorWithPostprocessors is raised. """ - if 'exc_info' in kwargs: - exception = kwargs['exc_info'][1] - else: - exception = None + exception = kwargs['exc_info'][1] if 'exc_info' in kwargs else None if isinstance(exception, HTTPErrorWithPostprocessors): self.finish_with_postprocessors() return self.set_header('Content-Type', media_types.TEXT_HTML) - return super().write_error(status_code, **kwargs) + super().write_error(status_code, **kwargs) - def cleanup(self): + def cleanup(self) -> None: if hasattr(self, 'active_limit'): self.active_limit.release() - def finish(self, chunk=None): + def finish(self, chunk: str | bytes | dict | None = None) -> Future[None]: self.stages_logger.commit_stage('postprocess') for name, value in self._mandatory_headers.items(): self.set_header(name, value) @@ -643,47 +632,56 @@ def finish(self, chunk=None): self._write_buffer = [] chunk = None - super().finish(chunk) + finish_future = super().finish(chunk) self.cleanup() + return finish_future # Preprocessors and postprocessors def add_preprocessor_future(self, future): if self._preprocessor_futures is None: + msg = 'preprocessors chain is already finished, calling add_preprocessor_future at this time is incorrect' raise Exception( - 'preprocessors chain is already finished, calling add_preprocessor_future at this time is incorrect' + msg, ) self._preprocessor_futures.append(future) return future - def set_mandatory_header(self, name, value): + def set_mandatory_header(self, name: str, value: str) -> None: self._mandatory_headers[name] = value - def set_mandatory_cookie(self, name, value, domain=None, expires=None, path="/", expires_days=None, **kwargs): + def set_mandatory_cookie( + self, + name: str, + value: str, + domain: str | None = None, + expires: str | None = None, + path: str = "/", + expires_days: int | None = None, + **kwargs: Any, + ) -> None: self._mandatory_cookies[name] = ((name, value, domain, expires, path, expires_days), kwargs) - def clear_header(self, name): + def clear_header(self, name: str) -> None: if name in self._mandatory_headers: del self._mandatory_headers[name] super().clear_header(name) - def clear_cookie(self, name, path="/", domain=None): + def clear_cookie(self, name: str, path: str = "/", domain: str | None = None) -> None: # type: ignore if name in self._mandatory_cookies: del self._mandatory_cookies[name] super().clear_cookie(name, path=path, domain=domain) - def was_preprocessor_called(self, preprocessor): + def was_preprocessor_called(self, preprocessor: Any) -> bool: return preprocessor.preprocessor_name in self._launched_preprocessors - async def _run_preprocessor_function(self, preprocessor_function): + async def _run_preprocessor_function(self, preprocessor_function: Callable) -> None: if asyncio.iscoroutinefunction(preprocessor_function): await preprocessor_function(self) else: preprocessor_function(self) - self._launched_preprocessors.append( - _get_preprocessor_name(preprocessor_function) - ) + self._launched_preprocessors.append(_get_preprocessor_name(preprocessor_function)) async def run_preprocessor(self, preprocessor): if self._finished: @@ -691,13 +689,13 @@ async def run_preprocessor(self, preprocessor): return False await self._run_preprocessor_function(preprocessor.function) - async def _run_preprocessors(self, preprocessor_functions): + async def _run_preprocessors(self, preprocessor_functions: list) -> bool: for p in preprocessor_functions: await self._run_preprocessor_function(p) if self._finished: self.log.info('page was already finished, breaking preprocessors chain') return False - await asyncio.gather(*self._preprocessor_futures) + await asyncio.gather(*self._preprocessor_futures) # type: ignore self._preprocessor_futures = None @@ -707,7 +705,7 @@ async def _run_preprocessors(self, preprocessor_functions): return True - async def _run_postprocessors(self, postprocessors): + async def _run_postprocessors(self, postprocessors: list) -> bool: for p in postprocessors: if asyncio.iscoroutinefunction(p): await p(self) @@ -720,7 +718,7 @@ async def _run_postprocessors(self, postprocessors): return True - async def _run_template_postprocessors(self, postprocessors, rendered_template, meta_info): + async def _run_template_postprocessors(self, postprocessors: list, rendered_template: Any, meta_info: Any) -> Any: for p in postprocessors: if asyncio.iscoroutinefunction(p): rendered_template = await p(self, rendered_template, meta_info) @@ -733,10 +731,10 @@ async def _run_template_postprocessors(self, postprocessors, rendered_template, return rendered_template - def add_render_postprocessor(self, postprocessor): + def add_render_postprocessor(self, postprocessor: Any) -> None: self._render_postprocessors.append(postprocessor) - def add_postprocessor(self, postprocessor): + def add_postprocessor(self, postprocessor: Any) -> None: self._postprocessors.append(postprocessor) # Producers @@ -749,18 +747,18 @@ async def _generic_producer(self): return self.text, None - def xml_from_file(self, filename): + def xml_from_file(self, filename: str) -> Any: return self.xml_producer.xml_from_file(filename) - def set_xsl(self, filename): - return self.xml_producer.set_xsl(filename) + def set_xsl(self, filename: str) -> None: + self.xml_producer.set_xsl(filename) - def set_template(self, filename): - return self.json_producer.set_template(filename) + def set_template(self, filename: str) -> None: + self.json_producer.set_template(filename) # HTTP client methods - def modify_http_client_request(self, balanced_request: RequestBuilder): + def modify_http_client_request(self, balanced_request: RequestBuilder) -> None: balanced_request.headers['x-request-id'] = request_context.get_request_id() balanced_request.headers[OUTER_TIMEOUT_MS_HEADER] = f'{balanced_request.request_timeout * 1000:.0f}' @@ -779,96 +777,235 @@ def modify_http_client_request(self, balanced_request: RequestBuilder): if authorization is not None: balanced_request.headers[header_name] = authorization - def group(self, futures): + def group(self, futures: dict) -> Task: return self.run_task(gather_dict(coro_dict=futures)) - def get_url(self, host, path, *, name=None, data=None, headers=None, follow_redirects=True, profile=None, - connect_timeout=None, request_timeout=None, max_timeout_tries=None, - speculative_timeout_pct=None, waited=True, parse_response=True, parse_on_error=True, - fail_fast=False) -> Future[RequestResult]: - + def get_url( + self, + host: str, + path: str, + *, + name: str | None = None, + data: Any = None, + headers: Any = None, + follow_redirects: bool = True, + profile: str | None = None, + connect_timeout: float | None = None, + request_timeout: float | None = None, + max_timeout_tries: int | None = None, + speculative_timeout_pct: float | None = None, + waited: bool = True, + parse_response: bool = True, + parse_on_error: bool = True, + fail_fast: bool = False, + ) -> Future[RequestResult]: fail_fast = _fail_fast_policy(fail_fast, waited, host, path) - client_method = lambda: self._http_client.get_url( - host, path, name=name, data=data, headers=headers, follow_redirects=follow_redirects, profile=profile, - connect_timeout=connect_timeout, request_timeout=request_timeout, max_timeout_tries=max_timeout_tries, - speculative_timeout_pct=speculative_timeout_pct, parse_response=parse_response, - parse_on_error=parse_on_error, fail_fast=fail_fast - ) + def client_method(): + return self._http_client.get_url( + host, + path, + name=name, + data=data, + headers=headers, + follow_redirects=follow_redirects, + profile=profile, + connect_timeout=connect_timeout, + request_timeout=request_timeout, + max_timeout_tries=max_timeout_tries, + speculative_timeout_pct=speculative_timeout_pct, + parse_response=parse_response, + parse_on_error=parse_on_error, + fail_fast=fail_fast, + ) return self._execute_http_client_method(host, path, client_method, waited) - def head_url(self, host, path, *, name=None, data=None, headers=None, follow_redirects=True, profile=None, - connect_timeout=None, request_timeout=None, max_timeout_tries=None, - speculative_timeout_pct=None, waited=True, fail_fast=False) -> Future[RequestResult]: - + def head_url( + self, + host: str, + path: str, + *, + name: str | None = None, + data: Any = None, + headers: Any = None, + follow_redirects: bool = True, + profile: str | None = None, + connect_timeout: float | None = None, + request_timeout: float | None = None, + max_timeout_tries: int | None = None, + speculative_timeout_pct: float | None = None, + waited: bool = True, + fail_fast: bool = False, + ) -> Future[RequestResult]: fail_fast = _fail_fast_policy(fail_fast, waited, host, path) - client_method = lambda: self._http_client.head_url( - host, path, data=data, name=name, headers=headers, follow_redirects=follow_redirects, profile=profile, - connect_timeout=connect_timeout, request_timeout=request_timeout, max_timeout_tries=max_timeout_tries, - speculative_timeout_pct=speculative_timeout_pct, fail_fast=fail_fast - ) + def client_method(): + return self._http_client.head_url( + host, + path, + data=data, + name=name, + headers=headers, + follow_redirects=follow_redirects, + profile=profile, + connect_timeout=connect_timeout, + request_timeout=request_timeout, + max_timeout_tries=max_timeout_tries, + speculative_timeout_pct=speculative_timeout_pct, + fail_fast=fail_fast, + ) return self._execute_http_client_method(host, path, client_method, waited) - def post_url(self, host, path, *, - name=None, data='', headers=None, files=None, content_type=None, follow_redirects=True, profile=None, - connect_timeout=None, request_timeout=None, max_timeout_tries=None, idempotent=False, - speculative_timeout_pct=None, waited=True, parse_response=True, parse_on_error=True, - fail_fast=False) -> Future[RequestResult]: - + def post_url( + self, + host: str, + path: str, + *, + name: str | None = None, + data: Any = '', + headers: Any = None, + files: Any = None, + content_type: str | None = None, + follow_redirects: bool | None = True, + profile: str | None = None, + connect_timeout: float | None = None, + request_timeout: float | None = None, + max_timeout_tries: int | None = None, + idempotent: bool = False, + speculative_timeout_pct: float | None = None, + waited: bool = True, + parse_response: bool = True, + parse_on_error: bool = True, + fail_fast: bool = False, + ) -> Future[RequestResult]: fail_fast = _fail_fast_policy(fail_fast, waited, host, path) - client_method = lambda: self._http_client.post_url( - host, path, data=data, name=name, headers=headers, files=files, content_type=content_type, - follow_redirects=follow_redirects, profile=profile, connect_timeout=connect_timeout, - request_timeout=request_timeout, max_timeout_tries=max_timeout_tries, idempotent=idempotent, - speculative_timeout_pct=speculative_timeout_pct, parse_response=parse_response, - parse_on_error=parse_on_error, fail_fast=fail_fast - ) + def client_method(): + return self._http_client.post_url( + host, + path, + data=data, + name=name, + headers=headers, + files=files, + content_type=content_type, + follow_redirects=follow_redirects, + profile=profile, + connect_timeout=connect_timeout, + request_timeout=request_timeout, + max_timeout_tries=max_timeout_tries, + idempotent=idempotent, + speculative_timeout_pct=speculative_timeout_pct, + parse_response=parse_response, + parse_on_error=parse_on_error, + fail_fast=fail_fast, + ) return self._execute_http_client_method(host, path, client_method, waited) - def put_url(self, host, path, *, name=None, data='', headers=None, content_type=None, follow_redirects=True, - profile=None, connect_timeout=None, request_timeout=None, max_timeout_tries=None, idempotent=True, - speculative_timeout_pct=None, waited=True, parse_response=True, parse_on_error=True, - fail_fast=False) -> Future[RequestResult]: - + def put_url( + self, + host: str, + path: str, + *, + name: str | None = None, + data: Any = '', + headers: Any = None, + content_type: str | None = None, + follow_redirects: bool = True, + profile: str | None = None, + connect_timeout: float | None = None, + request_timeout: float | None = None, + max_timeout_tries: int | None = None, + idempotent: bool = True, + speculative_timeout_pct: float | None = None, + waited: bool = True, + parse_response: bool = True, + parse_on_error: bool = True, + fail_fast: bool = False, + ) -> Future[RequestResult]: fail_fast = _fail_fast_policy(fail_fast, waited, host, path) - client_method = lambda: self._http_client.put_url( - host, path, name=name, data=data, headers=headers, content_type=content_type, - follow_redirects=follow_redirects, profile=profile, connect_timeout=connect_timeout, - request_timeout=request_timeout, max_timeout_tries=max_timeout_tries, idempotent=idempotent, - speculative_timeout_pct=speculative_timeout_pct, parse_response=parse_response, - parse_on_error=parse_on_error, fail_fast=fail_fast - ) + def client_method(): + return self._http_client.put_url( + host, + path, + name=name, + data=data, + headers=headers, + content_type=content_type, + follow_redirects=follow_redirects, + profile=profile, + connect_timeout=connect_timeout, + request_timeout=request_timeout, + max_timeout_tries=max_timeout_tries, + idempotent=idempotent, + speculative_timeout_pct=speculative_timeout_pct, + parse_response=parse_response, + parse_on_error=parse_on_error, + fail_fast=fail_fast, + ) return self._execute_http_client_method(host, path, client_method, waited) - def delete_url(self, host, path, *, name=None, data=None, headers=None, content_type=None, profile=None, - connect_timeout=None, request_timeout=None, max_timeout_tries=None, speculative_timeout_pct=None, - waited=True, parse_response=True, parse_on_error=True, fail_fast=False) -> Future[RequestResult]: - + def delete_url( + self, + host: str, + path: str, + *, + name: str | None = None, + data: Any = None, + headers: Any = None, + content_type: str | None = None, + profile: str | None = None, + connect_timeout: float | None = None, + request_timeout: float | None = None, + max_timeout_tries: int | None = None, + speculative_timeout_pct: float | None = None, + waited: bool = True, + parse_response: bool = True, + parse_on_error: bool = True, + fail_fast: bool = False, + ) -> Future[RequestResult]: fail_fast = _fail_fast_policy(fail_fast, waited, host, path) - client_method = lambda: self._http_client.delete_url( - host, path, name=name, data=data, headers=headers, content_type=content_type, profile=profile, - connect_timeout=connect_timeout, request_timeout=request_timeout, max_timeout_tries=max_timeout_tries, - parse_response=parse_response, parse_on_error=parse_on_error, - speculative_timeout_pct=speculative_timeout_pct, fail_fast=fail_fast - ) + def client_method(): + return self._http_client.delete_url( + host, + path, + name=name, + data=data, + headers=headers, + content_type=content_type, + profile=profile, + connect_timeout=connect_timeout, + request_timeout=request_timeout, + max_timeout_tries=max_timeout_tries, + parse_response=parse_response, + parse_on_error=parse_on_error, + speculative_timeout_pct=speculative_timeout_pct, + fail_fast=fail_fast, + ) return self._execute_http_client_method(host, path, client_method, waited) - def _execute_http_client_method(self, host, path, client_method, waited) -> Future[RequestResult]: + def _execute_http_client_method( + self, + host: str, + path: str, + client_method: Callable, + waited: bool, + ) -> Future[RequestResult]: if waited and (self.is_finished() or self.finish_group.is_finished()): handler_logger.info( - 'attempted to make waited http request to %s %s in finished handler, ignoring', host, path + 'attempted to make waited http request to %s %s in finished handler, ignoring', + host, + path, ) - future = Future() + future: Future = Future() future.set_exception(AbortAsyncGroup()) return future diff --git a/frontik/handler_active_limit.py b/frontik/handler_active_limit.py index 7d18a674f..c86c2d12b 100644 --- a/frontik/handler_active_limit.py +++ b/frontik/handler_active_limit.py @@ -1,9 +1,15 @@ +from __future__ import annotations + import logging +from typing import TYPE_CHECKING from tornado.web import HTTPError from frontik.options import options +if TYPE_CHECKING: + from frontik.integrations.statsd import StatsDClient, StatsDClientStub + handlers_count_logger = logging.getLogger('handlers_count') @@ -11,33 +17,33 @@ class ActiveHandlersLimit: count = 0 high_watermark_ratio = 0.75 - def __init__(self, statsd_client): + def __init__(self, statsd_client: StatsDClient | StatsDClientStub) -> None: self._acquired = False self._statsd_client = statsd_client self._high_watermark = int(options.max_active_handlers * self.high_watermark_ratio) if ActiveHandlersLimit.count > options.max_active_handlers: - handlers_count_logger.warning( - 'dropping request: too many active handlers (%s)', ActiveHandlersLimit.count - ) + handlers_count_logger.warning('dropping request: too many active handlers (%s)', ActiveHandlersLimit.count) raise HTTPError(503) elif ActiveHandlersLimit.count > self._high_watermark: handlers_count_logger.warning( 'active handlers count reached %.2f * %s watermark (%s)', - self.high_watermark_ratio, options.max_active_handlers, ActiveHandlersLimit.count + self.high_watermark_ratio, + options.max_active_handlers, + ActiveHandlersLimit.count, ) self.acquire() - def acquire(self): + def acquire(self) -> None: if not self._acquired: ActiveHandlersLimit.count += 1 self._acquired = True self._statsd_client.gauge('handler.active_count', ActiveHandlersLimit.count) - def release(self): + def release(self) -> None: if self._acquired: ActiveHandlersLimit.count -= 1 self._acquired = False diff --git a/frontik/http_status.py b/frontik/http_status.py index 724f4b51e..bf6b5a943 100644 --- a/frontik/http_status.py +++ b/frontik/http_status.py @@ -1,4 +1,4 @@ import http.client NON_CRITICAL_BAD_GATEWAY = 569 -ALLOWED_STATUSES = list(http.client.responses.keys()) + [NON_CRITICAL_BAD_GATEWAY] +ALLOWED_STATUSES = [*list(http.client.responses.keys()), NON_CRITICAL_BAD_GATEWAY] diff --git a/frontik/integrations/__init__.py b/frontik/integrations/__init__.py index ae406e13e..0e8b4fac0 100644 --- a/frontik/integrations/__init__.py +++ b/frontik/integrations/__init__.py @@ -1,14 +1,32 @@ +from __future__ import annotations + import importlib import logging import pkgutil -from asyncio import Future -from typing import List, Optional, Tuple +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from asyncio import Future + + from frontik.app import FrontikApplication + from frontik.handler import PageHandler integrations_logger = logging.getLogger('integrations') -def load_integrations(app) -> Tuple[List['Integration'], List[Future]]: - for importer, module_name, is_package in pkgutil.iter_modules(__path__): +class Integration: + def initialize_app(self, app: FrontikApplication) -> Future | None: + raise NotImplementedError # pragma: no cover + + def deinitialize_app(self, app: FrontikApplication) -> Future | None: + pass # pragma: no cover + + def initialize_handler(self, handler: PageHandler) -> None: + raise NotImplementedError # pragma: no cover + + +def load_integrations(app: FrontikApplication) -> tuple[list[Integration], list[Future]]: + for _importer, module_name, _is_package in pkgutil.iter_modules(__path__): try: importlib.import_module(f'frontik.integrations.{module_name}') except Exception as e: @@ -26,14 +44,3 @@ def load_integrations(app) -> Tuple[List['Integration'], List[Future]]: available_integrations.append(integration) return available_integrations, init_futures - - -class Integration: - def initialize_app(self, app) -> Optional[Future]: - raise NotImplementedError() # pragma: no cover - - def deinitialize_app(self, app) -> Optional[Future]: - pass # pragma: no cover - - def initialize_handler(self, handler): - raise NotImplementedError() # pragma: no cover diff --git a/frontik/integrations/gc_metrics_collector.py b/frontik/integrations/gc_metrics_collector.py index 785552631..48bd9dad0 100644 --- a/frontik/integrations/gc_metrics_collector.py +++ b/frontik/integrations/gc_metrics_collector.py @@ -1,27 +1,34 @@ +from __future__ import annotations + import gc import time -from asyncio import Future from functools import partial -from typing import Optional +from typing import TYPE_CHECKING from tornado.ioloop import PeriodicCallback from frontik.integrations import Integration, integrations_logger from frontik.options import options +if TYPE_CHECKING: + from asyncio import Future + + from frontik.app import FrontikApplication + class GCMetricsCollectorIntegration(Integration): - def initialize_app(self, app) -> Optional[Future]: + def initialize_app(self, app: FrontikApplication) -> Future | None: if options.gc_metrics_send_interval_ms is None or options.gc_metrics_send_interval_ms <= 0: integrations_logger.info( - 'GC metrics collector integration is disabled: gc_metrics_send_interval_ms option is not configured' + 'GC metrics collector integration is disabled: gc_metrics_send_interval_ms option is not configured', ) - return + return None gc.callbacks.append(gc_metrics_collector) periodic_callback = PeriodicCallback(partial(send_metrics, app), options.gc_metrics_send_interval_ms) periodic_callback.start() + return None def initialize_handler(self, handler): pass @@ -30,22 +37,22 @@ def initialize_handler(self, handler): class GCStats: __slots__ = ('start', 'duration', 'count', 'max_stw') - def __init__(self): - self.start = None - self.duration = 0 + def __init__(self) -> None: + self.start: float = None # type: ignore + self.duration: float = 0 self.count = 0 - self.max_stw = 0 + self.max_stw: float = 0 - def on_gc_start(self): + def on_gc_start(self) -> None: self.start = time.perf_counter() - def on_gc_stop(self): + def on_gc_stop(self) -> None: gc_duration = time.perf_counter() - self.start self.duration += gc_duration self.count += 1 self.max_stw = max(self.max_stw, gc_duration) - def clear(self): + def clear(self) -> None: self.duration = 0 self.count = 0 self.max_stw = 0 diff --git a/frontik/integrations/kafka.py b/frontik/integrations/kafka.py index 51c826c79..e1ff70634 100644 --- a/frontik/integrations/kafka.py +++ b/frontik/integrations/kafka.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio from typing import TYPE_CHECKING @@ -9,18 +11,19 @@ if TYPE_CHECKING: from asyncio import Future - from typing import Optional + + from frontik.app import FrontikApplication class KafkaIntegration(Integration): def __init__(self): self.kafka_producers = {} - def initialize_app(self, app) -> 'Optional[Future]': - def get_kafka_producer(producer_name: str) -> 'Optional[AIOKafkaProducer]': + def initialize_app(self, app: FrontikApplication) -> Future | None: + def get_kafka_producer(producer_name: str) -> AIOKafkaProducer | None: return self.kafka_producers.get(producer_name) - app.get_kafka_producer = get_kafka_producer + app.get_kafka_producer = get_kafka_producer # type: ignore if options.kafka_clusters: init_futures = [] diff --git a/frontik/integrations/sentry.py b/frontik/integrations/sentry.py index c5599fcc0..618299f39 100644 --- a/frontik/integrations/sentry.py +++ b/frontik/integrations/sentry.py @@ -1,5 +1,6 @@ -from asyncio import Future -from typing import Optional +from __future__ import annotations + +from typing import TYPE_CHECKING import sentry_sdk from http_client.request_response import FailFastError @@ -9,12 +10,17 @@ from frontik.integrations import Integration, integrations_logger from frontik.options import options +if TYPE_CHECKING: + from asyncio import Future + + from frontik.app import FrontikApplication + class SentryIntegration(Integration): - def initialize_app(self, app) -> Optional[Future]: + def initialize_app(self, app: FrontikApplication) -> Future | None: if not options.sentry_dsn: integrations_logger.info('sentry integration is disabled: sentry_dsn option is not configured') - return + return None sentry_sdk.init( dsn=options.sentry_dsn, @@ -25,7 +31,7 @@ def initialize_app(self, app) -> Optional[Future]: integrations=[ TornadoIntegration(), ], - ignore_errors=[HTTPError, FailFastError] + ignore_errors=[HTTPError, FailFastError], ) return None diff --git a/frontik/integrations/slow_callback_tracker.py b/frontik/integrations/slow_callback_tracker.py index d2c2a7693..c2ac40567 100644 --- a/frontik/integrations/slow_callback_tracker.py +++ b/frontik/integrations/slow_callback_tracker.py @@ -4,7 +4,6 @@ import time from asyncio import Future from functools import partial -from typing import Optional import sentry_sdk @@ -18,12 +17,12 @@ class SlowCallbackTrackerIntegration(Integration): - def initialize_app(self, app) -> Optional[Future]: + def initialize_app(self, app: FrontikApplication) -> Future | None: if options.asyncio_task_threshold_sec is None: integrations_logger.info( - 'slow callback tracker integration is disabled: asyncio_task_threshold_sec option is None' + 'slow callback tracker integration is disabled: asyncio_task_threshold_sec option is None', ) - return + return None slow_tasks_logger = bootstrap_logger('slow_tasks', logging.WARNING) import reprlib @@ -35,21 +34,26 @@ def initialize_app(self, app) -> Optional[Future]: global long_gc_log long_gc_log = bootstrap_logger('gc_stat', logging.WARNING) + return None + def initialize_handler(self, handler): pass -def wrap_handle_with_time_logging(app: FrontikApplication, slow_tasks_logger): +def wrap_handle_with_time_logging(app: FrontikApplication, slow_tasks_logger: logging.Logger) -> None: old_run = asyncio.Handle._run - def _log_slow_tasks(handle: asyncio.Handle, delta: float): + def _log_slow_tasks(handle: asyncio.Handle, delta: float) -> None: delta_ms = delta * 1000 app.statsd_client.time('long_task.time', int(delta_ms)) slow_tasks_logger.warning('%s took %.2fms', handle, delta_ms) - if options.asyncio_task_critical_threshold_sec and delta >= options.asyncio_task_critical_threshold_sec: - if options.sentry_dsn: - sentry_sdk.capture_message(f'{handle} took {delta_ms:.2f} ms', level='error') + if ( + options.asyncio_task_critical_threshold_sec + and delta >= options.asyncio_task_critical_threshold_sec + and options.sentry_dsn + ): + sentry_sdk.capture_message(f'{handle} took {delta_ms:.2f} ms', level='error') def run(self): global current_callback_start @@ -67,16 +71,16 @@ def run(self): if delta >= options.asyncio_task_threshold_sec: self._context.run(partial(_log_slow_tasks, self, delta)) - asyncio.Handle._run = run + asyncio.Handle._run = run # type: ignore class GCStats: __slots__ = ('callback_start', 'gc_start', 'sum_duration') - def __init__(self): - self.callback_start = None - self.gc_start = None - self.sum_duration = 0 + def __init__(self) -> None: + self.callback_start: float | None = None + self.gc_start: float | None = None + self.sum_duration: float = 0 GC_STATS = GCStats() diff --git a/frontik/integrations/statsd.py b/frontik/integrations/statsd.py index bdd601d8d..0f1b71727 100644 --- a/frontik/integrations/statsd.py +++ b/frontik/integrations/statsd.py @@ -1,60 +1,67 @@ -import socket +from __future__ import annotations + import collections +import socket import threading import time from functools import partial -from asyncio import Future -from typing import Optional +from typing import TYPE_CHECKING from frontik.integrations import Integration, integrations_logger -from frontik.options import options + +if TYPE_CHECKING: + from asyncio import Future + from collections.abc import Callable, ItemsView + from typing import Any + + from frontik.app import FrontikApplication + from frontik.options import Options class StatsdIntegration(Integration): def __init__(self): self.statsd_client = None - def initialize_app(self, app) -> Optional[Future]: - self.statsd_client = create_statsd_client(options, app) - app.statsd_client = self.statsd_client + def initialize_app(self, app: FrontikApplication) -> Future | None: + self.statsd_client = app.statsd_client return None def initialize_handler(self, handler): handler.statsd_client = self.statsd_client -def _convert_tag(name, value): +def _convert_tag(name: str, value: Any) -> str: return '{}_is_{}'.format(name.replace('.', '-'), str(value).replace('.', '-')) -def _convert_tags(tags): +def _convert_tags(tags: dict[str, Any]) -> str: if not tags: return '' return '.' + '.'.join(_convert_tag(name, value) for name, value in tags.items() if value is not None) -def _encode_str(some): - return some if isinstance(some, (bytes, bytearray)) else some.encode('utf-8') +def _encode_str(some: str | bytes) -> bytes: + return some if isinstance(some, bytes | bytearray) else some.encode('utf-8') class Counters: - def __init__(self): - self._tags_to_counter = {} + def __init__(self) -> None: + self._tags_to_counter: dict[tuple, int] = {} - def add(self, value, **kwargs): + def add(self, value: int, **kwargs: Any) -> None: tags = tuple(sorted(kwargs.items())) self._tags_to_counter.setdefault(tags, 0) self._tags_to_counter[tags] += value - def get_snapshot_and_reset(self): + def get_snapshot_and_reset(self) -> ItemsView: snapshot = self._tags_to_counter self._tags_to_counter = {} return snapshot.items() class StatsDClientStub: - def __init__(self): + def __init__(self) -> None: pass def stack(self): @@ -80,20 +87,27 @@ def send_periodically(self, callback, send_interval_sec=None): class StatsDClient: - def __init__(self, host, port, default_periodic_send_interval_sec, app=None, max_udp_size=508, reconnect_timeout=2): + def __init__( + self, + host: str, + port: int, + default_periodic_send_interval_sec: int, + app: str | None = None, + max_udp_size: int = 508, + reconnect_timeout: int = 2, + ) -> None: self.host = host self.port = port self.default_periodic_send_interval_sec = default_periodic_send_interval_sec self.app = app self.max_udp_size = max_udp_size self.reconnect_timeout = reconnect_timeout - self.buffer = collections.deque() + self.buffer: collections.deque = collections.deque() self.stacking = False - self.socket = None self._connect() - def _connect(self): + def _connect(self) -> None: integrations_logger.info('statsd: connecting to %s:%d', self.host, self.port) self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -101,17 +115,16 @@ def _connect(self): try: self.socket.connect((self.host, self.port)) - except socket.error as e: + except OSError as e: integrations_logger.warning('statsd: connect error: %s', e) self._close() - return - def _close(self): + def _close(self) -> None: self.socket.close() - self.socket = None + self.socket = None # type: ignore threading.Timer(self.reconnect_timeout, self._connect).start() - def _send(self, message): + def _send(self, message: str) -> None: if len(message) > self.max_udp_size: integrations_logger.debug('statsd: message %s is too long, dropping', message) @@ -121,22 +134,22 @@ def _send(self, message): self._write(message) - def _write(self, data): + def _write(self, data: bytes | str) -> None: if self.socket is None: integrations_logger.debug('statsd: trying to write to closed socket, dropping') return try: self.socket.send(_encode_str(data)) - except (socket.error, IOError, OSError) as e: + except OSError as e: integrations_logger.warning('statsd: writing error: %s', e) self._close() - def stack(self): + def stack(self) -> None: self.buffer.clear() self.stacking = True - def flush(self): + def flush(self) -> None: self.stacking = False if not self.buffer: @@ -156,26 +169,26 @@ def flush(self): self._write(data) - def count(self, aspect, delta, **kwargs): - self._send('{}{}:{}|c'.format(aspect, _convert_tags(dict(kwargs, app=self.app)), delta)) + def count(self, aspect: str, delta: int, **kwargs: Any) -> None: + self._send(f'{aspect}{_convert_tags(dict(kwargs, app=self.app))}:{delta}|c') - def counters(self, aspect, counters): + def counters(self, aspect: str, counters: Counters) -> None: for tags, count in counters.get_snapshot_and_reset(): self.count(aspect, count, **dict(tags)) - def time(self, aspect, value, **kwargs): - self._send('{}{}:{}|ms'.format(aspect, _convert_tags(dict(kwargs, app=self.app)), value)) + def time(self, aspect: str, value: float, **kwargs: Any) -> None: + self._send(f'{aspect}{_convert_tags(dict(kwargs, app=self.app))}:{value}|ms') - def gauge(self, aspect, value, **kwargs): - self._send('{}{}:{}|g'.format(aspect, _convert_tags(dict(kwargs, app=self.app)), value)) + def gauge(self, aspect: str, value: float, **kwargs: Any) -> None: + self._send(f'{aspect}{_convert_tags(dict(kwargs, app=self.app))}:{value}|g') - def send_periodically(self, callback, send_interval_sec=None): + def send_periodically(self, callback: Callable, send_interval_sec: float | None = None) -> None: if send_interval_sec is None: send_interval_sec = self.default_periodic_send_interval_sec threading.Thread(target=partial(self._send_periodically, callback, send_interval_sec), daemon=True).start() @staticmethod - def _send_periodically(callback, send_interval_sec): + def _send_periodically(callback: Callable, send_interval_sec: float) -> None: while True: try: time.sleep(send_interval_sec) @@ -184,13 +197,16 @@ def _send_periodically(callback, send_interval_sec): integrations_logger.warning('statsd: writing error: %s', e) -def create_statsd_client(options, app): +def create_statsd_client(options: Options, app: FrontikApplication) -> StatsDClient | StatsDClientStub: + statsd_client: StatsDClient | StatsDClientStub if options.statsd_host is None or options.statsd_port is None: statsd_client = StatsDClientStub() - integrations_logger.info( - 'statsd integration is disabled: statsd_host / statsd_port options are not configured' - ) + integrations_logger.info('statsd integration is disabled: statsd_host / statsd_port options are not configured') else: - statsd_client = StatsDClient(options.statsd_host, options.statsd_port, - options.statsd_default_periodic_send_interval_sec, app=app.app) + statsd_client = StatsDClient( + options.statsd_host, + options.statsd_port, + options.statsd_default_periodic_send_interval_sec, + app=app.app, + ) return statsd_client diff --git a/frontik/integrations/telemetry.py b/frontik/integrations/telemetry.py index f76c833c8..051a78f02 100644 --- a/frontik/integrations/telemetry.py +++ b/frontik/integrations/telemetry.py @@ -1,13 +1,12 @@ +from __future__ import annotations + import logging import random -from asyncio import Future -from typing import Optional +from typing import TYPE_CHECKING from urllib.parse import urlparse -import aiohttp from http_client import client_request_context from http_client.options import options as http_client_options -from http_client.request_response import RequestBuilder from opentelemetry import trace from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter from opentelemetry.instrumentation import aiohttp_client, tornado @@ -18,7 +17,6 @@ from opentelemetry.sdk.trace.sampling import ParentBased, TraceIdRatioBased from opentelemetry.semconv.resource import ResourceAttributes from opentelemetry.semconv.trace import SpanAttributes -from opentelemetry.trace import Span from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from opentelemetry.util.http import ExcludeList @@ -26,57 +24,71 @@ from frontik.integrations import Integration, integrations_logger from frontik.options import options +if TYPE_CHECKING: + from asyncio import Future + + import aiohttp + from http_client.request_response import RequestBuilder + from opentelemetry.trace import Span + + from frontik.app import FrontikApplication + log = logging.getLogger('telemetry') # change log-level, because mainly detach context produce exception on Tornado 5. Will be deleted, when up Tornado to 6 logging.getLogger('opentelemetry.context').setLevel(logging.CRITICAL) set_global_textmap(TraceContextTextMapPropagator()) -tornado._excluded_urls = ExcludeList(list(tornado._excluded_urls._excluded_urls) + ['/status']) +tornado._excluded_urls = ExcludeList([*list(tornado._excluded_urls._excluded_urls), '/status']) class TelemetryIntegration(Integration): def __init__(self): - self.aiohttp_instrumentor = None - self.tornado_instrumentor = None + self.aiohttp_instrumentor = aiohttp_client.AioHttpClientInstrumentor() + self.tornado_instrumentor = tornado.TornadoInstrumentor() - def initialize_app(self, app) -> Optional[Future]: + def initialize_app(self, app: FrontikApplication) -> Future | None: if not options.opentelemetry_enabled: - return + return None integrations_logger.info('start telemetry') - resource = Resource(attributes={ - ResourceAttributes.SERVICE_NAME: options.app, - ResourceAttributes.SERVICE_VERSION: app.application_version(), - ResourceAttributes.HOST_NAME: options.node_name, - ResourceAttributes.CLOUD_REGION: http_client_options.datacenter, - }) + resource = Resource( + attributes={ + ResourceAttributes.SERVICE_NAME: options.app, # type: ignore + ResourceAttributes.SERVICE_VERSION: app.application_version(), # type: ignore + ResourceAttributes.HOST_NAME: options.node_name, + ResourceAttributes.CLOUD_REGION: http_client_options.datacenter, + }, + ) otlp_exporter = OTLPSpanExporter(endpoint=options.opentelemetry_collector_url, insecure=True) - provider = TracerProvider(resource=resource, - id_generator=FrontikIdGenerator(), - sampler=ParentBased(TraceIdRatioBased(options.opentelemetry_sampler_ratio))) + provider = TracerProvider( + resource=resource, + id_generator=FrontikIdGenerator(), + sampler=ParentBased(TraceIdRatioBased(options.opentelemetry_sampler_ratio)), + ) provider.add_span_processor(BatchSpanProcessor(otlp_exporter)) trace.set_tracer_provider(provider) - self.aiohttp_instrumentor = aiohttp_client.AioHttpClientInstrumentor() self.aiohttp_instrumentor.instrument( request_hook=_client_request_hook, ) - self.tornado_instrumentor = tornado.TornadoInstrumentor() self.tornado_instrumentor.instrument( server_request_hook=_server_request_hook, ) - def deinitialize_app(self, app) -> Optional[Future]: + return None + + def deinitialize_app(self, app: FrontikApplication) -> Future | None: if not options.opentelemetry_enabled: - return + return None integrations_logger.info('stop telemetry') self.aiohttp_instrumentor.uninstrument() self.tornado_instrumentor.uninstrument() + return None def initialize_handler(self, handler): pass @@ -87,7 +99,7 @@ def _server_request_hook(span, handler): span.set_attribute(SpanAttributes.HTTP_TARGET, handler.request.uri) -def _client_request_hook(span: Span, params: aiohttp.TraceRequestStartParams): +def _client_request_hook(span: Span, params: aiohttp.TraceRequestStartParams) -> None: if not span or not span.is_recording(): return @@ -100,13 +112,15 @@ def _client_request_hook(span: Span, params: aiohttp.TraceRequestStartParams): if upstream_name is None: upstream_name = get_netloc(request.url) - span.update_name(' '.join((el for el in [request.method, upstream_name] if el))) + span.update_name(' '.join(el for el in [request.method, upstream_name] if el)) span.set_attribute('http.request.timeout', request.request_timeout * 1000) if upstream_datacenter is not None: span.set_attribute('http.request.cloud.region', upstream_datacenter) + return -def get_netloc(url): + +def get_netloc(url: str) -> str: parts = urlparse(url) if parts.scheme not in ('http', 'https', ''): parts = urlparse('//' + url) @@ -115,19 +129,21 @@ def get_netloc(url): class FrontikIdGenerator(IdGenerator): - def generate_span_id(self) -> int: return random.getrandbits(64) def generate_trace_id(self) -> int: request_id = request_context.get_request_id() try: + if request_id is None: + msg = 'bad request_id' + raise Exception(msg) + if len(request_id) < 32: - log.debug(f'request_id = {request_id} is less than 32 characters. Generating random trace_id ') + log.debug('request_id = %s is less than 32 characters. Generating random trace_id', request_id) return random.getrandbits(128) - request_id = int(request_id[:32], 16) - return request_id + return int(request_id[:32], 16) except Exception: - log.debug(f'request_id = {request_id} is not valid hex-format. Generating random trace_id') + log.debug('request_id = %s is not valid hex-format. Generating random trace_id', request_id) return random.getrandbits(128) diff --git a/frontik/json_builder.py b/frontik/json_builder.py index 4d6026b36..ea673a1ed 100644 --- a/frontik/json_builder.py +++ b/frontik/json_builder.py @@ -1,19 +1,26 @@ +from __future__ import annotations + import json +from typing import TYPE_CHECKING from tornado.concurrent import Future +if TYPE_CHECKING: + from collections.abc import Iterable + from typing import Any + -def _encode_value(value): - def _encode_iterable(values): +def _encode_value(value: Any) -> Any: + def _encode_iterable(values: Iterable) -> list: return [_encode_value(v) for v in values] - def _encode_dict(d): + def _encode_dict(d: dict) -> dict: return {k: _encode_value(v) for k, v in d.items()} if isinstance(value, dict): return _encode_dict(value) - elif isinstance(value, (set, frozenset, list, tuple)): + elif isinstance(value, set | frozenset | list | tuple): return _encode_iterable(value) elif isinstance(value, Future): @@ -37,6 +44,7 @@ class FrontikJsonEncoder(json.JSONEncoder): * objects with `to_json_value()` method * `Future` objects (only if the future is resolved) """ + def default(self, obj): return _encode_value(obj) @@ -44,35 +52,36 @@ def default(self, obj): class JsonBuilder: __slots__ = ('_data', '_encoder', 'root_node') - def __init__(self, root_node=None, json_encoder=None): + def __init__(self, root_node: str | None = None, json_encoder: Any = None) -> None: if root_node is not None and not isinstance(root_node, str): - raise TypeError(f'Cannot set {root_node} as root node') + msg = f'Cannot set {root_node} as root node' + raise TypeError(msg) - self._data = [] + self._data: list = [] self._encoder = json_encoder self.root_node = root_node - def put(self, *args, **kwargs): + def put(self, *args: Any, **kwargs: Any) -> None: """Append a chunk of data to JsonBuilder.""" self._data.extend(args) if kwargs: self._data.append(kwargs) - def is_empty(self): + def is_empty(self) -> bool: return len(self._data) == 0 - def clear(self): + def clear(self) -> None: self._data = [] - def replace(self, *args, **kwargs): + def replace(self, *args: Any, **kwargs: Any) -> None: self.clear() self.put(*args, **kwargs) - def to_dict(self): - """ Return plain dict from all data appended to JsonBuilder """ + def to_dict(self) -> dict: + """Return plain dict from all data appended to JsonBuilder""" return _encode_value(self._concat_chunks()) - def _concat_chunks(self): + def _concat_chunks(self) -> dict: result = {} for chunk in self._data: if isinstance(chunk, Future) or hasattr(chunk, 'to_dict'): @@ -86,7 +95,7 @@ def _concat_chunks(self): return result - def to_string(self): + def to_string(self) -> str: if self._encoder is None: return json.dumps(self._concat_chunks(), cls=FrontikJsonEncoder, ensure_ascii=False) diff --git a/frontik/loggers/__init__.py b/frontik/loggers/__init__.py index bb63dc30a..1fb88f385 100644 --- a/frontik/loggers/__init__.py +++ b/frontik/loggers/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import logging import os @@ -13,7 +15,7 @@ from frontik.options import options if TYPE_CHECKING: - from typing import List, Optional + from logging import LogRecord ROOT_LOGGER = logging.root JSON_REQUESTS_LOGGER = logging.getLogger('requests') @@ -22,12 +24,11 @@ class Mdc: + def __init__(self) -> None: + self.pid: int = None # type: ignore + self.role: str = None # type: ignore - def __init__(self): - self.pid = None - self.role = None - - def init(self, role): + def init(self, role: str) -> None: self.pid = os.getpid() self.role = role @@ -47,21 +48,22 @@ def filter(self, record): class BufferedHandler(Handler): - def __init__(self, level=logging.NOTSET): + def __init__(self, level: int = logging.NOTSET) -> None: super().__init__(level) - self.records = [] + self.records: list[LogRecord] = [] - def handle(self, record): + def handle(self, record: logging.LogRecord) -> None: # type: ignore self.records.append(record) def produce_all(self): - raise NotImplementedError() # pragma: no cover + raise NotImplementedError # pragma: no cover class GlobalLogHandler(Handler): def handle(self, record): - if request_context.get_log_handler(): - request_context.get_log_handler().handle(record) + handler = request_context.get_log_handler() + if handler is not None: + handler.handle(record) class JSONFormatter(Formatter): @@ -73,20 +75,20 @@ def format(self, record): stack_trace = self.format_stack_trace(record) mdc = JSONFormatter.get_mdc() - json_message = { - 'ts': timestamp - } + json_message = {'ts': timestamp} custom_json = getattr(record, CUSTOM_JSON_EXTRA, None) if custom_json: json_message.update(custom_json) else: - json_message.update({ - 'lvl': record.levelname, - 'logger': record.name, - 'mdc': mdc, - 'msg': message, - }) + json_message.update( + { + 'lvl': record.levelname, + 'logger': record.name, + 'mdc': mdc, + 'msg': message, + }, + ) if stack_trace: json_message['exception'] = stack_trace @@ -94,11 +96,8 @@ def format(self, record): return json.dumps(json_message) @staticmethod - def get_mdc(): - mdc = { - 'thread': MDC.pid, - 'role': MDC.role - } + def get_mdc() -> dict: + mdc = {'thread': MDC.pid, 'role': MDC.role} handler_name = request_context.get_handler_name() if handler_name: @@ -110,7 +109,7 @@ def get_mdc(): return mdc - def format_stack_trace(self, record): + def format_stack_trace(self, record: logging.LogRecord) -> str: # Copypaste from super.format stack_trace = '' if record.exc_info and not record.exc_text: @@ -146,7 +145,7 @@ def format(self, record): _TEXT_FORMATTER = None -def get_stderr_formatter(): +def get_stderr_formatter() -> StderrFormatter: global _STDERR_FORMATTER if _STDERR_FORMATTER is None: @@ -155,7 +154,7 @@ def get_stderr_formatter(): return _STDERR_FORMATTER -def get_text_formatter(): +def get_text_formatter() -> Formatter: global _TEXT_FORMATTER if _TEXT_FORMATTER is None: @@ -164,7 +163,13 @@ def get_text_formatter(): return _TEXT_FORMATTER -def bootstrap_logger(logger_info, logger_level, use_json_formatter=True, *, formatter=None): +def bootstrap_logger( + logger_info: str | tuple, + logger_level: int, + use_json_formatter: bool = True, + *, + formatter: Formatter | None = None, +) -> logging.Logger: if isinstance(logger_info, tuple): logger, logger_name = logger_info else: @@ -191,10 +196,15 @@ def bootstrap_logger(logger_info, logger_level, use_json_formatter=True, *, form return logger -def _configure_file(logger_name: str, - use_json_formatter: bool = True, formatter: 'Optional[Formatter]' = None) -> 'List[Handler]': +def _configure_file( + logger_name: str, + use_json_formatter: bool = True, + formatter: Formatter | None = None, +) -> list[Handler]: log_extension = '.slog' if use_json_formatter else '.log' - file_handler = logging.handlers.WatchedFileHandler(os.path.join(options.log_dir, f'{logger_name}{log_extension}')) + file_handler = logging.handlers.WatchedFileHandler( + os.path.join(options.log_dir, f'{logger_name}{log_extension}'), # type: ignore + ) if formatter is not None: file_handler.setFormatter(formatter) @@ -207,7 +217,7 @@ def _configure_file(logger_name: str, return [file_handler] -def _configure_stderr(formatter: 'Optional[Formatter]' = None): +def _configure_stderr(formatter: Formatter | None = None) -> list[logging.StreamHandler]: stderr_handler = logging.StreamHandler() if formatter is not None: stderr_handler.setFormatter(formatter) @@ -218,13 +228,16 @@ def _configure_stderr(formatter: 'Optional[Formatter]' = None): return [stderr_handler] -def _configure_syslog(logger_name: str, - use_json_formatter: bool = True, formatter: 'Optional[Formatter]' = None) -> 'List[Handler]': +def _configure_syslog( + logger_name: str, + use_json_formatter: bool = True, + formatter: Formatter | None = None, +) -> list[Handler]: try: syslog_handler = SysLogHandler( address=(options.syslog_host, options.syslog_port), facility=SysLogHandler.facility_names[options.syslog_facility], - socktype=socket.SOCK_DGRAM + socktype=socket.SOCK_DGRAM, ) log_extension = '.slog' if use_json_formatter else '.log' syslog_handler.ident = f'{options.syslog_tag}/{logger_name}{log_extension}/: ' @@ -238,12 +251,12 @@ def _configure_syslog(logger_name: str, return [syslog_handler] - except socket.error: + except OSError: logging.getLogger('frontik.logging').exception('cannot initialize syslog') return [] -def bootstrap_core_logging(log_level, use_json, suppressed_loggers): +def bootstrap_core_logging(log_level: str, use_json: bool, suppressed_loggers: list[str]) -> None: """This is a replacement for standard Tornado logging configuration.""" level = getattr(logging, log_level.upper()) @@ -255,6 +268,6 @@ def bootstrap_core_logging(log_level, use_json, suppressed_loggers): bootstrap_logger((JSON_REQUESTS_LOGGER, 'requests'), level, use_json_formatter=True) for logger_name in suppressed_loggers: - logging.getLogger(logger_name).setLevel(logging.WARN) + logging.getLogger(logger_name).setLevel(logging.WARNING) logging.captureWarnings(True) diff --git a/frontik/loggers/logleveloverride/http_log_level_override_extension.py b/frontik/loggers/logleveloverride/http_log_level_override_extension.py index 74aebd87d..113f18a0d 100644 --- a/frontik/loggers/logleveloverride/http_log_level_override_extension.py +++ b/frontik/loggers/logleveloverride/http_log_level_override_extension.py @@ -1,42 +1,41 @@ import logging -from typing import List from http_client import HttpClientFactory from tornado.httpclient import HTTPError -from frontik.loggers.logleveloverride.log_level_override_extension import LogLevelOverrideExtension, LogLevelOverride -from frontik.loggers.logleveloverride.logging_configurator_client import LOG_LEVEL_MAPPING from frontik import request_context +from frontik.loggers.logleveloverride.log_level_override_extension import LogLevelOverride, LogLevelOverrideExtension +from frontik.loggers.logleveloverride.logging_configurator_client import LOG_LEVEL_MAPPING logger = logging.getLogger('http_log_level_override_extension') -def parse_result_to_log_level_overrides_dto(data) -> List[LogLevelOverride]: - result = [] +def parse_result_to_log_level_overrides_dto(data: dict | None) -> list[LogLevelOverride]: + result: list[LogLevelOverride] = [] if data is None: return result for override in data['overrides']: - log_override = LogLevelOverride(override['loggerName'], - LOG_LEVEL_MAPPING.get(override['logLevel'], logging.INFO)) + log_override = LogLevelOverride( + override['loggerName'], + LOG_LEVEL_MAPPING.get(override['logLevel'], logging.INFO), + ) result.append(log_override) return result class HttpLogLevelOverrideExtension(LogLevelOverrideExtension): - def __init__(self, host, uri, http_client_factory: HttpClientFactory): + def __init__(self, host: str, uri: str, http_client_factory: HttpClientFactory) -> None: self.host = host self.uri = uri self.http_client_factory = http_client_factory - async def load_log_level_overrides(self) -> List[LogLevelOverride]: - headers = { - 'X-Request-Id': request_context.get_request_id() - } + async def load_log_level_overrides(self) -> list[LogLevelOverride]: + headers = {'X-Request-Id': request_context.get_request_id()} result = await self.http_client_factory.get_http_client().get_url(self.host, self.uri, headers=headers) if result.failed: - logger.error(f'some problem with fetching log level overrides: {result.failed}') + logger.error('some problem with fetching log level overrides: %s', result.failed) raise HTTPError(result.status_code) log_level_overrides = parse_result_to_log_level_overrides_dto(result.data) diff --git a/frontik/loggers/logleveloverride/log_level_override_extension.py b/frontik/loggers/logleveloverride/log_level_override_extension.py index ebea922a8..ad649d39a 100644 --- a/frontik/loggers/logleveloverride/log_level_override_extension.py +++ b/frontik/loggers/logleveloverride/log_level_override_extension.py @@ -1,12 +1,10 @@ import abc from collections import namedtuple -from typing import List LogLevelOverride = namedtuple('LogLevelOverride', ['logger_name', 'log_level']) class LogLevelOverrideExtension(metaclass=abc.ABCMeta): - @abc.abstractmethod - async def load_log_level_overrides(self) -> List[LogLevelOverride]: + async def load_log_level_overrides(self) -> list[LogLevelOverride]: pass diff --git a/frontik/loggers/logleveloverride/logging_configurator_client.py b/frontik/loggers/logleveloverride/logging_configurator_client.py index c63f7034b..df79fb47b 100644 --- a/frontik/loggers/logleveloverride/logging_configurator_client.py +++ b/frontik/loggers/logleveloverride/logging_configurator_client.py @@ -1,33 +1,39 @@ +from __future__ import annotations + import copy import logging +from typing import TYPE_CHECKING from tornado.ioloop import PeriodicCallback + from frontik.options import options -from frontik.loggers.logleveloverride.log_level_override_extension import LogLevelOverrideExtension +if TYPE_CHECKING: + from collections.abc import Iterable + + from frontik.loggers.logleveloverride.log_level_override_extension import LogLevelOverrideExtension LOG_LEVEL_MAPPING = { 'TRACE': logging.DEBUG, 'DEBUG': logging.DEBUG, 'INFO': logging.INFO, - 'WARN': logging.WARN, + 'WARN': logging.WARNING, 'ERROR': logging.ERROR, } class LoggingConfiguratorClient: - def __init__(self, log_level_override_extension: LogLevelOverrideExtension): self.log_level_override_extension = log_level_override_extension - self._loggers_store = {} + self._loggers_store: dict[str, int] = {} self._update_task_handle = PeriodicCallback( callback=self._update_log_level, - callback_time=options.update_log_level_interval_in_seconds * 1000 + callback_time=options.update_log_level_interval_in_seconds * 1000, ) self._update_task_handle.start() - async def _update_log_level(self): + async def _update_log_level(self) -> None: log_level_overrides = await self.log_level_override_extension.load_log_level_overrides() self._rollback_overrides(log_level_overrides) for logger_name, log_level in log_level_overrides: @@ -48,16 +54,16 @@ async def _update_log_level(self): self._loggers_store[logger.name] = logger.level - def _rollback_overrides(self, overrides=()): - for logger_name in self._loggers_store.keys() - set(map(lambda x: x.logger_name, overrides)): + def _rollback_overrides(self, overrides: Iterable = ()) -> None: + for logger_name in self._loggers_store.keys() - {x.logger_name for x in overrides}: del self._loggers_store[logger_name] self._reset_log_level(logger_name) - def _reset_log_level(self, logger_name): + def _reset_log_level(self, logger_name: str) -> None: logger = logging.getLogger(logger_name) logger.setLevel(options.log_level.upper()) for handler in logger.handlers: handler.setLevel(options.log_level.upper()) - def stop_logging_configurator(self): + def stop_logging_configurator(self) -> None: self._update_task_handle.stop() diff --git a/frontik/loggers/stages.py b/frontik/loggers/stages.py index 26b8c0c54..17629831a 100644 --- a/frontik/loggers/stages.py +++ b/frontik/loggers/stages.py @@ -1,21 +1,29 @@ +from __future__ import annotations + import logging import time from collections import namedtuple +from typing import TYPE_CHECKING from frontik import request_context +if TYPE_CHECKING: + from tornado.httputil import HTTPServerRequest + + from frontik.integrations.statsd import StatsDClient, StatsDClientStub + stages_logger = logging.getLogger('stages') class StagesLogger: Stage = namedtuple('Stage', ('name', 'delta', 'start_delta')) - def __init__(self, request, statsd_client): + def __init__(self, request: HTTPServerRequest, statsd_client: StatsDClient | StatsDClientStub) -> None: self._last_stage_time = self._start_time = request._start_time - self._stages = [] + self._stages: list[StagesLogger.Stage] = [] self._statsd_client = statsd_client - def commit_stage(self, stage_name): + def commit_stage(self, stage_name: str) -> None: stage_end_time = time.time() stage_start_time = self._last_stage_time self._last_stage_time = stage_end_time @@ -27,14 +35,14 @@ def commit_stage(self, stage_name): self._stages.append(stage) stages_logger.debug('stage "%s" completed in %.2fms', stage.name, stage.delta, extra={'_stage': stage}) - def flush_stages(self, status_code): + def flush_stages(self, status_code: int) -> None: """Writes available stages, total value and status code""" handler_name = request_context.get_handler_name() self._statsd_client.stack() for s in self._stages: - self._statsd_client.time(f'handler.stage.time', int(s.delta), stage=s.name) + self._statsd_client.time('handler.stage.time', int(s.delta), stage=s.name) self._statsd_client.flush() @@ -43,8 +51,5 @@ def flush_stages(self, status_code): stages_logger.info( 'timings for %(page)s : %(stages)s', - { - 'page': handler_name, - 'stages': '{0} total={1:.2f} code={2}'.format(stages_str, total, status_code) - }, + {'page': handler_name, 'stages': f'{stages_str} total={total:.2f} code={status_code}'}, ) diff --git a/frontik/options.py b/frontik/options.py index 5acb25f55..73cf4a156 100644 --- a/frontik/options.py +++ b/frontik/options.py @@ -1,5 +1,5 @@ -from dataclasses import dataclass, field, fields import logging.handlers +from dataclasses import dataclass, field, fields LOG_DIR_OPTION_NAME = 'log_dir' STDERR_LOG_OPTION_NAME = 'stderr_log' @@ -7,17 +7,17 @@ @dataclass class Options: - app: str = None - app_class: str = None + app: str | None = None + app_class: str | None = None workers: int = 1 init_workers_timeout_sec: int = 60 - tornado_settings: dict = None + tornado_settings: dict | None = None max_active_handlers: int = 100 reuse_port: bool = True xheaders: bool = False validate_request_id: bool = False - config: str = None + config: str | None = None host: str = '0.0.0.0' port: int = 8080 node_name: str = '' @@ -25,18 +25,19 @@ class Options: autoreload: bool = False stop_timeout: int = 3 - asyncio_task_threshold_sec: float = None - asyncio_task_critical_threshold_sec: float = None + asyncio_task_threshold_sec: float | None = None + asyncio_task_critical_threshold_sec: float | None = None - log_dir: str = None + log_dir: str | None = None log_level: str = 'info' update_log_level_interval_in_seconds: int = 300 log_json: bool = True log_text_format: str = '[%(process)s] %(asctime)s %(levelname)s %(name)s: %(message)s' stderr_log: bool = False - stderr_format: str = '%(color)s[%(levelname)1.1s %(asctime)s %(name)s ' \ - '%(module)s:%(lineno)d]%(end_color)s %(message)s' + stderr_format: str = ( + '%(color)s[%(levelname)1.1s %(asctime)s %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s' + ) stderr_dateformat: str = '%H:%M:%S' syslog: bool = False @@ -48,32 +49,32 @@ class Options: suppressed_loggers: list = field(default_factory=lambda: []) debug: bool = False - debug_login: str = None - debug_password: str = None + debug_login: str | None = None + debug_password: str | None = None - http_client_metrics_kafka_cluster: str = None + http_client_metrics_kafka_cluster: str | None = None kafka_clusters: dict = field(default_factory=lambda: {}) - statsd_host: str = None - statsd_port: int = None + statsd_host: str | None = None + statsd_port: int | None = None statsd_default_periodic_send_interval_sec: int = 60 - gc_metrics_send_interval_ms: int = None + gc_metrics_send_interval_ms: int | None = None long_gc_log_enabled: bool = True long_gc_log_threshold_sec: float = 0.01 - xml_root: str = None - xml_cache_limit: int = None - xml_cache_step: int = None - xsl_root: str = None - xsl_cache_limit: int = None - xsl_cache_step: int = None + xml_root: str | None = None + xml_cache_limit: int | None = None + xml_cache_step: int | None = None + xsl_root: str | None = None + xsl_cache_limit: int | None = None + xsl_cache_step: int | None = None xsl_executor_pool_size: int = 1 - jinja_template_root: str = None + jinja_template_root: str | None = None jinja_template_cache_limit: int = 50 jinja_streaming_render_timeout_ms: int = 50 - sentry_dsn: str = None + sentry_dsn: str | None = None sentry_max_breadcrumbs: int = 0 send_timeout_stats_interval_ms: int = 60000 @@ -81,9 +82,9 @@ class Options: # consul options consul_enabled: bool = True consul_host: str = '127.0.0.1' - consul_port: int = None - consul_service_address: str = None - consul_check_host: str = None + consul_port: int | None = None + consul_service_address: str | None = None + consul_check_host: str | None = None consul_http_check_interval_sec: int = 10 consul_http_check_timeout_sec: float = 1 consul_tags: list = field(default_factory=lambda: []) @@ -108,8 +109,8 @@ class Options: options = Options() -def parse_config_file(path): - config = {} +def parse_config_file(path: str) -> None: + config: dict = {} with open(path, 'rb') as config_file: exec(config_file.read(), config, config) diff --git a/frontik/preprocessors.py b/frontik/preprocessors.py index 180139948..796003a80 100644 --- a/frontik/preprocessors.py +++ b/frontik/preprocessors.py @@ -1,12 +1,19 @@ +from __future__ import annotations + import asyncio from functools import wraps +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Callable, Reversible + from typing import Any -def _get_preprocessor_name(preprocessor_function): +def _get_preprocessor_name(preprocessor_function: Any) -> str: return f'{preprocessor_function.__module__}.{preprocessor_function.__name__}' -def preprocessor(function_or_list): +def preprocessor(function_or_list: Callable | Reversible[Callable]) -> Callable: """Creates a preprocessor decorator for `PageHandler.get_page`, `PageHandler.post_page` etc. Preprocessor is a function that accepts handler instance as its only parameter. @@ -39,7 +46,7 @@ def get_page(self): Finally, after ``get_b`` is executed, ``get_page`` will be called. """ - def preprocessor_decorator(func): + def preprocessor_decorator(func: Callable) -> Callable: if callable(function_or_list): _register_preprocessors(func, [function_or_list]) else: @@ -50,29 +57,29 @@ def preprocessor_decorator(func): if callable(function_or_list): dep_name = function_or_list.__name__ - preprocessor_decorator.preprocessor_name = _get_preprocessor_name(function_or_list) - preprocessor_decorator.function = function_or_list + preprocessor_decorator.preprocessor_name = _get_preprocessor_name(function_or_list) # type: ignore + preprocessor_decorator.function = function_or_list # type: ignore else: - dep_name = [f.__name__ for f in function_or_list] - preprocessor_decorator.func_name = f'preprocessor_decorator({dep_name})' + dep_name = str([f.__name__ for f in function_or_list]) + preprocessor_decorator.func_name = f'preprocessor_decorator({dep_name})' # type: ignore return preprocessor_decorator -def _get_preprocessors(func): +def _get_preprocessors(func: Callable) -> list: return getattr(func, '_preprocessors', []) -def _unwrap_preprocessors(preprocessors): +def _unwrap_preprocessors(preprocessors: Reversible) -> list: return _get_preprocessors(preprocessor(preprocessors)(lambda: None)) -def _register_preprocessors(func, preprocessors): - setattr(func, '_preprocessors', preprocessors + _get_preprocessors(func)) +def _register_preprocessors(func: Callable, preprocessors: list[Callable]) -> None: + func._preprocessors = preprocessors + _get_preprocessors(func) # type: ignore -def make_preprocessors_names_list(preprocessors_list): - return list(map(lambda p: p.preprocessor_name, preprocessors_list)) +def make_preprocessors_names_list(preprocessors_list: list) -> list[str]: + return [p.preprocessor_name for p in preprocessors_list] def _wrap_async_func_to_tornado_coroutine(func): @@ -80,7 +87,7 @@ def _wrap_async_func_to_tornado_coroutine(func): def wrapper(*args, **kwargs): return asyncio.create_task(func(*args, **kwargs)) - wrapper.__wrapped__ = func - wrapper.__tornado_coroutine__ = True + wrapper.__wrapped__ = func # type: ignore + wrapper.__tornado_coroutine__ = True # type: ignore return wrapper diff --git a/frontik/process.py b/frontik/process.py index 8d1be977a..217452348 100644 --- a/frontik/process.py +++ b/frontik/process.py @@ -1,17 +1,26 @@ +from __future__ import annotations + import errno +import fcntl import gc import logging import os import signal import sys import time -import fcntl from dataclasses import dataclass +from typing import TYPE_CHECKING from tornado.util import errno_from_exception from frontik.options import options +if TYPE_CHECKING: + from collections.abc import Callable + from multiprocessing.sharedctypes import Synchronized + + from frontik.app import FrontikApplication + log = logging.getLogger('fork') @@ -28,8 +37,16 @@ class State: terminating: bool -def fork_workers(worker_function, *, app, init_workers_count_down, num_workers, after_workers_up_action, - before_workers_shutdown_action, children_pipes): +def fork_workers( + worker_function: Callable, + *, + app: FrontikApplication, + init_workers_count_down: Synchronized, + num_workers: int, + after_workers_up_action: Callable, + before_workers_shutdown_action: Callable, + children_pipes: dict, +) -> None: log.info("starting %d processes", num_workers) state = State(server=True, children={}, read_pipe=0, write_pipes=children_pipes, terminating=False) @@ -56,16 +73,19 @@ def sigterm_handler(signum, frame): timeout = time.time() + options.init_workers_timeout_sec while init_workers_count_down.value > 0: if time.time() > timeout: + msg = ( + f'workers did not started after {options.init_workers_timeout_sec} seconds, ' + f'do not started {init_workers_count_down.value} workers' + ) raise Exception( - f'workers did not started after {options.init_workers_timeout_sec} seconds,' - f' do not started {init_workers_count_down.value} workers' + msg, ) time.sleep(0.1) after_workers_up_action() _supervise_workers(state, worker_function) -def _supervise_workers(state, worker_function): +def _supervise_workers(state: State, worker_function: Callable) -> None: while state.children: try: pid, status = os.wait() @@ -105,8 +125,8 @@ def _supervise_workers(state, worker_function): # returns True inside child process, otherwise False -def _start_child(i, state): - read_fd, write_fd = os.pipe2(os.O_NONBLOCK) +def _start_child(i: int, state: State) -> bool: + read_fd, write_fd = os.pipe2(os.O_NONBLOCK) # type: ignore pid = os.fork() if pid == 0: os.close(write_fd) @@ -125,7 +145,7 @@ def _start_child(i, state): return False -def _set_pipe_size(fd, i): +def _set_pipe_size(fd: int, i: int) -> None: try: fcntl.fcntl(fd, F_SETPIPE_SZ, PIPE_BUFFER_SIZE) except OSError: diff --git a/frontik/producers/__init__.py b/frontik/producers/__init__.py index 2c2ce58f2..71c94c20f 100644 --- a/frontik/producers/__init__.py +++ b/frontik/producers/__init__.py @@ -1,3 +1,3 @@ class ProducerFactory: def get_producer(self, handler): - raise NotImplementedError() # pragma: no cover + raise NotImplementedError # pragma: no cover diff --git a/frontik/producers/json_producer.py b/frontik/producers/json_producer.py index d243ebafe..7c9291ad8 100644 --- a/frontik/producers/json_producer.py +++ b/frontik/producers/json_producer.py @@ -1,46 +1,39 @@ +from __future__ import annotations + import asyncio import time import weakref +from typing import TYPE_CHECKING import jinja2 from jinja2.utils import concat from tornado.escape import to_unicode from frontik import json_builder, media_types -from frontik.util import get_abs_path, get_cookie_or_url_param_value from frontik.options import options from frontik.producers import ProducerFactory +from frontik.util import get_abs_path, get_cookie_or_url_param_value +if TYPE_CHECKING: + from typing import Any -class JsonProducerFactory(ProducerFactory): - def __init__(self, application): - if hasattr(application, 'get_jinja_environment'): - self.environment = application.get_jinja_environment() - elif options.jinja_template_root is not None: - self.environment = jinja2.Environment( - auto_reload=options.debug, - cache_size=options.jinja_template_cache_limit, - loader=jinja2.FileSystemLoader(get_abs_path(application.app_root, options.jinja_template_root)), - ) - else: - self.environment = None - - def get_producer(self, handler): - return JsonProducer( - handler, - environment=self.environment, - json_encoder=getattr(handler, 'json_encoder', None), - jinja_context_provider=getattr(handler, 'jinja_context_provider', None), - ) + from frontik.app import FrontikApplication + from frontik.handler import PageHandler class JsonProducer: - def __init__(self, handler, environment=None, json_encoder=None, jinja_context_provider=None): + def __init__( + self, + handler: PageHandler, + environment: Any = None, + json_encoder: Any = None, + jinja_context_provider: Any = None, + ) -> None: self.handler = weakref.proxy(handler) self.log = weakref.proxy(self.handler.log) self.json = json_builder.JsonBuilder(json_encoder=json_encoder) - self.template_filename = None + self.template_filename: str = None # type: ignore self.environment = environment self.jinja_context_provider = jinja_context_provider @@ -55,16 +48,16 @@ def __call__(self): return self._finish_with_json() - def set_template(self, filename): + def set_template(self, filename: str) -> None: self.template_filename = filename - def get_jinja_context(self): + def get_jinja_context(self) -> Any: if callable(self.jinja_context_provider): return self.jinja_context_provider(self.handler) else: return self.json.to_dict() - async def _render_template_stream_on_ioloop(self, batch_render_timeout_ms): + async def _render_template_stream_on_ioloop(self, batch_render_timeout_ms: int) -> tuple[float, Any]: template_render_start_time = time.time() template = self.environment.get_template(self.template_filename) @@ -97,7 +90,10 @@ async def _render_template_stream_on_ioloop(self, batch_render_timeout_ms): taken_time_ms = (time.time() - part_render_start_time) * 1000 self.log.info( - 'render template part %s with %s statements in %.2fms', part_index, statements_processed, taken_time_ms + 'render template part %s with %s statements in %.2fms', + part_index, + statements_processed, + taken_time_ms, ) part_index += 1 @@ -107,9 +103,10 @@ async def _render_template_stream_on_ioloop(self, batch_render_timeout_ms): await asyncio.sleep(0) - async def _finish_with_template(self): + async def _finish_with_template(self) -> tuple[Any, None]: if not self.environment: - raise Exception('Cannot apply template, no Jinja2 environment configured') + msg = 'Cannot apply template, no Jinja2 environment configured' + raise Exception(msg) if self.handler._headers.get('Content-Type') is None: self.handler.set_header('Content-Type', media_types.TEXT_HTML) @@ -132,14 +129,17 @@ async def _finish_with_template(self): if isinstance(e, jinja2.TemplateSyntaxError): self.log.error( '%s in file "%s", line %d\n\t%s', - e.__class__.__name__, to_unicode(e.filename), e.lineno, to_unicode(e.message) + e.__class__.__name__, + to_unicode(e.filename), + e.lineno, + to_unicode(e.message), ) elif isinstance(e, jinja2.TemplateError): self.log.error('%s error\n\t%s', e.__class__.__name__, to_unicode(e.message)) raise e - async def _finish_with_json(self): + async def _finish_with_json(self) -> tuple[str, None]: self.log.debug('finishing without templating') if self.handler._headers.get('Content-Type') is None: self.handler.set_header('Content-Type', media_types.APPLICATION_JSON) @@ -147,4 +147,28 @@ async def _finish_with_json(self): return self.json.to_string(), None def __repr__(self): - return '{}.{}'.format(__package__, self.__class__.__name__) + return f'{__package__}.{self.__class__.__name__}' + + +class JsonProducerFactory(ProducerFactory): + def __init__(self, application: FrontikApplication) -> None: + if hasattr(application, 'get_jinja_environment'): + self.environment = application.get_jinja_environment() + elif options.jinja_template_root is not None: + self.environment = jinja2.Environment( + auto_reload=options.debug, + cache_size=options.jinja_template_cache_limit, + loader=jinja2.FileSystemLoader( + get_abs_path(application.app_root, options.jinja_template_root), # type: ignore + ), + ) + else: + self.environment = None + + def get_producer(self, handler: PageHandler) -> JsonProducer: + return JsonProducer( + handler, + environment=self.environment, + json_encoder=getattr(handler, 'json_encoder', None), + jinja_context_provider=getattr(handler, 'jinja_context_provider', None), + ) diff --git a/frontik/producers/xml_producer.py b/frontik/producers/xml_producer.py index 4ecf63b9c..0bf35f7bf 100644 --- a/frontik/producers/xml_producer.py +++ b/frontik/producers/xml_producer.py @@ -1,9 +1,12 @@ +from __future__ import annotations + import contextvars import copy +import re import time import weakref -import re from concurrent.futures import ThreadPoolExecutor +from typing import TYPE_CHECKING from lxml import etree from tornado.ioloop import IOLoop @@ -16,36 +19,23 @@ from frontik.util import get_abs_path from frontik.xml_util import xml_from_file, xsl_from_file +if TYPE_CHECKING: + from typing import Any -class XMLProducerFactory(ProducerFactory): - def __init__(self, application): - self.xml_cache = file_cache.make_file_cache( - 'XML', 'xml_root', - get_abs_path(application.app_root, options.xml_root), - xml_from_file, - options.xml_cache_limit, - options.xml_cache_step, - deepcopy=True - ) - - self.xsl_cache = file_cache.make_file_cache( - 'XSL', 'xsl_root', - get_abs_path(application.app_root, options.xsl_root), - xsl_from_file, - options.xsl_cache_limit, - options.xsl_cache_step - ) - - self.executor = ThreadPoolExecutor(options.xsl_executor_pool_size) - - def get_producer(self, handler): - return XmlProducer(handler, xml_cache=self.xml_cache, xsl_cache=self.xsl_cache, executor=self.executor) + from frontik.app import FrontikApplication + from frontik.handler import PageHandler class XmlProducer: METAINFO_PREFIX = 'hhmeta_' - def __init__(self, handler, xml_cache=None, xsl_cache=None, executor=None): + def __init__( + self, + handler: PageHandler, + xml_cache: Any = None, + xsl_cache: Any = None, + executor: Any = None, + ) -> None: self.handler = weakref.proxy(handler) self.log = weakref.proxy(self.handler.log) self.executor = executor @@ -54,8 +44,8 @@ def __init__(self, handler, xml_cache=None, xsl_cache=None, executor=None): self.xsl_cache = xsl_cache self.doc = frontik.doc.Doc() - self.transform = None - self.transform_filename = None + self.transform: Any = None # type: ignore + self.transform_filename: str | None = None def __call__(self): if any(frontik.util.get_cookie_or_url_param_value(self.handler, p) is not None for p in ('noxsl', 'notpl')): @@ -80,10 +70,10 @@ def __call__(self): return self._finish_with_xslt() - def set_xsl(self, filename): + def set_xsl(self, filename: str) -> None: self.transform_filename = filename - async def _finish_with_xslt(self): + async def _finish_with_xslt(self) -> tuple[str | None, list[Any] | None]: self.log.debug('finishing with XSLT') if self.handler._headers.get('Content-Type') is None: @@ -91,14 +81,18 @@ async def _finish_with_xslt(self): def job(): start_time = time.time() - result = self.transform(copy.deepcopy(self.doc.to_etree_element()), - profile_run=self.handler.debug_mode.profile_xslt) - meta_info = [entry.message.replace(self.METAINFO_PREFIX, '') - for entry in self.transform.error_log - if entry.message.startswith(self.METAINFO_PREFIX)] + result = self.transform( + copy.deepcopy(self.doc.to_etree_element()), + profile_run=self.handler.debug_mode.profile_xslt, + ) + meta_info = [ + entry.message.replace(self.METAINFO_PREFIX, '') + for entry in self.transform.error_log + if entry.message.startswith(self.METAINFO_PREFIX) + ] return start_time, (str(result), meta_info), result.xslt_profile - def get_xsl_log(): + def get_xsl_log() -> str: return '\n'.join( f'XSLT {e.level_name} in file "{e.filename}", line {e.line}, column {e.column}\n\t{e.message}' for e in self.transform.error_log @@ -132,7 +126,7 @@ def get_xsl_log(): self.log.error(get_xsl_log()) raise e - async def _finish_with_xml(self, escape_xmlns=False): + async def _finish_with_xml(self, escape_xmlns: bool = False) -> tuple[bytes, None]: self.log.debug('finishing without XSLT') if self.handler._headers.get('Content-Type') is None: self.handler.set_header('Content-Type', media_types.APPLICATION_XML) @@ -145,14 +139,41 @@ async def _finish_with_xml(self, escape_xmlns=False): doc_string_without_xmlns = re.sub( 'xmlns=".+?"', 'xmlns-hidden="xmlns is hidden due to chrome xml viewer issues"', - self.doc.to_string().decode('utf-8') + self.doc.to_string().decode('utf-8'), ) return doc_string_without_xmlns.encode('utf-8'), None return self.doc.to_string(), None - def xml_from_file(self, filename): + def xml_from_file(self, filename: str) -> Any: return self.xml_cache.load(filename, self.log) def __repr__(self): - return '{}.{}'.format(__package__, self.__class__.__name__) + return f'{__package__}.{self.__class__.__name__}' + + +class XMLProducerFactory(ProducerFactory): + def __init__(self, application: FrontikApplication) -> None: + self.xml_cache = file_cache.make_file_cache( + 'XML', + 'xml_root', + get_abs_path(application.app_root, options.xml_root), + xml_from_file, + options.xml_cache_limit, + options.xml_cache_step, + deepcopy=True, + ) + + self.xsl_cache = file_cache.make_file_cache( + 'XSL', + 'xsl_root', + get_abs_path(application.app_root, options.xsl_root), + xsl_from_file, + options.xsl_cache_limit, + options.xsl_cache_step, + ) + + self.executor = ThreadPoolExecutor(options.xsl_executor_pool_size) + + def get_producer(self, handler: PageHandler) -> XmlProducer: + return XmlProducer(handler, xml_cache=self.xml_cache, xsl_cache=self.xsl_cache, executor=self.executor) diff --git a/frontik/request_context.py b/frontik/request_context.py index 3b6df09ab..a650dd2b4 100644 --- a/frontik/request_context.py +++ b/frontik/request_context.py @@ -1,25 +1,32 @@ +from __future__ import annotations + import contextvars -import threading +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from tornado.httputil import HTTPServerRequest + + from frontik.debug import DebugBufferedHandler class _Context: __slots__ = ('request', 'request_id', 'handler_name', 'log_handler') - def __init__(self, request, request_id): + def __init__(self, request: HTTPServerRequest | None, request_id: str | None) -> None: self.request = request self.request_id = request_id - self.handler_name = None - self.log_handler = None + self.handler_name: str | None = None + self.log_handler: DebugBufferedHandler | None = None _context = contextvars.ContextVar('context', default=_Context(None, None)) -def initialize(request, request_id): +def initialize(request: HTTPServerRequest, request_id: str) -> contextvars.Token: return _context.set(_Context(request, request_id)) -def reset(token): +def reset(token: contextvars.Token) -> None: _context.reset(token) @@ -27,21 +34,21 @@ def get_request(): return _context.get().request -def get_request_id(): +def get_request_id() -> str | None: return _context.get().request_id -def get_handler_name(): +def get_handler_name() -> str | None: return _context.get().handler_name -def set_handler_name(handler_name): +def set_handler_name(handler_name: str) -> None: _context.get().handler_name = handler_name -def get_log_handler(): +def get_log_handler() -> DebugBufferedHandler | None: return _context.get().log_handler -def set_log_handler(log_handler): +def set_log_handler(log_handler: DebugBufferedHandler) -> None: _context.get().log_handler = log_handler diff --git a/frontik/routing.py b/frontik/routing.py index f550bd07e..8783d159a 100644 --- a/frontik/routing.py +++ b/frontik/routing.py @@ -1,8 +1,11 @@ +from __future__ import annotations + import importlib import logging import os import re from inspect import isclass +from typing import TYPE_CHECKING from tornado.routing import ReversibleRouter, Router from tornado.web import RequestHandler @@ -10,16 +13,23 @@ from frontik.handler import ErrorHandler from frontik.util import reverse_regex_named_groups +if TYPE_CHECKING: + from typing import Any + + from tornado.httputil import HTTPMessageDelegate, HTTPServerRequest + + from frontik.app import FrontikApplication + routing_logger = logging.getLogger('frontik.routing') MAX_MODULE_NAME_LENGTH = os.pathconf('/', 'PC_PATH_MAX') - 1 class FileMappingRouter(Router): - def __init__(self, module): + def __init__(self, module: Any) -> None: self.name = module.__name__ - def find_handler(self, request, **kwargs): + def find_handler(self, request: HTTPServerRequest, **kwargs: Any) -> HTTPMessageDelegate | None: url_parts = request.path.strip('/').split('/') application = kwargs['application'] @@ -35,7 +45,7 @@ def find_handler(self, request, **kwargs): routing_logger.info('page module name exceeds PATH_MAX (%s), using 404 page', MAX_MODULE_NAME_LENGTH) return _get_application_404_handler_delegate(application, request) - def _handle_general_module_import_exception(): + def _handle_general_module_import_exception() -> HTTPMessageDelegate: routing_logger.exception('error while importing %s module', page_module_name) return _get_application_500_handler_delegate(application, request) @@ -43,8 +53,10 @@ def _handle_general_module_import_exception(): page_module = importlib.import_module(page_module_name) routing_logger.debug('using %s from %s', page_module_name, page_module.__file__) except ModuleNotFoundError as module_not_found_error: - if not (page_module_name == module_not_found_error.name or - page_module_name.startswith(module_not_found_error.name + '.')): + if not ( + page_module_name == module_not_found_error.name + or page_module_name.startswith(module_not_found_error.name + '.') # type: ignore + ): return _handle_general_module_import_exception() routing_logger.warning('%s module not found', (self.name, page_module_name)) return _get_application_404_handler_delegate(application, request) @@ -59,10 +71,10 @@ def _handle_general_module_import_exception(): class FrontikRouter(ReversibleRouter): - def __init__(self, application): + def __init__(self, application: FrontikApplication) -> None: self.application = application self.handlers = [] - self.handler_names = {} + self.handler_names: dict[str, Any] = {} for handler_spec in application.application_urls(): if len(handler_spec) > 2: @@ -76,7 +88,7 @@ def __init__(self, application): if handler_name is not None: self.handler_names[handler_name] = pattern - def find_handler(self, request, **kwargs): + def find_handler(self, request: HTTPServerRequest, **kwargs: Any) -> HTTPMessageDelegate: routing_logger.info('requested url: %s', request.uri) for pattern, handler in self.handlers: @@ -100,23 +112,29 @@ def find_handler(self, request, **kwargs): routing_logger.error('match for request url "%s" not found', request.uri) return _get_application_404_handler_delegate(self.application, request) - def reverse_url(self, name, *args, **kwargs): + def reverse_url(self, name: str, *args: Any, **kwargs: Any) -> str: if name not in self.handler_names: raise KeyError('%s not found in named urls' % name) return reverse_regex_named_groups(self.handler_names[name], *args, **kwargs) -def _get_application_404_handler_delegate(application, request): +def _get_application_404_handler_delegate( + application: FrontikApplication, + request: HTTPServerRequest, +) -> HTTPMessageDelegate: handler_class, handler_kwargs = application.application_404_handler(request) return application.get_handler_delegate(request, handler_class, handler_kwargs) -def _get_application_500_handler_delegate(application, request): +def _get_application_500_handler_delegate( + application: FrontikApplication, + request: HTTPServerRequest, +) -> HTTPMessageDelegate: return application.get_handler_delegate(request, ErrorHandler, {'status_code': 500}) -def _add_request_arguments_from_path(request, match): +def _add_request_arguments_from_path(request: HTTPServerRequest, match: re.Match) -> None: arguments = match.groupdict() for name, value in arguments.items(): if value: diff --git a/frontik/server.py b/frontik/server.py index 0917d15d1..46810f336 100644 --- a/frontik/server.py +++ b/frontik/server.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import gc import importlib @@ -8,14 +10,14 @@ import signal import sys from concurrent.futures import ThreadPoolExecutor -from functools import partial from dataclasses import asdict +from functools import partial +from typing import TYPE_CHECKING -from http_client.options import options as http_client_options import tornado.autoreload import tornado.httpserver import tornado.ioloop -from tornado.platform.asyncio import BaseAsyncIOLoop +from http_client.options import options as http_client_options from frontik.app import FrontikApplication from frontik.config_parser import parse_configs @@ -24,10 +26,17 @@ from frontik.process import fork_workers from frontik.service_discovery import UpstreamUpdateListener +if TYPE_CHECKING: + from asyncio import Future + from collections.abc import Coroutine + from multiprocessing.synchronize import Lock as LockBase + + from tornado.platform.asyncio import BaseAsyncIOLoop + log = logging.getLogger('server') -def main(config_file=None): +def main(config_file: str | None = None) -> None: parse_configs(config_files=config_file) if options.app is None: @@ -36,6 +45,7 @@ def main(config_file=None): log.info('starting application %s', options.app) + app_class_name: str | None try: if options.app_class is not None and re.match(r'^\w+\.', options.app_class): app_module_name, app_class_name = options.app_class.rsplit('.', 1) @@ -56,23 +66,29 @@ def main(config_file=None): application = getattr(module, app_class_name) if app_class_name is not None else FrontikApplication try: - app = application(app_root=os.path.dirname(module.__file__), app_module=app_module_name, - **{**asdict(options), **asdict(http_client_options)}) + app = application( + app_root=os.path.dirname(str(module.__file__)), + app_module=app_module_name, + **{**asdict(options), **asdict(http_client_options)}, + ) count_down_lock = multiprocessing.Lock() gc.disable() gc.collect() gc.freeze() if options.workers != 1: - fork_workers(partial(_run_worker, app, count_down_lock, False), - app=app, - init_workers_count_down=app.init_workers_count_down, - num_workers=options.workers, - after_workers_up_action=lambda: { - app.upstream_caches.send_updates(), - app.service_discovery_client.register_service()}, - before_workers_shutdown_action=app.service_discovery_client.deregister_service_and_close, - children_pipes=app.children_pipes) + fork_workers( + partial(_run_worker, app, count_down_lock, False), + app=app, + init_workers_count_down=app.init_workers_count_down, + num_workers=options.workers, + after_workers_up_action=lambda: { + app.upstream_caches.send_updates(), + app.service_discovery_client.register_service(), + }, + before_workers_shutdown_action=app.service_discovery_client.deregister_service_and_close, + children_pipes=app.children_pipes, + ) else: # run in single process mode _run_worker(app, count_down_lock, True, None) @@ -81,7 +97,7 @@ def main(config_file=None): sys.exit(1) -def _run_worker(app, count_down_lock, need_to_init, pipe): +def _run_worker(app: FrontikApplication, count_down_lock: LockBase, need_to_init: bool, pipe: int | None) -> None: gc.enable() MDC.init('worker') @@ -92,11 +108,11 @@ def _run_worker(app, count_down_lock, need_to_init, pipe): else: uvloop.install() - ioloop = tornado.ioloop.IOLoop.current() + ioloop: BaseAsyncIOLoop = tornado.ioloop.IOLoop.current() # type: ignore executor = ThreadPoolExecutor(options.common_executor_pool_size) ioloop.asyncio_loop.set_default_executor(executor) initialize_application_task = ioloop.asyncio_loop.create_task( - _init_app(app, ioloop, count_down_lock, need_to_init, pipe) + _init_app(app, ioloop, count_down_lock, need_to_init, pipe), ) def initialize_application_task_result_handler(future): @@ -109,7 +125,11 @@ def initialize_application_task_result_handler(future): initialize_application_task.result() -async def run_server(app: FrontikApplication, ioloop: BaseAsyncIOLoop, need_to_register_in_service_discovery): +async def run_server( + app: FrontikApplication, + ioloop: BaseAsyncIOLoop, + need_to_register_in_service_discovery: bool, +) -> None: """Starts Frontik server for an application""" log.info('starting server on %s:%s', options.host, options.port) @@ -124,7 +144,7 @@ def sigterm_handler(signum, frame): log.info('requested shutdown, shutting down server on %s:%d', options.host, options.port) ioloop.add_callback_from_signal(server_stop) - def ioloop_is_running(): + def ioloop_is_running() -> bool: return ioloop.asyncio_loop.is_running() def server_stop(): @@ -146,16 +166,21 @@ def ioloop_stop(): signal.signal(signal.SIGINT, sigterm_handler) -async def _init_app(app: FrontikApplication, ioloop: BaseAsyncIOLoop, count_down_lock, - need_to_register_in_service_discovery, pipe): +async def _init_app( + app: FrontikApplication, + ioloop: BaseAsyncIOLoop, + count_down_lock: LockBase, + need_to_register_in_service_discovery: bool, + pipe: int | None, +) -> None: await app.init() - if not need_to_register_in_service_discovery: + if not need_to_register_in_service_discovery and pipe is not None: app.upstream_update_listener = UpstreamUpdateListener(app.upstream_manager, pipe) await run_server(app, ioloop, need_to_register_in_service_discovery) - log.info(f'Successfully inited application {app.app}') + log.info('Successfully inited application %s', app.app) with count_down_lock: app.init_workers_count_down.value -= 1 - log.info(f'worker is up, remaining workers = {app.init_workers_count_down.value}') + log.info('worker is up, remaining workers = %s', app.init_workers_count_down.value) if need_to_register_in_service_discovery: register_task = ioloop.asyncio_loop.create_task(app.service_discovery_client.register_service()) @@ -167,15 +192,18 @@ def register_task_result_handler(future): register_task.add_done_callback(register_task_result_handler) -async def _deinit_app(app: FrontikApplication, need_to_register_in_service_discovery): +async def _deinit_app(app: FrontikApplication, need_to_register_in_service_discovery: bool) -> None: + deinit_futures: list[Future | Coroutine | None] = [] + if need_to_register_in_service_discovery: deregistration = app.service_discovery_client.deregister_service_and_close() deinit_futures = [asyncio.wait_for(deregistration, timeout=options.stop_timeout)] - else: - deinit_futures = [] + deinit_futures.extend([integration.deinitialize_app(app) for integration in app.available_integrations]) + if app.tornado_http_client is not None: deinit_futures.append(app.tornado_http_client.client_session.close()) + if deinit_futures: try: await asyncio.gather(*[future for future in deinit_futures if future]) diff --git a/frontik/service_discovery.py b/frontik/service_discovery.py index b029322f5..5eb47a1d2 100644 --- a/frontik/service_discovery.py +++ b/frontik/service_discovery.py @@ -1,26 +1,38 @@ +from __future__ import annotations + +import asyncio +import contextlib import logging +import pickle import socket import struct -import pickle -from random import shuffle - -import time - import sys +import time +from queue import Full, Queue +from random import shuffle from threading import Lock, Thread -from queue import Queue, Full +from typing import TYPE_CHECKING -import asyncio -from consul.base import Check, Weight, KVCache, ConsistencyMode, HealthCache -from http_client import consul_parser, options as http_client_options +from consul.base import Check, ConsistencyMode, HealthCache, KVCache, Weight +from http_client import consul_parser +from http_client import options as http_client_options from http_client.balancing import Upstream from tornado.iostream import PipeIOStream, StreamClosedError -from frontik.consul_client import AsyncConsulClient, SyncConsulClient, ClientEventCallback +from frontik.consul_client import AsyncConsulClient, ClientEventCallback, SyncConsulClient from frontik.integrations.statsd import Counters from frontik.options import options from frontik.version import version +if TYPE_CHECKING: + from asyncio import BaseEventLoop + from typing import Any + + from http_client.balancing import Server, UpstreamManager + + from frontik.integrations.statsd import StatsDClient, StatsDClientStub + from frontik.options import Options + DEFAULT_WEIGHT = 100 AUTO_RESOLVE_ADDRESS_VALUE = 'resolve' MESSAGE_HEADER_MAGIC = b'T1uf31f' @@ -33,35 +45,21 @@ log = logging.getLogger('service_discovery') -def _get_service_address(options): +def _get_service_address(options: Options) -> str | None: if options.consul_service_address: - if AUTO_RESOLVE_ADDRESS_VALUE == options.consul_service_address.lower(): + if options.consul_service_address.lower() == AUTO_RESOLVE_ADDRESS_VALUE: hostname = socket.gethostname() return socket.gethostbyname(hostname) return options.consul_service_address - -def get_async_service_discovery(opts, statsd_client, *, event_loop=None): - if not opts.consul_enabled: - log.info('Consul disabled, skipping') - return _AsyncStub() - else: - return _AsyncServiceDiscovery(opts, statsd_client, event_loop) - - -def get_sync_service_discovery(opts, statsd_client): - if not opts.consul_enabled: - log.info('Consul disabled, skipping') - return _SyncStub() - else: - return _SyncServiceDiscovery(opts, statsd_client) + return None -def _make_service_id(options, *, service_name, hostname): +def _make_service_id(options: Options, *, service_name: str | None, hostname: str) -> str: return f'{service_name}-{hostname}-{options.port}' -def _create_http_check(options, address): +def _create_http_check(options: Options, address: str | None) -> dict: check_host = options.consul_check_host if not check_host: check_host = address if address else '127.0.0.1' @@ -69,7 +67,7 @@ def _create_http_check(options, address): f'http://{check_host}:{options.port}/status', f'{options.consul_http_check_interval_sec}s', deregister=f'{options.consul_deregister_critical_timeout}', - timeout=f'{options.consul_http_check_timeout_sec}s' + timeout=f'{options.consul_http_check_timeout_sec}s', ) return http_check @@ -79,23 +77,31 @@ def _create_meta(): return {'serviceVersion': version} -def _get_weight_or_default(value): +def _get_weight_or_default(value: dict | None) -> int: return int(value['Value']) if value is not None else DEFAULT_WEIGHT -def _get_hostname_or_raise(node_name: str): +def _get_hostname_or_raise(node_name: str) -> str: if not node_name: - raise RuntimeError('options node_name must be defined') + msg = 'options node_name must be defined' + raise RuntimeError(msg) return node_name class _AsyncServiceDiscovery: - def __init__(self, options, statsd_client, event_loop=None): + def __init__( + self, + options: Options, + statsd_client: StatsDClient | StatsDClientStub, + event_loop: BaseEventLoop | None = None, + ) -> None: self.options = options - self.consul = AsyncConsulClient(host=options.consul_host, - port=options.consul_port, - loop=event_loop, - client_event_callback=ConsulMetricsTracker(statsd_client)) + self.consul = AsyncConsulClient( + host=options.consul_host, + port=options.consul_port, + loop=event_loop, + client_event_callback=ConsulMetricsTracker(statsd_client), + ) self.service_name = options.app self.hostname = _get_hostname_or_raise(options.node_name) self.service_id = _make_service_id(options, service_name=self.service_name, hostname=self.hostname) @@ -104,7 +110,7 @@ def __init__(self, options, statsd_client, event_loop=None): self.consul_weight_consistency_mode = ConsistencyMode(options.consul_weight_consistency_mode.lower()) self.consul_cache_initial_warmup_timeout_sec = options.consul_cache_initial_warmup_timeout_sec - async def register_service(self): + async def register_service(self) -> None: address = _get_service_address(self.options) http_check = _create_http_check(self.options, address) index = None @@ -127,14 +133,15 @@ async def register_service(self): 'check': http_check, 'tags': self.options.consul_tags, 'weights': Weight.weights(weight, 0), - 'caller': self.service_name + 'caller': self.service_name, } if await self.consul.agent.service.register(self.service_name, **register_params): log.info('Successfully registered service %s', register_params) else: - raise Exception(f'Failed to register {self.service_id}') + msg = f'Failed to register {self.service_id}' + raise Exception(msg) - async def deregister_service_and_close(self): + async def deregister_service_and_close(self) -> None: if await self.consul.agent.service.deregister(self.service_id, self.service_name): log.info('Successfully deregistered service %s', self.service_id) else: @@ -142,11 +149,13 @@ async def deregister_service_and_close(self): class _SyncServiceDiscovery: - def __init__(self, options, statsd_client): + def __init__(self, options: Options, statsd_client: StatsDClient | StatsDClientStub) -> None: self.options = options - self.consul = SyncConsulClient(host=options.consul_host, - port=options.consul_port, - client_event_callback=ConsulMetricsTracker(statsd_client)) + self.consul = SyncConsulClient( + host=options.consul_host, + port=options.consul_port, + client_event_callback=ConsulMetricsTracker(statsd_client), + ) self.service_name = options.app self.hostname = _get_hostname_or_raise(options.node_name) self.service_id = _make_service_id(options, service_name=self.service_name, hostname=self.hostname) @@ -166,7 +175,7 @@ def __init__(self, options, statsd_client): cache_initial_warmup_timeout=self.consul_cache_initial_warmup_timeout_sec, consistency_mode=self.consul_weight_consistency_mode, recurse=False, - caller=self.service_name + caller=self.service_name, ) self.kvCache.add_listener(self._update_register, False) @@ -179,7 +188,7 @@ def register_service(self): self._sync_register(weight) self.kvCache.start() - def _sync_register(self, weight): + def _sync_register(self, weight: int) -> None: register_params = { 'service_id': self.service_id, 'address': self.address, @@ -187,12 +196,13 @@ def _sync_register(self, weight): 'check': self.http_check, 'tags': self.options.consul_tags, 'weights': Weight.weights(weight, 0), - 'caller': self.service_name + 'caller': self.service_name, } if self.consul.agent.service.register(self.service_name, **register_params): log.info('Successfully registered service %s', register_params) else: - raise Exception(f'Failed to register {register_params}') + msg = f'Failed to register {register_params}' + raise Exception(msg) def deregister_service_and_close(self): self.kvCache.stop() @@ -219,19 +229,18 @@ def deregister_service_and_close(self): class ConsulMetricsTracker(ClientEventCallback): - - def __init__(self, statsd_client): + def __init__(self, statsd_client: StatsDClient | StatsDClientStub) -> None: self._statsd_client = statsd_client self._request_counters = Counters() self._statsd_client.send_periodically(self._send_metrics) - def on_http_request_success(self, method, path, response_code): + def on_http_request_success(self, method: str, path: str, response_code: int) -> None: self._request_counters.add(1, result=CONSUL_REQUEST_SUCCESSFUL_RESULT, type=response_code) - def on_http_request_failure(self, method, path, ex): + def on_http_request_failure(self, method: str, path: str, ex: BaseException) -> None: self._request_counters.add(1, result=CONSUL_REQUEST_FAILED_RESULT, type=type(ex).__name__) - def on_http_request_invalid(self, method, path, response_code): + def on_http_request_invalid(self, method: str, path: str, response_code: int) -> None: self._request_counters.add(1, result=CONSUL_REQUEST_FAILED_RESULT, type=response_code) def _send_metrics(self): @@ -239,18 +248,18 @@ def _send_metrics(self): class UpstreamUpdateListener: - def __init__(self, upstream_manager, pipe): + def __init__(self, upstream_manager: UpstreamManager, pipe: int) -> None: self.upstream_manager = upstream_manager self.stream = PipeIOStream(pipe) self.task = asyncio.create_task(self._process()) - async def _process(self): + async def _process(self) -> None: while True: try: await self.stream.read_until(MESSAGE_HEADER_MAGIC) size_header = await self.stream.read_bytes(8) - size, = struct.unpack(MESSAGE_SIZE_STRUCT, size_header) + (size,) = struct.unpack(MESSAGE_SIZE_STRUCT, size_header) data = await self.stream.read_bytes(size) log.debug('received upstreams length: %d', size) upstreams = pickle.loads(data) @@ -263,9 +272,14 @@ async def _process(self): class UpstreamCaches: - def __init__(self, children_pipes, upstreams, service_discovery=None): - self._upstreams_config = {} - self._upstreams_servers = {} + def __init__( + self, + children_pipes: dict[int, Any], + upstreams: dict[str, Upstream], + service_discovery: None | _SyncServiceDiscovery | _SyncStub = None, + ) -> None: + self._upstreams_config: dict[str, dict] = {} + self._upstreams_servers: dict[str, list[Server]] = {} self._upstream_list = options.upstreams self._datacenter_list = http_client_options.datacenters self._current_dc = http_client_options.datacenter @@ -274,11 +288,11 @@ def __init__(self, children_pipes, upstreams, service_discovery=None): self._upstreams = upstreams self._children_pipes = children_pipes self._lock = Lock() - self._resend_dict = {} - self._resend_notification = Queue(maxsize=1) + self._resend_dict: dict[int, bool] = {} + self._resend_notification: Queue = Queue(maxsize=1) self._resend_thread = Thread(target=self._resend, daemon=True) - if service_discovery is not None: + if isinstance(service_discovery, _SyncServiceDiscovery): self._resend_thread.start() upstream_cache = KVCache( @@ -290,7 +304,7 @@ def __init__(self, children_pipes, upstreams, service_discovery=None): cache_initial_warmup_timeout=service_discovery.consul_cache_initial_warmup_timeout_sec, consistency_mode=service_discovery.consul_weight_consistency_mode, recurse=True, - caller=self._service_name + caller=self._service_name, ) upstream_cache.add_listener(self._update_upstreams_config, True) upstream_cache.start() @@ -305,7 +319,7 @@ def __init__(self, children_pipes, upstreams, service_discovery=None): watch_seconds=service_discovery.consul_weight_watch_seconds, backoff_delay_seconds=service_discovery.consul_cache_backoff_delay_seconds, dc=dc, - caller=self._service_name + caller=self._service_name, ) health_cache.add_listener(self._update_upstreams_service, True) health_cache.start() @@ -313,17 +327,19 @@ def __init__(self, children_pipes, upstreams, service_discovery=None): if options.fail_start_on_empty_upstream: self._check_empty_upstreams_on_startup() - def _check_empty_upstreams_on_startup(self): + def _check_empty_upstreams_on_startup(self) -> None: empty_upstreams = [k for k, v in self._upstreams.items() if not v.servers] if empty_upstreams: + msg = f'failed startup application, because for next upstreams got empty servers: {empty_upstreams}' raise RuntimeError( - f'failed startup application, because for next upstreams got empty servers: {empty_upstreams}' + msg, ) - def _update_upstreams_service(self, key, values): + def _update_upstreams_service(self, key: str, values: list) -> None: if values is not None: dc, servers = consul_parser.parse_consul_health_servers_data(values) - log.info(f'update servers for upstream {key}, datacenter {dc}: [{",".join(str(s) for s in servers)}]') + servers_str = ','.join(str(s) for s in servers) + log.info('update servers for upstream %s, datacenter %s: [%s]', key, dc, servers_str) self._upstreams_servers[f'{key}-{dc}'] = servers self._update_upstreams(key) @@ -334,14 +350,15 @@ def _update_upstreams_config(self, key, values): key = value['Key'].split('/')[1] if key in self._upstream_list: config = consul_parser.parse_consul_upstream_config(value) - log.info(f'parsed upstream config for {key}:{config}') + log.info('parsed upstream config for %s:%s', key, config) self._upstreams_config[key] = config self._update_upstreams(key) - def _update_upstreams(self, key): + def _update_upstreams(self, key: str) -> None: with self._lock: upstream = self._create_upstream(key) - log.info(f'current servers for upstream {key}: [{",".join(str(s) for s in upstream.servers)}]') + servers = ','.join(str(s) for s in upstream.servers) + log.info('current servers for upstream %s: [%s]', key, servers) current_upstream = self._upstreams.get(key) @@ -353,14 +370,14 @@ def _update_upstreams(self, key): if self._children_pipes: self.send_updates(upstream=upstream) - def send_updates(self, upstream=None): + def send_updates(self, upstream: Upstream | None = None) -> None: upstreams = list(self._upstreams.values()) if upstream is None else [upstream] data = pickle.dumps(upstreams) log.debug('sending upstreams to all length: %d', len(data)) for client_id, pipe in self._children_pipes.items(): self._send_update(client_id, pipe, data) - def _send_update(self, client_id, pipe, data): + def _send_update(self, client_id: int, pipe: Any, data: bytes) -> None: header_written = False try: pipe.write(MESSAGE_HEADER_MAGIC + struct.pack(MESSAGE_SIZE_STRUCT, len(data))) @@ -368,17 +385,15 @@ def _send_update(self, client_id, pipe, data): pipe.write(data) pipe.flush() except BlockingIOError: - log.warning(f'client {client_id} pipe blocked') + log.warning('client %s pipe blocked', client_id) if header_written: self._resend_dict[client_id] = True - try: + with contextlib.suppress(Full): self._resend_notification.put_nowait(True) - except Full: - pass except Exception: - log.exception(f'client {client_id} pipe write failed') + log.exception('client %s pipe write failed', client_id) - def _combine_servers(self, key): + def _combine_servers(self, key: str) -> list[Server]: servers_from_all_dc = [] for dc in self._datacenter_list: servers = self._upstreams_servers.get(f'{key}-{dc}') @@ -392,9 +407,11 @@ def _resend(self): time.sleep(1.0) with self._lock: - data = pickle.dumps([self._create_upstream(key) for key in self._upstreams.keys()]) + data = pickle.dumps([self._create_upstream(key) for key in self._upstreams]) clients = list(self._resend_dict.keys()) - log.debug('sending upstreams to %s length: %d', ','.join(clients), len(data)) + if log.isEnabledFor(logging.DEBUG): + client_ids = ','.join(map(str, clients)) + log.debug('sending upstreams to %s length: %d', client_ids, len(data)) self._resend_dict.clear() for client_id in clients: @@ -407,7 +424,31 @@ def _resend(self): self._send_update(client_id, pipe, data) self._send_update(client_id, pipe, data) - def _create_upstream(self, key): + def _create_upstream(self, key: str) -> Upstream: servers = self._combine_servers(key) shuffle(servers) return Upstream(key, self._upstreams_config.get(key, {}), servers) + + +def get_sync_service_discovery( + opts: Options, + statsd_client: StatsDClient | StatsDClientStub, +) -> _SyncServiceDiscovery | _SyncStub: + if not opts.consul_enabled: + log.info('Consul disabled, skipping') + return _SyncStub() + else: + return _SyncServiceDiscovery(opts, statsd_client) + + +def get_async_service_discovery( + opts: Options, + statsd_client: StatsDClient | StatsDClientStub, + *, + event_loop: BaseEventLoop | None = None, +) -> _AsyncServiceDiscovery | _AsyncStub: + if not opts.consul_enabled: + log.info('Consul disabled, skipping') + return _AsyncStub() + else: + return _AsyncServiceDiscovery(opts, statsd_client, event_loop) diff --git a/frontik/testing.py b/frontik/testing.py index 4220b9f3c..2952f0f62 100644 --- a/frontik/testing.py +++ b/frontik/testing.py @@ -2,12 +2,14 @@ import json import logging import re +from collections.abc import Callable +from typing import Any +import pytest from aioresponses import aioresponses from http_client import AIOHttpClientWrapper from http_client.request_response import RequestBuilder, RequestResult from lxml import etree -import pytest from tornado.escape import utf8 from tornado.httpserver import HTTPServer from tornado.log import app_log @@ -16,9 +18,10 @@ from yarl import URL from frontik.app import FrontikApplication + # noinspection PyUnresolvedReferences from frontik.loggers import bootstrap_logger -from frontik.media_types import APPLICATION_JSON, APPLICATION_XML, APPLICATION_PROTOBUF, TEXT_PLAIN +from frontik.media_types import APPLICATION_JSON, APPLICATION_PROTOBUF, APPLICATION_XML, TEXT_PLAIN from frontik.options import options from frontik.util import make_url, safe_template @@ -26,6 +29,10 @@ class FrontikTestCase(AsyncHTTPTestCase): """Deprecated, use FrontikTestBase instead""" + def __init__(self, *args, **kwargs): + self._app: FrontikApplication = None # type: ignore + super().__init__(*args, **kwargs) + def get_http_client(self): """Overrides `AsyncHTTPTestCase.get_http_client` to separate unit test HTTPClient from application HTTPClient. @@ -35,7 +42,7 @@ def get_http_client(self): self.forced_client = AIOHttpClientWrapper() return self.forced_client - def fetch(self, path, query=None, **kwargs) -> RequestResult: + def fetch(self, path: str, query: dict | None = None, **kwargs: Any) -> RequestResult: # type: ignore """Extends `AsyncHTTPTestCase.fetch` method with `query` kwarg. This argument accepts a `dict` of request query parameters that will be encoded and added to request path. @@ -44,47 +51,55 @@ def fetch(self, path, query=None, **kwargs) -> RequestResult: query = {} if query is None else query return super().fetch(make_url(path, **query), **kwargs) - def fetch_xml(self, path, query=None, **kwargs): + def fetch_xml(self, path: str, query: dict | None = None, **kwargs: Any) -> etree.Element: """Fetch the request and parse xml document from response body.""" return etree.fromstring(utf8(self.fetch(path, query, **kwargs).raw_body)) - def fetch_json(self, path, query=None, **kwargs): + def fetch_json(self, path: str, query: dict | None = None, **kwargs: Any) -> Any: """Fetch the request and parse JSON tree from response body.""" return json.loads(self.fetch(path, query, **kwargs).raw_body) - def patch_app_http_client(self, app): + def patch_app_http_client(self, app: FrontikApplication) -> None: """Patches application HTTPClient to enable requests stubbing.""" - patch_http_client(app.tornado_http_client) - - def set_stub(self, url, request_method='GET', - response_function=None, response_file=None, response_body='', - response_code=200, response_headers=None, - response_body_processor=safe_template, **kwargs): - + patch_http_client(app.tornado_http_client) # type: ignore + + def set_stub( + self, + url: str, + request_method: str = 'GET', + response_function: Callable | None = None, + response_file: str | None = None, + response_body: Any = '', + response_code: int = 200, + response_headers: Any = None, + response_body_processor: Callable = safe_template, + **kwargs: Any, + ) -> None: set_stub( - self._app.tornado_http_client, url, request_method, - response_function, response_file, response_body, response_code, response_headers, - response_body_processor, **kwargs + self._app.tornado_http_client, + url, + request_method, + response_function, + response_file, + response_body, + response_code, + response_headers, + response_body_processor, + **kwargs, ) def tearDown(self) -> None: if self._app.tornado_http_client is not None: - self.io_loop.run_sync( - self._app.tornado_http_client.client_session.close - ) + self.io_loop.run_sync(self._app.tornado_http_client.client_session.close) if self.forced_client is not None: - self.io_loop.run_sync( - self.forced_client.client_session.close - ) + self.io_loop.run_sync(self.forced_client.client_session.close) super().tearDown() - def configure_app(self, **kwargs): + def configure_app(self, **kwargs: Any) -> None: """Updates or adds options to application config.""" for name, val in kwargs.items(): setattr(self._app.config, name, val) - return self - class FrontikTestBase: @pytest.fixture(scope='session') @@ -95,13 +110,13 @@ def event_loop(self): loop.close() @pytest.fixture(scope='class', autouse=True) - def enable_consul(self): + def _enable_consul(self): options.consul_enabled = False @pytest.fixture(scope='class', autouse=True) - async def inited_test_app(self, test_app, enable_consul): + async def inited_test_app(self, test_app, _enable_consul): await test_app.init() - yield test_app + return test_app @pytest.fixture(scope='class', autouse=True) async def test_server_port(self, test_app): @@ -123,40 +138,72 @@ async def app_client(self): yield http_client await asyncio.wait_for(http_client.client_session.close(), timeout=5) - @pytest.fixture(scope='function', autouse=True) - def setup_client_server(self, inited_test_app: FrontikApplication, test_server_port: int, - app_client: AIOHttpClientWrapper): + @pytest.fixture(autouse=True) + def _setup_client_server( + self, + inited_test_app: FrontikApplication, + test_server_port: int, + app_client: AIOHttpClientWrapper, + ) -> None: self.app = inited_test_app self.port = test_server_port self.http_client = app_client - @pytest.fixture(scope='function', autouse=True) + @pytest.fixture(autouse=True) def setup_mock_client(self): with aioresponses(passthrough=['http://127.0.0.1']) as mock_client: self.mock_client = mock_client yield self.mock_client - async def fetch(self, path: str, query=None, method='GET', request_timeout=2, **kwargs) -> RequestResult: + async def fetch( + self, + path: str, + query: dict | None = None, + method: str = 'GET', + request_timeout: float = 2, + **kwargs: Any, + ) -> RequestResult: query = {} if query is None else query path = make_url(path, **query) host = f'http://127.0.0.1:{self.port}' - request = RequestBuilder(host, 'test', path, 'test_request', method=method, request_timeout=request_timeout, - **kwargs) + request = RequestBuilder( + host, + 'test', + path, + 'test_request', + method=method, + request_timeout=request_timeout, + **kwargs, + ) return await self.http_client.fetch(request) - async def fetch_xml(self, path, query=None, method='GET', **kwargs): + async def fetch_xml( + self, + path: str, + query: dict | None = None, + method: str = 'GET', + **kwargs: Any, + ) -> etree.Element: resp = await self.fetch(path, query, method, **kwargs) return etree.fromstring(utf8(resp.raw_body)) - async def fetch_json(self, path, query=None, method='GET', **kwargs): + async def fetch_json(self, path: str, query: dict | None = None, method: str = 'GET', **kwargs: Any) -> Any: resp = await self.fetch(path, query, method, **kwargs) return json.loads(resp.raw_body) - def set_stub(self, url: URL | str | re.Pattern, request_method='GET', - response_file=None, response_body='', - response_code=200, response_headers=None, - response_body_processor=safe_template, repeat=True, **kwargs): + def set_stub( + self, + url: URL | str | re.Pattern, + request_method: str = 'GET', + response_file: str | None = None, + response_body: Any = '', + response_code: int = 200, + response_headers: dict | None = None, + response_body_processor: Callable | None = safe_template, + repeat: bool = True, + **kwargs: Any, + ) -> None: """ url and request_method are related to mocked resource other params are related to mocked response @@ -178,15 +225,21 @@ def set_stub(self, url: URL | str | re.Pattern, request_method='GET', if response_headers is not None: headers.update(response_headers) - self.mock_client.add(url, method=request_method, status=response_code, headers=headers, body=content, - repeat=repeat) + self.mock_client.add( + url, + method=request_method, + status=response_code, + headers=headers, + body=content, + repeat=repeat, + ) - def configure_app(self, **kwargs): + def configure_app(self, **kwargs: Any) -> None: for name, val in kwargs.items(): setattr(self.app.config, name, val) @staticmethod - def guess_content_type_headers(file_name): + def guess_content_type_headers(file_name: str) -> dict[str, str]: if file_name.endswith('.json'): return {'Content-Type': APPLICATION_JSON} if file_name.endswith('.xml'): diff --git a/frontik/timeout_tracking.py b/frontik/timeout_tracking.py index ff33f78aa..899d7133f 100644 --- a/frontik/timeout_tracking.py +++ b/frontik/timeout_tracking.py @@ -1,21 +1,30 @@ +from __future__ import annotations + import logging from collections import namedtuple from functools import partial +from typing import TYPE_CHECKING from tornado.ioloop import PeriodicCallback from frontik.options import options from frontik.request_context import get_handler_name +if TYPE_CHECKING: + from collections.abc import Callable + + from http_client.request_response import RequestBuilder + timeout_tracking_logger = logging.getLogger('timeout_tracking') -LoggingData = namedtuple('LoggingData', - ('outer_caller', 'outer_timeout_ms', 'upstream', 'handler_name', 'request_timeout_ms')) +LoggingData = namedtuple( + 'LoggingData', + ('outer_caller', 'outer_timeout_ms', 'upstream', 'handler_name', 'request_timeout_ms'), +) class TimeoutCounter(dict): - - def increment(self, k, already_spent_ms): + def increment(self, k: LoggingData, already_spent_ms: float) -> None: count, max_already_spent_ms = super().__getitem__(k) super().__setitem__(k, (count + 1, max(already_spent_ms, max_already_spent_ms))) @@ -26,8 +35,9 @@ def __missing__(self, key): class Sender: def __init__(self) -> None: self._timeout_counters = TimeoutCounter() + self._send_stats_callback: PeriodicCallback | None = None - def send_data(self, data, already_spent_ms): + def send_data(self, data: LoggingData, already_spent_ms: float) -> None: self._timeout_counters.increment(data, already_spent_ms) @property @@ -36,12 +46,13 @@ def send_stats_callback(self): if options.send_timeout_stats_interval_ms: self._send_stats_callback = PeriodicCallback( partial(self.__send_aggregated_stats, options.send_timeout_stats_interval_ms), - options.send_timeout_stats_interval_ms) + options.send_timeout_stats_interval_ms, + ) else: self._send_stats_callback = None return self._send_stats_callback - def start_sending_if_needed(self): + def start_sending_if_needed(self) -> None: if self.send_stats_callback and not self.send_stats_callback.is_running(): self.send_stats_callback.start() @@ -49,47 +60,67 @@ def __send_aggregated_stats(self, interval_ms): timeout_tracking_logger.debug('timeout stats size: %d', len(self._timeout_counters)) for data, counters in self._timeout_counters.items(): count, max_already_spent_ms = counters - timeout_tracking_logger.error('For last %d ms, got %d requests from <%s> expecting timeout=%d ms, ' - 'but calling upstream <%s> from handler <%s> with timeout %d ms, ' - 'arbitrary we spend up to %d ms before the call', - interval_ms, - count, - data.outer_caller, - data.outer_timeout_ms, - data.upstream, - data.handler_name, - data.request_timeout_ms, - max_already_spent_ms) + timeout_tracking_logger.error( + 'For last %d ms, got %d requests from <%s> expecting timeout=%d ms, ' + 'but calling upstream <%s> from handler <%s> with timeout %d ms, ' + 'arbitrary we spend up to %d ms before the call', + interval_ms, + count, + data.outer_caller, + data.outer_timeout_ms, + data.upstream, + data.handler_name, + data.request_timeout_ms, + max_already_spent_ms, + ) self._timeout_counters.clear() _sender = Sender() -def get_timeout_checker(outer_caller, outer_timeout_ms, time_since_outer_request_start_ms_supplier, *, - threshold_ms=100): - _sender.start_sending_if_needed() - return TimeoutChecker(outer_caller, outer_timeout_ms, time_since_outer_request_start_ms_supplier, - threshold_ms=threshold_ms) - - class TimeoutChecker: - def __init__(self, outer_caller, outer_timeout_ms, time_since_outer_request_start_sec_supplier, *, - threshold_ms=100): + def __init__( + self, + outer_caller: str | None, + outer_timeout_ms: float, + time_since_outer_request_start_sec_supplier: Callable, + *, + threshold_ms: float = 100, + ) -> None: self.outer_caller = outer_caller self.outer_timeout_ms = outer_timeout_ms self.time_since_outer_request_start_sec_supplier = time_since_outer_request_start_sec_supplier self.threshold_ms = threshold_ms - def check(self, request): + def check(self, request: RequestBuilder) -> None: if self.outer_timeout_ms: already_spent_time_ms = self.time_since_outer_request_start_sec_supplier() * 1000 expected_timeout_ms = self.outer_timeout_ms - already_spent_time_ms request_timeout_ms = request.request_time_left * 1000 diff = request_timeout_ms - expected_timeout_ms if diff > self.threshold_ms: - data = LoggingData(self.outer_caller, self.outer_timeout_ms, - request.upstream_name, - get_handler_name(), - request_timeout_ms) + data = LoggingData( + self.outer_caller, + self.outer_timeout_ms, + request.upstream_name, + get_handler_name(), + request_timeout_ms, + ) _sender.send_data(data, already_spent_time_ms) + + +def get_timeout_checker( + outer_caller: str | None, + outer_timeout_ms: float, + time_since_outer_request_start_ms_supplier: Callable, + *, + threshold_ms: float = 100, +) -> TimeoutChecker: + _sender.start_sending_if_needed() + return TimeoutChecker( + outer_caller, + outer_timeout_ms, + time_since_outer_request_start_ms_supplier, + threshold_ms=threshold_ms, + ) diff --git a/frontik/util.py b/frontik/util.py index 1c378b6ad..3a97d8aad 100644 --- a/frontik/util.py +++ b/frontik/util.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import datetime import logging @@ -5,16 +7,22 @@ import random import re from string import Template +from typing import TYPE_CHECKING from urllib.parse import urlencode from uuid import uuid4 +from http_client.util import any_to_bytes, any_to_unicode, to_unicode from tornado.escape import utf8 -from http_client.util import to_unicode, any_to_unicode, any_to_bytes + +if TYPE_CHECKING: + from typing import Any + + from frontik.handler import PageHandler logger = logging.getLogger('util') -def safe_template(format_string, **kwargs): +def safe_template(format_string: str, **kwargs: Any) -> str: """Safe templating using PEP-292 template strings (see https://docs.python.org/3/library/string.html#template-strings). @@ -25,7 +33,7 @@ def safe_template(format_string, **kwargs): return Template(to_unicode(format_string)).safe_substitute(**kwargs) -def make_qs(query_args): +def make_qs(query_args: dict) -> str: return urlencode([(k, v) for k, v in query_args.items() if v is not None], doseq=True) @@ -33,7 +41,7 @@ def make_body(data): return make_qs(data) if isinstance(data, dict) else any_to_bytes(data) -def make_url(base, **query_args): +def make_url(base: str, **query_args: Any) -> str: """ Builds URL from base part and query arguments passed as kwargs. Returns unicode string @@ -46,20 +54,21 @@ def make_url(base, **query_args): return to_unicode(base) -def decode_string_from_charset(string, charsets=('cp1251',)): - if isinstance(string, str): - return string +def decode_string_from_charset(value: bytes, charsets: tuple = ('cp1251',)) -> str: + if isinstance(value, str): + return value decoded_body = None for c in charsets: try: - decoded_body = string.decode(c) + decoded_body = value.decode(c) break except UnicodeError: continue if decoded_body is None: - raise UnicodeError('Could not decode string (tried: {})'.format(', '.join(charsets))) + msg = 'Could not decode string (tried: {})'.format(', '.join(charsets)) + raise UnicodeError(msg) return decoded_body @@ -72,13 +81,13 @@ def choose_boundary(): return utf8(uuid4().hex) -def get_cookie_or_url_param_value(handler, param_name): +def get_cookie_or_url_param_value(handler: PageHandler, param_name: str) -> str | None: return handler.get_argument(param_name, handler.get_cookie(param_name, None)) -def reverse_regex_named_groups(pattern, *args, **kwargs): +def reverse_regex_named_groups(pattern: str, *args: Any, **kwargs: Any) -> str: class GroupReplacer: - def __init__(self, args, kwargs): + def __init__(self, args: Any, kwargs: Any) -> None: self.args, self.kwargs = args, kwargs self.current_arg = 0 @@ -94,7 +103,8 @@ def __call__(self, match): value = self.args[self.current_arg] self.current_arg += 1 else: - raise ValueError('Cannot reverse regex: required number of arguments not found') + msg = 'Cannot reverse regex: required number of arguments not found' + raise ValueError(msg) return any_to_unicode(value) @@ -102,9 +112,9 @@ def __call__(self, match): return result.replace('^', '').replace('$', '') -def get_abs_path(root_path, relative_path): +def get_abs_path(root_path: str, relative_path: str | None) -> str: if relative_path is None or os.path.isabs(relative_path): - return relative_path + return relative_path # type: ignore return os.path.normpath(os.path.join(root_path, relative_path)) @@ -120,20 +130,20 @@ def check_request_id(request_id: str) -> bool: int(request_id, 16) return True except ValueError: - logger.error(f'request_id = {request_id} is not valid hex-format') + logger.error('request_id = %s is not valid hex-format', request_id) return False -async def gather_list(*coros): +async def gather_list(*coros: Any) -> tuple: """ Similar to asyncio.gather, but None can be used in coros_or_futures param """ return await asyncio.gather(*[asyncio.sleep(0) if coro is None else coro for coro in coros]) -async def gather_dict(coro_dict): +async def gather_dict(coro_dict: dict) -> dict: """ None can be used in coros, see :func:`gather_list` """ results = await gather_list(*coro_dict.values()) - return dict(zip(coro_dict.keys(), results)) + return dict(zip(coro_dict.keys(), results, strict=True)) diff --git a/frontik/validator.py b/frontik/validator.py index f2c8b7691..3ac581aec 100644 --- a/frontik/validator.py +++ b/frontik/validator.py @@ -1,5 +1,4 @@ from enum import Enum -from typing import List, Optional from pydantic import BaseModel, validator @@ -15,16 +14,17 @@ class Validators(Enum): class BaseValidationModel(BaseModel): - boolean: Optional[bool] - string: Optional[str] - integer: Optional[int] - float: Optional[float] - list_int: Optional[List[int]] - list_str: Optional[List[str]] - path_safe_string: Optional[str] + boolean: bool | None + string: str | None + integer: int | None + float: float | None + list_int: list[int] | None + list_str: list[str] | None + path_safe_string: str | None @validator('path_safe_string', pre=True) @classmethod def check_path_safe_string(cls, value): - assert isinstance(value, str) and '/' not in value + assert isinstance(value, str) + assert '/' not in value return value diff --git a/frontik/xml_util.py b/frontik/xml_util.py index 4f0f646fb..9f686f10a 100644 --- a/frontik/xml_util.py +++ b/frontik/xml_util.py @@ -1,4 +1,6 @@ +import logging import time +from typing import Any from lxml import etree @@ -7,10 +9,10 @@ parser = etree.XMLParser() -def xml_from_file(filename, log): +def xml_from_file(filename: str, log: logging.Logger) -> Any: try: return etree.parse(filename).getroot() - except IOError: + except OSError: log.error('failed to read xml file %s', filename) raise except Exception: @@ -25,7 +27,7 @@ def xsl_from_file(filename, log): return result -def dict_to_xml(dict_value, element_name): +def dict_to_xml(dict_value: dict, element_name: str) -> etree.Element: element = etree.Element(element_name) if not isinstance(dict_value, dict): element.text = any_to_unicode(dict_value) @@ -36,7 +38,7 @@ def dict_to_xml(dict_value, element_name): return element -def xml_to_dict(xml): +def xml_to_dict(xml: etree.Element) -> dict: if len(xml) == 0: return xml.text if xml.text is not None else '' diff --git a/poetry.lock b/poetry.lock index a3824ae76..79a8307dc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -222,6 +222,50 @@ files = [ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] +[[package]] +name = "black" +version = "23.9.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, + {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, + {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, + {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, + {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, + {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, + {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, + {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, + {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, + {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, + {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "cachetools" version = "5.3.1" @@ -269,6 +313,20 @@ files = [ [package.extras] unicode-backport = ["unicodedata2"] +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" @@ -807,6 +865,62 @@ files = [ {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, ] +[[package]] +name = "mypy" +version = "1.5.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, + {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, + {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, + {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, + {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, + {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, + {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, + {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, + {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, + {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, + {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, + {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, + {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, + {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, + {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, + {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, + {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, + {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "opentelemetry-api" version = "1.17.0" @@ -968,6 +1082,17 @@ files = [ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + [[package]] name = "platformdirs" version = "3.10.0" @@ -1033,47 +1158,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.12" +version = "1.10.13" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, - {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, - {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, - {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, - {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, - {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, ] [package.dependencies] @@ -1185,6 +1310,32 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "ruff" +version = "0.0.291" +description = "An extremely fast Python linter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.291-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b97d0d7c136a85badbc7fd8397fdbb336e9409b01c07027622f28dcd7db366f2"}, + {file = "ruff-0.0.291-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6ab44ea607967171e18aa5c80335237be12f3a1523375fa0cede83c5cf77feb4"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a04b384f2d36f00d5fb55313d52a7d66236531195ef08157a09c4728090f2ef0"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b727c219b43f903875b7503a76c86237a00d1a39579bb3e21ce027eec9534051"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87671e33175ae949702774071b35ed4937da06f11851af75cd087e1b5a488ac4"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b75f5801547f79b7541d72a211949754c21dc0705c70eddf7f21c88a64de8b97"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b09b94efdcd162fe32b472b2dd5bf1c969fcc15b8ff52f478b048f41d4590e09"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d5b56bc3a2f83a7a1d7f4447c54d8d3db52021f726fdd55d549ca87bca5d747"}, + {file = "ruff-0.0.291-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13f0d88e5f367b2dc8c7d90a8afdcfff9dd7d174e324fd3ed8e0b5cb5dc9b7f6"}, + {file = "ruff-0.0.291-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b3eeee1b1a45a247758ecdc3ab26c307336d157aafc61edb98b825cadb153df3"}, + {file = "ruff-0.0.291-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6c06006350c3bb689765d71f810128c9cdf4a1121fd01afc655c87bab4fb4f83"}, + {file = "ruff-0.0.291-py3-none-musllinux_1_2_i686.whl", hash = "sha256:fd17220611047de247b635596e3174f3d7f2becf63bd56301fc758778df9b629"}, + {file = "ruff-0.0.291-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5383ba67ad360caf6060d09012f1fb2ab8bd605ab766d10ca4427a28ab106e0b"}, + {file = "ruff-0.0.291-py3-none-win32.whl", hash = "sha256:1d5f0616ae4cdc7a938b493b6a1a71c8a47d0300c0d65f6e41c281c2f7490ad3"}, + {file = "ruff-0.0.291-py3-none-win_amd64.whl", hash = "sha256:8a69bfbde72db8ca1c43ee3570f59daad155196c3fbe357047cd9b77de65f15b"}, + {file = "ruff-0.0.291-py3-none-win_arm64.whl", hash = "sha256:d867384a4615b7f30b223a849b52104214442b5ba79b473d7edd18da3cde22d6"}, + {file = "ruff-0.0.291.tar.gz", hash = "sha256:c61109661dde9db73469d14a82b42a88c7164f731e6a3b0042e71394c1c7ceed"}, +] + [[package]] name = "sentry-sdk" version = "1.26.0" @@ -1327,13 +1478,13 @@ files = [ [[package]] name = "urllib3" -version = "2.0.4" +version = "2.0.5" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, + {file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"}, + {file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"}, ] [package.extras] @@ -1551,4 +1702,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "3.11.*" -content-hash = "39a0305fb619e7bd2dd40069ad096186ffce27138479344c980aa762326bf5cb" +content-hash = "ac2edd2d14fb6f056bfc7db069dd8270c58718e9019a95753c8bdf63055e49e3" diff --git a/pyproject.toml b/pyproject.toml index 9da23df9e..766707f4a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,9 @@ tornado-httpclient-mock = '0.2.3' sentry-sdk = '1.26.0' aiokafka = '0.8.1' aioresponses = '0.7.4' +mypy = '1.5.1' +black = '23.9.1' +ruff = '0.0.291' [tool.pytest.ini_options] addopts = ''' @@ -47,3 +50,83 @@ addopts = ''' --asyncio-mode=auto -p no:warnings ''' + +[tool.black] +line-length = 120 +skip-string-normalization = true + +[tool.mypy] +ignore_missing_imports = true +disallow_untyped_calls = true +disallow_incomplete_defs = true +check_untyped_defs = true +strict = false +exclude = [ + '.*/api/2.*', +] + + +[tool.ruff] +line-length = 120 +target-version = 'py311' +select = [ + 'F', + 'E', + 'W', +# 'C90', # прикольная хрень но мы не готовы к такому + 'I', + 'N', + 'UP', +# 'ANN', # ебать, неужели это мупи заменяет, с этой хуйни ошибок столько же сколько со всего вместе взятого + 'ASYNC', +# 'S', # говорит lxml нельзя, так что хз как его врубать +# 'BLE', # не готов все голые except Exception рефакторить +# 'FBT', # это рефакторить тоже писос полный + 'B', +# 'A', # в остальных репах я бы врубил но тут чето хз как жить с этим либо noqa + 'COM', + 'C4', +# 'DTZ', # хз вот + 'T10', + 'EM', + 'FA', + 'ISC', + 'ICN', + 'G', + 'T20', + 'PT', +# 'Q', # ну у нас не в почете дабл квоты + 'RSE', +# 'RET', # не, думаю это нахер, иногда читаемость пиздаче +# 'SLF', # в сервисах надо но тут нет + 'SLOT', + 'SIM', + 'TID', + 'TCH', + 'INT', +# 'ARG', # ваще охуенная тема, но тут большинство неправильных +# 'PTH', # нахер, через ос по старинке нормально + 'TD', + 'FIX', +# 'ERA', # врубим но позднее когда дев закончится +# 'PGH', # хорошая тема, но чето не варик тут врубать +# 'PL', # хорошая тема, но чето не варик тут врубать +# 'TRY', # хорошая тема, но чето не варик тут врубать + 'FLY', + 'PERF', + 'FURB', + 'LOG', +# 'RUF', # не, какие-то у него ебаныте правила +] +preview = true +ignore = ['N818', 'B028', 'B904'] + +[tool.ruff.per-file-ignores] +'tests/**/*.py' = [ + 'S101', # asserts allowed in tests... + 'ARG', # Unused function args -> fixtures nevertheless are functionally relevant... + 'FBT', # Don't care about booleans as positional arguments in tests, e.g. via @pytest.mark.parametrize() + 'PLR2004', # Magic value used in comparison, ... + 'S311', # Standard pseudo-random generators are not suitable for cryptographic purposes + 'PT009', 'PT027', # пока на пайтест все не переписал +] \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 3576ee35d..899d9b8e8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,15 +1,21 @@ +from collections.abc import Iterator + import pytest @pytest.fixture(scope='session', autouse=True) -def teardown_module(): +def _teardown_module() -> Iterator[None]: yield from tests.instances import ( + frontik_balancer_app, + frontik_broken_balancer_app, + frontik_broken_config_app, + frontik_broken_init_async_app, frontik_consul_mock_app, - frontik_broken_config_app, frontik_broken_init_async_app, - frontik_no_debug_app, frontik_re_app, frontik_test_app, - frontik_balancer_app, frontik_broken_balancer_app, + frontik_no_debug_app, + frontik_re_app, + frontik_test_app, ) frontik_broken_config_app.stop() @@ -20,3 +26,12 @@ def teardown_module(): frontik_balancer_app.stop() frontik_broken_balancer_app.stop() frontik_consul_mock_app.stop() + + +def pytest_addoption(parser): + parser.addoption('--files_for_lint', action='store', default='') + + +@pytest.fixture(scope='session') +def files_for_lint(pytestconfig): + return pytestconfig.getoption('files_for_lint') diff --git a/tests/instances.py b/tests/instances.py index 917ac3040..0811d9aba 100644 --- a/tests/instances.py +++ b/tests/instances.py @@ -1,10 +1,12 @@ +from __future__ import annotations + import base64 import json import socket import subprocess import sys import time -from distutils.spawn import find_executable +from typing import TYPE_CHECKING import requests from lxml import etree @@ -13,30 +15,25 @@ from frontik import options from tests import FRONTIK_ROOT -try: - import coverage - USE_COVERAGE = '--with-coverage' in sys.argv -except ImportError: - USE_COVERAGE = False +if TYPE_CHECKING: + from builtins import function + from collections.abc import Callable + from typing import Any + + from requests.models import Response FRONTIK_RUN = f'{FRONTIK_ROOT}/frontik-test' TEST_PROJECTS = f'{FRONTIK_ROOT}/tests/projects' -def _run_command(command, port): +def _run_command(command: str, port: int) -> subprocess.Popen: python = sys.executable - - if USE_COVERAGE: - coverage = find_executable('coverage') - executable = f'{python} {coverage} run {command} --port={port}' - else: - executable = f'{python} {command} --port={port}' - + executable = f'{python} {command} --port={port}' return subprocess.Popen(executable.split()) -def find_free_port(from_port=9000, to_port=10000): +def find_free_port(from_port: int = 9000, to_port: int = 10000) -> int: for port in range(from_port, to_port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: @@ -47,30 +44,32 @@ def find_free_port(from_port=9000, to_port=10000): finally: s.close() else: - raise AssertionError(f'No empty port in range {from_port}..{to_port} for frontik test instance') + msg = f'No empty port in range {from_port}..{to_port} for frontik test instance' + raise AssertionError(msg) return port -def create_basic_auth_header(credentials): - return 'Basic {}'.format(to_unicode(base64.b64encode(utf8(credentials)))) +def create_basic_auth_header(credentials: str) -> str: + return f'Basic {to_unicode(base64.b64encode(utf8(credentials)))}' class FrontikTestInstance: - def __init__(self, command: str, *, allow_to_create_log_files: bool = False): + def __init__(self, command: str, *, allow_to_create_log_files: bool = False) -> None: if not allow_to_create_log_files and options.LOG_DIR_OPTION_NAME in command: - raise Exception('Log to file is prohibited it tests by default. use allow_to_create_log_files if needed') + msg = 'Log to file is prohibited it tests by default. use allow_to_create_log_files if needed' + raise Exception(msg) self.command = command - self.popen = None - self.port = None + self.popen: subprocess.Popen = None # type: ignore + self.port: int = None # type: ignore - def start(self): + def start(self) -> None: if self.port: return self.port = find_free_port() self.popen = _run_command(self.command, self.port) - for i in range(50): + for _i in range(50): try: time.sleep(0.1) response = self.get_page('status') @@ -79,61 +78,77 @@ def start(self): except Exception: pass - assert False, 'Failed to start Frontik instance' + msg = 'Failed to start Frontik instance' + raise AssertionError(msg) - def start_with_check(self, check_function): + def start_with_check(self, check_function: Callable) -> None: self.port = find_free_port() self.popen = _run_command(self.command, self.port) check_function(self) - def stop(self): + def stop(self) -> None: if not self.port: return self.popen.terminate() self.popen.wait(300) - self.port = None + self.port = None # type: ignore - def is_alive(self): + def is_alive(self) -> bool: return self.popen.poll() is None - def get_page(self, page, notpl=False, method=requests.get, **kwargs): + def get_page( + self, + page: str, + notpl: bool = False, + method: Callable | function = requests.get, + **kwargs: Any, + ) -> Response: if not self.port: self.start() url = 'http://127.0.0.1:{port}/{page}{notpl}'.format( port=self.port, page=page.format(port=self.port), - notpl=('?' if '?' not in page else '&') + 'notpl' if notpl else '' + notpl=('?' if '?' not in page else '&') + 'notpl' if notpl else '', ) # workaround for different versions of requests library if 'auth' in kwargs and requests.__version__ > '1.0': from requests.auth import HTTPBasicAuth + auth = kwargs['auth'] kwargs['auth'] = HTTPBasicAuth(auth[1], auth[2]) kwargs['timeout'] = 4 - return method(url, **kwargs) + return method(url, **kwargs) # type: ignore - def get_page_xml(self, page, notpl=False, method=requests.get, **kwargs): + def get_page_xml( + self, + page: str, + notpl: bool = False, + method: Callable = requests.get, + **kwargs: Any, + ) -> etree.Element: content = utf8(self.get_page(page, notpl=notpl, method=method, **kwargs).content) try: return etree.fromstring(content) except Exception as e: - raise Exception(f'failed to parse xml ({e}): "{content}"') + msg = f'failed to parse xml ({e}): "{content!s}"' + raise Exception(msg) - def get_page_json(self, page, notpl=False, method=requests.get, **kwargs): + def get_page_json(self, page: str, notpl: bool = False, method: Callable = requests.get, **kwargs: Any) -> Any: content = self.get_page_text(page, notpl=notpl, method=method, **kwargs) try: return json.loads(content) except Exception as e: - raise Exception(f'failed to parse json ({e}): "{content}"') + msg = f'failed to parse json ({e}): "{content}"' + raise Exception(msg) - def get_page_text(self, page, notpl=False, method=requests.get, **kwargs): + def get_page_text(self, page: str, notpl: bool = False, method: Callable = requests.get, **kwargs: Any) -> str: return to_unicode(self.get_page(page, notpl=notpl, method=method, **kwargs).content) @@ -141,47 +156,47 @@ def get_page_text(self, page, notpl=False, method=requests.get, **kwargs): frontik_consul_mock_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.consul_mock_app ' - f' --config={TEST_PROJECTS}/frontik_consul_mock.cfg {common_frontik_start_options}' + f' --config={TEST_PROJECTS}/frontik_consul_mock.cfg {common_frontik_start_options}', ) frontik_consul_mock_app.start() frontik_test_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.test_app ' f' --config={TEST_PROJECTS}/frontik_debug.cfg {common_frontik_start_options} ' - f' --consul_port={frontik_consul_mock_app.port}' + f' --consul_port={frontik_consul_mock_app.port}', ) frontik_re_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.re_app ' f' --config={TEST_PROJECTS}/frontik_debug.cfg {common_frontik_start_options} ' - f' --consul_port={frontik_consul_mock_app.port}' + f' --consul_port={frontik_consul_mock_app.port}', ) frontik_no_debug_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.no_debug_app ' f' --config={TEST_PROJECTS}/frontik_no_debug.cfg {common_frontik_start_options} ' - f' --consul_port={frontik_consul_mock_app.port} ' + f' --consul_port={frontik_consul_mock_app.port} ', ) frontik_broken_config_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.broken_config_app ' f' --config={TEST_PROJECTS}/frontik_debug.cfg {common_frontik_start_options} ' - f' --consul_port={frontik_consul_mock_app.port}' + f' --consul_port={frontik_consul_mock_app.port}', ) frontik_broken_init_async_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.broken_async_init_app ' f' --config={TEST_PROJECTS}/frontik_debug.cfg {common_frontik_start_options} ' - f' --consul_port={frontik_consul_mock_app.port}' + f' --consul_port={frontik_consul_mock_app.port}', ) frontik_balancer_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.balancer_app ' f' --config={TEST_PROJECTS}/frontik_no_debug.cfg {common_frontik_start_options} ' - f' --consul_port={frontik_consul_mock_app.port}' + f' --consul_port={frontik_consul_mock_app.port}', ) frontik_broken_balancer_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.broken_balancer_app ' f' --config={TEST_PROJECTS}/frontik_debug.cfg {common_frontik_start_options} ' - f' --consul_port={frontik_consul_mock_app.port}' + f' --consul_port={frontik_consul_mock_app.port}', ) diff --git a/tests/projects/balancer_app/__init__.py b/tests/projects/balancer_app/__init__.py index a6b74f88f..fa1bcbca5 100644 --- a/tests/projects/balancer_app/__init__.py +++ b/tests/projects/balancer_app/__init__.py @@ -1,17 +1,24 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + from http_client.balancing import Server from frontik.app import FrontikApplication +if TYPE_CHECKING: + from frontik.handler import PageHandler + -def get_server(handler, type): +def get_server(handler: PageHandler, type: str) -> Server: return Server(f'127.0.0.1:{handler.get_argument(type)}', dc='Test') -def get_server_with_port(port): +def get_server_with_port(port: int) -> Server: return Server(f'127.0.0.1:{port}', dc='Test') -def get_non_listening_server(): +def get_non_listening_server() -> Server: return Server('http://10.0.0.0:12345') diff --git a/tests/projects/balancer_app/pages/__init__.py b/tests/projects/balancer_app/pages/__init__.py index c95b5e723..b41b9df6d 100644 --- a/tests/projects/balancer_app/pages/__init__.py +++ b/tests/projects/balancer_app/pages/__init__.py @@ -1,13 +1,15 @@ from tornado.web import HTTPError +from frontik.handler import PageHandler -def check_all_servers_occupied(handler, name): + +def check_all_servers_occupied(handler: PageHandler, name: str) -> None: servers = handler.application.upstream_manager.upstreams.get(name).servers if any(server.current_requests == 0 for server in servers): raise HTTPError(500, 'some servers are ignored') -def check_all_requests_done(handler, name): +def check_all_requests_done(handler: PageHandler, name: str) -> None: servers = handler.application.upstream_manager.upstreams.get(name).servers if any(server.current_requests != 0 for server in servers): raise HTTPError(500, 'some servers have unfinished requests') diff --git a/tests/projects/balancer_app/pages/different_datacenter.py b/tests/projects/balancer_app/pages/different_datacenter.py index ccec71875..2e4a4522a 100644 --- a/tests/projects/balancer_app/pages/different_datacenter.py +++ b/tests/projects/balancer_app/pages/different_datacenter.py @@ -1,12 +1,18 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + from http_client.balancing import Upstream from http_client.request_response import NoAvailableServerException from tornado.web import HTTPError from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server +if TYPE_CHECKING: + from frontik.app import FrontikApplication + class Page(PageHandler): async def get_page(self): @@ -15,8 +21,10 @@ async def get_page(self): normal_server = get_server(self, 'normal') normal_server.datacenter = 'dc2' + self.application: FrontikApplication self.application.upstream_manager.update_upstream( - Upstream('different_datacenter', {}, [free_server, normal_server])) + Upstream('different_datacenter', {}, [free_server, normal_server]), + ) result = await self.post_url('different_datacenter', self.request.path) for server in self.application.upstream_manager.upstreams.get('different_datacenter').servers: diff --git a/tests/projects/balancer_app/pages/different_datacenter_async.py b/tests/projects/balancer_app/pages/different_datacenter_async.py index bd9e72097..0ce370535 100644 --- a/tests/projects/balancer_app/pages/different_datacenter_async.py +++ b/tests/projects/balancer_app/pages/different_datacenter_async.py @@ -4,7 +4,6 @@ from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server @@ -16,7 +15,8 @@ async def get_page(self): normal_server.datacenter = 'dc2' self.application.upstream_manager.update_upstream( - Upstream('different_datacenter', {}, [free_server, normal_server])) + Upstream('different_datacenter', {}, [free_server, normal_server]), + ) result = await self.post_url('different_datacenter', self.request.path) diff --git a/tests/projects/balancer_app/pages/no_available_backend.py b/tests/projects/balancer_app/pages/no_available_backend.py index 9d50c38f1..cb508d91e 100644 --- a/tests/projects/balancer_app/pages/no_available_backend.py +++ b/tests/projects/balancer_app/pages/no_available_backend.py @@ -2,7 +2,6 @@ from http_client.request_response import NoAvailableServerException from frontik import handler, media_types - from tests.projects.balancer_app.pages import check_all_requests_done @@ -10,7 +9,7 @@ class Page(handler.PageHandler): async def get_page(self): self.application.upstream_manager.update_upstream(Upstream('no_available_backend', {}, [])) - async def request_with_processing(): + async def request_with_processing() -> None: result = await self.post_url('no_available_backend', self.request.path) if result.exc is not None and isinstance(result.exc, NoAvailableServerException): self.text = 'no backend available' diff --git a/tests/projects/balancer_app/pages/no_available_backend_async.py b/tests/projects/balancer_app/pages/no_available_backend_async.py index 02a46d6f1..df72c5849 100644 --- a/tests/projects/balancer_app/pages/no_available_backend_async.py +++ b/tests/projects/balancer_app/pages/no_available_backend_async.py @@ -2,7 +2,6 @@ from http_client.request_response import NoAvailableServerException from frontik import handler, media_types - from tests.projects.balancer_app.pages import check_all_requests_done diff --git a/tests/projects/balancer_app/pages/no_retry_error.py b/tests/projects/balancer_app/pages/no_retry_error.py index 6274dfceb..254048f2f 100644 --- a/tests/projects/balancer_app/pages/no_retry_error.py +++ b/tests/projects/balancer_app/pages/no_retry_error.py @@ -1,15 +1,13 @@ from http_client.balancing import Upstream from frontik import handler, media_types - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done class Page(handler.PageHandler): async def get_page(self): - self.application.upstream_manager.update_upstream( - Upstream('no_retry_error', {}, [get_server(self, 'broken')])) + self.application.upstream_manager.update_upstream(Upstream('no_retry_error', {}, [get_server(self, 'broken')])) result = await self.post_url('no_retry_error', self.request.path) if result.error and result.status_code == 500: diff --git a/tests/projects/balancer_app/pages/no_retry_error_async.py b/tests/projects/balancer_app/pages/no_retry_error_async.py index f045e5b16..f3cbf2113 100644 --- a/tests/projects/balancer_app/pages/no_retry_error_async.py +++ b/tests/projects/balancer_app/pages/no_retry_error_async.py @@ -1,7 +1,6 @@ from http_client.balancing import Upstream from frontik import handler, media_types - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done @@ -9,7 +8,8 @@ class Page(handler.PageHandler): async def get_page(self): self.application.upstream_manager.update_upstream( - Upstream('no_retry_error_async', {}, [get_server(self, 'broken')])) + Upstream('no_retry_error_async', {}, [get_server(self, 'broken')]), + ) result = await self.post_url('no_retry_error_async', self.request.path) if result.failed and result.status_code == 500: diff --git a/tests/projects/balancer_app/pages/no_retry_timeout.py b/tests/projects/balancer_app/pages/no_retry_timeout.py index 992cdb6bd..e90329627 100644 --- a/tests/projects/balancer_app/pages/no_retry_timeout.py +++ b/tests/projects/balancer_app/pages/no_retry_timeout.py @@ -1,8 +1,8 @@ -from http_client.balancing import Upstream from asyncio import TimeoutError -from frontik import handler, media_types +from http_client.balancing import Upstream +from frontik import handler, media_types from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done @@ -10,7 +10,8 @@ class Page(handler.PageHandler): async def get_page(self): self.application.upstream_manager.update_upstream( - Upstream('no_retry_timeout', {}, [get_server(self, 'broken')])) + Upstream('no_retry_timeout', {}, [get_server(self, 'broken')]), + ) result = await self.post_url('no_retry_timeout', self.request.path, request_timeout=0.2) if result.failed and isinstance(result.exc, TimeoutError): diff --git a/tests/projects/balancer_app/pages/no_retry_timeout_async.py b/tests/projects/balancer_app/pages/no_retry_timeout_async.py index a6fbb115c..2e658e6c2 100644 --- a/tests/projects/balancer_app/pages/no_retry_timeout_async.py +++ b/tests/projects/balancer_app/pages/no_retry_timeout_async.py @@ -1,20 +1,27 @@ -from http_client.balancing import Upstream, UpstreamConfig from asyncio import TimeoutError -from frontik import handler, media_types +from http_client.balancing import Upstream, UpstreamConfig +from frontik import handler, media_types from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done class Page(handler.PageHandler): async def get_page(self): - self.application.upstream_manager.upstreams['no_retry_timeout_async'] = Upstream('no_retry_timeout_async', - {}, []) + self.application.upstream_manager.upstreams['no_retry_timeout_async'] = Upstream( + 'no_retry_timeout_async', + {}, + [], + ) upstream_config = {Upstream.DEFAULT_PROFILE: UpstreamConfig(max_timeout_tries=2)} self.application.upstream_manager.update_upstream( - Upstream('no_retry_timeout_async', upstream_config, - [get_server(self, 'broken'), get_server(self, 'normal')])) + Upstream( + 'no_retry_timeout_async', + upstream_config, + [get_server(self, 'broken'), get_server(self, 'normal')], + ), + ) result = await self.post_url('no_retry_timeout_async', self.request.path, request_timeout=0.2) diff --git a/tests/projects/balancer_app/pages/profile_with_retry.py b/tests/projects/balancer_app/pages/profile_with_retry.py index 28dfc8cff..a459d9ae5 100644 --- a/tests/projects/balancer_app/pages/profile_with_retry.py +++ b/tests/projects/balancer_app/pages/profile_with_retry.py @@ -12,10 +12,13 @@ async def get_page(self): upstream_config = { Upstream.DEFAULT_PROFILE: UpstreamConfig(slow_start_interval=0), "profile_without_retry": UpstreamConfig(max_tries=1), - "profile_with_retry": UpstreamConfig(max_tries=2) + "profile_with_retry": UpstreamConfig(max_tries=2), } - self.application.upstream_manager.upstreams['profile_with_retry'] = Upstream('profile_with_retry', - upstream_config, servers) + self.application.upstream_manager.upstreams['profile_with_retry'] = Upstream( + 'profile_with_retry', + upstream_config, + servers, + ) result = await self.put_url('profile_with_retry', self.request.path, profile="profile_with_retry") diff --git a/tests/projects/balancer_app/pages/profile_without_retry.py b/tests/projects/balancer_app/pages/profile_without_retry.py index 475477b26..28aa1cf49 100644 --- a/tests/projects/balancer_app/pages/profile_without_retry.py +++ b/tests/projects/balancer_app/pages/profile_without_retry.py @@ -11,10 +11,13 @@ async def get_page(self): upstream_config = { Upstream.DEFAULT_PROFILE: UpstreamConfig(slow_start_interval=0), "profile_without_retry": UpstreamConfig(max_tries=1), - "profile_with_retry": UpstreamConfig(max_tries=2) + "profile_with_retry": UpstreamConfig(max_tries=2), } - self.application.upstream_manager.upstreams['profile_without_retry'] = Upstream('profile_without_retry', - upstream_config, servers) + self.application.upstream_manager.upstreams['profile_without_retry'] = Upstream( + 'profile_without_retry', + upstream_config, + servers, + ) result = await self.put_url('profile_without_retry', self.request.path, profile="profile_without_retry") if result.failed or result.response.code == 500: diff --git a/tests/projects/balancer_app/pages/requests_count.py b/tests/projects/balancer_app/pages/requests_count.py index bd1beef58..b83354b9c 100644 --- a/tests/projects/balancer_app/pages/requests_count.py +++ b/tests/projects/balancer_app/pages/requests_count.py @@ -2,26 +2,23 @@ from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done, check_all_servers_occupied class Page(PageHandler): async def get_page(self): - self.application.upstream_manager.update_upstream( - Upstream('requests_count', {}, [get_server(self, 'normal')])) + self.application.upstream_manager.update_upstream(Upstream('requests_count', {}, [get_server(self, 'normal')])) self.text = '' - async def request_with_processing(): + async def request_with_processing() -> None: result = await self.post_url('requests_count', self.request.path) self.text = result.data check_all_requests_done(self, 'requests_count') - self.run_task(self.post_url('requests_count', self.request.path)) - self.run_task(self.post_url('requests_count', self.request.path)) - self.application.upstream_manager.update_upstream( - Upstream('requests_count', {}, [get_server(self, 'normal')])) + self.run_task(self.post_url('requests_count', self.request.path)) # type: ignore + self.run_task(self.post_url('requests_count', self.request.path)) # type: ignore + self.application.upstream_manager.update_upstream(Upstream('requests_count', {}, [get_server(self, 'normal')])) self.run_task(request_with_processing()) check_all_servers_occupied(self, 'requests_count') diff --git a/tests/projects/balancer_app/pages/requests_count_async.py b/tests/projects/balancer_app/pages/requests_count_async.py index 7526b72ae..8d40b0c69 100644 --- a/tests/projects/balancer_app/pages/requests_count_async.py +++ b/tests/projects/balancer_app/pages/requests_count_async.py @@ -4,21 +4,22 @@ from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done, check_all_servers_occupied class Page(PageHandler): async def get_page(self): - self.application.upstream_manager.update_upstream(Upstream('requests_count_async', {}, - [get_server(self, 'normal')])) + self.application.upstream_manager.update_upstream( + Upstream('requests_count_async', {}, [get_server(self, 'normal')]), + ) self.text = '' result1 = self.post_url('requests_count_async', self.request.path) result2 = self.post_url('requests_count_async', self.request.path) - self.application.upstream_manager.update_upstream(Upstream('requests_count_async', {}, - [get_server(self, 'normal')])) + self.application.upstream_manager.update_upstream( + Upstream('requests_count_async', {}, [get_server(self, 'normal')]), + ) result3 = self.post_url('requests_count_async', self.request.path) await asyncio.sleep(0) diff --git a/tests/projects/balancer_app/pages/retry_connect.py b/tests/projects/balancer_app/pages/retry_connect.py index 0fd026b4e..fc9c59d22 100644 --- a/tests/projects/balancer_app/pages/retry_connect.py +++ b/tests/projects/balancer_app/pages/retry_connect.py @@ -4,7 +4,6 @@ from frontik import media_types from frontik.handler import PageHandler from frontik.util import gather_list - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done, check_all_servers_occupied @@ -12,13 +11,14 @@ class Page(PageHandler): async def get_page(self): self.application.upstream_manager.update_upstream( - Upstream('retry_connect', {}, [get_server(self, 'free'), get_server(self, 'normal')])) + Upstream('retry_connect', {}, [get_server(self, 'free'), get_server(self, 'normal')]), + ) self.text = '' requests = [ self.post_url('retry_connect', self.request.path), self.post_url('retry_connect', self.request.path), - self.post_url('retry_connect', self.request.path) + self.post_url('retry_connect', self.request.path), ] check_all_servers_occupied(self, 'retry_connect') diff --git a/tests/projects/balancer_app/pages/retry_connect_async.py b/tests/projects/balancer_app/pages/retry_connect_async.py index f0b0a4326..bf1f808f0 100644 --- a/tests/projects/balancer_app/pages/retry_connect_async.py +++ b/tests/projects/balancer_app/pages/retry_connect_async.py @@ -1,22 +1,22 @@ import asyncio + from http_client.balancing import Upstream from tornado.web import HTTPError from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done, check_all_servers_occupied class Page(PageHandler): async def get_page(self): - self.application.upstream_manager.update_upstream(Upstream('retry_connect_async', {}, - [get_server(self, 'free'), - get_server(self, 'normal')])) + self.application.upstream_manager.update_upstream( + Upstream('retry_connect_async', {}, [get_server(self, 'free'), get_server(self, 'normal')]), + ) self.text = '' - async def make_request(): + async def make_request() -> None: result = await self.post_url('retry_connect_async', self.request.path) if result.failed or result.data is None: diff --git a/tests/projects/balancer_app/pages/retry_connect_timeout.py b/tests/projects/balancer_app/pages/retry_connect_timeout.py index 9e4be51b3..fe4ca4f6d 100644 --- a/tests/projects/balancer_app/pages/retry_connect_timeout.py +++ b/tests/projects/balancer_app/pages/retry_connect_timeout.py @@ -4,7 +4,6 @@ from frontik import media_types from frontik.handler import PageHandler from frontik.util import gather_list - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done, check_all_servers_occupied @@ -12,13 +11,14 @@ class Page(PageHandler): async def get_page(self): self.application.upstream_manager.update_upstream( - Upstream('retry_connect_timeout', {}, [get_server(self, 'normal')])) + Upstream('retry_connect_timeout', {}, [get_server(self, 'normal')]), + ) self.text = '' requests = [ self.post_url('retry_connect_timeout', self.request.path), self.post_url('retry_connect_timeout', self.request.path), - self.post_url('retry_connect_timeout', self.request.path) + self.post_url('retry_connect_timeout', self.request.path), ] check_all_servers_occupied(self, 'retry_connect_timeout') diff --git a/tests/projects/balancer_app/pages/retry_connect_timeout_async.py b/tests/projects/balancer_app/pages/retry_connect_timeout_async.py index 345006566..05a62980a 100644 --- a/tests/projects/balancer_app/pages/retry_connect_timeout_async.py +++ b/tests/projects/balancer_app/pages/retry_connect_timeout_async.py @@ -5,18 +5,18 @@ from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done, check_all_servers_occupied class Page(PageHandler): async def get_page(self): - self.application.upstream_manager.update_upstream(Upstream('retry_connect_timeout', {}, - [get_server(self, 'normal')])) + self.application.upstream_manager.update_upstream( + Upstream('retry_connect_timeout', {}, [get_server(self, 'normal')]), + ) self.text = '' - async def make_request(): + async def make_request() -> None: result = await self.post_url('retry_connect_timeout', self.request.path) if result.failed or result.data is None: diff --git a/tests/projects/balancer_app/pages/retry_count_limit.py b/tests/projects/balancer_app/pages/retry_count_limit.py index 99d511f00..c3eb9384e 100644 --- a/tests/projects/balancer_app/pages/retry_count_limit.py +++ b/tests/projects/balancer_app/pages/retry_count_limit.py @@ -2,7 +2,6 @@ from frontik.handler import PageHandler from tests.instances import find_free_port - from tests.projects.balancer_app import get_server_with_port @@ -11,8 +10,12 @@ async def get_page(self): upstream = Upstream( 'retry_count_limit', {Upstream.DEFAULT_PROFILE: UpstreamConfig(max_tries=3)}, - [get_server_with_port(find_free_port(11000, 20000)), get_server_with_port(find_free_port(12000, 20000)), - get_server_with_port(find_free_port(13000, 20000)), get_server_with_port(find_free_port(14000, 20000))] + [ + get_server_with_port(find_free_port(11000, 20000)), + get_server_with_port(find_free_port(12000, 20000)), + get_server_with_port(find_free_port(13000, 20000)), + get_server_with_port(find_free_port(14000, 20000)), + ], ) self.application.upstream_manager.update_upstream(upstream) diff --git a/tests/projects/balancer_app/pages/retry_count_limit_async.py b/tests/projects/balancer_app/pages/retry_count_limit_async.py index 20e65bf5b..d1936a685 100644 --- a/tests/projects/balancer_app/pages/retry_count_limit_async.py +++ b/tests/projects/balancer_app/pages/retry_count_limit_async.py @@ -2,7 +2,6 @@ from frontik.handler import PageHandler from tests.instances import find_free_port - from tests.projects.balancer_app import get_server_with_port @@ -11,8 +10,12 @@ async def get_page(self): upstream = Upstream( 'retry_count_limit_async', {Upstream.DEFAULT_PROFILE: UpstreamConfig(max_tries=3)}, - [get_server_with_port(find_free_port(11000, 20000)), get_server_with_port(find_free_port(12000, 20000)), - get_server_with_port(find_free_port(13000, 20000)), get_server_with_port(find_free_port(14000, 20000))] + [ + get_server_with_port(find_free_port(11000, 20000)), + get_server_with_port(find_free_port(12000, 20000)), + get_server_with_port(find_free_port(13000, 20000)), + get_server_with_port(find_free_port(14000, 20000)), + ], ) self.application.upstream_manager.update_upstream(upstream) diff --git a/tests/projects/balancer_app/pages/retry_error.py b/tests/projects/balancer_app/pages/retry_error.py index ff1611b47..d0f1b9b3f 100644 --- a/tests/projects/balancer_app/pages/retry_error.py +++ b/tests/projects/balancer_app/pages/retry_error.py @@ -4,7 +4,6 @@ from frontik import media_types from frontik.handler import PageHandler from frontik.util import gather_list - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done, check_all_servers_occupied @@ -12,13 +11,14 @@ class Page(PageHandler): async def get_page(self): self.application.upstream_manager.update_upstream( - Upstream('retry_error', {}, [get_server(self, 'broken'), get_server(self, 'normal')])) + Upstream('retry_error', {}, [get_server(self, 'broken'), get_server(self, 'normal')]), + ) self.text = '' requests = [ self.put_url('retry_error', self.request.path), self.put_url('retry_error', self.request.path), - self.put_url('retry_error', self.request.path) + self.put_url('retry_error', self.request.path), ] check_all_servers_occupied(self, 'retry_error') diff --git a/tests/projects/balancer_app/pages/retry_error_async.py b/tests/projects/balancer_app/pages/retry_error_async.py index 3210e36c7..e016ccdc3 100644 --- a/tests/projects/balancer_app/pages/retry_error_async.py +++ b/tests/projects/balancer_app/pages/retry_error_async.py @@ -5,19 +5,18 @@ from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done, check_all_servers_occupied class Page(PageHandler): async def get_page(self): - self.application.upstream_manager.update_upstream(Upstream('retry_error_async', {}, - [get_server(self, 'broken'), - get_server(self, 'normal')])) + self.application.upstream_manager.update_upstream( + Upstream('retry_error_async', {}, [get_server(self, 'broken'), get_server(self, 'normal')]), + ) self.text = '' - async def make_request(): + async def make_request() -> None: result = await self.put_url('retry_error_async', self.request.path) if result.failed or result.data is None: diff --git a/tests/projects/balancer_app/pages/retry_non_idempotent_503.py b/tests/projects/balancer_app/pages/retry_non_idempotent_503.py index 6c1d08433..1ef96d054 100644 --- a/tests/projects/balancer_app/pages/retry_non_idempotent_503.py +++ b/tests/projects/balancer_app/pages/retry_non_idempotent_503.py @@ -4,26 +4,23 @@ from frontik import media_types from frontik.handler import PageHandler from frontik.util import gather_list - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done class Page(PageHandler): async def get_page(self): - upstream_config = {Upstream.DEFAULT_PROFILE: UpstreamConfig(retry_policy={ - 503: { - "idempotent": "true" - } - })} + upstream_config = {Upstream.DEFAULT_PROFILE: UpstreamConfig(retry_policy={503: {"idempotent": "true"}})} self.application.upstream_manager.update_upstream( - Upstream('retry_non_idempotent_503', upstream_config, [get_server(self, 'normal')])) + Upstream('retry_non_idempotent_503', upstream_config, [get_server(self, 'normal')]), + ) self.application.upstream_manager.update_upstream( - Upstream('do_not_retry_non_idempotent_503', {}, [get_server(self, 'broken')])) + Upstream('do_not_retry_non_idempotent_503', {}, [get_server(self, 'broken')]), + ) res1, res2 = await gather_list( self.post_url('retry_non_idempotent_503', self.request.path), - self.post_url('do_not_retry_non_idempotent_503', self.request.path) + self.post_url('do_not_retry_non_idempotent_503', self.request.path), ) if res1.response.error or res1.data is None: diff --git a/tests/projects/balancer_app/pages/retry_non_idempotent_503_async.py b/tests/projects/balancer_app/pages/retry_non_idempotent_503_async.py index 60a28cc25..e89c0c0c2 100644 --- a/tests/projects/balancer_app/pages/retry_non_idempotent_503_async.py +++ b/tests/projects/balancer_app/pages/retry_non_idempotent_503_async.py @@ -5,27 +5,22 @@ from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done class Page(PageHandler): async def get_page(self): - upstream_config = {Upstream.DEFAULT_PROFILE: UpstreamConfig(retry_policy={ - 503: { - "idempotent": "true" - } - })} - self.application.upstream_manager.update_upstream(Upstream('retry_non_idempotent_503_async', - upstream_config, - [get_server(self, 'normal')])) - - self.application.upstream_manager.update_upstream(Upstream('do_not_retry_non_idempotent_503_async', - {}, - [get_server(self, 'broken')])) - - async def post_with_retry(): + upstream_config = {Upstream.DEFAULT_PROFILE: UpstreamConfig(retry_policy={503: {"idempotent": "true"}})} + self.application.upstream_manager.update_upstream( + Upstream('retry_non_idempotent_503_async', upstream_config, [get_server(self, 'normal')]), + ) + + self.application.upstream_manager.update_upstream( + Upstream('do_not_retry_non_idempotent_503_async', {}, [get_server(self, 'broken')]), + ) + + async def post_with_retry() -> None: result = await self.post_url('retry_non_idempotent_503_async', self.request.path) if result.failed or result.data is None: @@ -33,7 +28,7 @@ async def post_with_retry(): self.text = result.data - async def post_without_retry(): + async def post_without_retry() -> None: result = await self.post_url('do_not_retry_non_idempotent_503_async', self.request.path) if result.response.code != 503: diff --git a/tests/projects/balancer_app/pages/retry_on_timeout.py b/tests/projects/balancer_app/pages/retry_on_timeout.py index 8a50846dc..accf0d0a9 100644 --- a/tests/projects/balancer_app/pages/retry_on_timeout.py +++ b/tests/projects/balancer_app/pages/retry_on_timeout.py @@ -3,7 +3,6 @@ from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done @@ -11,10 +10,16 @@ class Page(PageHandler): async def get_page(self): self.application.upstream_manager.update_upstream( - Upstream('retry_on_timeout', {}, [get_server(self, 'broken'), get_server(self, 'normal')])) - - result = await self.delete_url('retry_on_timeout', self.request.path, connect_timeout=0.1, request_timeout=0.3, - max_timeout_tries=2) + Upstream('retry_on_timeout', {}, [get_server(self, 'broken'), get_server(self, 'normal')]), + ) + + result = await self.delete_url( + 'retry_on_timeout', + self.request.path, + connect_timeout=0.1, + request_timeout=0.3, + max_timeout_tries=2, + ) if result.response.error or result.data is None: raise HTTPError(500) diff --git a/tests/projects/balancer_app/pages/retry_on_timeout_async.py b/tests/projects/balancer_app/pages/retry_on_timeout_async.py index 924104224..fdca6769b 100644 --- a/tests/projects/balancer_app/pages/retry_on_timeout_async.py +++ b/tests/projects/balancer_app/pages/retry_on_timeout_async.py @@ -3,19 +3,23 @@ from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done class Page(PageHandler): async def get_page(self): - self.application.upstream_manager.update_upstream(Upstream('retry_on_timeout_async', {}, - [get_server(self, 'broken'), - get_server(self, 'normal')])) - - result = await self.delete_url('retry_on_timeout_async', self.request.path, - connect_timeout=0.1, request_timeout=0.3, max_timeout_tries=2) + self.application.upstream_manager.update_upstream( + Upstream('retry_on_timeout_async', {}, [get_server(self, 'broken'), get_server(self, 'normal')]), + ) + + result = await self.delete_url( + 'retry_on_timeout_async', + self.request.path, + connect_timeout=0.1, + request_timeout=0.3, + max_timeout_tries=2, + ) if result.failed or result.data is None: raise HTTPError(500) diff --git a/tests/projects/balancer_app/pages/slow_start.py b/tests/projects/balancer_app/pages/slow_start.py index ceddc9c85..ecfb6ed43 100644 --- a/tests/projects/balancer_app/pages/slow_start.py +++ b/tests/projects/balancer_app/pages/slow_start.py @@ -1,11 +1,10 @@ import asyncio import time -from http_client.balancing import Upstream, Server, UpstreamConfig +from http_client.balancing import Server, Upstream, UpstreamConfig from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done, check_all_servers_occupied @@ -21,21 +20,22 @@ async def get_page(self): server_slow_start = Server('127.0.0.1:12345', weight=5, dc='Test') upstream_config = {Upstream.DEFAULT_PROFILE: UpstreamConfig(slow_start_interval=0.1)} - self.application.upstream_manager.update_upstream( - Upstream('slow_start', upstream_config, [server])) + self.application.upstream_manager.update_upstream(Upstream('slow_start', upstream_config, [server])) self.text = '' requests = [] requests.append(self.post_url('slow_start', self.request.path)) - time.sleep(0.2) + time.sleep(0.2) # noqa: ASYNC101 upstream_config = {Upstream.DEFAULT_PROFILE: UpstreamConfig(slow_start_interval=1)} self.application.upstream_manager.update_upstream( - Upstream('slow_start', upstream_config, [same_server, server_slow_start])) - requests.append(self.post_url('slow_start', self.request.path)) - time.sleep(1) - requests.append(self.post_url('slow_start', self.request.path)) + Upstream('slow_start', upstream_config, [same_server, server_slow_start]), + ) requests.append(self.post_url('slow_start', self.request.path)) + time.sleep(1) # noqa: ASYNC101 + requests.extend( + (self.post_url('slow_start', self.request.path), self.post_url('slow_start', self.request.path)), + ) check_all_servers_occupied(self, 'slow_start') diff --git a/tests/projects/balancer_app/pages/slow_start_async.py b/tests/projects/balancer_app/pages/slow_start_async.py index b245d14a4..baec2a090 100644 --- a/tests/projects/balancer_app/pages/slow_start_async.py +++ b/tests/projects/balancer_app/pages/slow_start_async.py @@ -1,10 +1,9 @@ import asyncio -from http_client.balancing import Upstream, Server, UpstreamConfig +from http_client.balancing import Server, Upstream, UpstreamConfig from frontik import media_types from frontik.handler import PageHandler - from tests.projects.balancer_app import get_server from tests.projects.balancer_app.pages import check_all_requests_done @@ -17,11 +16,10 @@ async def get_page(self): server_slow_start = Server('127.0.0.1:12345', weight=5, dc='Test') upstream_config = {Upstream.DEFAULT_PROFILE: UpstreamConfig(slow_start_interval=0.1)} - self.application.upstream_manager.update_upstream( - Upstream('slow_start_async', upstream_config, [server])) + self.application.upstream_manager.update_upstream(Upstream('slow_start_async', upstream_config, [server])) self.text = '' - async def make_request(delay: float = 0): + async def make_request(delay: float = 0) -> None: await asyncio.sleep(delay) result = await self.post_url('slow_start_async', self.request.path) self.text = result.data @@ -32,7 +30,8 @@ async def make_request(delay: float = 0): upstream_config = {Upstream.DEFAULT_PROFILE: UpstreamConfig(slow_start_interval=1)} self.application.upstream_manager.update_upstream( - Upstream('slow_start_async', upstream_config, [server, server_slow_start])) + Upstream('slow_start_async', upstream_config, [server, server_slow_start]), + ) request2 = self.run_task(make_request()) request3 = self.run_task(make_request()) diff --git a/tests/projects/balancer_app/pages/speculative_no_retry.py b/tests/projects/balancer_app/pages/speculative_no_retry.py index 661f730aa..5c74d45b5 100644 --- a/tests/projects/balancer_app/pages/speculative_no_retry.py +++ b/tests/projects/balancer_app/pages/speculative_no_retry.py @@ -7,13 +7,19 @@ class Page(PageHandler): async def get_page(self): - self.application.upstream_manager.upstreams['speculative_no_retry'] = Upstream('speculative_no_retry', - {}, []) + self.application.upstream_manager.upstreams['speculative_no_retry'] = Upstream('speculative_no_retry', {}, []) self.application.upstream_manager.update_upstream( - Upstream('speculative_no_retry', {}, [get_server(self, 'broken'), get_server(self, 'normal')])) + Upstream('speculative_no_retry', {}, [get_server(self, 'broken'), get_server(self, 'normal')]), + ) - result = await self.post_url('speculative_no_retry', self.request.path, connect_timeout=0.1, - request_timeout=0.5, max_timeout_tries=1, speculative_timeout_pct=0.10) + result = await self.post_url( + 'speculative_no_retry', + self.request.path, + connect_timeout=0.1, + request_timeout=0.5, + max_timeout_tries=1, + speculative_timeout_pct=0.10, + ) if result.failed or result.status_code == 500: self.text = 'no retry' diff --git a/tests/projects/balancer_app/pages/speculative_retry.py b/tests/projects/balancer_app/pages/speculative_retry.py index 134fbb67f..b45473653 100644 --- a/tests/projects/balancer_app/pages/speculative_retry.py +++ b/tests/projects/balancer_app/pages/speculative_retry.py @@ -10,10 +10,17 @@ class Page(PageHandler): async def get_page(self): self.application.upstream_manager.upstreams['speculative_retry'] = Upstream('speculative_no_retry', {}, []) self.application.upstream_manager.update_upstream( - Upstream('speculative_retry', {}, [get_server(self, 'broken'), get_server(self, 'normal')])) + Upstream('speculative_retry', {}, [get_server(self, 'broken'), get_server(self, 'normal')]), + ) - result = await self.put_url('speculative_retry', self.request.path, connect_timeout=0.1, - request_timeout=0.5, max_timeout_tries=1, speculative_timeout_pct=0.1) + result = await self.put_url( + 'speculative_retry', + self.request.path, + connect_timeout=0.1, + request_timeout=0.5, + max_timeout_tries=1, + speculative_timeout_pct=0.1, + ) if result.failed or result.data is None: raise HTTPError(500) diff --git a/tests/projects/broken_async_init_app/__init__.py b/tests/projects/broken_async_init_app/__init__.py index 3624e71eb..779d91481 100644 --- a/tests/projects/broken_async_init_app/__init__.py +++ b/tests/projects/broken_async_init_app/__init__.py @@ -3,4 +3,5 @@ class TestApplication(FrontikApplication): def init(self): - raise Exception('broken async init') + msg = 'broken async init' + raise Exception(msg) diff --git a/tests/projects/broken_balancer_app/pages/no_retry_timeout.py b/tests/projects/broken_balancer_app/pages/no_retry_timeout.py index 97ec96cbf..db5ceb704 100644 --- a/tests/projects/broken_balancer_app/pages/no_retry_timeout.py +++ b/tests/projects/broken_balancer_app/pages/no_retry_timeout.py @@ -5,9 +5,7 @@ class Page(frontik.handler.PageHandler): async def post_page(self): - self.add_timeout( - time.time() + 2, self.finish_group.add(self.check_finished(self.timeout_callback)) - ) + self.add_timeout(time.time() + 2, self.finish_group.add(self.check_finished(self.timeout_callback))) def timeout_callback(self): self.text = 'result' diff --git a/tests/projects/broken_balancer_app/pages/retry_on_timeout.py b/tests/projects/broken_balancer_app/pages/retry_on_timeout.py index b02a68cff..1d11fe73c 100644 --- a/tests/projects/broken_balancer_app/pages/retry_on_timeout.py +++ b/tests/projects/broken_balancer_app/pages/retry_on_timeout.py @@ -5,9 +5,7 @@ class Page(handler.PageHandler): async def delete_page(self): - self.add_timeout( - time.time() + 2, self.finish_group.add(self.check_finished(self.timeout_callback)) - ) + self.add_timeout(time.time() + 2, self.finish_group.add(self.check_finished(self.timeout_callback))) def timeout_callback(self): self.add_header('Content-Type', media_types.TEXT_PLAIN) diff --git a/tests/projects/broken_config_app/__init__.py b/tests/projects/broken_config_app/__init__.py index 71db6b551..ea33fe163 100644 --- a/tests/projects/broken_config_app/__init__.py +++ b/tests/projects/broken_config_app/__init__.py @@ -4,4 +4,5 @@ class TestApplication(FrontikApplication): def application_config(self): from tests.projects.broken_config_app import config + return config diff --git a/tests/projects/broken_config_app/config.py b/tests/projects/broken_config_app/config.py index 24ceb78be..6ce17db3b 100644 --- a/tests/projects/broken_config_app/config.py +++ b/tests/projects/broken_config_app/config.py @@ -1 +1,2 @@ -raise Exception('broken config') +msg = 'broken config' +raise Exception(msg) diff --git a/tests/projects/broken_integration/target_app/__init__.py b/tests/projects/broken_integration/target_app/__init__.py index bdc69b244..81871635c 100644 --- a/tests/projects/broken_integration/target_app/__init__.py +++ b/tests/projects/broken_integration/target_app/__init__.py @@ -4,11 +4,10 @@ class TestApplication(FrontikApplication): - async def init(self): await super().init() await self.broken_future() - async def broken_future(self): + async def broken_future(self) -> None: await asyncio.sleep(1) raise Exception diff --git a/tests/projects/consul_mock_app/__init__.py b/tests/projects/consul_mock_app/__init__.py index 7286bdaf1..49b345358 100644 --- a/tests/projects/consul_mock_app/__init__.py +++ b/tests/projects/consul_mock_app/__init__.py @@ -7,14 +7,15 @@ class TestApplication(FrontikApplication): - def __init__(self, **settings): super().__init__(**settings) - self.registration_call_counter = Counter() - self.deregistration_call_counter = Counter() + self.registration_call_counter: Counter = Counter() + self.deregistration_call_counter: Counter = Counter() def application_urls(self): - return [(r'^/v1/agent/service/deregister/([a-zA-Z\-_0-9\.:\-]+)', deregister.Page), - (r'^/v1/kv/host/([a-zA-Z\-_0-9\.:\-]+)/weight', weight.Page), - (r'^/v1/kv/upstream', upstream.Page), - *super().application_urls(), ] + return [ + (r'^/v1/agent/service/deregister/([a-zA-Z\-_0-9\.:\-]+)', deregister.Page), + (r'^/v1/kv/host/([a-zA-Z\-_0-9\.:\-]+)/weight', weight.Page), + (r'^/v1/kv/upstream', upstream.Page), + *super().application_urls(), + ] diff --git a/tests/projects/consul_mock_app/pages/call_deregistration_stat.py b/tests/projects/consul_mock_app/pages/call_deregistration_stat.py index 9d445171d..c04588c15 100644 --- a/tests/projects/consul_mock_app/pages/call_deregistration_stat.py +++ b/tests/projects/consul_mock_app/pages/call_deregistration_stat.py @@ -1,9 +1,16 @@ +from __future__ import annotations + import json +from typing import TYPE_CHECKING from frontik.handler import PageHandler +if TYPE_CHECKING: + from tests.projects.consul_mock_app import TestApplication + class Page(PageHandler): async def get_page(self): self.set_status(200) + self.application: TestApplication self.text = json.dumps(self.application.deregistration_call_counter) diff --git a/tests/projects/consul_mock_app/pages/call_deregistration_stat_async.py b/tests/projects/consul_mock_app/pages/call_deregistration_stat_async.py index 9d445171d..c04588c15 100644 --- a/tests/projects/consul_mock_app/pages/call_deregistration_stat_async.py +++ b/tests/projects/consul_mock_app/pages/call_deregistration_stat_async.py @@ -1,9 +1,16 @@ +from __future__ import annotations + import json +from typing import TYPE_CHECKING from frontik.handler import PageHandler +if TYPE_CHECKING: + from tests.projects.consul_mock_app import TestApplication + class Page(PageHandler): async def get_page(self): self.set_status(200) + self.application: TestApplication self.text = json.dumps(self.application.deregistration_call_counter) diff --git a/tests/projects/consul_mock_app/pages/call_registration_stat.py b/tests/projects/consul_mock_app/pages/call_registration_stat.py index fa99a6c91..5f12903c1 100644 --- a/tests/projects/consul_mock_app/pages/call_registration_stat.py +++ b/tests/projects/consul_mock_app/pages/call_registration_stat.py @@ -1,9 +1,16 @@ +from __future__ import annotations + import json +from typing import TYPE_CHECKING from frontik.handler import PageHandler +if TYPE_CHECKING: + from tests.projects.consul_mock_app import TestApplication + class Page(PageHandler): async def get_page(self): self.set_status(200) + self.application: TestApplication self.text = json.dumps(self.application.registration_call_counter) diff --git a/tests/projects/consul_mock_app/pages/call_registration_stat_async.py b/tests/projects/consul_mock_app/pages/call_registration_stat_async.py index fa99a6c91..5f12903c1 100644 --- a/tests/projects/consul_mock_app/pages/call_registration_stat_async.py +++ b/tests/projects/consul_mock_app/pages/call_registration_stat_async.py @@ -1,9 +1,16 @@ +from __future__ import annotations + import json +from typing import TYPE_CHECKING from frontik.handler import PageHandler +if TYPE_CHECKING: + from tests.projects.consul_mock_app import TestApplication + class Page(PageHandler): async def get_page(self): self.set_status(200) + self.application: TestApplication self.text = json.dumps(self.application.registration_call_counter) diff --git a/tests/projects/consul_mock_app/pages/deregister.py b/tests/projects/consul_mock_app/pages/deregister.py index 6039dde56..86a8f36bb 100644 --- a/tests/projects/consul_mock_app/pages/deregister.py +++ b/tests/projects/consul_mock_app/pages/deregister.py @@ -1,7 +1,18 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + from frontik.handler import PageHandler +if TYPE_CHECKING: + from tests.projects.consul_mock_app import TestApplication + class Page(PageHandler): + def __init__(self, *args, **kwargs): + self.application: TestApplication + super().__init__(*args, **kwargs) + async def get_page(self): self.set_status(200) self.application.deregistration_call_counter['get_page'] += 1 diff --git a/tests/projects/consul_mock_app/pages/v1/agent/service/register.py b/tests/projects/consul_mock_app/pages/v1/agent/service/register.py index f0ab8ac5a..4d2bcaf02 100644 --- a/tests/projects/consul_mock_app/pages/v1/agent/service/register.py +++ b/tests/projects/consul_mock_app/pages/v1/agent/service/register.py @@ -1,7 +1,18 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + from frontik.handler import PageHandler +if TYPE_CHECKING: + from tests.projects.consul_mock_app import TestApplication + class Page(PageHandler): + def __init__(self, *args, **kwargs): + self.application: TestApplication + super().__init__(*args, **kwargs) + async def get_page(self): self.set_status(200) self.application.registration_call_counter['get_page'] += 1 diff --git a/tests/projects/re_app/__init__.py b/tests/projects/re_app/__init__.py index fef76d40e..7f3150ac1 100644 --- a/tests/projects/re_app/__init__.py +++ b/tests/projects/re_app/__init__.py @@ -1,9 +1,8 @@ import jinja2 from frontik.app import FrontikApplication -from frontik.util import get_abs_path from frontik.options import options - +from frontik.util import get_abs_path from tests.projects.re_app import config from tests.projects.re_app.pages import handler_404 diff --git a/tests/projects/re_app/config.py b/tests/projects/re_app/config.py index 61d94ef9a..d0f065136 100644 --- a/tests/projects/re_app/config.py +++ b/tests/projects/re_app/config.py @@ -1,13 +1,10 @@ from frontik.routing import FileMappingRouter - from tests.projects.re_app import pages -from tests.projects.re_app.pages import handler_404 -from tests.projects.re_app.pages import id_param -from tests.projects.re_app.pages import simple +from tests.projects.re_app.pages import handler_404, id_param, simple urls = [ ('/id/(?P[^/]+)', id_param.Page), ('/id/(?P[^/]+)/(?P[^/]+)', handler_404.Page, 'two_ids'), ('/not_simple', simple.Page), - ('(?!/not_matching_regex)', FileMappingRouter(pages)) + ('(?!/not_matching_regex)', FileMappingRouter(pages)), ] diff --git a/tests/projects/re_app/pages/reverse_url.py b/tests/projects/re_app/pages/reverse_url.py index 0fd8cca0a..a4a0fc170 100644 --- a/tests/projects/re_app/pages/reverse_url.py +++ b/tests/projects/re_app/pages/reverse_url.py @@ -9,8 +9,10 @@ async def get_page(self): if self.get_argument('fail_missing', 'false') != 'false': self.text = self.reverse_url('missing', 1) - self.json.put({ - 'args': self.reverse_url('two_ids', 1, 2), - 'args_and_kwargs': self.reverse_url('two_ids', 2, id1=1), - 'kwargs': self.reverse_url('two_ids', id1=1, id2=2), - }) + self.json.put( + { + 'args': self.reverse_url('two_ids', 1, 2), + 'args_and_kwargs': self.reverse_url('two_ids', 2, id1=1), + 'kwargs': self.reverse_url('two_ids', id1=1, id2=2), + }, + ) diff --git a/tests/projects/test_app/__init__.py b/tests/projects/test_app/__init__.py index df3346374..433e4924b 100644 --- a/tests/projects/test_app/__init__.py +++ b/tests/projects/test_app/__init__.py @@ -1,14 +1,14 @@ import json import logging import re + import tornado.routing from frontik.app import FrontikApplication -from frontik.loggers import bootstrap_logger from frontik.handler import RedirectHandler +from frontik.loggers import bootstrap_logger from frontik.options import options from frontik.routing import _get_application_404_handler_delegate - from tests.projects.test_app import config @@ -26,9 +26,7 @@ async def init(self): self.http_client_factory.request_engine_builder.kafka_producer = TestKafkaProducer() def application_urls(self): - return [ - (r'^/redirect', RedirectRouter()), - ] + super().application_urls() + return [('^/redirect', RedirectRouter()), *super().application_urls()] def application_config(self): return config @@ -38,17 +36,15 @@ def application_version_xml(self): class TestKafkaProducer: - def __init__(self): - self.data = [] + def __init__(self) -> None: + self.data: list[dict] = [] self.request_id = None async def send(self, topic, value=None): json_data = json.loads(value) if json_data['requestId'] == self.request_id: - self.data.append({ - topic: json_data - }) + self.data.append({topic: json_data}) def enable_for_request_id(self, request_id): self.request_id = request_id @@ -70,5 +66,5 @@ def find_handler(self, request, **kwargs): permanent = False else: return _get_application_404_handler_delegate(application, request) - redirect_arguments = dict(url='/finish?foo=bar', permanent=permanent) + redirect_arguments = {'url': '/finish?foo=bar', 'permanent': permanent} return application.get_handler_delegate(request, RedirectHandler, redirect_arguments) diff --git a/tests/projects/test_app/pages/api/2/store.py b/tests/projects/test_app/pages/api/2/store.py index 7aa3e2d7b..533ffcf85 100644 --- a/tests/projects/test_app/pages/api/2/store.py +++ b/tests/projects/test_app/pages/api/2/store.py @@ -13,9 +13,7 @@ async def post_page(self): Page.exceptions.append(sentry_event) async def get_page(self): - self.json.put({ - 'exceptions': Page.exceptions - }) + self.json.put({'exceptions': Page.exceptions}) async def delete_page(self): Page.exceptions = [] diff --git a/tests/projects/test_app/pages/arguments.py b/tests/projects/test_app/pages/arguments.py index fb6ca52e6..ef90e2660 100644 --- a/tests/projects/test_app/pages/arguments.py +++ b/tests/projects/test_app/pages/arguments.py @@ -3,6 +3,4 @@ class Page(frontik.handler.PageHandler): async def get_page(self): - self.json.put({ - 'тест': self.get_argument('param') - }) + self.json.put({'тест': self.get_argument('param')}) diff --git a/tests/projects/test_app/pages/arguments_async.py b/tests/projects/test_app/pages/arguments_async.py index fb6ca52e6..ef90e2660 100644 --- a/tests/projects/test_app/pages/arguments_async.py +++ b/tests/projects/test_app/pages/arguments_async.py @@ -3,6 +3,4 @@ class Page(frontik.handler.PageHandler): async def get_page(self): - self.json.put({ - 'тест': self.get_argument('param') - }) + self.json.put({'тест': self.get_argument('param')}) diff --git a/tests/projects/test_app/pages/async_group/group.py b/tests/projects/test_app/pages/async_group/group.py index 5324088e2..1bf791c8a 100644 --- a/tests/projects/test_app/pages/async_group/group.py +++ b/tests/projects/test_app/pages/async_group/group.py @@ -8,19 +8,26 @@ async def get_page(self): fail_callback = self.get_argument('fail_callback', 'false') == 'true' fail_request = self.get_argument('fail_request', 'false') == 'true' - async def _async_callback(): + async def _async_callback() -> None: """Assert that callback is executed asynchronously""" assert ensure_callback_is_async - async def put_json_data(): - result = await gather_dict({ - '1': self.post_url(self.request.host, self.request.path + '?data=1'), - '2': self.post_url(self.request.host, self.request.path + '?data=2'), - '3': self.post_url(self.request.host, self.request.path, - data={'data': '3' if not fail_request else None}, parse_on_error=False) - }) + async def put_json_data() -> None: + result = await gather_dict( + { + '1': self.post_url(self.request.host, self.request.path + '?data=1'), + '2': self.post_url(self.request.host, self.request.path + '?data=2'), + '3': self.post_url( + self.request.host, + self.request.path, + data={'data': '3' if not fail_request else None}, + parse_on_error=False, + ), + }, + ) if fail_callback: - raise Exception("I'm dying!") + msg = "I'm dying!" + raise Exception(msg) self.json.put({'final_callback_called': True}) self.json.put(result) @@ -30,15 +37,11 @@ async def put_json_data(): self.run_task(_async_callback()) ensure_callback_is_async = True - async def group_task(): - result = await self.group({ - '4': self.post_url(self.request.host, self.request.path + '?data=4') - }) + async def group_task() -> None: + result = await self.group({'4': self.post_url(self.request.host, self.request.path + '?data=4')}) self.json.put({'future_callback_result': result['4'].data['4']}) self.run_task(group_task()) async def post_page(self): - self.json.put({ - self.get_argument('data'): 'yay' - }) + self.json.put({self.get_argument('data'): 'yay'}) diff --git a/tests/projects/test_app/pages/async_group/group_async.py b/tests/projects/test_app/pages/async_group/group_async.py index ee3524383..04295a44b 100644 --- a/tests/projects/test_app/pages/async_group/group_async.py +++ b/tests/projects/test_app/pages/async_group/group_async.py @@ -1,3 +1,5 @@ +from typing import Any + import frontik.handler @@ -6,10 +8,11 @@ async def get_page(self): fail_callback = self.get_argument('fail_callback', 'false') == 'true' fail_request = self.get_argument('fail_request', 'false') == 'true' - async def task(): + async def task() -> Any: request_result = await self.post_url(self.request.host, self.request.path + '?data=2') if fail_callback: - raise Exception("I'm dying!") + msg = "I'm dying!" + raise Exception(msg) return request_result.data self.json.put( @@ -17,20 +20,20 @@ async def task(): { '1': self.post_url(self.request.host, self.request.path + '?data=1'), '2': task(), - '3': self.post_url(self.request.host, self.request.path, - data={'data': '3' if not fail_request else None}, parse_on_error=False) - } - ) + '3': self.post_url( + self.request.host, + self.request.path, + data={'data': '3' if not fail_request else None}, + parse_on_error=False, + ), + }, + ), ) - result = await self.group({ - '4': self.post_url(self.request.host, self.request.path + '?data=4') - }) + result = await self.group({'4': self.post_url(self.request.host, self.request.path + '?data=4')}) self.json.put({'future_callback_result': result['4'].data['4']}) self.json.put({'final_callback_called': True}) async def post_page(self): - self.json.put({ - self.get_argument('data'): 'yay' - }) + self.json.put({self.get_argument('data'): 'yay'}) diff --git a/tests/projects/test_app/pages/async_group/group_with_futures.py b/tests/projects/test_app/pages/async_group/group_with_futures.py index 0af103913..845db9278 100644 --- a/tests/projects/test_app/pages/async_group/group_with_futures.py +++ b/tests/projects/test_app/pages/async_group/group_with_futures.py @@ -6,19 +6,21 @@ class Page(frontik.handler.PageHandler): async def get_page(self): - future = Future() + future: Future = Future() if self.get_argument('failed_future', 'false') == 'true': future.set_exception(Exception('failed future exception')) else: future.set_result({'1': 'yay'}) - another_future = Future() + another_future: Future = Future() another_future.set_result({'2': 'yay'}) - result = await gather_dict({ - '1': future, - '2': another_future, - }) + result = await gather_dict( + { + '1': future, + '2': another_future, + }, + ) self.json.put({'final_callback_called': True}) self.json.put(result) diff --git a/tests/projects/test_app/pages/async_group/group_with_futures_async.py b/tests/projects/test_app/pages/async_group/group_with_futures_async.py index 2b3e33d60..8817c8e22 100644 --- a/tests/projects/test_app/pages/async_group/group_with_futures_async.py +++ b/tests/projects/test_app/pages/async_group/group_with_futures_async.py @@ -5,14 +5,14 @@ class Page(frontik.handler.PageHandler): async def get_page(self): - future = Future() + future: Future = Future() if self.get_argument('failed_future', 'false') == 'true': future.set_exception(Exception('failed future exception')) else: future.set_result({'1': 'yay'}) - another_future = Future() + another_future: Future = Future() another_future.set_result({'2': 'yay'}) self.json.put( @@ -20,6 +20,6 @@ async def get_page(self): { '1': future, '2': another_future, - } - ) + }, + ), ) diff --git a/tests/projects/test_app/pages/async_group/not_waited_failed_requests.py b/tests/projects/test_app/pages/async_group/not_waited_failed_requests.py index 0961f5f56..47322d784 100644 --- a/tests/projects/test_app/pages/async_group/not_waited_failed_requests.py +++ b/tests/projects/test_app/pages/async_group/not_waited_failed_requests.py @@ -2,7 +2,7 @@ class Page(PageHandler): - data = {} + data: dict = {} async def get_page(self): if self.request.method == 'HEAD': @@ -21,7 +21,7 @@ async def get_page(self): self.json.put(self.data) self.data = {} - async def head_page(self): + async def head_page(self) -> None: self._record_failed_request({'head_failed': True}) async def post_page(self): @@ -33,6 +33,7 @@ async def put_page(self): async def delete_page(self): self._record_failed_request({'delete_failed': True}) - def _record_failed_request(self, data): + def _record_failed_request(self, data: dict) -> None: Page.data.update(data) - raise ValueError('Some error') + msg = 'Some error' + raise ValueError(msg) diff --git a/tests/projects/test_app/pages/async_group/not_waited_failed_requests_async.py b/tests/projects/test_app/pages/async_group/not_waited_failed_requests_async.py index 0961f5f56..47322d784 100644 --- a/tests/projects/test_app/pages/async_group/not_waited_failed_requests_async.py +++ b/tests/projects/test_app/pages/async_group/not_waited_failed_requests_async.py @@ -2,7 +2,7 @@ class Page(PageHandler): - data = {} + data: dict = {} async def get_page(self): if self.request.method == 'HEAD': @@ -21,7 +21,7 @@ async def get_page(self): self.json.put(self.data) self.data = {} - async def head_page(self): + async def head_page(self) -> None: self._record_failed_request({'head_failed': True}) async def post_page(self): @@ -33,6 +33,7 @@ async def put_page(self): async def delete_page(self): self._record_failed_request({'delete_failed': True}) - def _record_failed_request(self, data): + def _record_failed_request(self, data: dict) -> None: Page.data.update(data) - raise ValueError('Some error') + msg = 'Some error' + raise ValueError(msg) diff --git a/tests/projects/test_app/pages/async_group/not_waited_requests.py b/tests/projects/test_app/pages/async_group/not_waited_requests.py index 77bf7a923..27b0d5cd5 100644 --- a/tests/projects/test_app/pages/async_group/not_waited_requests.py +++ b/tests/projects/test_app/pages/async_group/not_waited_requests.py @@ -4,7 +4,7 @@ class Page(PageHandler): - data = {} + data: dict = {} async def get_page(self): if not self.data: @@ -18,12 +18,12 @@ async def get_page(self): self.data = {} def finish(self, chunk=None): - super(Page, self).finish(chunk) + super().finish(chunk) if self.request.method == 'GET': # HTTP requests with waited=False can be made after handler is finished - asyncio.create_task(self.put_url(self.request.host, self.request.path, waited=False)) + asyncio.create_task(self.put_url(self.request.host, self.request.path, waited=False)) # type: ignore - async def coro(self): + async def coro(self) -> None: await self.post_url(self.request.host, self.request.path, waited=False) # HTTP requests with waited=True are aborted after handler is finished @@ -41,6 +41,6 @@ async def put_page(self): async def delete_page(self): self.record_request({'delete_made': True}) - def record_request(self, data): + def record_request(self, data: dict) -> None: self.json.put(data) Page.data.update(data) diff --git a/tests/projects/test_app/pages/async_group/not_waited_requests_async.py b/tests/projects/test_app/pages/async_group/not_waited_requests_async.py index 00a90df40..7dd408e1d 100644 --- a/tests/projects/test_app/pages/async_group/not_waited_requests_async.py +++ b/tests/projects/test_app/pages/async_group/not_waited_requests_async.py @@ -4,7 +4,7 @@ class Page(PageHandler): - data = {} + data: dict = {} async def get_page(self): if not self.data: @@ -18,12 +18,12 @@ async def get_page(self): self.data = {} def finish(self, chunk=None): - super(Page, self).finish(chunk) + super().finish(chunk) if self.request.method == 'GET': # HTTP requests with waited=False can be made after handler is finished - asyncio.create_task(self.put_url(self.request.host, self.request.path, waited=False)) + asyncio.create_task(self.put_url(self.request.host, self.request.path, waited=False)) # type: ignore - async def coro(self): + async def coro(self) -> None: await self.post_url(self.request.host, self.request.path, waited=False) # HTTP requests with waited=True are aborted after handler is finished @@ -41,6 +41,6 @@ async def put_page(self): async def delete_page(self): self.record_request({'delete_made': True}) - def record_request(self, data): + def record_request(self, data: dict) -> None: self.json.put(data) Page.data.update(data) diff --git a/tests/projects/test_app/pages/broken_workflow.py b/tests/projects/test_app/pages/broken_workflow.py index ee0569b97..31d4aff2b 100644 --- a/tests/projects/test_app/pages/broken_workflow.py +++ b/tests/projects/test_app/pages/broken_workflow.py @@ -15,7 +15,7 @@ def cb(*args, **kw): results = await gather_list( self.get_url(f'http://localhost:{port}', '/page/simple/'), self.get_url(f'http://localhost:{port}', '/page/simple/'), - self.get_url(f'http://localhost:{port}', '/page/simple/') + self.get_url(f'http://localhost:{port}', '/page/simple/'), ) for res in results: cb(res) diff --git a/tests/projects/test_app/pages/debug.py b/tests/projects/test_app/pages/debug.py index 09398c52a..3f2227bbd 100644 --- a/tests/projects/test_app/pages/debug.py +++ b/tests/projects/test_app/pages/debug.py @@ -7,9 +7,11 @@ class Page(handler.PageHandler): async def get_page(self): self.log.debug('debug: starting debug page') - def _exception_trace(): - def _inner(): - raise ValueError('Testing an exception юникод') + def _exception_trace() -> None: + def _inner() -> None: + msg = 'Testing an exception юникод' + raise ValueError(msg) + _inner() try: @@ -44,11 +46,7 @@ def _inner(): async def post_page(self): self.log.debug('this page returns json') - self.json.put({ - 'param1': 'value', - 'param2': 'тест', - 'тест': 'value' - }) + self.json.put({'param1': 'value', 'param2': 'тест', 'тест': 'value'}) async def put_page(self): content_type = self.get_argument('type') @@ -66,4 +64,4 @@ async def put_page(self): self.text = 'document.body.write("Привет")' elif content_type == 'text': self.set_header('Content-Type', media_types.TEXT_PLAIN) - self.text = 'привет charset'.encode('utf-8') + self.text = 'привет charset'.encode() diff --git a/tests/projects/test_app/pages/error_on_import.py b/tests/projects/test_app/pages/error_on_import.py index f53efa3f4..876fc1320 100644 --- a/tests/projects/test_app/pages/error_on_import.py +++ b/tests/projects/test_app/pages/error_on_import.py @@ -1 +1,2 @@ -raise ValueError('error on import') +msg = 'error on import' +raise ValueError(msg) diff --git a/tests/projects/test_app/pages/error_yield.py b/tests/projects/test_app/pages/error_yield.py index 3f921c50f..61a4ec8ed 100644 --- a/tests/projects/test_app/pages/error_yield.py +++ b/tests/projects/test_app/pages/error_yield.py @@ -1,14 +1,14 @@ import frontik.handler -async def some_async_function(handler): +async def some_async_function(handler: frontik.handler.PageHandler) -> float: await handler.post_url(handler.request.host, handler.request.path) return 1 / 0 class Page(frontik.handler.PageHandler): async def get_page(self): - self.finish_group.add_future(some_async_function(self)) + self.finish_group.add_future(some_async_function(self)) # type: ignore async def post_page(self): self.text = 'result' diff --git a/tests/projects/test_app/pages/fail_fast/__init__.py b/tests/projects/test_app/pages/fail_fast/__init__.py index f570a47bc..3afeba919 100644 --- a/tests/projects/test_app/pages/fail_fast/__init__.py +++ b/tests/projects/test_app/pages/fail_fast/__init__.py @@ -5,9 +5,7 @@ @preprocessor def get_page_preprocessor(handler): - handler.json.put({ - 'preprocessor': True - }) + handler.json.put({'preprocessor': True}) class Page(PageHandler): @@ -18,14 +16,19 @@ async def get_page(self): if self.get_argument('return_none', 'false') == 'true': return - results = await gather_dict({ - 'get': self.get_url(self.request.host, self.request.path, data={'return_none': 'true'}, fail_fast=True), - 'post': self.post_url(self.request.host, self.request.path, data={'param': 'post'}), - 'put': self.put_url( - self.request.host, self.request.path + '?code=401', fail_fast=fail_fast, parse_on_error=True - ), - 'delete': self.delete_url(self.request.host, self.request.path, data={'invalid_dict_value': 'true'}), - }) + results = await gather_dict( + { + 'get': self.get_url(self.request.host, self.request.path, data={'return_none': 'true'}, fail_fast=True), + 'post': self.post_url(self.request.host, self.request.path, data={'param': 'post'}), + 'put': self.put_url( + self.request.host, + self.request.path + '?code=401', + fail_fast=fail_fast, + parse_on_error=True, + ), + 'delete': self.delete_url(self.request.host, self.request.path, data={'invalid_dict_value': 'true'}), + }, + ) assert results['post'].status_code == 200 assert results['put'].status_code == 401 @@ -35,7 +38,8 @@ async def get_page(self): def get_page_fail_fast(self, failed_future): if self.get_argument('exception_in_fail_fast', 'false') == 'true': - raise Exception('Exception in fail_fast') + msg = 'Exception in fail_fast' + raise Exception(msg) self.json.replace({'fail_fast': True}) self.set_status(403) @@ -43,18 +47,19 @@ def get_page_fail_fast(self, failed_future): async def post_page(self): if self.get_argument('fail_fast_default', 'false') == 'true': - results = await gather_dict({ - 'e': self.put_url( - self.request.host, '{}?code={}'.format(self.request.path, self.get_argument('code')), - fail_fast=True - ) - }) + results = await gather_dict( + { + 'e': self.put_url( + self.request.host, + '{}?code={}'.format(self.request.path, self.get_argument('code')), + fail_fast=True, + ), + }, + ) self.json.put(results) else: - self.json.put({ - 'POST': self.get_argument('param') - }) + self.json.put({'POST': self.get_argument('param')}) async def put_page(self): # Testing parse_on_error=True diff --git a/tests/projects/test_app/pages/fail_fast/future.py b/tests/projects/test_app/pages/fail_fast/future.py index e6980ab63..96ef60f62 100644 --- a/tests/projects/test_app/pages/fail_fast/future.py +++ b/tests/projects/test_app/pages/fail_fast/future.py @@ -9,14 +9,12 @@ class Page(PageHandler): async def get_page(self): fail_future = self.get_argument('fail_future', 'false') == 'true' - results = await gather_dict({ - 'future': self.get_future('future_result', exception=fail_future) - }) + results = await gather_dict({'future': self.get_future('future_result', exception=fail_future)}) self.json.put(results) - def get_future(self, result, exception=False): - future = Future() + def get_future(self, result: str, exception: bool = False) -> Future: + future: Future = Future() def _finish_future(): if exception: diff --git a/tests/projects/test_app/pages/fail_fast/with_postprocessors.py b/tests/projects/test_app/pages/fail_fast/with_postprocessors.py index c54d0aed0..5b5d63fb2 100644 --- a/tests/projects/test_app/pages/fail_fast/with_postprocessors.py +++ b/tests/projects/test_app/pages/fail_fast/with_postprocessors.py @@ -1,6 +1,6 @@ from tornado.web import HTTPError -from frontik.handler import PageHandler, HTTPErrorWithPostprocessors +from frontik.handler import HTTPErrorWithPostprocessors, PageHandler class Page(PageHandler): @@ -10,7 +10,7 @@ async def get_page(self): def get_page_fail_fast(self, failed_future): self.json.put({'error': 'some_error'}) - raise HTTPErrorWithPostprocessors() + raise HTTPErrorWithPostprocessors async def post_page(self): raise HTTPError(403) diff --git a/tests/projects/test_app/pages/finish.py b/tests/projects/test_app/pages/finish.py index 45d6e42a5..ccd823dab 100644 --- a/tests/projects/test_app/pages/finish.py +++ b/tests/projects/test_app/pages/finish.py @@ -12,6 +12,7 @@ async def get_page(self): self.set_status(code) if throw: - raise Finish('success') + msg = 'success' + raise Finish(msg) else: self.finish('success') diff --git a/tests/projects/test_app/pages/finish_with_postprocessors.py b/tests/projects/test_app/pages/finish_with_postprocessors.py index b33282ac0..2eddd2fc2 100644 --- a/tests/projects/test_app/pages/finish_with_postprocessors.py +++ b/tests/projects/test_app/pages/finish_with_postprocessors.py @@ -16,7 +16,7 @@ def pp(handler): async def get_page(self): content_type = self.get_argument('type') - async def fail_request(): + async def fail_request() -> None: await self.post_url(self.request.host, self.request.path) raise HTTPError(500) @@ -32,7 +32,7 @@ async def fail_request(): self.doc.put(etree.Element('ok')) self.set_xsl('simple.xsl') - raise FinishWithPostprocessors() + raise FinishWithPostprocessors async def post_page(self): pass diff --git a/tests/projects/test_app/pages/handler/delete.py b/tests/projects/test_app/pages/handler/delete.py index 7d44e2d80..24bdd7601 100644 --- a/tests/projects/test_app/pages/handler/delete.py +++ b/tests/projects/test_app/pages/handler/delete.py @@ -13,6 +13,4 @@ async def post_page(self): self.json.put(result.data) async def delete_page(self): - self.json.put({ - 'delete': self.get_argument('data') - }) + self.json.put({'delete': self.get_argument('data')}) diff --git a/tests/projects/test_app/pages/handler/json.py b/tests/projects/test_app/pages/handler/json.py index f3fb7ade0..c96045732 100644 --- a/tests/projects/test_app/pages/handler/json.py +++ b/tests/projects/test_app/pages/handler/json.py @@ -2,7 +2,7 @@ class Page(frontik.handler.PageHandler): - def _page_handler(self): + def _page_handler(self) -> None: self.text = self.get_body_argument('foo') async def post_page(self): diff --git a/tests/projects/test_app/pages/handler/json_optional_args.py b/tests/projects/test_app/pages/handler/json_optional_args.py index 3fbeb6fea..a0c154610 100644 --- a/tests/projects/test_app/pages/handler/json_optional_args.py +++ b/tests/projects/test_app/pages/handler/json_optional_args.py @@ -2,7 +2,7 @@ class Page(frontik.handler.PageHandler): - def _page_handler(self): + def _page_handler(self) -> None: self.text = self.get_body_argument('foo', 'baz') async def post_page(self): diff --git a/tests/projects/test_app/pages/http_client/long_page_request.py b/tests/projects/test_app/pages/http_client/long_page_request.py index 94c7edb62..3fa158f5b 100644 --- a/tests/projects/test_app/pages/http_client/long_page_request.py +++ b/tests/projects/test_app/pages/http_client/long_page_request.py @@ -8,7 +8,7 @@ async def get_page(self): result = await self.post_url(self.request.host, self.request.path, request_timeout=0.5) self.request_callback(result.data, result.failed) - def request_callback(self, xml, error): + def request_callback(self, xml: str, error: bool) -> None: self.json.put({'error_received': bool(error)}) async def post_page(self): diff --git a/tests/projects/test_app/pages/http_client/post_url.py b/tests/projects/test_app/pages/http_client/post_url.py index 087210e09..678cfb950 100644 --- a/tests/projects/test_app/pages/http_client/post_url.py +++ b/tests/projects/test_app/pages/http_client/post_url.py @@ -2,8 +2,9 @@ import frontik.handler from frontik.util import any_to_bytes, any_to_unicode +from typing import Any -FIELDS = { +FIELDS: dict[str, Any] = { 'fielda': 'hello', 'fieldb': '', 'field3': 'None', @@ -13,7 +14,7 @@ 'field7': ['1', '3', 'jiji', bytes([1, 2, 3])] } -FILES = { +FILES: dict[str, list] = { 'field9': [{'filename': 'file0', 'body': b'\x10\x20\x30'}], 'field10': [ {'filename': 'file1', 'body': b'\x01\x02\x03'}, diff --git a/tests/projects/test_app/pages/json_page.py b/tests/projects/test_app/pages/json_page.py index f3d392816..f73fec83f 100644 --- a/tests/projects/test_app/pages/json_page.py +++ b/tests/projects/test_app/pages/json_page.py @@ -5,6 +5,7 @@ class Page(handler.PageHandler): def prepare(self): if self.get_argument('custom_render', 'false') == 'true': + def jinja_context_provider(handler): return { 'req1': {'result': 'custom1'}, @@ -20,7 +21,7 @@ async def get_page(self): requests = { 'req1': self.post_url(self.request.host, self.request.path, data={'param': 1}), - 'req2': self.post_url(self.request.host, self.request.path, data={'param': 2, 'invalid': invalid_json}) + 'req2': self.post_url(self.request.host, self.request.path, data={'param': 2, 'invalid': invalid_json}), } data = await gather_dict(requests) @@ -34,9 +35,7 @@ async def post_page(self): invalid_json = self.get_argument('invalid', 'false') == 'true' if not invalid_json: - self.json.put({ - 'result': self.get_argument('param') - }) + self.json.put({'result': self.get_argument('param')}) else: self.set_header('Content-Type', media_types.APPLICATION_JSON) self.text = '{"result": FAIL}' diff --git a/tests/projects/test_app/pages/kafka.py b/tests/projects/test_app/pages/kafka.py index af0322f4f..ab5e57953 100644 --- a/tests/projects/test_app/pages/kafka.py +++ b/tests/projects/test_app/pages/kafka.py @@ -8,7 +8,7 @@ async def get_page(self): request_engine_builder = self.application.http_client_factory.request_engine_builder request_engine_builder.kafka_producer.enable_for_request_id(self.request_id) - await self.post_url(self.request.host, self.request.uri) + await self.post_url(self.request.host, self.request.uri) # type: ignore await asyncio.sleep(0.1) self.json.put(*request_engine_builder.kafka_producer.disable_and_get_data()) diff --git a/tests/projects/test_app/pages/log.py b/tests/projects/test_app/pages/log.py index ed741bf12..5d54d071a 100644 --- a/tests/projects/test_app/pages/log.py +++ b/tests/projects/test_app/pages/log.py @@ -11,7 +11,8 @@ async def get_page(self): self.log.info('info') try: - raise Exception('test') + msg = 'test' + raise Exception(msg) except Exception: self.log.exception('exception') self.log.error('error', stack_info=True) diff --git a/tests/projects/test_app/pages/mandatory_headers.py b/tests/projects/test_app/pages/mandatory_headers.py index 775ae79a1..fed9a3177 100644 --- a/tests/projects/test_app/pages/mandatory_headers.py +++ b/tests/projects/test_app/pages/mandatory_headers.py @@ -5,7 +5,6 @@ class Page(PageHandler): async def get_page(self): - if self.get_argument('test_mandatory_headers', None) is not None: self.set_mandatory_header('TEST_HEADER', 'TEST_HEADER_VALUE') self.set_mandatory_cookie('TEST_COOKIE', 'TEST_HEADER_COOKIE') diff --git a/tests/projects/test_app/pages/module_not_found_error_on_import.py b/tests/projects/test_app/pages/module_not_found_error_on_import.py index 194822ae4..e69de29bb 100644 --- a/tests/projects/test_app/pages/module_not_found_error_on_import.py +++ b/tests/projects/test_app/pages/module_not_found_error_on_import.py @@ -1 +0,0 @@ -import tests.non.existing.module diff --git a/tests/projects/test_app/pages/module_starting_same_as_page_not_found_error_on_import.py b/tests/projects/test_app/pages/module_starting_same_as_page_not_found_error_on_import.py index 273c5a99e..e69de29bb 100644 --- a/tests/projects/test_app/pages/module_starting_same_as_page_not_found_error_on_import.py +++ b/tests/projects/test_app/pages/module_starting_same_as_page_not_found_error_on_import.py @@ -1 +0,0 @@ -import non.existing.module diff --git a/tests/projects/test_app/pages/preprocessors/__init__.py b/tests/projects/test_app/pages/preprocessors/__init__.py index d5a3186df..ac2ed7fee 100644 --- a/tests/projects/test_app/pages/preprocessors/__init__.py +++ b/tests/projects/test_app/pages/preprocessors/__init__.py @@ -1,5 +1,6 @@ import asyncio import time +from collections.abc import Callable from tornado.concurrent import Future @@ -7,7 +8,7 @@ from frontik.preprocessors import preprocessor -def pp0(name): +def pp0(name: str) -> Callable: @preprocessor def pp(handler): handler.run.append(name) @@ -19,13 +20,13 @@ def pp(handler): async def pp1(handler): handler.run.append('pp1-before') - ready_future = Future() + ready_future: Future = Future() ready_future.set_result('pp1-between') result = await ready_future handler.run.append(result) - wait_future = Future() + wait_future: Future = Future() handler.add_timeout(time.time() + 0.1, lambda: wait_future.set_result('pp1-after')) result = await wait_future @@ -34,9 +35,9 @@ async def pp1(handler): @preprocessor async def pp2(handler): - future = Future() + future: Future = Future() - async def put_request(): + async def put_request() -> None: res = await handler.put_url(handler.request.host, handler.request.path) handler.json.put({'put_request_finished': True}) future.set_result(res.data) @@ -61,10 +62,8 @@ class Page(PageHandler): def prepare(self): super().prepare() - self.run = [] - self.json.put({ - 'run': self.run - }) + self.run: list[str] = [] + self.json.put({'run': self.run}) self.add_postprocessor(self.postprocessor) diff --git a/tests/projects/test_app/pages/preprocessors/aborted.py b/tests/projects/test_app/pages/preprocessors/aborted.py index da1db1f98..d8d6abdf1 100644 --- a/tests/projects/test_app/pages/preprocessors/aborted.py +++ b/tests/projects/test_app/pages/preprocessors/aborted.py @@ -11,7 +11,7 @@ def pp_before(handler): @preprocessor async def pp(handler): - async def post_request(): + async def post_request() -> None: await handler.put_url(handler.request.host, handler.request.path) handler.json.put({'put_request_finished': True}) @@ -44,10 +44,8 @@ class Page(PageHandler): def prepare(self): super().prepare() - self.run = [] - self.json.put({ - 'run': self.run - }) + self.run: list = [] + self.json.put({'run': self.run}) self.add_postprocessor(lambda handler: handler.json.put({'postprocessor': True})) diff --git a/tests/projects/test_app/pages/preprocessors/aborted_nonblocking_group.py b/tests/projects/test_app/pages/preprocessors/aborted_nonblocking_group.py index d2becee73..37592760e 100644 --- a/tests/projects/test_app/pages/preprocessors/aborted_nonblocking_group.py +++ b/tests/projects/test_app/pages/preprocessors/aborted_nonblocking_group.py @@ -9,7 +9,7 @@ def pp1(handler): @preprocessor def pp2(handler): - async def pp2_coro(): + async def pp2_coro() -> None: await handler.post_url(handler.request.host, handler.request.uri + '&from=pp') if handler.get_argument('finish', None): @@ -17,7 +17,7 @@ async def pp2_coro(): handler.finish('DONE_IN_PP') elif handler.get_argument('abort', None): - raise FinishWithPostprocessors() + raise FinishWithPostprocessors handler.add_preprocessor_future(pp2_coro()) diff --git a/tests/projects/test_app/pages/preprocessors/preprocessor_future_return.py b/tests/projects/test_app/pages/preprocessors/preprocessor_future_return.py index 9539b9af8..57cdb0f81 100644 --- a/tests/projects/test_app/pages/preprocessors/preprocessor_future_return.py +++ b/tests/projects/test_app/pages/preprocessors/preprocessor_future_return.py @@ -1,8 +1,8 @@ +from tornado.concurrent import Future + from frontik.handler import PageHandler from frontik.preprocessors import preprocessor -from tornado.concurrent import Future - @preprocessor async def pp1(handler): @@ -15,9 +15,7 @@ async def pp1(handler): @preprocessor async def pp2(handler): await handler.future - handler.json.put({ - 'test': handler.future_result - }) + handler.json.put({'test': handler.future_result}) class Page(PageHandler): diff --git a/tests/projects/test_app/pages/preprocessors/preprocessor_futures.py b/tests/projects/test_app/pages/preprocessors/preprocessor_futures.py index dd5a47506..d317598f2 100644 --- a/tests/projects/test_app/pages/preprocessors/preprocessor_futures.py +++ b/tests/projects/test_app/pages/preprocessors/preprocessor_futures.py @@ -1,4 +1,5 @@ import time +from collections.abc import Callable from tornado.concurrent import Future @@ -6,7 +7,7 @@ from frontik.preprocessors import preprocessor -def waiting_preprocessor(sleep_time_sec, preprocessor_name, add_preprocessor_future): +def waiting_preprocessor(sleep_time_sec: float, preprocessor_name: str, add_preprocessor_future: bool) -> Callable: @preprocessor def pp(handler): def _put_to_completed(): @@ -14,7 +15,7 @@ def _put_to_completed(): handler.completed_preprocessors.append(preprocessor_name) wait_future.set_result(preprocessor_name) - wait_future = Future() + wait_future: Future = Future() handler.add_timeout(time.time() + sleep_time_sec, _put_to_completed) if add_preprocessor_future: @@ -30,17 +31,17 @@ def add_preprocessor(): def _done(_): handler.add_timeout( - time.time() + 0.2, handler.finish_group.add(add_preprocessor, handler._handle_request_exception) + time.time() + 0.2, + handler.finish_group.add(add_preprocessor, handler._handle_request_exception), ) - future = Future() + future: Future = Future() handler.add_future(future, handler.finish_group.add(_done)) future.set_result(None) await future class Page(PageHandler): - @waiting_preprocessor(0.7, "should_finish_after_page_finish", False) @waiting_preprocessor(0.5, "should_finish_third", True) @waiting_preprocessor(0.1, "should_finish_first", False) diff --git a/tests/projects/test_app/pages/preprocessors/priority_preprocessors.py b/tests/projects/test_app/pages/preprocessors/priority_preprocessors.py index 69f44735b..9214500df 100644 --- a/tests/projects/test_app/pages/preprocessors/priority_preprocessors.py +++ b/tests/projects/test_app/pages/preprocessors/priority_preprocessors.py @@ -1,5 +1,5 @@ from frontik.handler import PageHandler -from frontik.preprocessors import preprocessor, make_preprocessors_names_list +from frontik.preprocessors import make_preprocessors_names_list, preprocessor @preprocessor @@ -24,14 +24,14 @@ def pp3(handler): class Page(PageHandler): preprocessors = [pp0] - _priority_preprocessor_names = make_preprocessors_names_list([ - pp2, pp1 - ]) + _priority_preprocessor_names = make_preprocessors_names_list([pp2, pp1]) @pp1 @pp3 @pp2 async def get_page(self): - self.json.put({ - 'order': self.called_preprocessors, - }) + self.json.put( + { + 'order': self.called_preprocessors, # type: ignore + }, + ) diff --git a/tests/projects/test_app/pages/preprocessors/was_async_preprocessor_called.py b/tests/projects/test_app/pages/preprocessors/was_async_preprocessor_called.py index f648367e3..cb1c1bc6f 100644 --- a/tests/projects/test_app/pages/preprocessors/was_async_preprocessor_called.py +++ b/tests/projects/test_app/pages/preprocessors/was_async_preprocessor_called.py @@ -28,9 +28,11 @@ class Page(PageHandler): @pp1 @pp2 async def get_page(self): - self.json.put({ - 'pp0': self.was_preprocessor_called(pp0), - 'pp1': self.was_preprocessor_called(pp1), - 'pp2': self.was_preprocessor_called(pp2), - 'pp3': self.was_preprocessor_called(pp3), - }) + self.json.put( + { + 'pp0': self.was_preprocessor_called(pp0), + 'pp1': self.was_preprocessor_called(pp1), + 'pp2': self.was_preprocessor_called(pp2), + 'pp3': self.was_preprocessor_called(pp3), + }, + ) diff --git a/tests/projects/test_app/pages/preprocessors/was_preprocessor_called.py b/tests/projects/test_app/pages/preprocessors/was_preprocessor_called.py index f8910f01a..2f3e7d1c9 100644 --- a/tests/projects/test_app/pages/preprocessors/was_preprocessor_called.py +++ b/tests/projects/test_app/pages/preprocessors/was_preprocessor_called.py @@ -28,9 +28,11 @@ class Page(PageHandler): @pp1 @pp2 async def get_page(self): - self.json.put({ - 'pp0': self.was_preprocessor_called(pp0), - 'pp1': self.was_preprocessor_called(pp1), - 'pp2': self.was_preprocessor_called(pp2), - 'pp3': self.was_preprocessor_called(pp3), - }) + self.json.put( + { + 'pp0': self.was_preprocessor_called(pp0), + 'pp1': self.was_preprocessor_called(pp1), + 'pp2': self.was_preprocessor_called(pp2), + 'pp3': self.was_preprocessor_called(pp3), + }, + ) diff --git a/tests/projects/test_app/pages/request_context.py b/tests/projects/test_app/pages/request_context.py index 3eeaebf91..a3101d8a0 100644 --- a/tests/projects/test_app/pages/request_context.py +++ b/tests/projects/test_app/pages/request_context.py @@ -1,3 +1,4 @@ +from collections.abc import Callable from concurrent.futures import ThreadPoolExecutor from functools import partial @@ -11,7 +12,7 @@ def _callback(name, handler, *args): class Page(PageHandler): async def get_page(self): - def _waited_callback(name): + def _waited_callback(name: str) -> Callable: return self.finish_group.add(partial(_callback, name, self)) self.json.put({'page': request_context.get_handler_name()}) @@ -22,13 +23,13 @@ def _waited_callback(name): self.run_task(self.run_coroutine()) - future = self.post_url(self.request.host, self.request.uri) + future = self.post_url(self.request.host, self.request.uri) # type: ignore self.add_future(future, _waited_callback('future')) - async def run_coroutine(self): + async def run_coroutine(self) -> None: self.json.put({'coroutine_before_yield': request_context.get_handler_name()}) - await self.post_url(self.request.host, self.request.uri) + await self.post_url(self.request.host, self.request.uri) # type: ignore self.json.put({'coroutine_after_yield': request_context.get_handler_name()}) diff --git a/tests/projects/test_app/pages/sentry_error.py b/tests/projects/test_app/pages/sentry_error.py index 1e6dcd173..f3208ebcb 100644 --- a/tests/projects/test_app/pages/sentry_error.py +++ b/tests/projects/test_app/pages/sentry_error.py @@ -15,7 +15,8 @@ async def get_page(self): sentry_sdk.set_user({'real_ip': ip}) sentry_sdk.set_extra('extra_key', extra) - raise Exception('My_sentry_exception') + msg = 'My_sentry_exception' + raise Exception(msg) async def post_page(self): raise HTTPError(500, 'my_HTTPError') diff --git a/tests/projects/test_app/pages/test_exception_text.py b/tests/projects/test_app/pages/test_exception_text.py index e054bae5e..da1cf55c6 100644 --- a/tests/projects/test_app/pages/test_exception_text.py +++ b/tests/projects/test_app/pages/test_exception_text.py @@ -4,15 +4,15 @@ class Page(PageHandler): async def get_page(self): - async def bad_post_requests(): + async def bad_post_requests() -> None: results = await gather_list( self.post_url(self.request.host, self.request.path), self.post_url(self.request.host, self.request.path), self.post_url(self.request.host, self.request.path), - self.post_url(self.request.host, self.request.path) + self.post_url(self.request.host, self.request.path), ) for _ in results: - assert False + raise AssertionError self.run_task(bad_post_requests()) diff --git a/tests/projects/test_app/pages/validate_arguments.py b/tests/projects/test_app/pages/validate_arguments.py index 93d634091..fc7fce795 100644 --- a/tests/projects/test_app/pages/validate_arguments.py +++ b/tests/projects/test_app/pages/validate_arguments.py @@ -1,12 +1,11 @@ -from typing import Optional +from pydantic import validator from frontik.handler import PageHandler from frontik.validator import BaseValidationModel, Validators -from pydantic import validator class CustomModel(BaseValidationModel): - string: Optional[str] + string: str | None @validator('string') @classmethod @@ -30,23 +29,27 @@ async def get_page(self): list_int = self.get_validated_argument('list', Validators.LIST_INT, array=True) string = self.get_str_argument('string', path_safe=not is_custom_model) - self.json.put({ - 'list': list_int, - 'string': string, - 'str_arg_with_default': empty_default_str, - 'int_arg_with_default': empty_default_int, - 'str_arg': empty_str, - 'none_float': none_float is None - }) + self.json.put( + { + 'list': list_int, + 'string': string, + 'str_arg_with_default': empty_default_str, + 'int_arg_with_default': empty_default_int, + 'str_arg': empty_str, + 'none_float': none_float is None, + }, + ) async def post_page(self): str_body_arg = self.get_str_argument('str_argument', 'default', from_body=True) int_body_arg = self.get_int_argument('int_argument', 0, from_body=True) - self.json.put({ - 'str_body_arg': str_body_arg, - 'int_body_arg': int_body_arg, - }) + self.json.put( + { + 'str_body_arg': str_body_arg, + 'int_body_arg': int_body_arg, + }, + ) async def put_page(self): - self.get_str_argument('str_arg', 3) + self.get_str_argument('str_arg', 3) # type: ignore diff --git a/tests/projects/test_app/pages/write_after_finish.py b/tests/projects/test_app/pages/write_after_finish.py index 3f1d797d8..50b2d0138 100644 --- a/tests/projects/test_app/pages/write_after_finish.py +++ b/tests/projects/test_app/pages/write_after_finish.py @@ -21,16 +21,12 @@ async def _pp(cls, handler): # create race condition between postprocessors if handler.counter == 1: await asyncio.sleep(0.1) - handler.json.put({ - 'postprocessor_completed': True - }) + handler.json.put({'postprocessor_completed': True}) async def get_page(self): - await self.post_url(self.request.host, self.request.uri) + await self.post_url(self.request.host, self.request.uri) # type: ignore # test that postprocessors are scheduled only once self.finish_with_postprocessors() async def post_page(self): - self.json.put({ - 'counter': self.counter_static - }) + self.json.put({'counter': self.counter_static}) diff --git a/tests/projects/test_app/pages/write_error.py b/tests/projects/test_app/pages/write_error.py index 1e94cfe00..98eda0305 100644 --- a/tests/projects/test_app/pages/write_error.py +++ b/tests/projects/test_app/pages/write_error.py @@ -3,12 +3,14 @@ class Page(frontik.handler.PageHandler): async def get_page(self): - raise Exception('exception in handler') + msg = 'exception in handler' + raise Exception(msg) def write_error(self, status_code=500, **kwargs): self.json.put({'write_error': True}) if self.get_argument('fail_write_error', 'false') == 'true': - raise Exception('exception in write_error') + msg = 'exception in write_error' + raise Exception(msg) self.finish_with_postprocessors() diff --git a/tests/test_arguments.py b/tests/test_arguments.py index adb209a88..23a9bc8f5 100644 --- a/tests/test_arguments.py +++ b/tests/test_arguments.py @@ -1,3 +1,4 @@ +from typing import Any from urllib.parse import urlencode import requests @@ -6,9 +7,8 @@ class TestJsonResponse: - - def setup_method(self): - self.query_args = { + def setup_method(self) -> None: + self.query_args: dict[str, Any] = { 'list': [1, 2], 'string': 'safestring', 'str_arg': '', @@ -17,11 +17,10 @@ def setup_method(self): } def test_validation(self): - self.query_args.update(int_arg=0) get_data = frontik_test_app.get_page_json( f'validate_arguments?{urlencode(self.query_args, doseq=True)}', - notpl=True + notpl=True, ) assert get_data['list'] == [1, 2] @@ -51,7 +50,7 @@ def test_arg_validation_raises_for_empty_value_with_no_default(self): assert response.status_code == 400 - def test_arg_validation_raises_for_default_of_incorrect_type(self): + def test_arg_validation_raises_for_default_of_incorrect_type(self) -> None: response = frontik_test_app.get_page('validate_arguments?str_arg=test', method=requests.put, notpl=True) assert response.status_code == 500 @@ -62,7 +61,7 @@ def test_validation_model(self): data = frontik_test_app.get_page_json( f'validate_arguments?{urlencode(self.query_args, doseq=True)}', - notpl=True + notpl=True, ) assert data['list'] == [1, 2] diff --git a/tests/test_async.py b/tests/test_async.py index a1c28ea5e..1bea2b6d3 100644 --- a/tests/test_async.py +++ b/tests/test_async.py @@ -1,11 +1,18 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + from tornado.concurrent import Future from tornado.testing import AsyncTestCase from frontik.futures import future_fold +if TYPE_CHECKING: + from collections.abc import Callable + class MyException(Exception): - def __init__(self, result_was=None): + def __init__(self, result_was: object | None = None) -> None: self.result_was = result_was @@ -13,47 +20,12 @@ class MyOtherException(MyException): pass -class FutureProbe: - _DEFAULT = object - - def __init__(self, future_to_check, stop_cb=None): - self._calls = [] - self._stop_cb = stop_cb - future_to_check.add_done_callback(self.build_callback()) - - def build_callback(self): - def _cb(future): - exception = future.exception() - result = None - if exception is None: - result = future.result() - self._calls.append((result, exception)) - if callable(self._stop_cb): - self._stop_cb() - return _cb - - def assert_single_result_call(self, test, expected_result): - test.assertEqual(len(self._calls), 1, msg='should be only one future resolve') - test.assertEqual(self._calls[0][0], expected_result, msg='expected future result not matched') - - def assert_single_exception_call(self, test, expected_exception_class, result_was=_DEFAULT): - assert issubclass(expected_exception_class, MyException) - - test.assertEqual(len(self._calls), 1, msg='should be only one future resolve with exception') - exception = self._calls[0][1] - test.assertIsInstance(exception, expected_exception_class, - msg='exception should have expected type') - if result_was is not self._DEFAULT: - test.assertEqual(exception.result_was, result_was) - - class TestFutureFold(AsyncTestCase): - def test_value_to_value(self): marker = object() result_marker = object() - future = Future() + future: Future = Future() future_probe = FutureProbe(future) def _mapper(result): @@ -70,7 +42,7 @@ def _mapper(result): def test_value_to_exception(self): result_marker = object() - future = Future() + future: Future = Future() future_probe = FutureProbe(future) def _mapper(result): @@ -88,7 +60,7 @@ def _mapper(result): def test_exception_to_value(self): marker = object() - future = Future() + future: Future = Future() future_probe = FutureProbe(future) def _exception_mapper(exception): @@ -109,12 +81,12 @@ def _exception_mapper(exception): res_future_probe.assert_single_result_call(self, marker) def test_exception_to_exception(self): - future = Future() + future: Future = Future() future_probe = FutureProbe(future) def _exception_mapper(exception): if isinstance(exception, MyException): - raise MyOtherException() + raise MyOtherException else: return None @@ -138,11 +110,11 @@ def _mapper(_): def _exception_mapper(_): return second_marker - first_future = Future() + first_future: Future = Future() folded_future = future_fold(first_future, result_mapper=_mapper, exception_mapper=_exception_mapper) folded_future_probe = FutureProbe(folded_future) - second_future = Future() + second_future: Future = Future() second_folded_future = future_fold(second_future, result_mapper=_mapper, exception_mapper=_exception_mapper) second_folded_future_probe = FutureProbe(second_folded_future, stop_cb=self.stop) @@ -152,3 +124,46 @@ def _exception_mapper(_): folded_future_probe.assert_single_result_call(self, marker) second_folded_future_probe.assert_single_result_call(self, second_marker) + + +class FutureProbe: + _DEFAULT = object + + def __init__(self, future_to_check: Future, stop_cb: Callable | None = None) -> None: + self._calls: list = [] + self._stop_cb = stop_cb + future_to_check.add_done_callback(self.build_callback()) + + def build_callback(self) -> Callable: + def _cb(future): + exception = future.exception() + result = None + if exception is None: + result = future.result() + self._calls.append((result, exception)) + if callable(self._stop_cb): + self._stop_cb() + + return _cb + + def assert_single_result_call( + self, + test: TestFutureFold, + expected_result: tuple[object, object] | object, + ) -> None: + test.assertEqual(len(self._calls), 1, msg='should be only one future resolve') + test.assertEqual(self._calls[0][0], expected_result, msg='expected future result not matched') + + def assert_single_exception_call( + self, + test: TestFutureFold, + expected_exception_class: type[MyException] | type[MyOtherException], + result_was: type[object] | object = _DEFAULT, + ) -> None: + assert issubclass(expected_exception_class, MyException) + + test.assertEqual(len(self._calls), 1, msg='should be only one future resolve with exception') + exception = self._calls[0][1] + test.assertIsInstance(exception, expected_exception_class, msg='exception should have expected type') + if result_was is not self._DEFAULT: + test.assertEqual(exception.result_was, result_was) diff --git a/tests/test_asyncgroup.py b/tests/test_asyncgroup.py index ad521fd04..b1cd0f107 100644 --- a/tests/test_asyncgroup.py +++ b/tests/test_asyncgroup.py @@ -5,8 +5,7 @@ from tornado.concurrent import Future from tornado.testing import ExpectLog -from frontik.futures import async_logger, AsyncGroup - +from frontik.futures import AsyncGroup, async_logger logging.root.setLevel(logging.NOTSET) @@ -41,8 +40,8 @@ def finish_callback(): self.assertEqual(ag._finished, True) self.assertEqual(data, [1, 2, 3]) - def test_notifications(self): - f = Future() + def test_notifications(self) -> None: + f: Future = Future() ag = AsyncGroup(partial(f.set_result, True)) not1 = ag.add_notification() not2 = ag.add_notification() @@ -61,8 +60,8 @@ def test_notifications(self): with ExpectLog(async_logger, r'.*trying to finish already finished AsyncGroup\(name=None, finished=True\)'): ag.finish() - def test_finish(self): - f = Future() + def test_finish(self) -> None: + f: Future = Future() ag = AsyncGroup(partial(f.set_result, True)) self.assertEqual(ag._finished, False) @@ -73,9 +72,10 @@ def test_finish(self): self.assertEqual(ag._finished, True) self.assertEqual(f.result(), True) - def test_exception_in_first(self): + def test_exception_in_first(self) -> None: def callback1(): - raise Exception('callback1 error') + msg = 'callback1 error' + raise Exception(msg) def callback2(): self.fail('callback2 should not be called') @@ -95,9 +95,10 @@ def finish_callback(): self.assertEqual(ag._finished, True) - def test_exception_in_last(self): + def test_exception_in_last(self) -> None: def callback2(): - raise Exception('callback1 error') + msg = 'callback1 error' + raise Exception(msg) def finish_callback(): self.fail('finish_callback should not be called') @@ -113,9 +114,10 @@ def finish_callback(): self.assertEqual(ag._finished, True) - def test_exception_in_final(self): + def test_exception_in_final(self) -> None: def finish_callback(): - raise Exception('callback1 error') + msg = 'callback1 error' + raise Exception(msg) ag = AsyncGroup(finish_callback) diff --git a/tests/test_asyncgroup_handler.py b/tests/test_asyncgroup_handler.py index a5618bc2c..4325e1ef3 100644 --- a/tests/test_asyncgroup_handler.py +++ b/tests/test_asyncgroup_handler.py @@ -14,8 +14,8 @@ def test_group(self): '2': {'2': 'yay'}, '3': {'3': 'yay'}, 'final_callback_called': True, - 'future_callback_result': 'yay' - } + 'future_callback_result': 'yay', + }, ) def test_group_async(self): @@ -27,8 +27,8 @@ def test_group_async(self): '2': {'2': 'yay'}, '3': {'3': 'yay'}, 'final_callback_called': True, - 'future_callback_result': 'yay' - } + 'future_callback_result': 'yay', + }, ) def test_group_request_fail(self): @@ -40,8 +40,8 @@ def test_group_request_fail(self): '2': {'2': 'yay'}, '3': {'error': {'reason': 'Bad Request', 'code': 400}}, 'final_callback_called': True, - 'future_callback_result': 'yay' - } + 'future_callback_result': 'yay', + }, ) def test_group_request_fail_async(self): @@ -53,15 +53,15 @@ def test_group_request_fail_async(self): '2': {'2': 'yay'}, '3': {'error': {'reason': 'Bad Request', 'code': 400}}, 'final_callback_called': True, - 'future_callback_result': 'yay' - } + 'future_callback_result': 'yay', + }, ) - def test_group_callback_fail(self): + def test_group_callback_fail(self) -> None: response = frontik_test_app.get_page('async_group/group?fail_callback=true') self.assertEqual(response.status_code, 500) - def test_group_callback_fail_async(self): + def test_group_callback_fail_async(self) -> None: response = frontik_test_app.get_page('async_group/group_async?fail_callback=true') self.assertEqual(response.status_code, 500) @@ -73,11 +73,11 @@ def test_group_with_only_resolved_futures_async(self): json = frontik_test_app.get_page_json('async_group/group_with_futures_async') self.assertEqual(json, {'1': {'1': 'yay'}, '2': {'2': 'yay'}}) - def test_group_with_failing_future(self): + def test_group_with_failing_future(self) -> None: response = frontik_test_app.get_page('async_group/group_with_futures?failed_future=true') self.assertEqual(response.status_code, 500) - def test_group_with_failing_future_async(self): + def test_group_with_failing_future_async(self) -> None: response = frontik_test_app.get_page('async_group/group_with_futures_async?failed_future=true') self.assertEqual(response.status_code, 500) diff --git a/tests/test_balancer.py b/tests/test_balancer.py index d295f4d07..85e059fc4 100644 --- a/tests/test_balancer.py +++ b/tests/test_balancer.py @@ -1,18 +1,20 @@ import unittest -from tests.instances import find_free_port, frontik_balancer_app, frontik_broken_balancer_app import pytest +from tests.instances import find_free_port, frontik_balancer_app, frontik_broken_balancer_app + -# TODO unmark skipped class TestHttpError(unittest.TestCase): + free_port = None + @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: frontik_balancer_app.start() frontik_broken_balancer_app.start() cls.free_port = find_free_port(from_port=10000, to_port=20000) - def make_url(self, url): + def make_url(self, url: str) -> str: return ( f'{url}?normal={frontik_balancer_app.port}&broken={frontik_broken_balancer_app.port}&free={self.free_port}' ) diff --git a/tests/test_broken_app.py b/tests/test_broken_app.py index 0156c2398..9a9afd9be 100644 --- a/tests/test_broken_app.py +++ b/tests/test_broken_app.py @@ -4,8 +4,8 @@ class TestBrokenApp(unittest.TestCase): - def test_broken_config(self): + def test_broken_config(self) -> None: self.assertRaises(AssertionError, frontik_broken_config_app.start) - def test_broken_init_async(self): + def test_broken_init_async(self) -> None: self.assertRaises(AssertionError, frontik_broken_init_async_app.start) diff --git a/tests/test_codestyle.py b/tests/test_codestyle.py new file mode 100644 index 000000000..1ac70ae58 --- /dev/null +++ b/tests/test_codestyle.py @@ -0,0 +1,65 @@ +import os +import subprocess + +import mypy.api + +ROOT = os.path.dirname(os.path.dirname(__file__)) +MODULES = ['frontik', 'tests'] + + +# def test_simple_error(files_for_lint): +# print('--------------') +# print(f'------{files_for_lint}--------') +# # completed_proc = subprocess.run( +# # f'cd {ROOT}; git diff --diff-filter=d --name-only master -- "***.py"', capture_output=True, shell=True +# # ) +# # code = completed_proc.returncode +# # out = completed_proc.stdout.decode('utf-8').splitlines() +# # print(out) +# assert 1 == 0, f'mega error message, files for lint {files_for_lint}' + + +def test_mypy(): + code_paths = [f'{ROOT}/{m}' for m in MODULES] + # opts = [ + # '--ignore-missing-imports', + # '--disallow-untyped-calls', + # '--disallow-incomplete-defs', + # '--check-untyped-defs', + # ] + # out, err, exit_code = mypy.api.run(opts + code_paths) + out, err, exit_code = mypy.api.run(['--config-file', f'{ROOT}/pyproject.toml', *code_paths]) + assert exit_code == 0, out + + +def test_ruff(): + modules = ' '.join(MODULES) + opts = '' + # opts = ' '.join( + # [ + # '--line-length 120', + # # '--ignore F541,D300', + # '--select E,F,W,I', + # ] + # ) + completed_proc = subprocess.run(f'cd {ROOT}; ruff {opts} {modules}', capture_output=True, shell=True) + code = completed_proc.returncode + out = completed_proc.stdout.decode('utf-8') + assert code == 0, out + + +def test_black(): + modules = ' '.join(MODULES) + opts = '' + # opts = ' '.join( + # [ + # '--check', + # '--diff', + # '-l 120', + # '-S', + # ] + # ) + completed_proc = subprocess.run(f'cd {ROOT}; black {opts} {modules}', capture_output=True, shell=True) + code = completed_proc.returncode + out = completed_proc.stdout.decode('utf-8') + assert code == 0, out diff --git a/tests/test_consul_registration.py b/tests/test_consul_registration.py index 8f19bf924..5f430ae6c 100644 --- a/tests/test_consul_registration.py +++ b/tests/test_consul_registration.py @@ -1,40 +1,44 @@ +import sys import time import unittest -from tests.instances import FrontikTestInstance, common_frontik_start_options -from tests import FRONTIK_ROOT import pytest -import sys + +from tests import FRONTIK_ROOT +from tests.instances import FrontikTestInstance, common_frontik_start_options FRONTIK_RUN = f'{FRONTIK_ROOT}/frontik-test' TEST_PROJECTS = f'{FRONTIK_ROOT}/tests/projects' class TestConsulRegistration(unittest.TestCase): - - def setUp(self): + def setUp(self) -> None: self.consul_mock = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.consul_mock_app {common_frontik_start_options} ' - f' --config={TEST_PROJECTS}/frontik_consul_mock.cfg') + f' --config={TEST_PROJECTS}/frontik_consul_mock.cfg', + ) self.consul_mock.start() self.frontik_single_worker_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.no_debug_app {common_frontik_start_options} ' f' --config={TEST_PROJECTS}/frontik_no_debug.cfg --consul_port={self.consul_mock.port} ' f' --consul_enabled=True' - f' --fail_start_on_empty_upstream=False') + f' --fail_start_on_empty_upstream=False', + ) self.frontik_multiple_worker_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.no_debug_app {common_frontik_start_options} ' f' --config={TEST_PROJECTS}/frontik_no_debug.cfg --consul_port={self.consul_mock.port} --workers=3' f' --consul_enabled=True' - f' --fail_start_on_empty_upstream=False') + f' --fail_start_on_empty_upstream=False', + ) self.frontik_multiple_worker_app_timeout_barrier = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.no_debug_app {common_frontik_start_options} ' f' --config={TEST_PROJECTS}/frontik_no_debug.cfg --consul_port={self.consul_mock.port} --workers=3' f' --init_workers_timeout_sec=0' f' --consul_enabled=True' - f' --fail_start_on_empty_upstream=False') + f' --fail_start_on_empty_upstream=False', + ) - def tearDown(self): + def tearDown(self) -> None: self.frontik_single_worker_app.stop() self.frontik_multiple_worker_app.stop() self.frontik_multiple_worker_app_timeout_barrier.stop() @@ -56,12 +60,13 @@ def test_multiple_worker_registration(self): def test_multiple_worker_not_registration(self): self.frontik_multiple_worker_app_timeout_barrier.start_with_check(lambda _: None) - for i in range(50): + for _i in range(50): time.sleep(0.1) if not self.frontik_multiple_worker_app_timeout_barrier.is_alive(): break else: - raise Exception('application didn\'t stop') + msg = "application didn't stop" + raise Exception(msg) registration_call_count = self.consul_mock.get_page_json('call_registration_stat') self.assertEqual(registration_call_count, {}, 'Application should not register') diff --git a/tests/test_debug.py b/tests/test_debug.py index bffd923fa..7e641f839 100644 --- a/tests/test_debug.py +++ b/tests/test_debug.py @@ -1,10 +1,16 @@ +from __future__ import annotations + import base64 import http.client import unittest +from typing import TYPE_CHECKING from tornado.escape import to_unicode -from tests.instances import create_basic_auth_header, frontik_test_app, frontik_no_debug_app +from tests.instances import create_basic_auth_header, frontik_no_debug_app, frontik_test_app + +if TYPE_CHECKING: + from requests.models import Response class TestDebug(unittest.TestCase): @@ -77,7 +83,12 @@ def test_complex_debug_page(self): for msg in assert_not_found: self.assertNotIn(msg, response_content) - def assertDebugResponseCode(self, page, expected_code, headers=None): + def assert_debug_response_code( + self, + page: str, + expected_code: int, + headers: dict[str, str] | None = None, + ) -> Response: response = frontik_no_debug_app.get_page(page, headers=headers) self.assertEqual(response.status_code, expected_code) return response @@ -85,15 +96,17 @@ def assertDebugResponseCode(self, page, expected_code, headers=None): def test_debug_by_basic_auth(self): for url in ('simple', 'simple_async'): for param in ('debug', 'noxsl', 'notpl'): - response = self.assertDebugResponseCode(f'simple?{param}', http.client.UNAUTHORIZED) + response = self.assert_debug_response_code(f'simple?{param}', http.client.UNAUTHORIZED) self.assertIn('Www-Authenticate', response.headers) self.assertRegex(response.headers['Www-Authenticate'], 'Basic realm="[^"]+"') - self.assertDebugResponseCode( - f'{url}?{param}', http.client.OK, headers={'Authorization': self.DEBUG_BASIC_AUTH} + self.assert_debug_response_code( + f'{url}?{param}', + http.client.OK, + headers={'Authorization': self.DEBUG_BASIC_AUTH}, ) - def test_debug_by_basic_auth_with_invalid_header(self): + def test_debug_by_basic_auth_with_invalid_header(self) -> None: for url in ('simple', 'simple_async'): invalid_headers = ( 'Token user:god', @@ -104,34 +117,39 @@ def test_debug_by_basic_auth_with_invalid_header(self): create_basic_auth_header(':'), create_basic_auth_header(''), create_basic_auth_header('not:pass'), - 'BASIC {}'.format(to_unicode(base64.b64encode(b'user:god'))) + 'BASIC {}'.format(to_unicode(base64.b64encode(b'user:god'))), ) for h in invalid_headers: - self.assertDebugResponseCode(f'{url}?debug', http.client.UNAUTHORIZED, headers={'Authorization': h}) + self.assert_debug_response_code(f'{url}?debug', http.client.UNAUTHORIZED, headers={'Authorization': h}) def test_debug_by_header(self): for url in ('simple', 'simple_async'): for param in ('debug', 'noxsl', 'notpl'): - response = self.assertDebugResponseCode(f'simple?{param}', http.client.UNAUTHORIZED) + response = self.assert_debug_response_code(f'simple?{param}', http.client.UNAUTHORIZED) self.assertIn('Www-Authenticate', response.headers) self.assertEqual('Basic realm="Secure Area"', response.headers['Www-Authenticate']) - self.assertDebugResponseCode( - f'simple?{param}', http.client.OK, headers={'Frontik-Debug-Auth': 'user:god'} + self.assert_debug_response_code( + f'simple?{param}', + http.client.OK, + headers={'Frontik-Debug-Auth': 'user:god'}, ) - self.assertDebugResponseCode( - f'{url}?{param}', http.client.OK, - headers={'Frontik-Debug-Auth': 'user:god', 'Authorization': 'Basic bad'} + self.assert_debug_response_code( + f'{url}?{param}', + http.client.OK, + headers={'Frontik-Debug-Auth': 'user:god', 'Authorization': 'Basic bad'}, ) - def test_debug_by_header_with_wrong_header(self): + def test_debug_by_header_with_wrong_header(self) -> None: for url in ('simple', 'simple_async'): for value in ('', 'not:pass', 'user: god', self.DEBUG_BASIC_AUTH): - response = self.assertDebugResponseCode( - f'{url}?debug', http.client.UNAUTHORIZED, headers={'Frontik-Debug-Auth': value} + response = self.assert_debug_response_code( + f'{url}?debug', + http.client.UNAUTHORIZED, + headers={'Frontik-Debug-Auth': value}, ) self.assertIn('Www-Authenticate', response.headers) @@ -140,11 +158,10 @@ def test_debug_by_header_with_wrong_header(self): def test_debug_by_cookie(self): for url in ('simple', 'simple_async'): for param in ('debug', 'noxsl', 'notpl'): - self.assertDebugResponseCode( - url, http.client.UNAUTHORIZED, headers={'Cookie': f'{param}=true'} - ) + self.assert_debug_response_code(url, http.client.UNAUTHORIZED, headers={'Cookie': f'{param}=true'}) - self.assertDebugResponseCode( - url, http.client.OK, - headers={'Cookie': f'{param}=true;', 'Authorization': self.DEBUG_BASIC_AUTH} + self.assert_debug_response_code( + url, + http.client.OK, + headers={'Cookie': f'{param}=true;', 'Authorization': self.DEBUG_BASIC_AUTH}, ) diff --git a/tests/test_debug_curl_string.py b/tests/test_debug_curl_string.py index 69f4be8e8..83c5b38ab 100644 --- a/tests/test_debug_curl_string.py +++ b/tests/test_debug_curl_string.py @@ -1,6 +1,6 @@ import unittest -from http_client.request_response import RequestBuilder, USER_AGENT_HEADER +from http_client.request_response import USER_AGENT_HEADER, RequestBuilder from frontik import media_types from frontik.debug import request_to_curl_string @@ -8,49 +8,62 @@ class TestCurlString(unittest.TestCase): def test_curl_string_get(self): - request = RequestBuilder('http://test.com', 'test', '/path', 'test', - data={'param': 'value'}, - headers={'Accept': media_types.APPLICATION_JSON}) + request = RequestBuilder( + 'http://test.com', + 'test', + '/path', + 'test', + data={'param': 'value'}, + headers={'Accept': media_types.APPLICATION_JSON}, + ) self.assertEqual( request_to_curl_string(request), "curl -X GET 'http://test.com/path?param=value' " - "-H 'Accept: application/json' -H '{}: test'".format(USER_AGENT_HEADER) + f"-H 'Accept: application/json' -H '{USER_AGENT_HEADER}: test'", ) def test_curl_string_post(self): - request = RequestBuilder('http://test.com', 'test', '/path', 'test', - data={'param': 'value'}, - method='POST') + request = RequestBuilder('http://test.com', 'test', '/path', 'test', data={'param': 'value'}, method='POST') self.assertEqual( request_to_curl_string(request), "curl -X POST 'http://test.com/path' -H 'Content-Length: 11' " "-H 'Content-Type: application/x-www-form-urlencoded' " - "-H '{}: test' --data 'param=value'".format(USER_AGENT_HEADER) + f"-H '{USER_AGENT_HEADER}: test' --data 'param=value'", ) def test_curl_string_put(self): - request = RequestBuilder('http://test.com', 'test', '/path', 'test', - data='DATA', - method='PUT', - content_type=media_types.TEXT_PLAIN) + request = RequestBuilder( + 'http://test.com', + 'test', + '/path', + 'test', + data='DATA', + method='PUT', + content_type=media_types.TEXT_PLAIN, + ) self.assertEqual( request_to_curl_string(request), "curl -X PUT 'http://test.com/path' -H 'Content-Length: 4' -H 'Content-Type: text/plain' " - "-H '{}: test' --data 'DATA'".format(USER_AGENT_HEADER) + f"-H '{USER_AGENT_HEADER}: test' --data 'DATA'", ) def test_curl_string_binary(self): - request = RequestBuilder('http://test.com', 'test', '/path', 'test', - data='тест', - method='POST', - content_type=media_types.TEXT_PLAIN) + request = RequestBuilder( + 'http://test.com', + 'test', + '/path', + 'test', + data='тест', + method='POST', + content_type=media_types.TEXT_PLAIN, + ) self.assertEqual( request_to_curl_string(request), "echo -e '\\xd1\\x82\\xd0\\xb5\\xd1\\x81\\xd1\\x82' | " "curl -X POST 'http://test.com/path' -H 'Content-Length: 8' -H 'Content-Type: text/plain' " - "-H '{}: test' --data-binary @-".format(USER_AGENT_HEADER) + f"-H '{USER_AGENT_HEADER}: test' --data-binary @-", ) diff --git a/tests/test_default_urls.py b/tests/test_default_urls.py index 69f3d2b7e..fa047bad1 100644 --- a/tests/test_default_urls.py +++ b/tests/test_default_urls.py @@ -5,27 +5,27 @@ class TestDefaultUrls(unittest.TestCase): - def test_version(self): + def test_version(self) -> None: xml = frontik_test_app.get_page_xml('version') test_app_version = xml.xpath('application[@name="tests.projects.test_app"]/app-version/@number')[0] self.assertEqual(xml.tag, 'versions') self.assertEqual('last version', test_app_version) - def test_unknown_version(self): + def test_unknown_version(self) -> None: xml = frontik_re_app.get_page_xml('version') re_app_version = xml.findtext('application[@name="tests.projects.re_app"]/version') self.assertEqual('unknown', re_app_version) - def test_no_version(self): + def test_no_version(self) -> None: xml = frontik_re_app.get_page_xml('version') re_app_version = xml.findtext('application[@name="tests.projects.re_app"]/version') self.assertEqual(xml.tag, 'versions') self.assertEqual(re_app_version, 'unknown') - def test_status(self): + def test_status(self) -> None: response = frontik_test_app.get_page('status') self.assertTrue(response.headers['Content-Type'].startswith('application/json')) diff --git a/tests/test_doc.py b/tests/test_doc.py index 2f1fab4fb..5ce077b7d 100644 --- a/tests/test_doc.py +++ b/tests/test_doc.py @@ -1,15 +1,15 @@ import unittest +from http_client.request_response import DataParseError, RequestResult from lxml import etree from lxml_asserts.testcase import LxmlTestCaseMixin from tornado.concurrent import Future -from http_client.request_response import DataParseError, RequestResult from frontik.doc import Doc class TestDoc(unittest.TestCase, LxmlTestCaseMixin): - def test_simple(self): + def test_simple(self) -> None: d = Doc('a') self.assertTrue(d.is_empty()) @@ -25,12 +25,12 @@ def test_simple(self): self.assertFalse(d.is_empty()) self.assertXmlEqual( d.to_etree_element(), - b"""\n\xd1\x82\xd0\xb5\xd1\x81\xd1\x82""" + b"""\n\xd1\x82\xd0\xb5\xd1\x81\xd1\x82""", ) async def test_future_simple(self): d = Doc('a') - f = Future() + f: Future = Future() d.put(f) self.assertXmlEqual(d.to_etree_element(), b"""\n""") @@ -41,7 +41,7 @@ async def test_future_simple(self): async def test_future_etree_element(self): d = Doc('a') - f = Future() + f: Future = Future() f.set_result(etree.Element('b')) d.put(f) @@ -49,7 +49,7 @@ async def test_future_etree_element(self): async def test_future_list(self): d = Doc('a') - f = Future() + f: Future = Future() f.set_result([etree.Comment('ccc'), etree.Element('bbb')]) d.put(f) @@ -57,41 +57,38 @@ async def test_future_list(self): async def test_failed_future(self): d = Doc('a') - f = Future() + f: Future = Future() result = self.get_test_request_result() result._data_parse_error = DataParseError(reason='error', code='code') f.set_result(result) d.put(f) self.assertXmlEqual( - d.to_etree_element(), """\n""" + d.to_etree_element(), + """\n""", ) async def test_future_string_value(self): d = Doc('a') - f = Future() + f: Future = Future() result = self.get_test_request_result() result._content_type = 'text' result._data = 'plain_text' f.set_result(result) d.put(f) - self.assertXmlEqual( - d.to_etree_element(), """\n""" - ) + self.assertXmlEqual(d.to_etree_element(), """\n""") - def test_doc_nested(self): + def test_doc_nested(self) -> None: a = Doc('a') b = Doc('b') a.put(b) - self.assertXmlEqual( - a.to_etree_element(), b"""\n""" - ) + self.assertXmlEqual(a.to_etree_element(), b"""\n""") - def test_serializable(self): + def test_serializable(self) -> None: class Serializable: - def __init__(self, tag, value): + def __init__(self, tag: str, value: str) -> None: self.tag = tag self.value = value @@ -104,24 +101,23 @@ def to_etree_element(self): a.put(Serializable('testNode', 'vally')) self.assertEqual( - a.to_string(), b"""\nvally""" + a.to_string(), + b"""\nvally""", ) - def test_other_types(self): + def test_other_types(self) -> None: a = Doc('a') a.put('1') self.assertRaises(ValueError, a.to_string) - def test_root_node(self): + def test_root_node(self) -> None: d = Doc(root_node=etree.Element('doc')) d.put(etree.Element('test1')) - self.assertXmlEqual( - d.to_etree_element(), b"""\n""" - ) + self.assertXmlEqual(d.to_etree_element(), b"""\n""") - def test_root_node_doc(self): + def test_root_node_doc(self) -> None: d1 = Doc('a') d1.put(etree.Comment('1')) @@ -129,18 +125,19 @@ def test_root_node_doc(self): d2.put(etree.Comment('2')) self.assertXmlEqual( - d2.to_etree_element(), b"""\n""" + d2.to_etree_element(), + b"""\n""", ) - def test_string_as_root_node(self): + def test_string_as_root_node(self) -> None: d = Doc(root_node='a') self.assertXmlEqual(d.to_etree_element(), b"""\n""") - def test_root_node_invalid(self): + def test_root_node_invalid(self) -> None: self.assertRaises(TypeError, Doc, root_node=etree.Comment('invalid root doc')) @staticmethod - def get_test_request_result(): + def get_test_request_result() -> RequestResult: class FakeRequest: name = 'name' diff --git a/tests/test_errors.py b/tests/test_errors.py index 10480ecb3..2b45d610c 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -2,14 +2,12 @@ import requests -from tests.instances import frontik_test_app from frontik import media_types +from tests.instances import frontik_test_app class TestHttpError(unittest.TestCase): - _CODES_MAPPING = { - 200: 200, 401: 401, 599: 503 - } + _CODES_MAPPING = {200: 200, 401: 401, 599: 503} def test_raise_200(self): response = frontik_test_app.get_page('http_error?code=200') @@ -23,7 +21,7 @@ def test_raise_401(self): self.assertEqual(response.headers['content-type'], media_types.TEXT_HTML) self.assertEqual( response.content, - b'401: Unauthorized401: Unauthorized' + b'401: Unauthorized401: Unauthorized', ) def test_raise_unknown_code(self): @@ -64,12 +62,12 @@ def test_http_error_json(self): self.assertEqual(response.status_code, 400) self.assertEqual(response.content, b'{"reason": "bad argument"}') - def test_write_error(self): + def test_write_error(self) -> None: response = frontik_test_app.get_page('write_error') self.assertEqual(response.status_code, 500) self.assertEqual(response.content, b'{"write_error": true}') - def test_write_error_exception(self): + def test_write_error_exception(self) -> None: response = frontik_test_app.get_page('write_error?fail_write_error=true') self.assertEqual(response.status_code, 500) self.assertEqual(response.content, b'') diff --git a/tests/test_fail_fast.py b/tests/test_fail_fast.py index b9e2ab9a7..861291d7f 100644 --- a/tests/test_fail_fast.py +++ b/tests/test_fail_fast.py @@ -41,11 +41,11 @@ def test_future(self): future_result = json['future'] self.assertEqual(future_result, 'future_result') - def test_future_fail(self): + def test_future_fail(self) -> None: response = frontik_test_app.get_page('fail_fast/future?fail_future=true') self.assertEqual(response.status_code, 500) - def test_exception_in_fail_fast(self): + def test_exception_in_fail_fast(self) -> None: response = frontik_test_app.get_page('fail_fast?fail_fast=true&exception_in_fail_fast=true') self.assertEqual(response.status_code, 500) diff --git a/tests/test_file_cache.py b/tests/test_file_cache.py index 806c014b8..2d0a92133 100644 --- a/tests/test_file_cache.py +++ b/tests/test_file_cache.py @@ -1,3 +1,4 @@ +import logging import os import unittest from functools import partial @@ -54,9 +55,12 @@ def test_unlimited_dict(self): CACHE_DIR = os.path.join(os.path.dirname(__file__), 'projects', 'test_app', 'xsl') - class MockLog: - def __init__(self): - self.message = None + class MockLog(logging.Logger): + def __init__(self) -> None: + self.message: str = None # type: ignore + self.level = 0 + self.parent = None + self.name = 'mock_logger' def debug(self, message, *args): self.message = message % args diff --git a/tests/test_file_logging.py b/tests/test_file_logging.py index d7fe71438..4aa08199f 100644 --- a/tests/test_file_logging.py +++ b/tests/test_file_logging.py @@ -3,23 +3,27 @@ import tempfile import unittest +from tests import FRONTIK_ROOT from tests.instances import FrontikTestInstance, common_frontik_start_options +FRONTIK_RUN = f'{FRONTIK_ROOT}/frontik-test' +TEST_PROJECTS = f'{FRONTIK_ROOT}/tests/projects' -class TestLogToFile(unittest.TestCase): - def setUp(self): +class TestLogToFile(unittest.TestCase): + def setUp(self) -> None: self.tmp_log_dir = tempfile.mkdtemp() self.service = FrontikTestInstance( - f'./frontik-test --app=tests.projects.consul_mock_app {common_frontik_start_options} ' - f' --config=tests/projects/frontik_consul_mock.cfg --log_dir={self.tmp_log_dir} --log_level=debug', - allow_to_create_log_files=True) + f'{FRONTIK_RUN} --app=tests.projects.consul_mock_app {common_frontik_start_options} ' + f' --config={TEST_PROJECTS}/frontik_consul_mock.cfg --log_dir={self.tmp_log_dir} --log_level=debug', + allow_to_create_log_files=True, + ) - def tearDown(self): + def tearDown(self) -> None: self.service.stop() shutil.rmtree(self.tmp_log_dir, ignore_errors=True) - def test_log_dir_is_not_empty(self): + def test_log_dir_is_not_empty(self) -> None: self.service.start() self.service.stop() dir_contents = os.listdir(self.tmp_log_dir) @@ -27,4 +31,4 @@ def test_log_dir_is_not_empty(self): self.fail('No log files') empty_files = [f for f in dir_contents if os.stat(os.path.join(self.tmp_log_dir, f)).st_size == 0] if empty_files: - self.fail('Empty log files: {}'.format(empty_files)) + self.fail(f'Empty log files: {empty_files}') diff --git a/tests/test_frontik_testing.py b/tests/test_frontik_testing.py index 9e54c87fd..fcf96f5b7 100644 --- a/tests/test_frontik_testing.py +++ b/tests/test_frontik_testing.py @@ -1,24 +1,24 @@ -from lxml import etree +from __future__ import annotations + import pytest +from lxml import etree from tornado.ioloop import IOLoop from frontik.app import FrontikApplication from frontik.handler import PageHandler from frontik.options import options -from frontik.testing import FrontikTestCase, FrontikTestBase +from frontik.testing import FrontikTestBase, FrontikTestCase from frontik.util import gather_list -from tests.projects.test_app.pages.handler import delete from tests import FRONTIK_ROOT +from tests.projects.test_app.pages.handler import delete class AsyncHandler(PageHandler): async def get_page(self): self.result = 0 + service_host = self.config.serviceHost # type: ignore - res1, res2 = await gather_list( - self.get_url(self.config.serviceHost, '/val1/1'), - self.get_url(self.config.serviceHost, '/val2/2') - ) + res1, res2 = await gather_list(self.get_url(service_host, '/val1/1'), self.get_url(service_host, '/val2/2')) self.result += int(res1.data.findtext('val')) self.result += int(res2.data.findtext('val')) @@ -30,11 +30,11 @@ async def get_page(self): class CheckConfigHandler(PageHandler): async def get_page(self): - self.text = self.config.config_param + self.text = self.config.config_param # type: ignore class TestApplication(FrontikApplication): - def application_urls(self): + def application_urls(self) -> list[tuple]: return [ ('/config', CheckConfigHandler), ('/sum_values', AsyncHandler), @@ -43,12 +43,12 @@ def application_urls(self): class TestFrontikTestingOld(FrontikTestCase): - def setUp(self): + def setUp(self) -> None: options.consul_enabled = False super().setUp() self.configure_app(serviceHost='http://service.host') - def get_app(self): + def get_app(self) -> TestApplication: app = TestApplication(app='test_app') IOLoop.current().run_sync(app.init) @@ -74,8 +74,10 @@ def test_xml_stub(self): def test_json_stub(self): self.set_stub( - f'http://127.0.0.1:{self.get_http_port()}/delete', request_method='DELETE', - response_file=f'{FRONTIK_ROOT}/tests/stub.json', param='param' + f'http://127.0.0.1:{self.get_http_port()}/delete', + request_method='DELETE', + response_file=f'{FRONTIK_ROOT}/tests/stub.json', + param='param', ) json = self.fetch_json('/delete') @@ -84,8 +86,8 @@ def test_json_stub(self): class TestFrontikTesting(FrontikTestBase): @pytest.fixture(scope='class') - def test_app(self): - yield TestApplication(app='test_app') + def test_app(self) -> TestApplication: + return TestApplication(app='test_app') async def test_config(self): self.configure_app(config_param='param_value') @@ -97,8 +99,10 @@ async def test_config(self): async def test_json_stub(self): self.configure_app(serviceHost='http://service.host') self.set_stub( - f'http://backend/delete', request_method='DELETE', - response_file=f'{FRONTIK_ROOT}/tests/stub.json', param='param' + 'http://backend/delete', + request_method='DELETE', + response_file=f'{FRONTIK_ROOT}/tests/stub.json', + param='param', ) json = await self.fetch_json('/delete', method='POST') diff --git a/tests/test_handler.py b/tests/test_handler.py index c0eef7613..7f9929e93 100644 --- a/tests/test_handler.py +++ b/tests/test_handler.py @@ -1,6 +1,7 @@ -import requests import unittest +import requests + from frontik import media_types from tests.instances import frontik_no_debug_app, frontik_test_app @@ -60,7 +61,7 @@ def test_json_decode_exception(self): 'handler/json', method=method, headers={'Content-Type': media_types.APPLICATION_JSON}, - data=b'' + data=b'', ) self.assertEqual(response.status_code, 400) diff --git a/tests/test_http_client.py b/tests/test_http_client.py index d0f4a7264..918b96f2b 100644 --- a/tests/test_http_client.py +++ b/tests/test_http_client.py @@ -34,15 +34,13 @@ def test_timeout(self): self.assertEqual(json, {'error_received': True}) def test_parse_error(self): - """ If json or xml parsing error occurs, we must send None into callback. """ + """If json or xml parsing error occurs, we must send None into callback.""" text = frontik_test_app.get_page_text('http_client/parse_error') self.assertEqual(text, 'Parse error occured') def test_parse_response(self): json = frontik_test_app.get_page_json('http_client/parse_response') - self.assertEqual( - json, {'post': True, 'delete': 'deleted', 'error': {'reason': 'Bad Request', 'code': 400}} - ) + self.assertEqual(json, {'post': True, 'delete': 'deleted', 'error': {'reason': 'Bad Request', 'code': 400}}) def test_custom_headers(self): json = frontik_test_app.get_page_json('http_client/custom_headers') diff --git a/tests/test_http_client_keep_alive.py b/tests/test_http_client_keep_alive.py index 4bf4bebe6..08677ed67 100644 --- a/tests/test_http_client_keep_alive.py +++ b/tests/test_http_client_keep_alive.py @@ -1,17 +1,19 @@ import socket import unittest from contextlib import closing +from typing import Any -from tests.instances import find_free_port, frontik_test_app import pytest +from tests.instances import find_free_port, frontik_test_app + @pytest.mark.skip(reason="doesn't work with native coroutines") class TestHTTPClientKeepAlive(unittest.TestCase): - """ Tests use frontik_client to send http request to frontik_keep_alive_app. - Frontik_keep_alive_app proxies the request to backend. - Backend is just a simple server socket. - We write http response to accepted socket and check whether it is closed or not. + """Tests use frontik_client to send http request to frontik_keep_alive_app. + Frontik_keep_alive_app proxies the request to backend. + Backend is just a simple server socket. + We write http response to accepted socket and check whether it is closed or not. """ def setUp(self): @@ -65,42 +67,42 @@ def test_http_client_closes_connection_if_read_timeout_after_partial_response(se class Client: - def __init__(self, port): + def __init__(self, port: int) -> None: self.port = port self.socket = socket.socket() self.socket.connect(('127.0.0.1', port)) self.socket.settimeout(5) - def send_request(self, backend_port, request_id=None): + def send_request(self, backend_port: int, request_id: str | None = None) -> None: self.socket.send(b'GET /http_client/proxy_code?port=' + str(backend_port).encode() + b' HTTP/1.1\r\n') self.socket.send(b'Host: 127.0.0.1:' + str(self.port).encode() + b'\r\n') if request_id: self.socket.send(b'X-Request-Id: ' + request_id.encode() + b'\r\n') self.socket.send(b'\r\n') - def get_response(self): + def get_response(self) -> Any: return self.socket.recv(1024).decode() - def close(self): + def close(self) -> None: self.socket.close() class Backend: - def __init__(self): + def __init__(self) -> None: self.port = find_free_port() self.socket = socket.socket() self.socket.bind(('127.0.0.1', self.port)) self.socket.listen(1) - def accept(self): + def accept(self) -> closing: socket, _ = self.socket.accept() return closing(socket) - def close(self): + def close(self) -> None: self.socket.close() -def handle_request_to_backend(backend_socket, code, reason, headers=None): +def handle_request_to_backend(backend_socket: socket.socket, code: str, reason: str, headers: Any = None) -> None: backend_socket.recv(1024) backend_socket.send(b'HTTP/1.1 ' + code.encode() + b' ' + reason.encode() + b'\r\n') if headers is not None: diff --git a/tests/test_integrations.py b/tests/test_integrations.py index f34a01706..0c2d88e46 100644 --- a/tests/test_integrations.py +++ b/tests/test_integrations.py @@ -1,23 +1,23 @@ +import sys import time import unittest -import requests import pytest -import sys +import requests -from tests.instances import FrontikTestInstance, common_frontik_start_options from tests import FRONTIK_ROOT +from tests.instances import FrontikTestInstance, common_frontik_start_options FRONTIK_RUN = f'{FRONTIK_ROOT}/frontik-test' TEST_PROJECTS = f'{FRONTIK_ROOT}/tests/projects' class TestIntegrations(unittest.TestCase): - def setUp(self): self.frontik_multiple_worker_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.broken_integration.target_app {common_frontik_start_options} ' - f' --config={TEST_PROJECTS}/frontik_consul_mock.cfg --workers=3') + f' --config={TEST_PROJECTS}/frontik_consul_mock.cfg --workers=3', + ) def tearDown(self): self.frontik_multiple_worker_app.stop() @@ -26,11 +26,12 @@ def tearDown(self): def test_server_not_bound_before_integrations(self): def assert_app_start(instance): # keep in relevance to tests.projects.broken_integration.target_app - for i in range(11): + for _i in range(11): try: time.sleep(0.1) response = instance.get_page('status') self.assertNotEqual(response.status_code, 200) except requests.RequestException: pass + self.frontik_multiple_worker_app.start_with_check(assert_app_start) diff --git a/tests/test_jinja.py b/tests/test_jinja.py index 862a42ce7..484f2e4e4 100644 --- a/tests/test_jinja.py +++ b/tests/test_jinja.py @@ -18,11 +18,11 @@ def test_jinja_custom_environment(self): response = frontik_re_app.get_page('jinja_custom_environment') self.assertEqual(response.content, b'custom_env_function_value') - def test_jinja_no_environment(self): + def test_jinja_no_environment(self) -> None: response = frontik_no_debug_app.get_page('jinja_no_environment') self.assertEqual(response.status_code, 500) - def test_jinja_no_environment_async(self): + def test_jinja_no_environment_async(self) -> None: response = frontik_no_debug_app.get_page('jinja_no_environment_async') self.assertEqual(response.status_code, 500) diff --git a/tests/test_json_builder.py b/tests/test_json_builder.py index 60cf40f28..6715b622b 100644 --- a/tests/test_json_builder.py +++ b/tests/test_json_builder.py @@ -1,15 +1,15 @@ import json import unittest -from tornado.concurrent import Future from http_client.request_response import DataParseError +from tornado.concurrent import Future from frontik.json_builder import JsonBuilder from tests.test_doc import TestDoc class TestJsonBuilder(unittest.TestCase): - def test_simple(self): + def test_simple(self) -> None: j = JsonBuilder() self.assertTrue(j.is_empty()) @@ -24,7 +24,7 @@ def test_simple(self): self.assertFalse(j.is_empty()) self.assertEqual(j.to_string(), """{"a": "b"}""") - def test_clear(self): + def test_clear(self) -> None: j = JsonBuilder() j.put({'a': 'b'}) j.clear() @@ -32,41 +32,41 @@ def test_clear(self): self.assertTrue(j.is_empty()) self.assertEqual(j.to_string(), '{}') - def test_replace(self): + def test_replace(self) -> None: j = JsonBuilder() j.put({'a': 'b'}) j.replace({'c': 'd'}) self.assertEqual(j.to_string(), '{"c": "d"}') - def test_root_node_name(self): + def test_root_node_name(self) -> None: j = JsonBuilder(root_node='root') j.put({'a': 'b'}) self.assertEqual(j.to_string(), """{"root": {"a": "b"}}""") - def test_invalid_root_node_name(self): + def test_invalid_root_node_name(self) -> None: self.assertRaises(TypeError, JsonBuilder, root_node=10) - def test_list(self): + def test_list(self) -> None: j = JsonBuilder() j.put({'a': {'b': [1, 2, 3]}}) self.assertEqual(j.to_string(), """{"a": {"b": [1, 2, 3]}}""") - def test_set(self): + def test_set(self) -> None: j = JsonBuilder() j.put({'a': {'b': {1, 2, 3}}}) self.assertSetEqual(set(j.to_dict()['a']['b']), {1, 2, 3}) - def test_frozenset(self): + def test_frozenset(self) -> None: j = JsonBuilder() j.put({'a': {'b': frozenset([1, 2, 3])}}) self.assertSetEqual(set(j.to_dict()['a']['b']), {1, 2, 3}) - def test_encoder(self): + def test_encoder(self) -> None: class CustomValue: def __iter__(self): return iter((1, 2, 3)) @@ -85,7 +85,7 @@ def default(self, obj): self.assertEqual(j.to_string(), """{"a": "1.2.3"}""") - def test_multiple_items(self): + def test_multiple_items(self) -> None: j = JsonBuilder() j.put({'a': 'b'}) j.put({'c': 'd'}) @@ -100,9 +100,9 @@ def test_multiple_items(self): self.assertEqual(j.to_dict(), {'a': 'x', 'c': 'd', 'e': 'x'}) - def test_future(self): + def test_future(self) -> None: j = JsonBuilder() - f = Future() + f: Future = Future() j.put(f) self.assertFalse(j.is_empty()) @@ -115,7 +115,7 @@ def test_future(self): async def test_future_string_value(self): j = JsonBuilder() - f = Future() + f: Future = Future() result = TestDoc.get_test_request_result() result._content_type = 'xml' result._data = 'test' @@ -126,7 +126,7 @@ async def test_future_string_value(self): async def test_failed_future(self): j = JsonBuilder() - f = Future() + f: Future = Future() result = TestDoc.get_test_request_result() result._data_parse_error = DataParseError(reason='error', code='code') f.set_result(result) @@ -134,11 +134,11 @@ async def test_failed_future(self): self.assertEqual(j.to_dict(), {'error': {'reason': 'error', 'code': 'code'}}) - def test_nested_future(self): + def test_nested_future(self) -> None: j = JsonBuilder() - f1 = Future() - f2 = Future() - f3 = Future() + f1: Future = Future() + f2: Future = Future() + f3: Future = Future() f1.set_result({'nested': f2}) j.put(f1) @@ -152,8 +152,8 @@ def test_nested_future(self): async def test_nested_future_error_node(self): j = JsonBuilder() - f1 = Future() - f2 = Future() + f1: Future = Future() + f2: Future = Future() f1.set_result({'nested': f2}) j.put(f1) @@ -162,15 +162,11 @@ async def test_nested_future_error_node(self): result = TestDoc.get_test_request_result() result._data_parse_error = DataParseError(reason='error', code='code') - f2.set_result( - {'a': result} - ) + f2.set_result({'a': result}) - self.assertEqual( - j.to_dict(), {'nested': {'a': {'error': {'reason': 'error', 'code': 'code'}}}} - ) + self.assertEqual(j.to_dict(), {'nested': {'a': {'error': {'reason': 'error', 'code': 'code'}}}}) - def test_nested_json_builder(self): + def test_nested_json_builder(self) -> None: j1 = JsonBuilder() j1.put(k1='v1') @@ -179,20 +175,18 @@ def test_nested_json_builder(self): j1.put(j2) - self.assertEqual( - j1.to_dict(), {'k2': 'v2', 'k1': 'v1'} - ) + self.assertEqual(j1.to_dict(), {'k2': 'v2', 'k1': 'v1'}) - def test_dict_put_invalid(self): + def test_dict_put_invalid(self) -> None: j = JsonBuilder() j.put({'a': 'b'}) j.put(['c']) self.assertRaises(ValueError, j.to_dict) - def test_to_dict(self): + def test_to_dict(self) -> None: class Serializable: - def __init__(self, name, values): + def __init__(self, name: str, values: list[str]) -> None: self.name = name self.values = values @@ -202,6 +196,4 @@ def to_dict(self): j = JsonBuilder() j.put(Serializable('some', ['test1', 'test2', 'test3'])) - self.assertEqual( - j.to_dict(), {'some': ['test1', 'test2', 'test3']} - ) + self.assertEqual(j.to_dict(), {'some': ['test1', 'test2', 'test3']}) diff --git a/tests/test_kafka_integration.py b/tests/test_kafka_integration.py index aeed9d869..9adf8c1ec 100644 --- a/tests/test_kafka_integration.py +++ b/tests/test_kafka_integration.py @@ -1,7 +1,6 @@ import unittest try: - import aiokafka has_kafka = True except Exception: has_kafka = False diff --git a/tests/test_logging.py b/tests/test_logging.py index c83031878..6f0334e3f 100644 --- a/tests/test_logging.py +++ b/tests/test_logging.py @@ -6,11 +6,16 @@ from tornado.escape import to_unicode +from tests import FRONTIK_ROOT from tests.instances import FrontikTestInstance +FRONTIK_RUN = f'{FRONTIK_ROOT}/frontik-test' +TEST_PROJECTS = f'{FRONTIK_ROOT}/tests/projects' + class TestSyslog(unittest.TestCase): - test_app = None + test_app: FrontikTestInstance = None # type: ignore + s: socket.socket = None # type: ignore @classmethod def setUpClass(cls): @@ -21,9 +26,9 @@ def setUpClass(cls): port = cls.s.getsockname()[1] cls.test_app = FrontikTestInstance( - './frontik-test --app=tests.projects.test_app --config=tests/projects/frontik_debug.cfg ' + f'{FRONTIK_RUN} --app=tests.projects.test_app --config={TEST_PROJECTS}/frontik_debug.cfg ' f'--syslog=true --consul_enabled=False --syslog_host=127.0.0.1 --syslog_tag=test' - f' --log_level=debug --syslog_port={port}' + f' --log_level=debug --syslog_port={port}', ) @classmethod @@ -51,57 +56,41 @@ def test_send_to_syslog(self): self.assertRegex(log, syslog_line_regexp) match = re.match(syslog_line_regexp, log) - priority, tag, message = match.groups() - - parsed_logs[tag].append({ - 'priority': priority, - 'message': message - }) + if match is not None: + priority, tag, message = match.groups() + parsed_logs[tag].append({'priority': priority, 'message': message}) expected_service_logs = [ { 'priority': '14', 'message': { - 'lvl': 'INFO', 'logger': r'server', 'msg': r'starting application tests\.projects\.test_app' - } - }, - { - 'priority': '14', - 'message': { - 'lvl': 'INFO', 'logger': r'frontik\.routing', 'msg': 'requested url: /log' - } - }, - { - 'priority': '15', - 'message': { - 'lvl': 'DEBUG', 'logger': r'handler', 'msg': 'debug' - } - }, - { - 'priority': '14', - 'message': { - 'lvl': 'INFO', 'logger': r'handler', 'msg': 'info' - } + 'lvl': 'INFO', + 'logger': r'server', + 'msg': r'starting application tests\.projects\.test_app', + }, }, + {'priority': '14', 'message': {'lvl': 'INFO', 'logger': r'frontik\.routing', 'msg': 'requested url: /log'}}, + {'priority': '15', 'message': {'lvl': 'DEBUG', 'logger': r'handler', 'msg': 'debug'}}, + {'priority': '14', 'message': {'lvl': 'INFO', 'logger': r'handler', 'msg': 'info'}}, { 'priority': '11', 'message': { - 'lvl': 'ERROR', 'logger': r'handler', 'msg': 'exception', 'exception': '.*raise Exception.*' - } + 'lvl': 'ERROR', + 'logger': r'handler', + 'msg': 'exception', + 'exception': '.*raise Exception.*', + }, }, { 'priority': '11', 'message': { - 'lvl': 'ERROR', 'logger': r'handler', 'msg': 'error', - 'exception': r".*self\.log\.error\('error', stack_info=True\)" - } - }, - { - 'priority': '10', - 'message': { - 'lvl': 'CRITICAL', 'logger': r'handler', 'msg': 'critical' - } + 'lvl': 'ERROR', + 'logger': r'handler', + 'msg': 'error', + 'exception': r".*self\.log\.error\('error', stack_info=True\)", + }, }, + {'priority': '10', 'message': {'lvl': 'CRITICAL', 'logger': r'handler', 'msg': 'critical'}}, ] self.assert_json_logs_match(expected_service_logs, parsed_logs['test/service.slog/']) @@ -109,9 +98,7 @@ def test_send_to_syslog(self): expected_requests_logs = [ { 'priority': '14', - 'message': { - 'ip': '.+', 'rid': '.+', 'status': '200', 'time': '.+', 'method': 'GET', 'uri': '/log' - } + 'message': {'ip': '.+', 'rid': '.+', 'status': '200', 'time': '.+', 'method': 'GET', 'uri': '/log'}, }, ] @@ -121,27 +108,26 @@ def test_send_to_syslog(self): { 'priority': '10', 'message': r'\[\d+\] [\d-]+ [\d:,]+ CRITICAL ' - r'custom_logger\.tests\.projects\.test_app\.pages\.log\.Page\.\w+: fatal' + r'custom_logger\.tests\.projects\.test_app\.pages\.log\.Page\.\w+: fatal', }, ] self.assert_text_logs_match(expected_custom_logs, parsed_logs['test/custom_logger.log/']) - def assert_json_logs_match(self, expected_logs, parsed_logs): + def assert_json_logs_match(self, expected_logs: list, parsed_logs: list) -> None: for expected_log in expected_logs: for actual_log in parsed_logs: priority = actual_log['priority'] message = json.loads(actual_log['message']) - if ( - priority == expected_log['priority'] and - all(re.match(v, str(message[k]), re.DOTALL) for k, v in expected_log['message'].items()) + if priority == expected_log['priority'] and all( + re.match(v, str(message[k]), re.DOTALL) for k, v in expected_log['message'].items() ): break else: self.fail(f'Log message not found: {expected_log}') - def assert_text_logs_match(self, expected_logs, parsed_logs): + def assert_text_logs_match(self, expected_logs: list, parsed_logs: list) -> None: for expected_log in expected_logs: for actual_log in parsed_logs: priority = actual_log['priority'] diff --git a/tests/test_logging_configurator_client.py b/tests/test_logging_configurator_client.py index 7b7563119..a42fa19de 100644 --- a/tests/test_logging_configurator_client.py +++ b/tests/test_logging_configurator_client.py @@ -1,10 +1,9 @@ import logging import unittest -from typing import List import pytest as pytest -from frontik.loggers.logleveloverride.log_level_override_extension import LogLevelOverrideExtension, LogLevelOverride +from frontik.loggers.logleveloverride.log_level_override_extension import LogLevelOverride, LogLevelOverrideExtension from frontik.loggers.logleveloverride.logging_configurator_client import LoggingConfiguratorClient MOCK_LOG_OVERRIDE_DTO = [ @@ -15,13 +14,11 @@ class TestLogLevelOverrideExtension(LogLevelOverrideExtension): - - async def load_log_level_overrides(self) -> List[LogLevelOverride]: + async def load_log_level_overrides(self) -> list[LogLevelOverride]: return MOCK_LOG_OVERRIDE_DTO class TestLoggingConfiguratorClient(unittest.TestCase): - def setUp(self) -> None: self.logging_configurator_client = LoggingConfiguratorClient(TestLogLevelOverrideExtension()) self.logging_configurator_client.stop_logging_configurator() @@ -35,12 +32,12 @@ def tearDown(self) -> None: MOCK_LOG_OVERRIDE_DTO.append(LogLevelOverride('b', 'INFO')) MOCK_LOG_OVERRIDE_DTO.append(LogLevelOverride('c', 'WARN')) - @pytest.mark.asyncio + @pytest.mark.asyncio() async def test_simple_override(self): await self.logging_configurator_client._update_log_level() self.assertEqual(len(self.logging_configurator_client._loggers_store), 3) - @pytest.mark.asyncio + @pytest.mark.asyncio() async def test_override_and_remove(self): await self.logging_configurator_client._update_log_level() self.assertEqual(len(self.logging_configurator_client._loggers_store), 3) @@ -50,7 +47,7 @@ async def test_override_and_remove(self): await self.logging_configurator_client._update_log_level() self.assertEqual(len(self.logging_configurator_client._loggers_store), 0) - @pytest.mark.asyncio + @pytest.mark.asyncio() async def test_override_and_after_change_level(self): await self.logging_configurator_client._update_log_level() self.assertEqual(logging.getLogger('a').level, logging.DEBUG) @@ -61,14 +58,14 @@ async def test_override_and_after_change_level(self): await self.logging_configurator_client._update_log_level() self.assertEqual(logging.getLogger('a').level, logging.INFO) - @pytest.mark.asyncio + @pytest.mark.asyncio() async def test_level_with_handlers(self): logging.getLogger().handlers.append(logging.Handler()) await self.logging_configurator_client._update_log_level() self.assertEqual(logging.getLogger('a').level, logging.DEBUG) self.assertEqual(logging.getLogger('a').handlers[0].level, logging.DEBUG) self.assertEqual(logging.getLogger('b').handlers[0].level, logging.INFO) - self.assertEqual(logging.getLogger('c').handlers[0].level, logging.WARN) + self.assertEqual(logging.getLogger('c').handlers[0].level, logging.WARNING) MOCK_LOG_OVERRIDE_DTO.clear() @@ -80,7 +77,7 @@ async def test_level_with_handlers(self): self.assertEqual(logging.getLogger('b').handlers[0].level, logging.INFO) self.assertEqual(logging.getLogger('c').handlers[0].level, logging.INFO) - @pytest.mark.asyncio + @pytest.mark.asyncio() async def test_not_add_root_handlers_if_exist_on_specific_logger(self): logging.getLogger().handlers.append(logging.Handler()) logging.getLogger().handlers.append(logging.Handler()) diff --git a/tests/test_mandatory_headers.py b/tests/test_mandatory_headers.py index 41757d72c..ad82da58e 100644 --- a/tests/test_mandatory_headers.py +++ b/tests/test_mandatory_headers.py @@ -8,21 +8,21 @@ def test_set_mandatory_headers(self): response = frontik_test_app.get_page('mandatory_headers?test_mandatory_headers') self.assertEqual(response.status_code, 500) self.assertEqual(response.headers.get('TEST_HEADER'), 'TEST_HEADER_VALUE') - self.assertEqual(response.cookies.get('TEST_COOKIE'), 'TEST_HEADER_COOKIE') + self.assertEqual(response.cookies.get('TEST_COOKIE'), 'TEST_HEADER_COOKIE') # type: ignore - def test_mandatory_headers_are_lost(self): + def test_mandatory_headers_are_lost(self) -> None: response = frontik_test_app.get_page('mandatory_headers?test_without_mandatory_headers') self.assertEqual(response.status_code, 500) self.assertIsNone(response.headers.get('TEST_HEADER')) self.assertIsNone(response.headers.get('TEST_COOKIE')) - def test_mandatory_headers_are_cleared(self): + def test_mandatory_headers_are_cleared(self) -> None: response = frontik_test_app.get_page('mandatory_headers?test_clear_set_mandatory_headers') self.assertEqual(response.status_code, 500) self.assertIsNone(response.headers.get('TEST_HEADER')) self.assertIsNone(response.headers.get('TEST_COOKIE')) - def test_clear_not_set_headers_does_not_faile(self): + def test_clear_not_set_headers_does_not_faile(self) -> None: response = frontik_test_app.get_page('mandatory_headers?test_clear_not_set_headers') self.assertEqual(response.status_code, 500) self.assertIsNone(response.headers.get('TEST_HEADER')) diff --git a/tests/test_no_debug_mode.py b/tests/test_no_debug_mode.py index e9c10fa41..23ff13686 100644 --- a/tests/test_no_debug_mode.py +++ b/tests/test_no_debug_mode.py @@ -22,21 +22,24 @@ def test_basic_auth_fail_async(self): def test_basic_auth_fail_on_wrong_pass(self): response = frontik_no_debug_app.get_page( - 'basic_auth', headers={'Authorization': create_basic_auth_header('user:bad')} + 'basic_auth', + headers={'Authorization': create_basic_auth_header('user:bad')}, ) self.assertEqual(response.status_code, 401) def test_basic_auth_fail_on_wrong_pass_async(self): response = frontik_no_debug_app.get_page( - 'basic_auth_async', headers={'Authorization': create_basic_auth_header('user:bad')} + 'basic_auth_async', + headers={'Authorization': create_basic_auth_header('user:bad')}, ) self.assertEqual(response.status_code, 401) def test_basic_auth_pass(self): response = frontik_no_debug_app.get_page( - 'basic_auth', headers={'Authorization': create_basic_auth_header('user:god')} + 'basic_auth', + headers={'Authorization': create_basic_auth_header('user:god')}, ) self.assertEqual(response.status_code, 200) @@ -44,7 +47,8 @@ def test_basic_auth_pass(self): def test_basic_auth_pass_async(self): response = frontik_no_debug_app.get_page( - 'basic_auth_async', headers={'Authorization': create_basic_auth_header('user:god')} + 'basic_auth_async', + headers={'Authorization': create_basic_auth_header('user:god')}, ) self.assertEqual(response.status_code, 200) diff --git a/tests/test_preprocessors.py b/tests/test_preprocessors.py index 371741060..2ae4ce1e9 100644 --- a/tests/test_preprocessors.py +++ b/tests/test_preprocessors.py @@ -12,26 +12,18 @@ def test_preprocessors(self): self.assertEqual( response_json, { - 'run': [ - 'pp01', 'pp02', 'pp1-before', 'pp1-between', 'pp1-after', 'pp2', 'pp3', 'get_page' - ], + 'run': ['pp01', 'pp02', 'pp1-before', 'pp1-between', 'pp1-after', 'pp2', 'pp3', 'get_page'], 'put_request_finished': True, 'put_request_preprocessors': ['pp01', 'pp02'], - 'postprocessor': True - } + 'postprocessor': True, + }, ) def test_preprocessor_futures(self): response_json = frontik_test_app.get_page_json('preprocessors/preprocessor_futures') self.assertEqual( response_json, - { - 'preprocessors': [ - 'should_finish_first', - 'should_finish_second', - 'should_finish_third' - ] - } + {'preprocessors': ['should_finish_first', 'should_finish_second', 'should_finish_third']}, ) def test_was_preprocessor_called(self): @@ -43,7 +35,7 @@ def test_was_preprocessor_called(self): 'pp1': True, 'pp2': True, 'pp3': False, - } + }, ) def test_was_async_preprocessor_called(self): @@ -55,19 +47,14 @@ def test_was_async_preprocessor_called(self): 'pp1': True, 'pp2': True, 'pp3': False, - } + }, ) def test_priority_preprocessors(self): response_json = frontik_test_app.get_page_json('preprocessors/priority_preprocessors') - self.assertEqual( - response_json, - { - 'order': ['pp0', 'pp2', 'pp1', 'pp3'] - } - ) + self.assertEqual(response_json, {'order': ['pp0', 'pp2', 'pp1', 'pp3']}) - def test_add_preprocessor_future_after_preprocessors(self): + def test_add_preprocessor_future_after_preprocessors(self) -> None: response = frontik_test_app.get_page('preprocessors/preprocessor_futures', method=requests.post) self.assertEqual(response.status_code, 500) @@ -77,15 +64,11 @@ def test_add_preprocessor_future_return_value(self): def test_preprocessors_abort(self): response_json = frontik_test_app.get_page_json('preprocessors/aborted?abort_preprocessors=true') - self.assertEqual( - response_json, {'run': ['before', 'pp'], 'put_request_finished': True, 'postprocessor': True} - ) + self.assertEqual(response_json, {'run': ['before', 'pp'], 'put_request_finished': True, 'postprocessor': True}) def test_preprocessors_abort_nowait(self): response_json = frontik_test_app.get_page_json('preprocessors/aborted?abort_preprocessors_nowait=true') - self.assertEqual( - response_json, {'run': ['before', 'pp'], 'postprocessor': True} - ) + self.assertEqual(response_json, {'run': ['before', 'pp'], 'postprocessor': True}) def test_preprocessors_raise_error(self): response = frontik_test_app.get_page('preprocessors/aborted?raise_error=true') @@ -104,7 +87,10 @@ def test_preprocessors_finish(self): def test_preprocessors_redirect(self): response = frontik_test_app.get_page('preprocessors/aborted?redirect=true', allow_redirects=False) self.assertEqual(response.status_code, 302) - self.assertIn('redirected', response.headers.get('Location')) + location = response.headers.get('Location') + self.assertTrue(isinstance(location, str)) + if isinstance(location, str): + self.assertIn('redirected', location) def test_finish_in_nonblocking_group_preprocessor(self): response = frontik_test_app.get_page('preprocessors/aborted_nonblocking_group?finish=true') diff --git a/tests/test_pycodestyle.py b/tests/test_pycodestyle.py deleted file mode 100644 index 6767172c4..000000000 --- a/tests/test_pycodestyle.py +++ /dev/null @@ -1,21 +0,0 @@ -from functools import partial -import os.path -import unittest - -import pycodestyle - -from tests import FRONTIK_ROOT - - -class TestPycodestyle(unittest.TestCase): - CHECKED_PATHS = ('frontik', 'tests', 'examples', 'frontik-test') - - def test_pycodestyle(self): - style_guide = pycodestyle.StyleGuide( - show_pep8=False, - show_source=True, - max_line_length=120, - ignore=['E731', 'W504'] - ) - result = style_guide.check_files(map(partial(os.path.join, FRONTIK_ROOT), TestPycodestyle.CHECKED_PATHS)) - self.assertEqual(result.total_errors, 0, 'Pycodestyle found code style errors or warnings') diff --git a/tests/test_request_context.py b/tests/test_request_context.py index b81d25b71..87c55efa3 100644 --- a/tests/test_request_context.py +++ b/tests/test_request_context.py @@ -7,11 +7,14 @@ class TestRequestContext(unittest.TestCase): def test_request_context(self): json = frontik_test_app.get_page_json('request_context') - self.assertEqual(json, { - 'page': 'request_context', - 'callback': 'request_context', - 'executor': None, - 'future': 'request_context', - 'coroutine_before_yield': 'request_context', - 'coroutine_after_yield': 'request_context' - }) + self.assertEqual( + json, + { + 'page': 'request_context', + 'callback': 'request_context', + 'executor': None, + 'future': 'request_context', + 'coroutine_before_yield': 'request_context', + 'coroutine_after_yield': 'request_context', + }, + ) diff --git a/tests/test_routing.py b/tests/test_routing.py index 563b0ed61..cbf2a3738 100644 --- a/tests/test_routing.py +++ b/tests/test_routing.py @@ -1,7 +1,6 @@ import unittest from frontik.routing import MAX_MODULE_NAME_LENGTH - from tests.instances import frontik_re_app, frontik_test_app @@ -34,16 +33,16 @@ def test_rewrite_single(self): html = frontik_re_app.get_page_text('id/some') self.assertIn('some', html) - def test_rewrite_multiple(self): + def test_rewrite_multiple(self) -> None: values = ('some', 'another') html = frontik_re_app.get_page_text('id/{}'.format(','.join(values))) self.assertTrue(all(map(html.find, values))) - def test_error_on_import(self): + def test_error_on_import(self) -> None: response = frontik_test_app.get_page('error_on_import') self.assertEqual(response.status_code, 500) - def test_error_on_import_of_module_having_module_not_found_error(self): + def test_error_on_import_of_module_having_module_not_found_error(self) -> None: response = frontik_test_app.get_page('module_not_found_error_on_import') self.assertEqual(response.status_code, 500) @@ -75,13 +74,16 @@ def test_filemapping_custom_404_for_complex_path(self): def test_reverse_url(self): json = frontik_re_app.get_page_json('reverse_url') - self.assertEqual(json, { - 'args': '/id/1/2', - 'args_and_kwargs': '/id/1/2', - 'kwargs': '/id/1/2', - }) - - def test_reverse_url_fail(self): + self.assertEqual( + json, + { + 'args': '/id/1/2', + 'args_and_kwargs': '/id/1/2', + 'kwargs': '/id/1/2', + }, + ) + + def test_reverse_url_fail(self) -> None: response = frontik_re_app.get_page('reverse_url?fail_args=true') self.assertEqual(response.status_code, 500) diff --git a/tests/test_sentry_integration.py b/tests/test_sentry_integration.py index 8b2d53826..70e4f7f4a 100644 --- a/tests/test_sentry_integration.py +++ b/tests/test_sentry_integration.py @@ -57,11 +57,11 @@ def test_sentry_not_configured(self): self.assertEqual(200, frontik_re_app.get_page('sentry_not_configured').status_code) @staticmethod - def _get_sentry_messages(): + def _get_sentry_messages() -> list: sentry_json = frontik_test_app.get_page_json('api/2/store') return sentry_json['exceptions'] @staticmethod - def _get_sentry_exceptions(name): + def _get_sentry_exceptions(name: str) -> list: sentry_json = frontik_test_app.get_page_json('api/2/store') return list(filter(lambda e: e['exception']['values'][0]['value'] == name, sentry_json['exceptions'])) diff --git a/tests/test_service_discovery.py b/tests/test_service_discovery.py index c54e9feee..dceda09bb 100644 --- a/tests/test_service_discovery.py +++ b/tests/test_service_discovery.py @@ -1,5 +1,5 @@ -import unittest import sys +import unittest import pytest @@ -11,24 +11,26 @@ class TestServiceDiscovery(unittest.TestCase): - - def setUp(self): + def setUp(self) -> None: self.consul_mock = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.consul_mock_app {common_frontik_start_options} ' - f' --config={TEST_PROJECTS}/frontik_consul_mock.cfg') + f' --config={TEST_PROJECTS}/frontik_consul_mock.cfg', + ) self.consul_mock.start() self.frontik_single_worker_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.no_debug_app {common_frontik_start_options} ' f' --config={TEST_PROJECTS}/frontik_no_debug.cfg --consul_port={self.consul_mock.port} ' f' --consul_enabled=True' - f' --fail_start_on_empty_upstream=False') + f' --fail_start_on_empty_upstream=False', + ) self.frontik_multiple_worker_app = FrontikTestInstance( f'{FRONTIK_RUN} --app=tests.projects.no_debug_app {common_frontik_start_options} ' f' --config={TEST_PROJECTS}/frontik_no_debug.cfg --consul_port={self.consul_mock.port} --workers=3' f' --consul_enabled=True' - f' --fail_start_on_empty_upstream=False') + f' --fail_start_on_empty_upstream=False', + ) - def tearDown(self): + def tearDown(self) -> None: self.frontik_single_worker_app.stop() self.frontik_multiple_worker_app.stop() self.consul_mock.stop() diff --git a/tests/test_service_start.py b/tests/test_service_start.py index c2f2f10ab..acb811cb3 100644 --- a/tests/test_service_start.py +++ b/tests/test_service_start.py @@ -1,20 +1,24 @@ import unittest +from tests import FRONTIK_ROOT from tests.instances import FrontikTestInstance +FRONTIK_RUN = f'{FRONTIK_ROOT}/frontik-test' + class TestServiceStart(unittest.TestCase): - def app_run(self, parameters): + def app_run(self, parameters: str) -> None: service = FrontikTestInstance(parameters) service.start() response = service.get_page('status') self.assertEqual(response.status_code, 200) service.stop() - def test_with_only_app(self): - self.app_run('./frontik-test --app=tests.projects.test_app' - f' --syslog=false --consul_enabled=False') + def test_with_only_app(self) -> None: + self.app_run(f'{FRONTIK_RUN} --app=tests.projects.test_app --syslog=false --consul_enabled=False') - def test_with_app_class(self): - self.app_run(f'./frontik-test --app=test-app --app_class=tests.projects.test_app.TestApplication' - f' --syslog=false --consul_enabled=False') + def test_with_app_class(self) -> None: + self.app_run( + f'{FRONTIK_RUN} --app=test-app --app_class=tests.projects.test_app.TestApplication' + f' --syslog=false --consul_enabled=False', + ) diff --git a/tests/test_statsd_integration.py b/tests/test_statsd_integration.py index 0728ea582..6d7c60d1a 100644 --- a/tests/test_statsd_integration.py +++ b/tests/test_statsd_integration.py @@ -3,8 +3,12 @@ from tornado.escape import to_unicode +from tests import FRONTIK_ROOT from tests.instances import FrontikTestInstance +FRONTIK_RUN = f'{FRONTIK_ROOT}/frontik-test' +TEST_PROJECTS = f'{FRONTIK_ROOT}/tests/projects' + class TestStatsdIntegration(unittest.TestCase): def test_send_to_statsd(self): @@ -15,8 +19,8 @@ def test_send_to_statsd(self): port = statsd_socket.getsockname()[1] test_app = FrontikTestInstance( - './frontik-test --app=tests.projects.test_app --config=tests/projects/frontik_debug.cfg ' - f'--statsd_host=127.0.0.1 --consul_enabled=False --statsd_port={port}' + f'{FRONTIK_RUN} --app=tests.projects.test_app --config={TEST_PROJECTS}/frontik_debug.cfg ' + f'--statsd_host=127.0.0.1 --consul_enabled=False --statsd_port={port}', ) test_app.get_page('statsd') diff --git a/tests/test_telemetry.py b/tests/test_telemetry.py index bd9812a55..2a778d200 100644 --- a/tests/test_telemetry.py +++ b/tests/test_telemetry.py @@ -1,12 +1,14 @@ import unittest +from collections.abc import Sequence +from typing import Any from opentelemetry import trace from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.sdk.trace.export import (BatchSpanProcessor, SpanExporter, - SpanExportResult) +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ReadableSpan, SpanExporter, SpanExportResult from opentelemetry.sdk.trace.sampling import ParentBased, TraceIdRatioBased from opentelemetry.semconv.resource import ResourceAttributes +from tornado.httputil import HTTPServerRequest from tornado.ioloop import IOLoop from tornado.testing import gen_test @@ -19,46 +21,46 @@ class TestTelemetry(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: self.trace_id_generator = FrontikIdGenerator() - def test_generate_trace_id_with_none_request_id(self): + def test_generate_trace_id_with_none_request_id(self) -> None: trace_id = self.trace_id_generator.generate_trace_id() self.assertIsNotNone(trace_id) - def test_generate_trace_id_with_hex_request_id(self): - request_context.initialize(None, '163897206709842601f90a070699ac44') + def test_generate_trace_id_with_hex_request_id(self) -> None: + request_context.initialize(HTTPServerRequest(), '163897206709842601f90a070699ac44') trace_id = self.trace_id_generator.generate_trace_id() self.assertEqual('0x163897206709842601f90a070699ac44', hex(trace_id)) - def test_generate_trace_id_with_no_hex_request_id(self): - request_context.initialize(None, 'non-hex-string-1234') + def test_generate_trace_id_with_no_hex_request_id(self) -> None: + request_context.initialize(HTTPServerRequest(), 'non-hex-string-1234') trace_id = self.trace_id_generator.generate_trace_id() self.assertIsNotNone(trace_id) - def test_generate_trace_id_with_no_str_request_id(self): - request_context.initialize(None, 12345678910) + def test_generate_trace_id_with_no_str_request_id(self) -> None: + request_context.initialize(HTTPServerRequest(), 12345678910) # type: ignore trace_id = self.trace_id_generator.generate_trace_id() self.assertIsNotNone(trace_id) - def test_generate_trace_id_with_hex_request_id_and_postfix(self): - request_context.initialize(None, '163897206709842601f90a070699ac44_some_postfix_string') + def test_generate_trace_id_with_hex_request_id_and_postfix(self) -> None: + request_context.initialize(HTTPServerRequest(), '163897206709842601f90a070699ac44_some_postfix_string') trace_id = self.trace_id_generator.generate_trace_id() self.assertEqual('0x163897206709842601f90a070699ac44', hex(trace_id)) - def test_generate_trace_id_with_no_hex_request_id_in_first_32_characters(self): - request_context.initialize(None, '16389720670_NOT_HEX_9842601f90a070699ac44_some_postfix_string') + def test_generate_trace_id_with_no_hex_request_id_in_first_32_characters(self) -> None: + request_context.initialize(HTTPServerRequest(), '16389720670_NOT_HEX_9842601f90a070699ac44_some_postfix_string') trace_id = self.trace_id_generator.generate_trace_id() self.assertIsNotNone(trace_id) self.assertNotEqual('0x16389720670_NOT_HEX_9842601f90a0', hex(trace_id)) - def test_generate_trace_id_with_request_id_len_less_32_characters(self): - request_context.initialize(None, '163897206') + def test_generate_trace_id_with_request_id_len_less_32_characters(self) -> None: + request_context.initialize(HTTPServerRequest(), '163897206') trace_id = self.trace_id_generator.generate_trace_id() self.assertIsNotNone(trace_id) self.assertNotEqual('0x163897206', hex(trace_id)) - def test_get_netloc(self): + def test_get_netloc(self) -> None: self.assertEqual('balancer:7000', get_netloc('balancer:7000/xml/get-article/')) self.assertEqual('balancer:7000', get_netloc('//balancer:7000/xml/get-article/')) self.assertEqual('balancer:7000', get_netloc('https://balancer:7000/xml/get-article/')) @@ -77,28 +79,32 @@ async def get_page(self): self.json.put({}) -def make_otel_provider(): - resource = Resource(attributes={ - ResourceAttributes.SERVICE_NAME: options.app, - ResourceAttributes.SERVICE_VERSION: '1.2.3', - ResourceAttributes.HOST_NAME: options.node_name, - ResourceAttributes.CLOUD_REGION: 'test', - }) - provider = TracerProvider(resource=resource, - id_generator=FrontikIdGenerator(), - sampler=ParentBased(TraceIdRatioBased(options.opentelemetry_sampler_ratio))) +def make_otel_provider() -> TracerProvider: + resource = Resource( + attributes={ + ResourceAttributes.SERVICE_NAME: options.app, # type: ignore + ResourceAttributes.SERVICE_VERSION: '1.2.3', + ResourceAttributes.HOST_NAME: options.node_name, + ResourceAttributes.CLOUD_REGION: 'test', + }, + ) + provider = TracerProvider( + resource=resource, + id_generator=FrontikIdGenerator(), + sampler=ParentBased(TraceIdRatioBased(options.opentelemetry_sampler_ratio)), + ) return provider -SPAN_STORAGE = [] +SPAN_STORAGE: list[ReadableSpan] = [] -def find_span(attr, value): - return next(filter(lambda item: item.attributes.get(attr, None) == value, SPAN_STORAGE), None) +def find_span(attr: str, value: Any) -> ReadableSpan | None: + return next(filter(lambda item: item.attributes.get(attr, None) == value, SPAN_STORAGE), None) # type: ignore class TestExporter(SpanExporter): - def export(self, spans) -> SpanExportResult: + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: SPAN_STORAGE.extend(spans) return SpanExportResult.SUCCESS @@ -110,7 +116,7 @@ def force_flush(self, *args, **kwargs): class TestFrontikTesting(FrontikTestCase): - def setUp(self): + def setUp(self) -> None: options.consul_enabled = False options.opentelemetry_enabled = True options.opentelemetry_sampler_ratio = 1 @@ -136,7 +142,7 @@ def application_urls(self): return app @gen_test - async def test_parent_span(self): + async def test_parent_span(self) -> None: url = self.get_url('/page_a') await self.http_client.fetch(url) self.batch_span_processor.force_flush() @@ -145,5 +151,7 @@ async def test_parent_span(self): server_b_span = find_span('http.target', '/page_b') SPAN_STORAGE.clear() + assert client_a_span is not None assert client_a_span.parent is not None + assert server_b_span is not None assert server_b_span.parent is not None diff --git a/tests/test_unicode.py b/tests/test_unicode.py index 07fd99811..d28b4188e 100644 --- a/tests/test_unicode.py +++ b/tests/test_unicode.py @@ -3,7 +3,6 @@ from tornado.escape import to_unicode from frontik.util import make_url - from tests.instances import frontik_test_app @@ -33,14 +32,14 @@ def test_cp1251_argument_async(self): self.assertEqual(to_unicode(response.content), '{"тест": "тест"}') def test_argument_with_invalid_chars(self): - arg_with_invalid_chars = '≤'.encode('koi8_r') + 'тест'.encode('utf-8') + arg_with_invalid_chars = '≤'.encode('koi8_r') + 'тест'.encode() response = frontik_test_app.get_page(make_url('arguments', param=arg_with_invalid_chars)) self.assertEqual(response.status_code, 200) self.assertEqual(to_unicode(response.content), '{"тест": "тест"}') def test_argument_with_invalid_chars_async(self): - arg_with_invalid_chars = '≤'.encode('koi8_r') + 'тест'.encode('utf-8') + arg_with_invalid_chars = '≤'.encode('koi8_r') + 'тест'.encode() response = frontik_test_app.get_page(make_url('arguments_async', param=arg_with_invalid_chars)) self.assertEqual(response.status_code, 200) diff --git a/tests/test_upstream_caches.py b/tests/test_upstream_caches.py index 1b707658f..99ce60cfe 100644 --- a/tests/test_upstream_caches.py +++ b/tests/test_upstream_caches.py @@ -1,21 +1,21 @@ import asyncio import os +import sys import time import unittest from queue import Queue from threading import Thread +import pytest +from http_client import options as http_client_options +from http_client.balancing import Server, Upstream, UpstreamConfig + from frontik.options import options from frontik.service_discovery import UpstreamCaches, UpstreamUpdateListener -from http_client import options as http_client_options -from http_client.balancing import Upstream, Server, UpstreamConfig -import pytest -import sys class TestUpstreamCaches(unittest.TestCase): - - def test_update_upstreams_servers_different_dc(self): + def test_update_upstreams_servers_different_dc(self) -> None: options.upstreams = ['app'] http_client_options.datacenters = ['Test', 'AnoTher'] value_one_dc = [ @@ -31,12 +31,9 @@ def test_update_upstreams_servers_different_dc(self): 'Service': 'app', 'Address': '', 'Port': 9999, - 'Weights': { - 'Passing': 100, - 'Warning': 0 - } - } - } + 'Weights': {'Passing': 100, 'Warning': 0}, + }, + }, ] value_another_dc = [ @@ -52,12 +49,9 @@ def test_update_upstreams_servers_different_dc(self): 'Service': 'app', 'Address': '', 'Port': 9999, - 'Weights': { - 'Passing': 100, - 'Warning': 0 - } - } - } + 'Weights': {'Passing': 100, 'Warning': 0}, + }, + }, ] upstream_cache = UpstreamCaches({}, {}) @@ -67,7 +61,7 @@ def test_update_upstreams_servers_different_dc(self): self.assertEqual(len(upstream_cache._upstreams_servers), 2) self.assertEqual(len(upstream_cache._upstreams['app'].servers), 2) - def test_update_upstreams_servers_same_dc(self): + def test_update_upstreams_servers_same_dc(self) -> None: options.upstreams = ['app'] http_client_options.datacenters = ['test', 'another'] value_one_dc = [ @@ -83,12 +77,9 @@ def test_update_upstreams_servers_same_dc(self): 'Service': 'app', 'Address': '', 'Port': 9999, - 'Weights': { - 'Passing': 100, - 'Warning': 0 - } - } - } + 'Weights': {'Passing': 100, 'Warning': 0}, + }, + }, ] upstream_cache = UpstreamCaches({}, {}) @@ -98,7 +89,7 @@ def test_update_upstreams_servers_same_dc(self): self.assertEqual(len(upstream_cache._upstreams_servers), 1) self.assertEqual(len(upstream_cache._upstreams['app'].servers), 1) - def test_multiple_update_upstreams_servers_different_dc(self): + def test_multiple_update_upstreams_servers_different_dc(self) -> None: options.upstreams = ['app'] http_client_options.datacenters = ['test', 'another'] value_one_dc = [ @@ -114,12 +105,9 @@ def test_multiple_update_upstreams_servers_different_dc(self): 'Service': 'app', 'Address': '', 'Port': 9999, - 'Weights': { - 'Passing': 100, - 'Warning': 0 - } - } - } + 'Weights': {'Passing': 100, 'Warning': 0}, + }, + }, ] value_another_dc = [ @@ -135,12 +123,9 @@ def test_multiple_update_upstreams_servers_different_dc(self): 'Service': 'app', 'Address': '', 'Port': 9999, - 'Weights': { - 'Passing': 100, - 'Warning': 0 - } - } - } + 'Weights': {'Passing': 100, 'Warning': 0}, + }, + }, ] upstream_cache = UpstreamCaches({}, {}) @@ -152,7 +137,7 @@ def test_multiple_update_upstreams_servers_different_dc(self): self.assertEqual(len(upstream_cache._upstreams_servers), 2) self.assertEqual(len(upstream_cache._upstreams['app'].servers), 2) - def test_remove_upstreams_servers_different_dc(self): + def test_remove_upstreams_servers_different_dc(self) -> None: options.upstreams = ['app'] http_client_options.datacenters = ['test', 'another'] value_test_dc = [ @@ -168,12 +153,9 @@ def test_remove_upstreams_servers_different_dc(self): 'Service': 'app', 'Address': '', 'Port': 9999, - 'Weights': { - 'Passing': 100, - 'Warning': 0 - } - } - } + 'Weights': {'Passing': 100, 'Warning': 0}, + }, + }, ] value_another_dc = [ @@ -189,11 +171,8 @@ def test_remove_upstreams_servers_different_dc(self): 'Service': 'app', 'Address': '', 'Port': 9999, - 'Weights': { - 'Passing': 100, - 'Warning': 0 - } - } + 'Weights': {'Passing': 100, 'Warning': 0}, + }, }, { 'Node': { @@ -207,12 +186,9 @@ def test_remove_upstreams_servers_different_dc(self): 'Service': 'app', 'Address': '', 'Port': 999, - 'Weights': { - 'Passing': 100, - 'Warning': 0 - } - } - } + 'Weights': {'Passing': 100, 'Warning': 0}, + }, + }, ] value_another_remove_service_dc = [ @@ -228,12 +204,9 @@ def test_remove_upstreams_servers_different_dc(self): 'Service': 'app', 'Address': '', 'Port': 9999, - 'Weights': { - 'Passing': 100, - 'Warning': 0 - } - } - } + 'Weights': {'Passing': 100, 'Warning': 0}, + }, + }, ] upstream_cache = UpstreamCaches({}, {}) @@ -256,25 +229,23 @@ def test_pipe_buffer_overflow(self): options.upstreams = ['app'] http_client_options.datacenters = ['Test'] - read_fd, write_fd = os.pipe2(os.O_NONBLOCK) - upstream_config = {Upstream.DEFAULT_PROFILE: UpstreamConfig( - max_timeout_tries=10, - retry_policy={ - '403': {'idempotent': 'false'}, - '500': {'idempotent': 'true'} - })} - upstreams = { - 'upstream': Upstream('upstream', upstream_config, [Server('12.2.3.5'), Server('12.22.3.5')]) + read_fd, write_fd = os.pipe2(os.O_NONBLOCK) # type: ignore + upstream_config = { + Upstream.DEFAULT_PROFILE: UpstreamConfig( + max_timeout_tries=10, + retry_policy={'403': {'idempotent': 'false'}, '500': {'idempotent': 'true'}}, + ), } + upstreams = {'upstream': Upstream('upstream', upstream_config, [Server('12.2.3.5'), Server('12.22.3.5')])} upstream_cache = UpstreamCaches({0: os.fdopen(write_fd, 'wb')}, upstreams) - for i in range(200): + for _i in range(200): upstream_cache.send_updates() self.assertTrue(upstream_cache._resend_dict, 'resend dict should not be empty') listener_upstreams = {} - notification_queue = Queue() + notification_queue: Queue = Queue() class ListenerCallback: def update_upstreams(self, upstreams): @@ -282,7 +253,7 @@ def update_upstreams(self, upstreams): listener_upstreams[upstream.name] = upstream notification_queue.put(True) - async def _listener(): + async def _listener() -> None: UpstreamUpdateListener(ListenerCallback(), read_fd) def _run_loop(io_loop): diff --git a/tests/test_util.py b/tests/test_util.py index e2ef42e14..603b20db5 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -1,89 +1,88 @@ import unittest from collections import OrderedDict +from http_client.util import make_mfd from tornado.escape import to_unicode from tornado.httputil import HTTPFile, parse_body_arguments from frontik import media_types -from frontik.util import any_to_bytes, any_to_unicode, make_qs, make_url, reverse_regex_named_groups, \ - generate_uniq_timestamp_request_id, check_request_id - -from http_client.util import make_mfd +from frontik.util import ( + any_to_bytes, + any_to_unicode, + check_request_id, + generate_uniq_timestamp_request_id, + make_qs, + make_url, + reverse_regex_named_groups, +) class TestUtil(unittest.TestCase): - def test_make_qs_simple(self): + def test_make_qs_simple(self) -> None: query_args = {'a': '1', 'b': '2'} - self.assertQueriesEqual(make_qs(query_args), 'a=1&b=2') + self.assert_queries_equal(make_qs(query_args), 'a=1&b=2') - def test_make_qs_not_str(self): + def test_make_qs_not_str(self) -> None: query_args = {'a': 1, 'b': 2.0, 'c': True, 'd': None} - self.assertQueriesEqual(make_qs(query_args), 'a=1&b=2.0&c=True') + self.assert_queries_equal(make_qs(query_args), 'a=1&b=2.0&c=True') - def test_make_qs_iterables(self): + def test_make_qs_iterables(self) -> None: query_args = {'a': [1, 2], 'b': {1, 2}, 'c': (1, 2), 'd': frozenset((1, 2))} - self.assertQueriesEqual(make_qs(query_args), 'a=1&a=2&b=1&b=2&c=1&c=2&d=1&d=2') + self.assert_queries_equal(make_qs(query_args), 'a=1&a=2&b=1&b=2&c=1&c=2&d=1&d=2') - def test_make_qs_none(self): + def test_make_qs_none(self) -> None: query_args = {'a': None, 'b': None} - self.assertQueriesEqual(make_qs(query_args), '') + self.assert_queries_equal(make_qs(query_args), '') - def test_make_qs_encode(self): + def test_make_qs_encode(self) -> None: query_args = {'a': 'тест', 'b': 'тест'} qs = make_qs(query_args) self.assertIsInstance(qs, str) - self.assertQueriesEqual(qs, 'a=%D1%82%D0%B5%D1%81%D1%82&b=%D1%82%D0%B5%D1%81%D1%82') + self.assert_queries_equal(qs, 'a=%D1%82%D0%B5%D1%81%D1%82&b=%D1%82%D0%B5%D1%81%D1%82') - def test_make_qs_from_ordered_dict(self): + def test_make_qs_from_ordered_dict(self) -> None: qs = make_qs(OrderedDict([('z', 'я'), ('г', 'd'), ('b', ['2', '1'])])) self.assertIsInstance(qs, str) self.assertEqual(qs, 'z=%D1%8F&%D0%B3=d&b=2&b=1') - def test_make_qs_unicode_params(self): - self.assertQueriesEqual( + def test_make_qs_unicode_params(self) -> None: + self.assert_queries_equal( make_qs({'при': 'вет', 'по': 'ка'}), - '%D0%BF%D1%80%D0%B8=%D0%B2%D0%B5%D1%82&%D0%BF%D0%BE=%D0%BA%D0%B0' + '%D0%BF%D1%80%D0%B8=%D0%B2%D0%B5%D1%82&%D0%BF%D0%BE=%D0%BA%D0%B0', ) def test_make_url(self): - self.assertEqual( - make_url('http://test.com/path', param='value'), - 'http://test.com/path?param=value' - ) + self.assertEqual(make_url('http://test.com/path', param='value'), 'http://test.com/path?param=value') - self.assertEqual( - make_url('http://test.com/path?k=v', param='value'), - 'http://test.com/path?k=v¶m=value' - ) + self.assertEqual(make_url('http://test.com/path?k=v', param='value'), 'http://test.com/path?k=v¶m=value') self.assertEqual( make_url('http://тест.рф/path?k=v', param='тест'), - 'http://тест.рф/path?k=v¶m=%D1%82%D0%B5%D1%81%D1%82' + 'http://тест.рф/path?k=v¶m=%D1%82%D0%B5%D1%81%D1%82', ) - def assertQueriesEqual(self, qs1, qs2): + def assert_queries_equal(self, qs1: str, qs2: str) -> None: qs1_list = sorted(qs1.split('&')) qs2_list = sorted(qs2.split('&')) self.assertEqual(qs1_list, qs2_list) - def test_any_to_unicode(self): + def test_any_to_unicode(self) -> None: self.assertEqual(any_to_unicode(5), '5') self.assertEqual(any_to_unicode(None), 'None') self.assertEqual(any_to_unicode('тест'), 'тест') - self.assertEqual(any_to_unicode('тест'.encode('utf-8')), 'тест') + self.assertEqual(any_to_unicode('тест'.encode()), 'тест') - def test_any_to_bytes(self): + def test_any_to_bytes(self) -> None: self.assertEqual(any_to_bytes(5), b'5') self.assertEqual(any_to_bytes(None), b'None') - self.assertEqual(any_to_bytes('тест'), 'тест'.encode('utf-8')) - self.assertEqual(any_to_bytes('тест'.encode('utf-8')), 'тест'.encode('utf-8')) + self.assertEqual(any_to_bytes('тест'), 'тест'.encode()) + self.assertEqual(any_to_bytes('тест'.encode()), 'тест'.encode()) - def test_make_mfd(self): - args, files = {}, {} + def test_make_mfd(self) -> None: + args: dict = {} + files: dict = {} body, content_type = make_mfd( - { - 'arg1': 'value1' - }, + {'arg1': 'value1'}, { 'file0': [HTTPFile(filename='file0.rar', body='ARCHIVE', content_type='some/type\r\n\r\nBAD DATA')], 'file1': [HTTPFile(filename='file1.png', body='CAT PICTURE', content_type=media_types.IMAGE_PNG)], @@ -92,7 +91,7 @@ def test_make_mfd(self): HTTPFile(filename=r'file3-"part1".unknown', body='BODY1'), HTTPFile(filename=r'file3-\part2\.unknown', body='BODY2'), ], - } + }, ) parse_body_arguments(to_unicode(content_type), body, args, files) @@ -136,7 +135,7 @@ def test_reverse_regex_named_groups(self): self.assertRaises(ValueError, reverse_regex_named_groups, two_ids, 1) self.assertRaises(ValueError, reverse_regex_named_groups, two_ids, id1=1) - def test_generate_request_id(self): + def test_generate_request_id(self) -> None: first = generate_uniq_timestamp_request_id() second = generate_uniq_timestamp_request_id() @@ -146,6 +145,6 @@ def test_generate_request_id(self): int(first, 16) int(second, 16) - def test_check_request_id(self): + def test_check_request_id(self) -> None: self.assertTrue(check_request_id('12345678910abcdef')) self.assertFalse(check_request_id('not_hex_format_123')) diff --git a/tests/test_xml_utils.py b/tests/test_xml_utils.py index 72b736c98..0ef2eb0da 100644 --- a/tests/test_xml_utils.py +++ b/tests/test_xml_utils.py @@ -1,12 +1,15 @@ +import logging import os.path import unittest +import pytest from lxml import etree from lxml_asserts.testcase import LxmlTestCaseMixin from frontik.xml_util import dict_to_xml, xml_from_file, xml_to_dict -XML = etree.XML(''' +XML = etree.XML( + ''' value @@ -23,40 +26,21 @@ True - ''') + ''', +) DICT_BEFORE = { 'key1': 'value', 'key2': '', - 'nested': { - 'key1': 'русский текст в utf-8', - 'key2': 'русский текст в unicode' - }, - 'complexNested': { - 'nested': { - 'key': 'value', - 'otherKey': 'otherValue' - }, - 'int': 123, - 'bool': True - } + 'nested': {'key1': 'русский текст в utf-8', 'key2': 'русский текст в unicode'}, + 'complexNested': {'nested': {'key': 'value', 'otherKey': 'otherValue'}, 'int': 123, 'bool': True}, } DICT_AFTER = { 'key1': 'value', 'key2': '', - 'nested': { - 'key1': 'русский текст в utf-8', - 'key2': 'русский текст в unicode' - }, - 'complexNested': { - 'nested': { - 'key': 'value', - 'otherKey': 'otherValue' - }, - 'int': '123', - 'bool': 'True' - } + 'nested': {'key1': 'русский текст в utf-8', 'key2': 'русский текст в unicode'}, + 'complexNested': {'nested': {'key': 'value', 'otherKey': 'otherValue'}, 'int': '123', 'bool': 'True'}, } @@ -72,9 +56,9 @@ def test_xml_to_dict_and_back_again(self): XML_MISSING_FILE = os.path.join(os.path.dirname(__file__), 'bbb.xml') XML_SYNTAX_ERROR_FILE = os.path.join(os.path.dirname(__file__), 'projects', 'test_app', 'xsl', 'syntax_error.xsl') - class MockLog: - def __init__(self): - self.message = None + class MockLog(logging.Logger): + def __init__(self) -> None: + self.message: str = None # type: ignore def error(self, message, *args): self.message = message % args @@ -86,7 +70,7 @@ def test_xml_from_file(self): def test_xml_from_file_does_not_exist(self): log = TestXmlUtils.MockLog() - with self.assertRaises(IOError): + with pytest.raises(IOError): # noqa: PT011 xml_from_file(self.XML_MISSING_FILE, log) self.assertIn('failed to read xml file', log.message) @@ -94,7 +78,7 @@ def test_xml_from_file_does_not_exist(self): def test_xml_from_file_syntax_error(self): log = TestXmlUtils.MockLog() - with self.assertRaises(etree.XMLSyntaxError): + with pytest.raises(etree.XMLSyntaxError): xml_from_file(self.XML_SYNTAX_ERROR_FILE, log) self.assertIn('failed to parse xml file', log.message) diff --git a/tests/test_yield_errors.py b/tests/test_yield_errors.py index e536dcaed..3d9249256 100644 --- a/tests/test_yield_errors.py +++ b/tests/test_yield_errors.py @@ -4,10 +4,10 @@ class TestHandler(unittest.TestCase): - def test_error_in_yield(self): + def test_error_in_yield(self) -> None: response = frontik_test_app.get_page('error_yield') self.assertEqual(response.status_code, 500) - def test_error_in_yield_async(self): + def test_error_in_yield_async(self) -> None: response = frontik_test_app.get_page('error_yield_async') self.assertEqual(response.status_code, 500)