From 117b154b49bb516720e0829c62933be7d987b865 Mon Sep 17 00:00:00 2001 From: Leonid Vinogradov Date: Thu, 4 Apr 2024 15:38:30 +0300 Subject: [PATCH] HH-212492 use fastapi server/routing --- frontik/app.py | 264 ++++++++--- frontik/debug.py | 56 +-- frontik/futures.py | 11 +- frontik/handler.py | 730 ++++++++++++++++------------- frontik/integrations/sentry.py | 4 +- frontik/integrations/telemetry.py | 66 +-- frontik/loggers/stages.py | 6 +- frontik/producers/json_producer.py | 4 +- frontik/producers/xml_producer.py | 4 +- frontik/routing.py | 217 ++++----- frontik/server.py | 128 +++-- frontik/timeout_tracking.py | 9 +- frontik/util.py | 2 +- poetry.lock | 165 ++++--- pyproject.toml | 1 + 15 files changed, 978 insertions(+), 689 deletions(-) diff --git a/frontik/app.py b/frontik/app.py index b0ca803de..b4dded341 100644 --- a/frontik/app.py +++ b/frontik/app.py @@ -10,6 +10,9 @@ from functools import partial from threading import Lock from typing import Any, Optional, Union +import json +import inspect +import re from aiokafka import AIOKafkaProducer from http_client import AIOHttpClientWrapper, HttpClientFactory @@ -24,33 +27,118 @@ import frontik.producers.xml_producer from frontik import integrations, media_types, request_context from frontik.debug import DebugTransform, get_frontik_and_apps_versions -from frontik.handler import ErrorHandler, PageHandler +from frontik.handler import PageHandler, FinishPageSignal, RedirectPageSignal, build_error_data from frontik.handler_return_values import ReturnedValueHandlers, get_default_returned_value_handlers from frontik.integrations.statsd import StatsDClient, StatsDClientStub, create_statsd_client from frontik.loggers import CUSTOM_JSON_EXTRA, JSON_REQUESTS_LOGGER from frontik.options import options from frontik.process import WorkerState -from frontik.routing import FileMappingRouter, FrontikRouter +from frontik.routing import routers, normal_routes, regex_mapping, FrontikRouter, FrontikRegexRouter from frontik.service_discovery import UpstreamManager from frontik.util import check_request_id, generate_uniq_timestamp_request_id - -app_logger = logging.getLogger('http_client') - - -class VersionHandler(RequestHandler): - def get(self): - self.application: FrontikApplication - self.set_header('Content-Type', 'text/xml') - self.write( - etree.tostring(get_frontik_and_apps_versions(self.application), encoding='utf-8', xml_declaration=True), - ) - - -class StatusHandler(RequestHandler): - def get(self): - self.application: FrontikApplication - self.set_header('Content-Type', media_types.APPLICATION_JSON) - self.finish(self.application.get_current_status()) +from fastapi import FastAPI, APIRouter, Request +from fastapi.routing import APIRoute +import pkgutil +from http_client import HttpClient +from starlette.middleware.base import Response +from fastapi import Depends +import os +from inspect import ismodule +from starlette.datastructures import MutableHeaders +from frontik.json_builder import json_decode +from frontik.handler import get_current_handler + +app_logger = logging.getLogger('app_logger') + +_core_router = FrontikRouter() +router = FrontikRouter() +regex_router = FrontikRegexRouter() +routers.extend((_core_router, router, regex_router)) + + +def setup_page_handler(request: Request, cls: type(PageHandler)): + # create legacy PageHandler and put to request + handler = cls( + request.app.frontik_app, + request.query_params, + request.cookies, + request.headers, + request.state.body_bytes, + request.state.start_time, + request.url.path, + request.state.path_params, + request.client.host, + request.method, + ) + + request.state.handler = handler + return handler + + +def _data_to_chunk(data, headers) -> bytes: + if isinstance(data, str): + chunk = data.encode("utf-8") + elif isinstance(data, dict): + chunk = json.dumps(data).replace(" None: self.finish(traceback.format_exc()) -class FrontikApplication(Application): +class FrontikApplication: request_id = '' class DefaultConfig: @@ -107,15 +195,6 @@ def __init__(self, app_root: str, **settings: Any) -> None: self.available_integrations: list[integrations.Integration] = [] self.tornado_http_client: Optional[AIOHttpClientWrapper] = None self.http_client_factory: HttpClientFactory - self.router = FrontikRouter(self) - - core_handlers: list[Any] = [ - (r'/version/?', VersionHandler), - (r'/status/?', StatusHandler), - (r'.*', self.router), - ] - if options.debug: - core_handlers.insert(0, (r'/pydevd/?', PydevdHandler)) self.statsd_client: Union[StatsDClient, StatsDClientStub] = create_statsd_client(options, self) @@ -126,8 +205,6 @@ def __init__(self, app_root: str, **settings: Any) -> None: self.returned_value_handlers: ReturnedValueHandlers = get_default_returned_value_handlers() - super().__init__(core_handlers) - def create_upstream_manager( self, upstreams: dict[str, Upstream], @@ -146,8 +223,6 @@ def create_upstream_manager( self.upstream_manager.send_updates() # initial full state sending async def init(self) -> None: - self.transforms.insert(0, partial(DebugTransform, self)) # type: ignore - self.available_integrations, integration_futures = integrations.load_integrations(self) await asyncio.gather(*[future for future in integration_futures if future]) @@ -182,36 +257,6 @@ async def init(self) -> None: if self.worker_state.single_worker_mode: self.worker_state.master_done.value = True - def find_handler(self, request, **kwargs): - request_id = request.headers.get('X-Request-Id') - if request_id is None: - request_id = FrontikApplication.next_request_id() - if options.validate_request_id: - check_request_id(request_id) - - def wrapped_in_context(func: Callable) -> Callable: - def wrapper(*args, **kwargs): - with request_context.request_context(request, request_id): - return func(*args, **kwargs) - - return wrapper - - delegate: httputil.HTTPMessageDelegate = wrapped_in_context(super().find_handler)(request, **kwargs) - delegate.headers_received = wrapped_in_context(delegate.headers_received) # type: ignore - delegate.data_received = wrapped_in_context(delegate.data_received) # type: ignore - delegate.finish = wrapped_in_context(delegate.finish) # type: ignore - delegate.on_connection_close = wrapped_in_context(delegate.on_connection_close) # type: ignore - - return delegate - - def reverse_url(self, name: str, *args: Any, **kwargs: Any) -> str: - return self.router.reverse_url(name, *args, **kwargs) - - def application_urls(self) -> list[tuple]: - return [('', FileMappingRouter(importlib.import_module(f'{self.app_module}.pages')))] - - def application_404_handler(self, request: HTTPServerRequest) -> tuple[type[PageHandler], dict]: - return ErrorHandler, {'status_code': 404} def application_config(self) -> DefaultConfig: return FrontikApplication.DefaultConfig() @@ -248,19 +293,15 @@ def get_current_status(self) -> dict[str, str]: return {'uptime': uptime_value, 'datacenter': http_client_options.datacenter} - def log_request(self, handler): - if not options.log_json: - super().log_request(handler) - return - - request_time = int(1000.0 * handler.request.request_time()) + def log_request(self, handler, request: Request): + request_time = int(1000.0 * (time.time() - handler.request_start_time)) extra = { - 'ip': handler.request.remote_ip, + 'ip': request.client.host, 'rid': request_context.get_request_id(), 'status': handler.get_status(), 'time': request_time, - 'method': handler.request.method, - 'uri': handler.request.uri, + 'method': request.method, + 'uri': str(request.url), } handler_name = request_context.get_handler_name() @@ -271,3 +312,76 @@ def log_request(self, handler): def get_kafka_producer(self, producer_name: str) -> Optional[AIOKafkaProducer]: # pragma: no cover pass + + +async def process_request(request, call_next): + handler = request.state.handler + status = 200 + headers = {} + content = None + + try: + request_context.set_handler(handler) + + handler.prepare() + handler.stages_logger.commit_stage('prepare') + _response = await call_next(request) + + handler._handler_finished_notification() + await handler.finish_group.get_gathering_future() + await handler.finish_group.get_finish_future() + handler.stages_logger.commit_stage('page') + + render_result = await handler._postprocess() + handler.stages_logger.commit_stage('postprocess') + + headers = handler.resp_headers + status = handler.get_status() + + debug_transform = DebugTransform(request.app.frontik_app, request) + if debug_transform.is_enabled(): + chunk = _data_to_chunk(render_result, headers) + status, headers, render_result = debug_transform.transform_chunk(status, headers, chunk) + + content = render_result + + except FinishPageSignal as finish_ex: + handler._handler_finished_notification() + headers = handler.resp_headers + chunk = _data_to_chunk(finish_ex.data, headers) + status = handler.get_status() + content = chunk + + except RedirectPageSignal as redirect_ex: + handler._handler_finished_notification() + headers = handler.resp_headers + url = redirect_ex.url + status = redirect_ex.status + headers["Location"] = url.encode('utf-8') + + except Exception as ex: + try: + status, headers, content = await handler._handle_request_exception(ex) + except Exception as exc: + app_logger.exception(f'request processing has failed') + status, headers, content = build_error_data(handler.request_id) + + finally: + handler.cleanup() + + if status in (204, 304) or (100 <= status < 200): + for h in ('Content-Encoding', 'Content-Language', 'Content-Type'): + if h in headers: + headers.pop(h) + content = None + + response = Response(status_code=status, headers=headers, content=content) + + for key, values in handler.resp_cookies.items(): + response.set_cookie(key, **values) + + handler.finish_group.abort() + request.app.frontik_app.log_request(handler, request) + handler.on_finish(status) + + return response diff --git a/frontik/debug.py b/frontik/debug.py index 33fdebb1f..a2f783f0b 100644 --- a/frontik/debug.py +++ b/frontik/debug.py @@ -34,6 +34,8 @@ from frontik.options import options from frontik.version import version as frontik_version from frontik.xml_util import dict_to_xml +from fastapi import Request +from starlette.datastructures import Headers if TYPE_CHECKING: from typing import Any @@ -203,7 +205,7 @@ def _params_to_xml(url: str) -> etree.Element: return params -def _headers_to_xml(request_or_response_headers: dict | HTTPHeaders) -> etree.Element: +def _headers_to_xml(request_or_response_headers: dict | Headers) -> etree.Element: headers = etree.Element('headers') for name, value in request_or_response_headers.items(): if name != 'Cookie': @@ -365,52 +367,42 @@ def _produce_one(self, record: logging.LogRecord) -> etree.Element: DEBUG_XSL = os.path.join(os.path.dirname(__file__), 'debug/debug.xsl') -class DebugTransform(OutputTransform): - def __init__(self, application: FrontikApplication, request: HTTPServerRequest) -> None: +# class DebugTransform(OutputTransform): +class DebugTransform: + def __init__(self, application: FrontikApplication, request: Request) -> None: self.application = application - self.request = request + self.request: Request = request def is_enabled(self) -> bool: - return getattr(self.request, '_debug_enabled', False) + return getattr(self.request.state.handler, '_debug_enabled', False) def is_inherited(self) -> bool: - return getattr(self.request, '_debug_inherited', False) + return getattr(self.request.state.handler, '_debug_inherited', False) - def transform_first_chunk(self, status_code, headers, chunk, finishing): + def transform_chunk(self, status_code, original_headers, chunk): if not self.is_enabled(): - return status_code, headers, chunk + return status_code, original_headers, chunk self.status_code = status_code - self.headers = headers + self.headers = original_headers self.chunks = [chunk] if not self.is_inherited(): - headers = HTTPHeaders({'Content-Type': media_types.TEXT_HTML}) + wrap_headers = {'Content-Type': media_types.TEXT_HTML} else: - headers = HTTPHeaders({'Content-Type': media_types.APPLICATION_XML, DEBUG_HEADER_NAME: 'true'}) + wrap_headers = {'Content-Type': media_types.APPLICATION_XML, DEBUG_HEADER_NAME: 'true'} - return 200, headers, self.produce_debug_body(finishing) - - def transform_chunk(self, chunk: bytes, finishing: bool) -> bytes: - if not self.is_enabled(): - return chunk - - self.chunks.append(chunk) - - return self.produce_debug_body(finishing) - - def produce_debug_body(self, finishing: bool) -> bytes: - if not finishing: - return b'' + return 200, wrap_headers, self.produce_debug_body() + def produce_debug_body(self) -> bytes: start_time = time.time() debug_log_data = request_context.get_log_handler().produce_all() # type: ignore debug_log_data.set('code', str(int(self.status_code))) debug_log_data.set('handler-name', request_context.get_handler_name()) - debug_log_data.set('started', _format_number(self.request._start_time)) - debug_log_data.set('request-id', str(self.request.request_id)) # type: ignore - debug_log_data.set('stages-total', _format_number((time.time() - self.request._start_time) * 1000)) + debug_log_data.set('started', _format_number(self.request.state.start_time)) + debug_log_data.set('request-id', str(self.request.state.handler.request_id)) # type: ignore + debug_log_data.set('stages-total', _format_number((time.time() - self.request.state.start_time) * 1000)) try: debug_log_data.append(E.versions(_pretty_print_xml(get_frontik_and_apps_versions(self.application)))) @@ -427,7 +419,7 @@ def produce_debug_body(self, finishing: bool) -> bytes: debug_log_data.append( E.request( E.method(self.request.method), - _params_to_xml(self.request.uri), # type: ignore + _params_to_xml(str(self.request.url)), # type: ignore _headers_to_xml(self.request.headers), _cookies_to_xml(self.request.headers), # type: ignore ), @@ -451,7 +443,7 @@ def produce_debug_body(self, finishing: bool) -> bytes: upstream.set('bgcolor', bgcolor) upstream.set('fgcolor', fgcolor) - if not getattr(self.request, '_debug_inherited', False): + if not getattr(self.request.state.handler, '_debug_inherited', False): try: transform = etree.XSLT(etree.parse(DEBUG_XSL)) log_document = utf8(str(transform(debug_log_data))) @@ -476,16 +468,16 @@ def __init__(self, handler: PageHandler) -> None: debug_value = frontik.util.get_cookie_or_url_param_value(handler, 'debug') self.mode_values = debug_value.split(',') if debug_value is not None else '' - self.inherited = handler.request.headers.get(DEBUG_HEADER_NAME) + self.inherited = handler.get_header(DEBUG_HEADER_NAME, False) if self.inherited: debug_log.debug('debug mode is inherited due to %s request header', DEBUG_HEADER_NAME) - handler.request._debug_inherited = True # type: ignore + handler._debug_inherited = True # type: ignore if debug_value is not None or self.inherited: handler.require_debug_access() - self.enabled = handler.request._debug_enabled = True # type: ignore + self.enabled = handler._debug_enabled = True # type: ignore self.pass_debug = 'nopass' not in self.mode_values or self.inherited self.profile_xslt = 'xslt' in self.mode_values diff --git a/frontik/futures.py b/frontik/futures.py index cafe7dd66..8558c92df 100644 --- a/frontik/futures.py +++ b/frontik/futures.py @@ -5,6 +5,7 @@ import time from functools import partial, wraps from typing import TYPE_CHECKING, Optional +from asyncio import Task from tornado.concurrent import Future from tornado.ioloop import IOLoop @@ -44,6 +45,9 @@ def is_finished(self) -> bool: return self._finished def abort(self) -> None: + if self._finished: + return + async_logger.info('aborting %s', self) self._finished = True if not self._future.done(): @@ -122,10 +126,9 @@ def _handle_future(callback, future): future.result() callback() - def add_future(self, future: Future) -> Future: - IOLoop.current().add_future(future, partial(self._handle_future, self.add_notification())) - self._futures.append(future) - return future + def add_future(self, task: Task): + task.add_done_callback(partial(self._handle_future, self.add_notification())) + self._futures.append(task) def get_finish_future(self) -> Future: return self._future diff --git a/frontik/handler.py b/frontik/handler.py index f818f33f2..4e06d93bc 100644 --- a/frontik/handler.py +++ b/frontik/handler.py @@ -9,6 +9,7 @@ from asyncio.futures import Future from functools import wraps from typing import TYPE_CHECKING, Any, Optional, Union +import sys import tornado.httputil import tornado.web @@ -18,6 +19,8 @@ from tornado.ioloop import IOLoop from tornado.web import Finish, RequestHandler +import datetime +from tornado.httputil import parse_body_arguments, format_timestamp import frontik.auth import frontik.handler_active_limit import frontik.producers.json_producer @@ -26,7 +29,6 @@ from frontik import media_types, request_context from frontik.auth import DEBUG_AUTH_HEADER_NAME from frontik.debug import DEBUG_HEADER_NAME, DebugMode -from frontik.dependency_manager import APIRouter, execute_page_method_with_dependencies from frontik.futures import AbortAsyncGroup, AsyncGroup from frontik.http_status import ALLOWED_STATUSES, CLIENT_CLOSED_REQUEST from frontik.json_builder import FrontikJsonDecodeError, json_decode @@ -36,6 +38,12 @@ from frontik.util import gather_dict, make_url from frontik.validator import BaseValidationModel, Validators from frontik.version import version as frontik_version +from collections.abc import Callable, Coroutine +from fastapi import Request +from fastapi import HTTPException +from starlette.datastructures import QueryParams, Headers +from fastapi import Depends + if TYPE_CHECKING: from collections.abc import Callable, Coroutine @@ -66,17 +74,31 @@ def __init__(self) -> None: super().__init__(400, 'Failed to parse json in request body') -class DefaultValueError(Exception): - def __init__(self, *args: object) -> None: +class DefaultValueError(tornado.web.HTTPError): + def __init__(self, arg_name: str) -> None: + super().__init__(400, "Missing argument %s" % arg_name) + self.arg_name = arg_name + + +class FinishPageSignal(Exception): + def __init__(self, data: None, *args: object) -> None: + super().__init__(*args) + self.data = data + + +class RedirectPageSignal(Exception): + def __init__(self, url: str, status: int, *args: object) -> None: super().__init__(*args) + self.url = url + self.status = status _ARG_DEFAULT = object() MEDIA_TYPE_PARAMETERS_SEPARATOR_RE = r' *; *' OUTER_TIMEOUT_MS_HEADER = 'X-Outer-Timeout-Ms' +_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]") handler_logger = logging.getLogger('handler') -router = APIRouter() def _fail_fast_policy(fail_fast: bool, waited: bool, host: str, path: str) -> bool: @@ -91,18 +113,44 @@ def _fail_fast_policy(fail_fast: bool, waited: bool, host: str, path: str) -> bo return fail_fast -class PageHandler(RequestHandler): +class PageHandler: returned_value_handlers: ReturnedValueHandlers = [] - def __init__(self, application: FrontikApplication, request: HTTPServerRequest, **kwargs: Any) -> None: - self.name = self.__class__.__name__ - self.request_id: str = request_context.get_request_id() # type: ignore - request.request_id = self.request_id # type: ignore + def __init__( + self, + application: FrontikApplication, + q_params: QueryParams = None, + c_params: dict[str, str] = None, + h_params: Headers = None, + body_bytes: bytes = None, + request_start_time: float = None, + path: str = None, + path_params: dict = None, + remote_ip: str = None, + method: str = None, + ) -> None: # request: Request + self.q_params = q_params + self.c_params = c_params or {} + self.h_params: Headers = h_params + self.body_bytes = body_bytes + self._json_body = None + self.body_arguments = {} + self.files = {} + self.parse_body_bytes() + self.path = path + self.path_params = path_params + self.request_start_time = request_start_time + self.remote_ip = h_params.get('x-real-ip', None) + self.method = method + + self.resp_cookies: dict[str, dict] = {} + self.config = application.config self.log = handler_logger self.text: Any = None - super().__init__(application, request, **kwargs) + self.application = application + self._finished = False self.statsd_client: StatsDClient | StatsDClientStub @@ -112,32 +160,34 @@ def __init__(self, application: FrontikApplication, request: HTTPServerRequest, if not self.returned_value_handlers: self.returned_value_handlers = list(application.returned_value_handlers) - self.stages_logger = StagesLogger(request, self.statsd_client) + self.stages_logger = StagesLogger(request_start_time, self.statsd_client) self._debug_access: Optional[bool] = None self._render_postprocessors: list = [] self._postprocessors: list = [] - self._mandatory_cookies: dict = {} - self._mandatory_headers = tornado.httputil.HTTPHeaders() - self._validation_model: type[BaseValidationModel | BaseModel] = BaseValidationModel self.timeout_checker = None self.use_adaptive_strategy = False - outer_timeout = request.headers.get(OUTER_TIMEOUT_MS_HEADER) + outer_timeout = h_params.get(OUTER_TIMEOUT_MS_HEADER) if outer_timeout: self.timeout_checker = get_timeout_checker( - request.headers.get(USER_AGENT_HEADER), + h_params.get(USER_AGENT_HEADER), float(outer_timeout), - request.request_time, + request_start_time, ) + self._status = 200 + self._reason = '' + def __repr__(self): return f'{self.__module__}.{self.__class__.__name__}' - def prepare(self) -> None: - self.application: FrontikApplication + def prepare(self): + self.request_id: str = request_context.get_request_id() # type: ignore + self.resp_headers = set_default_headers(self.request_id) + self.active_limit = frontik.handler_active_limit.ActiveHandlersLimit(self.statsd_client) self.debug_mode = DebugMode(self) self.finish_group = AsyncGroup(lambda: None, name='finish') @@ -156,60 +206,99 @@ def prepare(self) -> None: self._handler_finished_notification = self.finish_group.add_notification() - super().prepare() + # Simple getters and setters - def require_debug_access(self, login: Optional[str] = None, passwd: Optional[str] = None) -> None: - if self._debug_access is None: - if options.debug: - debug_access = True - else: - check_login = login if login is not None else options.debug_login - check_passwd = passwd if passwd is not None else options.debug_password - frontik.auth.check_debug_auth(self, check_login, check_passwd) - debug_access = True - - self._debug_access = debug_access - - def set_default_headers(self): - self._headers = tornado.httputil.HTTPHeaders({ - 'Server': f'Frontik/{frontik_version}', - 'X-Request-Id': self.request_id, - }) + def get_request_headers(self) -> Headers: + return self.h_params - def decode_argument(self, value: bytes, name: Optional[str] = None) -> str: - try: - return super().decode_argument(value, name) - except (UnicodeError, tornado.web.HTTPError): - self.log.warning('cannot decode utf-8 query parameter, trying other charsets') - - try: - return frontik.util.decode_string_from_charset(value) - except UnicodeError: - self.log.exception('cannot decode argument, ignoring invalid chars') - return value.decode('utf-8', 'ignore') - - def get_body_argument(self, name: str, default: Any = _ARG_DEFAULT, strip: bool = True) -> Optional[str]: - if self._get_request_mime_type(self.request) == media_types.APPLICATION_JSON: - if name not in self.json_body and default == _ARG_DEFAULT: - raise tornado.web.MissingArgumentError(name) + def get_path_argument(self, name, default=_ARG_DEFAULT): + value = self.path_params.get(name, None) + if value is None: + if default == _ARG_DEFAULT: + raise DefaultValueError(name) + return default + value = _remove_control_chars_regex.sub(" ", value) + return value - result = self.json_body.get(name, default) + def get_query_argument( + self, + name: str, + default: Any = _ARG_DEFAULT, + strip: bool = True, + ) -> Optional[str]: + args = self._get_arguments(name, strip=strip) + if not args: + if default == _ARG_DEFAULT: + raise DefaultValueError(name) + return default + return args[-1] + + def get_query_arguments(self, name: Optional[str] = None, strip: bool = True) -> Union[list[str], dict[str, str]]: + if name is None: + return self._get_all_arguments(strip) + return self._get_arguments(name, strip) + + def _get_all_arguments(self, strip: bool = True) -> dict[str, str]: + qargs_list = self.q_params.multi_items() + values = {} + for qarg_k, qarg_v in qargs_list: + v = _remove_control_chars_regex.sub(" ", qarg_v) + if strip: + v = v.strip() + values[qarg_k] = v + + return values + + def _get_arguments(self, name: str, strip: bool = True) -> list[str]: + qargs_list = self.q_params.multi_items() + values = [] + for qarg_k, qarg_v in qargs_list: + if qarg_k != name: + continue + + # Get rid of any weird control chars (unless decoding gave + # us bytes, in which case leave it alone) + v = _remove_control_chars_regex.sub(" ", qarg_v) + if strip: + v = v.strip() + values.append(v) + + return values - if strip and isinstance(result, str): - return result.strip() + def get_str_argument( + self, + name: str, + default: Any = _ARG_DEFAULT, + path_safe: bool = True, + **kwargs: Any, + ) -> Optional[Union[str, list[str]]]: + if path_safe: + return self.get_validated_argument(name, Validators.PATH_SAFE_STRING, default=default, **kwargs) + return self.get_validated_argument(name, Validators.STRING, default=default, **kwargs) - return result + def get_int_argument( + self, + name: str, + default: Any = _ARG_DEFAULT, + **kwargs: Any, + ) -> Optional[Union[int, list[int]]]: + return self.get_validated_argument(name, Validators.INTEGER, default=default, **kwargs) - if default == _ARG_DEFAULT: - return super().get_body_argument(name, strip=strip) - return super().get_body_argument(name, default, strip) + def get_bool_argument( + self, + name: str, + default: Any = _ARG_DEFAULT, + **kwargs: Any, + ) -> Optional[Union[bool, list[bool]]]: + return self.get_validated_argument(name, Validators.BOOLEAN, default=default, **kwargs) - def set_validation_model(self, model: type[Union[BaseValidationModel, BaseModel]]) -> None: - if issubclass(model, BaseModel): - self._validation_model = model - else: - msg = 'model is not subclass of BaseClass' - raise TypeError(msg) + def get_float_argument( + self, + name: str, + default: Any = _ARG_DEFAULT, + **kwargs: Any, + ) -> Optional[Union[float, list[float]]]: + return self.get_validated_argument(name, Validators.FLOAT, default=default, **kwargs) def get_validated_argument( self, @@ -226,7 +315,7 @@ def get_validated_argument( params = {validator: default} validated_default = self._validation_model(**params).model_dump().get(validator) except ValidationError: - raise DefaultValueError() + raise DefaultValueError(name) else: validated_default = default @@ -236,9 +325,9 @@ def get_validated_argument( elif from_body: value = self.get_body_argument(name, validated_default, strip) elif array: - value = self.get_arguments(name, strip) + value = self.get_query_arguments(name, strip) else: - value = self.get_argument(name, validated_default, strip) + value = self.get_query_argument(name, validated_default, strip) try: params = {validator: value} @@ -250,199 +339,230 @@ def get_validated_argument( return validated_value - def get_str_argument( - self, - name: str, - default: Any = _ARG_DEFAULT, - path_safe: bool = True, - **kwargs: Any, - ) -> Optional[Union[str, list[str]]]: - if path_safe: - return self.get_validated_argument(name, Validators.PATH_SAFE_STRING, default=default, **kwargs) - return self.get_validated_argument(name, Validators.STRING, default=default, **kwargs) + def get_body_arguments(self, name: str = None, strip: bool = True) -> Union[list[str], dict[str, list[str]]]: + # только для не джсона + if name is None: + return self._get_all_body_arguments(strip) + return self._get_body_arguments(name, strip) + + def _get_all_body_arguments(self, strip) -> dict[str, list[str]]: + result = {} + for key, values in self.body_arguments.items(): + result[key] = [] + for v in values: + s = self.decode_argument(v) + if isinstance(s, str): + s = _remove_control_chars_regex.sub(" ", s) + if strip: + s = s.strip() + result[key].append(s) + return result - def get_int_argument( - self, - name: str, - default: Any = _ARG_DEFAULT, - **kwargs: Any, - ) -> Optional[Union[int, list[int]]]: - return self.get_validated_argument(name, Validators.INTEGER, default=default, **kwargs) + def get_body_argument(self, name: str, default: Any = _ARG_DEFAULT, strip: bool = True) -> Optional[str]: + if self._get_request_mime_type() == media_types.APPLICATION_JSON: + if name not in self.json_body and default == _ARG_DEFAULT: + raise DefaultValueError(name) - def get_bool_argument( - self, - name: str, - default: Any = _ARG_DEFAULT, - **kwargs: Any, - ) -> Optional[Union[bool, list[bool]]]: - return self.get_validated_argument(name, Validators.BOOLEAN, default=default, **kwargs) + result = self.json_body.get(name, default) - def get_float_argument( - self, - name: str, - default: Any = _ARG_DEFAULT, - **kwargs: Any, - ) -> Optional[Union[float, list[float]]]: - return self.get_validated_argument(name, Validators.FLOAT, default=default, **kwargs) + if strip and isinstance(result, str): + return result.strip() - def _get_request_mime_type(self, request: HTTPServerRequest) -> str: - content_type = request.headers.get('Content-Type', '') - return re.split(MEDIA_TYPE_PARAMETERS_SEPARATOR_RE, content_type)[0] + return result - def set_status(self, status_code: int, reason: Optional[str] = None) -> None: - status_code = status_code if status_code in ALLOWED_STATUSES else http.client.SERVICE_UNAVAILABLE - super().set_status(status_code, reason=reason) - - def redirect(self, url, *args, allow_protocol_relative=False, **kwargs): - if not allow_protocol_relative and url.startswith('//'): - # A redirect with two initial slashes is a "protocol-relative" URL. - # This means the next path segment is treated as a hostname instead - # of a part of the path, making this effectively an open redirect. - # Reject paths starting with two slashes to prevent this. - # This is only reachable under certain configurations. - raise tornado.web.HTTPError(403, 'cannot redirect path with two initial slashes') - self.log.info('redirecting to: %s', url) - return super().redirect(url, *args, **kwargs) + if default == _ARG_DEFAULT: + return self._get_body_argument(name, strip=strip) + return self._get_body_argument(name, default, strip) - def reverse_url(self, name: str, *args: Any, **kwargs: Any) -> str: - return self.application.reverse_url(name, *args, **kwargs) + def _get_body_argument( + self, + name: str, + default: Any = _ARG_DEFAULT, + strip: bool = True, + ) -> Optional[str]: + args = self._get_body_arguments(name, strip=strip) + if not args: + if default == _ARG_DEFAULT: + raise DefaultValueError(name) + return default + return args[-1] + + def _get_body_arguments(self, name: str, strip: bool = True) -> list[str]: + values = [] + for v in self.body_arguments.get(name, []): + s = self.decode_argument(v, name=name) + if isinstance(s, str): + s = _remove_control_chars_regex.sub(" ", s) + if strip: + s = s.strip() + values.append(s) + return values + + def parse_body_bytes(self): + if self._get_request_mime_type() == media_types.APPLICATION_JSON: # если джсон то парсим сами + # _ = self.json_body + return # on_demand распарсим + else: + parse_body_arguments( + self.get_header('Content-Type', ''), + self.body_bytes, + self.body_arguments, + self.files, + self.h_params, + ) @property def json_body(self): - if not hasattr(self, '_json_body'): + if self._json_body is None: self._json_body = self._get_json_body() return self._json_body def _get_json_body(self) -> Any: try: - return json_decode(self.request.body) + return json_decode(self.body_bytes) except FrontikJsonDecodeError as _: raise JSONBodyParseError() - @classmethod - def add_callback(cls, callback: Callable, *args: Any, **kwargs: Any) -> None: - IOLoop.current().add_callback(callback, *args, **kwargs) - - @classmethod - def add_timeout(cls, deadline: float, callback: Callable, *args: Any, **kwargs: Any) -> Any: - return IOLoop.current().add_timeout(deadline, callback, *args, **kwargs) - - @staticmethod - def remove_timeout(timeout): - IOLoop.current().remove_timeout(timeout) - - @classmethod - def add_future(cls, future: Future, callback: Callable) -> None: - IOLoop.current().add_future(future, callback) - - # Requests handling + def decode_argument(self, value: bytes, name: Optional[str] = None) -> str: + try: + return value.decode("utf-8") + except UnicodeError: + self.log.warning(f'cannot decode utf-8 body parameter {name}, trying other charsets') - async def _execute(self, transforms, *args, **kwargs): - request_context.set_handler(self) try: - return await super()._execute(transforms, *args, **kwargs) - except Exception as ex: - self._handle_request_exception(ex) - return True + return frontik.util.decode_string_from_charset(value) + except UnicodeError: + self.log.exception(f'cannot decode body parameter {name}, ignoring invalid chars') + return value.decode('utf-8', 'ignore') - async def get(self, *args, **kwargs): - await self._execute_page(self.get_page) + def get_header(self, param_name, default=None): + return self.h_params.get(param_name.lower(), default) - async def post(self, *args, **kwargs): - await self._execute_page(self.post_page) + def set_header(self, k, v): + self.resp_headers[k] = v - async def put(self, *args, **kwargs): - await self._execute_page(self.put_page) + def _get_request_mime_type(self) -> str: + content_type = self.get_header('Content-Type', '') + return re.split(MEDIA_TYPE_PARAMETERS_SEPARATOR_RE, content_type)[0] - async def delete(self, *args, **kwargs): - await self._execute_page(self.delete_page) + def clear_header(self, name: str) -> None: + if name in self.resp_headers: + del self.resp_headers[name] - async def head(self, *args, **kwargs): - await self._execute_page(self.get_page) + def clear_cookie(self, name: str, path: str = '/', domain: Optional[str] = None) -> None: # type: ignore + expires = datetime.datetime.utcnow() - datetime.timedelta(days=365) + self.set_cookie(name, value="", expires=expires, path=path, domain=domain) - def options(self, *args, **kwargs): - self.return_405() + def get_cookie(self, param_name, default): + return self.c_params.get(param_name, default) - async def _execute_page(self, page_handler_method: Callable[[], Coroutine[Any, Any, None]]) -> None: - self.stages_logger.commit_stage('prepare') + def set_cookie( + self, + name: str, + value: Union[str, bytes], + domain: Optional[str] = None, + expires: Optional[Union[float, tuple, datetime.datetime]] = None, + path: str = "/", + expires_days: Optional[float] = None, + # Keyword-only args start here for historical reasons. + *, + max_age: Optional[int] = None, + httponly: bool = False, + secure: bool = False, + samesite: Optional[str] = None, + ) -> None: + name = str(name) + value = str(value) + if re.search(r"[\x00-\x20]", name + value): + # Don't let us accidentally inject bad stuff + raise ValueError("Invalid cookie %r: %r" % (name, value)) + + if name in self.resp_cookies: + del self.resp_cookies[name] + self.resp_cookies[name] = {'value': value} + morsel = self.resp_cookies[name] + if domain: + morsel["domain"] = domain + if expires_days is not None and not expires: + expires = datetime.datetime.utcnow() + datetime.timedelta(days=expires_days) + if expires: + morsel["expires"] = format_timestamp(expires) + if path: + morsel["path"] = path + if max_age: + # Note change from _ to -. + morsel["max_age"] = str(max_age) + if httponly: + # Note that SimpleCookie ignores the value here. The presense of an + # httponly (or secure) key is treated as true. + morsel["httponly"] = True + if secure: + morsel["secure"] = True + if samesite: + morsel["samesite"] = samesite - returned_value: ReturnedValue = await execute_page_method_with_dependencies(self, page_handler_method) - for returned_value_handler in self.returned_value_handlers: - returned_value_handler(self, returned_value) + # Requests handling - self._handler_finished_notification() - await self.finish_group.get_gathering_future() - await self.finish_group.get_finish_future() + def require_debug_access(self, login: Optional[str] = None, passwd: Optional[str] = None) -> None: + if self._debug_access is None: + if options.debug: + debug_access = True + else: + check_login = login if login is not None else options.debug_login + check_passwd = passwd if passwd is not None else options.debug_password + frontik.auth.check_debug_auth(self, check_login, check_passwd) + debug_access = True - render_result = await self._postprocess() - if render_result is not None: - self.write(render_result) + self._debug_access = debug_access - @router.get() - async def get_page(self): - """This method can be implemented in the subclass""" - self.return_405() + def set_status(self, status_code: int, reason: Optional[str] = None) -> None: + status_code = status_code if status_code in ALLOWED_STATUSES else http.client.SERVICE_UNAVAILABLE - @router.post() - async def post_page(self): - """This method can be implemented in the subclass""" - self.return_405() + self._status = status_code + self._reason = reason - @router.put() - async def put_page(self): - """This method can be implemented in the subclass""" - self.return_405() + def get_status(self) -> int: + return self._status - @router.delete() - async def delete_page(self): - """This method can be implemented in the subclass""" - self.return_405() + def redirect(self, url: str, permanent: bool = False, status: Optional[int] = None): + if url.startswith('//'): + raise RuntimeError('403 cannot redirect path with two initial slashes') + self.log.info('redirecting to: %s', url) + if status is None: + status = 301 if permanent else 302 + else: + assert isinstance(status, int) and 300 <= status <= 399 + raise RedirectPageSignal(url, status) - def return_405(self) -> None: - allowed_methods = [name for name in ('get', 'post', 'put', 'delete') if f'{name}_page' in vars(self.__class__)] - self.set_header('Allow', ', '.join(allowed_methods)) - self.set_status(405) - self.finish() + def finish(self, data: Optional[Union[str, bytes, dict]] = None) -> Future[None]: + raise FinishPageSignal(data) - def get_page_fail_fast(self, request_result: RequestResult) -> None: - self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) + async def get_page_fail_fast(self, request_result: RequestResult): + return await self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) - def post_page_fail_fast(self, request_result: RequestResult) -> None: - self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) + async def post_page_fail_fast(self, request_result: RequestResult): + return await self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) - def put_page_fail_fast(self, request_result: RequestResult) -> None: - self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) + async def put_page_fail_fast(self, request_result: RequestResult): + return await self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) - def delete_page_fail_fast(self, request_result: RequestResult) -> None: - self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) + async def delete_page_fail_fast(self, request_result: RequestResult): + return await self.__return_error(request_result.status_code, error_info={'is_fail_fast': True}) - def __return_error(self, response_code: int, **kwargs: Any) -> None: - self.send_error(response_code if 300 <= response_code < 500 else 502, **kwargs) + async def __return_error(self, response_code: int, **kwargs: Any) -> tuple[int, dict, Any]: + return await self.send_error(response_code if 300 <= response_code < 500 else 502, **kwargs) # Finish page def is_finished(self) -> bool: return self._finished - def check_finished(self, callback: Callable) -> Callable: - @wraps(callback) - def wrapper(*args, **kwargs): - if self.is_finished(): - self.log.warning('page was already finished, %s ignored', callback) - else: - return callback(*args, **kwargs) - - return wrapper - - def finish_with_postprocessors(self) -> None: + async def finish_with_postprocessors(self) -> tuple[int, dict, Any]: if not self.finish_group.get_finish_future().done(): self.finish_group.abort() - def _cb(future): - if future.result() is not None: - self.finish(future.result()) - - asyncio.create_task(self._postprocess()).add_done_callback(_cb) + content = await self._postprocess() + return self.get_status(), self.resp_headers, content def run_task(self: PageHandler, coro: Coroutine) -> Task: task = asyncio.create_task(coro) @@ -479,39 +599,33 @@ async def _postprocess(self) -> Any: ) return postprocessed_result - def on_connection_close(self): - with request_context.request_context(self.request, self.request_id): - super().on_connection_close() - - self.finish_group.abort() - self.set_status(CLIENT_CLOSED_REQUEST, 'Client closed the connection: aborting request') - - self.stages_logger.commit_stage('page') - self.stages_logger.flush_stages(self.get_status()) - - self.finish() - - def on_finish(self): + def on_finish(self, status: int): self.stages_logger.commit_stage('flush') - self.stages_logger.flush_stages(self.get_status()) + self.stages_logger.flush_stages(status) - def _handle_request_exception(self, e: BaseException) -> None: + async def _handle_request_exception(self, e: BaseException) -> tuple[int, dict, Any]: if isinstance(e, AbortAsyncGroup): self.log.info('page was aborted, skipping postprocessing') - return + raise e if isinstance(e, FinishWithPostprocessors): if e.wait_finish_group: self._handler_finished_notification() - self.add_future(self.finish_group.get_finish_future(), lambda _: self.finish_with_postprocessors()) - else: - self.finish_with_postprocessors() - return + await self.finish_group.get_finish_future() + return await self.finish_with_postprocessors() + + if isinstance(e, HTTPErrorWithPostprocessors): + self.set_status(e.status_code) + return await self.finish_with_postprocessors() + + if isinstance(e, tornado.web.HTTPError): + self.set_status(e.status_code) + return await self.write_error(e.status_code, exc_info=sys.exc_info()) if self._finished and not isinstance(e, Finish): # tornado will handle Finish by itself # any other errors can't complete after handler is finished - return + raise e if isinstance(e, FailFastError): request = e.failed_result.request @@ -532,51 +646,42 @@ def _handle_request_exception(self, e: BaseException) -> None: ) try: - error_method_name = f'{self.request.method.lower()}_page_fail_fast' # type: ignore + error_method_name = f'{self.method.lower()}_page_fail_fast' # type: ignore method = getattr(self, error_method_name, None) if callable(method): - method(e.failed_result) + return await method(e.failed_result) else: - self.__return_error(e.failed_result.status_code, error_info={'is_fail_fast': True}) + return await self.__return_error(e.failed_result.status_code, error_info={'is_fail_fast': True}) except Exception as exc: - super()._handle_request_exception(exc) + raise exc else: - super()._handle_request_exception(e) + raise e - def send_error(self, status_code: int = 500, **kwargs: Any) -> None: + async def send_error(self, status_code: int = 500, **kwargs: Any) -> tuple[int, dict, Any]: """`send_error` is adapted to support `write_error` that can call `finish` asynchronously. """ self.stages_logger.commit_stage('page') - if self._headers_written: - super().send_error(status_code, **kwargs) - return - - reason = kwargs.get('reason') + self._reason = kwargs.get('reason') if 'exc_info' in kwargs: exception = kwargs['exc_info'][1] if isinstance(exception, tornado.web.HTTPError) and exception.reason: - reason = exception.reason + self._reason = exception.reason else: exception = None if not isinstance(exception, HTTPErrorWithPostprocessors): - self.clear() + set_default_headers(self.request_id) - self.set_status(status_code, reason=reason) + self.set_status(status_code, reason=self._reason) - try: - self.write_error(status_code, **kwargs) - except Exception: - self.log.exception('Uncaught exception in write_error') - if not self._finished: - self.finish() + return await self.write_error(status_code, **kwargs) - def write_error(self, status_code: int = 500, **kwargs: Any) -> None: + async def write_error(self, status_code: int = 500, **kwargs: Any) -> tuple[int, dict, Any]: """ `write_error` can call `finish` asynchronously if HTTPErrorWithPostprocessors is raised. """ @@ -584,62 +689,17 @@ def write_error(self, status_code: int = 500, **kwargs: Any) -> None: exception = kwargs['exc_info'][1] if 'exc_info' in kwargs else None if isinstance(exception, HTTPErrorWithPostprocessors): - self.finish_with_postprocessors() - return + return await self.finish_with_postprocessors() - self.set_header('Content-Type', media_types.TEXT_HTML) - super().write_error(status_code, **kwargs) + return build_error_data(self.request_id, status_code, self._reason) def cleanup(self) -> None: + self._finished = True if hasattr(self, 'active_limit'): self.active_limit.release() - def finish(self, chunk: Optional[Union[str, bytes, dict]] = None) -> Future[None]: - self.stages_logger.commit_stage('postprocess') - for name, value in self._mandatory_headers.items(): - self.set_header(name, value) - - for args, kwargs in self._mandatory_cookies.values(): - try: - self.set_cookie(*args, **kwargs) - except ValueError: - self.set_status(http.client.BAD_REQUEST) - - if self._status_code in (204, 304) or (100 <= self._status_code < 200): - self._write_buffer = [] - chunk = None - - finish_future = super().finish(chunk) - self.cleanup() - return finish_future - # postprocessors - def set_mandatory_header(self, name: str, value: str) -> None: - self._mandatory_headers[name] = value - - def set_mandatory_cookie( - self, - name: str, - value: str, - domain: Optional[str] = None, - expires: Optional[str] = None, - path: str = '/', - expires_days: Optional[int] = None, - **kwargs: Any, - ) -> None: - self._mandatory_cookies[name] = ((name, value, domain, expires, path, expires_days), kwargs) - - def clear_header(self, name: str) -> None: - if name in self._mandatory_headers: - del self._mandatory_headers[name] - super().clear_header(name) - - def clear_cookie(self, name: str, path: str = '/', domain: Optional[str] = None) -> None: # type: ignore - if name in self._mandatory_cookies: - del self._mandatory_cookies[name] - super().clear_cookie(name, path=path, domain=domain) - async def _run_postprocessors(self, postprocessors: list) -> bool: for p in postprocessors: if asyncio.iscoroutinefunction(p): @@ -677,7 +737,7 @@ def add_postprocessor(self, postprocessor: Any) -> None: async def _generic_producer(self): self.log.debug('finishing plaintext') - if self._headers.get('Content-Type') is None: + if self.resp_headers.get('Content-Type') is None: self.set_header('Content-Type', media_types.TEXT_HTML) return self.text, None @@ -708,13 +768,10 @@ def modify_http_client_request(self, balanced_request: RequestBuilder) -> None: balanced_request.path = make_url(balanced_request.path, debug_timestamp=int(time.time())) for header_name in ('Authorization', DEBUG_AUTH_HEADER_NAME): - authorization = self.request.headers.get(header_name) + authorization = self.get_header(header_name) if authorization is not None: balanced_request.headers[header_name] = authorization - def group(self, futures: dict) -> Task: - return self.run_task(gather_dict(coro_dict=futures)) - def get_url( self, host: str, @@ -952,15 +1009,54 @@ def _execute_http_client_method( return future -class ErrorHandler(PageHandler, tornado.web.ErrorHandler): - pass +@Depends +async def get_current_handler(request: Request) -> PageHandler: + return request.state.handler -class RedirectHandler(PageHandler, tornado.web.RedirectHandler): - @router.get() - def get_page(self): - tornado.web.RedirectHandler.get(self) +class RequestCancelledMiddleware: + # https://github.com/tiangolo/fastapi/discussions/11360 + def __init__(self, app): + self.app = app + async def __call__(self, scope, receive, send): + if scope["type"] != "http": + await self.app(scope, receive, send) + return -async def get_current_handler(request: Request) -> PageHandler: - return request['handler'] + queue = asyncio.Queue() + + async def message_poller(sentinel, handler_task): + nonlocal queue + while True: + message = await receive() + if message["type"] == "http.disconnect": + handler_task.cancel() + return sentinel + + await queue.put(message) + + sentinel = object() + handler_task = asyncio.create_task(self.app(scope, queue.get, send)) + poller_task = asyncio.create_task(message_poller(sentinel, handler_task)) + poller_task.done() + + try: + return await handler_task + except asyncio.CancelledError: + pass + # handler_logger.info(f'Cancelling request due to client has disconnected') + + +def set_default_headers(request_id): + return { + 'Server': f'Frontik/{frontik_version}', + 'X-Request-Id': request_id, + } + + +def build_error_data(request_id, status_code: int = 500, message='Internal Server Error') -> tuple[int, dict, Any]: + headers = set_default_headers(request_id) + headers['Content-Type'] = media_types.TEXT_HTML + content = f'{status_code}: {message}{status_code}: {message}' + return status_code, headers, content diff --git a/frontik/integrations/sentry.py b/frontik/integrations/sentry.py index 9ecb237d4..0e0840b5f 100644 --- a/frontik/integrations/sentry.py +++ b/frontik/integrations/sentry.py @@ -4,7 +4,7 @@ import sentry_sdk from http_client.request_response import FailFastError -from sentry_sdk.integrations.tornado import TornadoIntegration +from sentry_sdk.integrations.fastapi import FastApiIntegration from tornado.web import HTTPError from frontik.integrations import Integration, integrations_logger @@ -27,7 +27,7 @@ def initialize_app(self, app: FrontikApplication) -> Optional[Future]: max_breadcrumbs=options.sentry_max_breadcrumbs, default_integrations=False, auto_enabling_integrations=False, - integrations=[TornadoIntegration()], + integrations=[FastApiIntegration()], ignore_errors=[HTTPError, FailFastError], ) diff --git a/frontik/integrations/telemetry.py b/frontik/integrations/telemetry.py index 164124119..e8b5cf0d5 100644 --- a/frontik/integrations/telemetry.py +++ b/frontik/integrations/telemetry.py @@ -61,43 +61,43 @@ def patched_set_attribute(self: SpanImpl, key: str, value: types.AttributeValue) SpanImpl.set_attribute = patched_set_attribute # type: ignore def initialize_app(self, app: FrontikApplication) -> Optional[Future]: - if not options.opentelemetry_enabled: - return None - - integrations_logger.info('start telemetry') - - resource = Resource( - attributes={ - ResourceAttributes.SERVICE_NAME: options.app, # type: ignore - ResourceAttributes.SERVICE_VERSION: app.application_version(), # type: ignore - ResourceAttributes.HOST_NAME: options.node_name, - ResourceAttributes.CLOUD_REGION: http_client_options.datacenter, - }, - ) - otlp_exporter = OTLPSpanExporter(endpoint=options.opentelemetry_collector_url, insecure=True) - - provider = TracerProvider( - resource=resource, - id_generator=FrontikIdGenerator(), - sampler=ParentBased(TraceIdRatioBased(options.opentelemetry_sampler_ratio)), - ) - - provider.add_span_processor(BatchSpanProcessor(otlp_exporter)) - trace.set_tracer_provider(provider) - - self.aiohttp_instrumentor.instrument(request_hook=_client_request_hook, response_hook=_client_response_hook) - - self.tornado_instrumentor.instrument(server_request_hook=_server_request_hook) + # if not options.opentelemetry_enabled: + # return None + # + # integrations_logger.info('start telemetry') + # + # resource = Resource( + # attributes={ + # ResourceAttributes.SERVICE_NAME: options.app, # type: ignore + # ResourceAttributes.SERVICE_VERSION: app.application_version(), # type: ignore + # ResourceAttributes.HOST_NAME: options.node_name, + # ResourceAttributes.CLOUD_REGION: http_client_options.datacenter, + # }, + # ) + # otlp_exporter = OTLPSpanExporter(endpoint=options.opentelemetry_collector_url, insecure=True) + # + # provider = TracerProvider( + # resource=resource, + # id_generator=FrontikIdGenerator(), + # sampler=ParentBased(TraceIdRatioBased(options.opentelemetry_sampler_ratio)), + # ) + # + # provider.add_span_processor(BatchSpanProcessor(otlp_exporter)) + # trace.set_tracer_provider(provider) + # + # self.aiohttp_instrumentor.instrument(request_hook=_client_request_hook, response_hook=_client_response_hook) + # + # self.tornado_instrumentor.instrument(server_request_hook=_server_request_hook) return None def deinitialize_app(self, app: FrontikApplication) -> Optional[Future]: - if not options.opentelemetry_enabled: - return None - - integrations_logger.info('stop telemetry') - self.aiohttp_instrumentor.uninstrument() - self.tornado_instrumentor.uninstrument() + # if not options.opentelemetry_enabled: + # return None + # + # integrations_logger.info('stop telemetry') + # self.aiohttp_instrumentor.uninstrument() + # self.tornado_instrumentor.uninstrument() return None def initialize_handler(self, handler): diff --git a/frontik/loggers/stages.py b/frontik/loggers/stages.py index 17629831a..155a46e0e 100644 --- a/frontik/loggers/stages.py +++ b/frontik/loggers/stages.py @@ -8,8 +8,6 @@ from frontik import request_context if TYPE_CHECKING: - from tornado.httputil import HTTPServerRequest - from frontik.integrations.statsd import StatsDClient, StatsDClientStub stages_logger = logging.getLogger('stages') @@ -18,8 +16,8 @@ class StagesLogger: Stage = namedtuple('Stage', ('name', 'delta', 'start_delta')) - def __init__(self, request: HTTPServerRequest, statsd_client: StatsDClient | StatsDClientStub) -> None: - self._last_stage_time = self._start_time = request._start_time + def __init__(self, request_start_time, statsd_client: StatsDClient | StatsDClientStub) -> None: + self._last_stage_time = self._start_time = request_start_time self._stages: list[StagesLogger.Stage] = [] self._statsd_client = statsd_client diff --git a/frontik/producers/json_producer.py b/frontik/producers/json_producer.py index 373c74db0..4881e0772 100644 --- a/frontik/producers/json_producer.py +++ b/frontik/producers/json_producer.py @@ -108,7 +108,7 @@ async def _finish_with_template(self) -> tuple[Optional[str], None]: msg = 'Cannot apply template, no Jinja2 environment configured' raise Exception(msg) - if self.handler._headers.get('Content-Type') is None: + if self.handler.resp_headers.get('Content-Type', None) is None: self.handler.set_header('Content-Type', media_types.TEXT_HTML) try: @@ -141,7 +141,7 @@ async def _finish_with_template(self) -> tuple[Optional[str], None]: async def _finish_with_json(self) -> tuple[str, None]: self.log.debug('finishing without templating') - if self.handler._headers.get('Content-Type') is None: + if self.handler.resp_headers.get('Content-Type', None) is None: self.handler.set_header('Content-Type', media_types.APPLICATION_JSON) return self.json.to_string(), None diff --git a/frontik/producers/xml_producer.py b/frontik/producers/xml_producer.py index 91c0d0aae..86144943d 100644 --- a/frontik/producers/xml_producer.py +++ b/frontik/producers/xml_producer.py @@ -76,7 +76,7 @@ def set_xsl(self, filename: str) -> None: async def _finish_with_xslt(self) -> tuple[Optional[str], Optional[list[Any]]]: self.log.debug('finishing with XSLT') - if self.handler._headers.get('Content-Type') is None: + if self.handler.resp_headers.get('Content-Type', None) is None: self.handler.set_header('Content-Type', media_types.TEXT_HTML) def job(): @@ -127,7 +127,7 @@ def get_xsl_log() -> str: async def _finish_with_xml(self, escape_xmlns: bool = False) -> tuple[bytes, None]: self.log.debug('finishing without XSLT') - if self.handler._headers.get('Content-Type') is None: + if self.handler.resp_headers.get('Content-Type', None) is None: self.handler.set_header('Content-Type', media_types.APPLICATION_XML) if escape_xmlns: diff --git a/frontik/routing.py b/frontik/routing.py index b02e4b3e0..c3d8e9e27 100644 --- a/frontik/routing.py +++ b/frontik/routing.py @@ -1,141 +1,112 @@ -from __future__ import annotations - -import importlib import logging -import os +from fastapi.routing import APIRouter +from typing import Callable import re -from inspect import isclass -from typing import TYPE_CHECKING, Optional +import inspect +import importlib +import pkgutil -from tornado.routing import ReversibleRouter, Router -from tornado.web import RequestHandler -from frontik.handler import ErrorHandler -from frontik.util import reverse_regex_named_groups +routing_logger = logging.getLogger('frontik.routing') -if TYPE_CHECKING: - from typing import Any +routers = [] +normal_routes = {} +regex_mapping = [] - from tornado.httputil import HTTPMessageDelegate, HTTPServerRequest - from frontik.app import FrontikApplication +class FrontikRouter(APIRouter): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + routers.append(self) + self._cls = None + self._path = None -routing_logger = logging.getLogger('frontik.routing') + def get(self, path: str, cls, **kwargs) -> Callable: + self._path, self._cls = path, cls + return super().get(path, **kwargs) + + def post(self, path: str, cls, **kwargs) -> Callable: + self._path, self._cls = path, cls + return super().post(path, **kwargs) + + def put(self, path: str, cls, **kwargs) -> Callable: + self._path, self._cls = path, cls + return super().put(path, **kwargs) + + def delete(self, path: str, cls, **kwargs) -> Callable: + self._path, self._cls = path, cls + return super().delete(path, **kwargs) + + def add_api_route(self, *args, **kwargs): + super().add_api_route(*args, **kwargs) + route = self.routes[-1] + m = next(iter(route.methods), None) + normal_routes[(self._path, m)] = (route, self._cls) # нам нужен дикт роутов, чтобы знать класс хендлера + self._cls, self._path = None, None + + +class FrontikRegexRouter(APIRouter): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + routers.append(self) + self._cls = None + self._path = None + + def get(self, path: str, cls, **kwargs) -> Callable: + self._path, self._cls = path, cls + return super().get(path, **kwargs) + + def post(self, path: str, cls, **kwargs) -> Callable: + self._path, self._cls = path, cls + return super().post(path, **kwargs) + + def put(self, path: str, cls, **kwargs) -> Callable: + self._path, self._cls = path, cls + return super().put(path, **kwargs) -MAX_MODULE_NAME_LENGTH = os.pathconf('/', 'PC_PATH_MAX') - 1 + def delete(self, path: str, cls, **kwargs) -> Callable: + self._path, self._cls = path, cls + return super().delete(path, **kwargs) + def add_api_route(self, *args, **kwargs): + super().add_api_route(*args, **kwargs) -class FileMappingRouter(Router): - def __init__(self, module: Any) -> None: - self.name = module.__name__ + regex_mapping.append(( + re.compile(self._path), + self.routes[-1], + self._cls + )) - def find_handler(self, request: HTTPServerRequest, **kwargs: Any) -> Optional[HTTPMessageDelegate]: - url_parts = request.path.strip('/').split('/') - application = kwargs['application'] + self._cls, self._path = None, None - if any('.' in part for part in url_parts): - routing_logger.info('url contains "." character, using 404 page') - return get_application_404_handler_delegate(application, request) - page_name = '.'.join(filter(None, url_parts)) - page_module_name = '.'.join(filter(None, (self.name, page_name))) - routing_logger.debug('page module: %s', page_module_name) +def build_path() -> str: + curframe = inspect.currentframe() + calframe = inspect.getouterframes(curframe, 2) + page_file_path = calframe[1].filename + idx = page_file_path.find('/pages') + if idx == -1: + raise RuntimeError('cant generate url path') - if len(page_module_name) > MAX_MODULE_NAME_LENGTH: - routing_logger.info('page module name exceeds PATH_MAX (%s), using 404 page', MAX_MODULE_NAME_LENGTH) - return get_application_404_handler_delegate(application, request) + return page_file_path[idx + 6:-3] - def _handle_general_module_import_exception() -> HTTPMessageDelegate: - routing_logger.exception('error while importing %s module', page_module_name) - return _get_application_500_handler_delegate(application, request) +def _import_submodules(package: str) -> None: + package = importlib.import_module(package) + for loader, name, is_pkg in pkgutil.walk_packages(package.__path__): + full_name = package.__name__ + '.' + name try: - page_module = importlib.import_module(page_module_name) - routing_logger.debug('using %s from %s', page_module_name, page_module.__file__) - except ModuleNotFoundError as module_not_found_error: - if not ( - page_module_name == module_not_found_error.name - or page_module_name.startswith(module_not_found_error.name + '.') # type: ignore - ): - return _handle_general_module_import_exception() - routing_logger.warning('%s module not found', (self.name, page_module_name)) - return get_application_404_handler_delegate(application, request) - except Exception: - return _handle_general_module_import_exception() - - if not hasattr(page_module, 'Page'): - routing_logger.error('%s.Page class not found', page_module_name) - return get_application_404_handler_delegate(application, request) - - return application.get_handler_delegate(request, page_module.Page) - - -class FrontikRouter(ReversibleRouter): - def __init__(self, application: FrontikApplication) -> None: - self.application = application - self.handlers = [] - self.handler_names: dict[str, Any] = {} - - for handler_spec in application.application_urls(): - if len(handler_spec) > 2: - pattern, handler, handler_name = handler_spec - else: - handler_name = None - pattern, handler = handler_spec - - self.handlers.append((re.compile(pattern), handler)) - - if handler_name is not None: - self.handler_names[handler_name] = pattern - - def find_handler(self, request: HTTPServerRequest, **kwargs: Any) -> HTTPMessageDelegate: - routing_logger.info('requested url: %s', request.uri) - - for pattern, handler in self.handlers: - match = pattern.match(request.uri) - if match: - routing_logger.debug('using %r', handler) - - if isclass(handler) and issubclass(handler, RequestHandler): - _add_request_arguments_from_path(request, match) - return self.application.get_handler_delegate(request, handler) - - elif isinstance(handler, Router): - delegate = handler.find_handler(request, application=self.application) - if delegate is not None: - return delegate - - else: - routing_logger.error('handler %r is of unknown type', handler) - return _get_application_500_handler_delegate(self.application, request) - - routing_logger.error('match for request url "%s" not found', request.uri) - return get_application_404_handler_delegate(self.application, request) - - def reverse_url(self, name: str, *args: Any, **kwargs: Any) -> str: - if name not in self.handler_names: - raise KeyError('%s not found in named urls' % name) - - return reverse_regex_named_groups(self.handler_names[name], *args, **kwargs) - - -def get_application_404_handler_delegate( - application: FrontikApplication, - request: HTTPServerRequest, -) -> HTTPMessageDelegate: - handler_class, handler_kwargs = application.application_404_handler(request) - return application.get_handler_delegate(request, handler_class, handler_kwargs) - - -def _get_application_500_handler_delegate( - application: FrontikApplication, - request: HTTPServerRequest, -) -> HTTPMessageDelegate: - return application.get_handler_delegate(request, ErrorHandler, {'status_code': 500}) - - -def _add_request_arguments_from_path(request: HTTPServerRequest, match: re.Match) -> None: - arguments = match.groupdict() - for name, value in arguments.items(): - if value: - request.arguments.setdefault(name, []).append(value) + importlib.import_module(full_name) + except ModuleNotFoundError: + continue + except Exception as ex: + routing_logger.error('failed on import page %s %s', full_name, ex) + continue + if is_pkg: + _import_submodules(full_name) + + +def fill_router(app_module: str) -> None: + # import all pages on startup + package_name = f'{app_module}.pages' + _import_submodules(package_name) diff --git a/frontik/server.py b/frontik/server.py index 0aa4336e2..0fcd4e049 100644 --- a/frontik/server.py +++ b/frontik/server.py @@ -14,17 +14,23 @@ from functools import partial from threading import Lock from typing import Callable, Optional, Union +import socket import tornado.autoreload from http_client.balancing import Upstream from http_client.options import options as http_client_options from tornado.httpserver import HTTPServer -from frontik.app import FrontikApplication +from frontik.app import FrontikApplication, core_middleware, regex_router_fallback +from frontik.handler import RequestCancelledMiddleware +from frontik.routing import routers, fill_router from frontik.config_parser import parse_configs from frontik.loggers import MDC from frontik.options import options from frontik.process import fork_workers +from fastapi import FastAPI, APIRouter +import uvicorn + log = logging.getLogger('server') @@ -113,6 +119,8 @@ def _run_worker(app: FrontikApplication) -> None: loop = asyncio.get_event_loop() executor = ThreadPoolExecutor(options.common_executor_pool_size) loop.set_default_executor(executor) + + # asyncio.run(_init_app(app)) initialize_application_task = loop.create_task(_init_app(app)) def initialize_application_task_result_handler(future): @@ -125,16 +133,68 @@ def initialize_application_task_result_handler(future): initialize_application_task.result() -def run_server(app: FrontikApplication) -> None: +async def periodic_task(callback, check_time_ms): + while True: + await asyncio.sleep(check_time_ms/1000) + callback() + + +def bind_socket(host, port) -> socket.socket: + sock = socket.socket(family=socket.AF_INET) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + try: + sock.bind((host, port)) + except OSError as exc: + log.error(exc) + sys.exit(1) + + log.info(f"Uvicorn running on %s://%s:%d (Press CTRL+C to quit)", "http", host, sock.getsockname()[1]) + sock.set_inheritable(True) + return sock + + +def run_server(frontik_app: FrontikApplication): """Starts Frontik server for an application""" loop = asyncio.get_event_loop() log.info('starting server on %s:%s', options.host, options.port) - http_server = HTTPServer(app, xheaders=options.xheaders) - http_server.bind(options.port, options.host, reuse_port=options.reuse_port) - http_server.start() + + fill_router(frontik_app.app_module) + fastapi_app = FastAPI() + fastapi_app.frontik_app = frontik_app + for router in routers: + fastapi_app.include_router(router) + fastapi_app.middleware('http')(core_middleware) + fastapi_app.add_middleware(RequestCancelledMiddleware) + + fastapi_app.add_exception_handler(404, regex_router_fallback) + + config = uvicorn.Config( + fastapi_app, + host=options.host, + port=options.port, + log_level='trace', # trace critical + loop='none', + log_config=None, + access_log=False, + server_header=False, + ) + server = uvicorn.Server(config) + + loop = asyncio.get_event_loop() if options.autoreload: - tornado.autoreload.start(1000) + check_time: int = 500 + modify_times: dict[str, float] = {} + reload = partial(tornado.autoreload._reload_on_update, modify_times) + + server_task = asyncio.gather(server._serve(), periodic_task(reload, check_time)) + else: + sock = bind_socket(options.host, options.port) + server_task = asyncio.create_task(server._serve([sock])) + + loop = asyncio.get_event_loop() def worker_sigterm_handler(_signum, _frame): log.info('requested shutdown, shutting down server on %s:%d', options.host, options.port) @@ -142,47 +202,55 @@ def worker_sigterm_handler(_signum, _frame): loop.call_soon_threadsafe(server_stop) def server_stop(): - deinit_task = loop.create_task(_deinit_app(app)) - http_server.stop() - - if loop.is_running(): - log.info('going down in %s seconds', options.stop_timeout) + log.info('going down in %s seconds', options.stop_timeout) - def ioloop_stop(_deinit_task): - if loop.is_running(): - log.info('stopping IOLoop') - loop.stop() - log.info('stopped') + def ioloop_stop(_deinit_task): + if loop.is_running(): + log.info('stopping IOLoop') + loop.stop() + log.info('stopped') - deinit_task.add_done_callback(ioloop_stop) + deinit_task = loop.create_task(_deinit_app(frontik_app, server)) + deinit_task.add_done_callback(ioloop_stop) signal.signal(signal.SIGTERM, worker_sigterm_handler) signal.signal(signal.SIGINT, worker_sigterm_handler) + def sig_handler(signum, _frame): + if signum == signal.SIGUSR1: + message = f'{signum} received. Current tasks:\n' + ''.join(map(str, asyncio.all_tasks())) + log.info(message) + + signal.signal(signal.SIGUSR1, sig_handler) + + return server_task + async def _init_app(app: FrontikApplication) -> None: await app.init() - run_server(app) + server_task = run_server(app) log.info('Successfully inited application %s', app.app) with app.worker_state.count_down_lock: app.worker_state.init_workers_count_down.value -= 1 log.info('worker is up, remaining workers = %s', app.worker_state.init_workers_count_down.value) + await server_task -async def _deinit_app(app: FrontikApplication) -> None: - deinit_futures: list[Optional[Union[Future, Coroutine]]] = [] +async def kill(app, server): + await asyncio.sleep(options.stop_timeout) + if app.tornado_http_client is not None: + await app.tornado_http_client.client_session.close() + server.should_exit = True - app.upstream_manager.deregister_service_and_close() +async def _deinit_app(app: FrontikApplication, server) -> None: + deinit_futures: list[Optional[Union[Future, Coroutine]]] = [kill(app, server)] deinit_futures.extend([integration.deinitialize_app(app) for integration in app.available_integrations]) - if deinit_futures: - try: - await asyncio.gather(*[future for future in deinit_futures if future]) - log.info('Successfully deinited application') - except Exception as e: - log.exception('failed to deinit, deinit returned: %s', e) + app.upstream_manager.deregister_service_and_close() - await asyncio.sleep(options.stop_timeout) - if app.tornado_http_client is not None: - await app.tornado_http_client.client_session.close() + try: + await asyncio.gather(*[future for future in deinit_futures if future]) + log.info('Successfully deinited application') + except Exception as e: + log.exception('failed to deinit, deinit returned: %s', e) diff --git a/frontik/timeout_tracking.py b/frontik/timeout_tracking.py index ff14d80f1..cecb16771 100644 --- a/frontik/timeout_tracking.py +++ b/frontik/timeout_tracking.py @@ -1,6 +1,7 @@ from __future__ import annotations import logging +import time from collections import namedtuple from functools import partial from typing import TYPE_CHECKING, Optional @@ -84,18 +85,18 @@ def __init__( self, outer_caller: Optional[str], outer_timeout_ms: float, - time_since_outer_request_start_sec_supplier: Callable, + request_start_time: float, *, threshold_ms: float = 100, ) -> None: self.outer_caller = outer_caller self.outer_timeout_ms = outer_timeout_ms - self.time_since_outer_request_start_sec_supplier = time_since_outer_request_start_sec_supplier + self.request_start_time = request_start_time self.threshold_ms = threshold_ms def check(self, request: RequestBuilder) -> None: if self.outer_timeout_ms: - already_spent_time_ms = self.time_since_outer_request_start_sec_supplier() * 1000 + already_spent_time_ms = (time.time() - self.request_start_time) * 1000 expected_timeout_ms = self.outer_timeout_ms - already_spent_time_ms request_timeout_ms = request.request_time_left * 1000 diff = request_timeout_ms - expected_timeout_ms @@ -113,7 +114,7 @@ def check(self, request: RequestBuilder) -> None: def get_timeout_checker( outer_caller: Optional[str], outer_timeout_ms: float, - time_since_outer_request_start_ms_supplier: Callable, + time_since_outer_request_start_ms_supplier: float, *, threshold_ms: float = 100, ) -> TimeoutChecker: diff --git a/frontik/util.py b/frontik/util.py index 945b30b80..80a5ffca1 100644 --- a/frontik/util.py +++ b/frontik/util.py @@ -82,7 +82,7 @@ def choose_boundary(): def get_cookie_or_url_param_value(handler: PageHandler, param_name: str) -> Optional[str]: - return handler.get_argument(param_name, handler.get_cookie(param_name, None)) + return handler.get_query_argument(param_name, handler.get_cookie(param_name, None)) def reverse_regex_named_groups(pattern: str, *args: Any, **kwargs: Any) -> str: diff --git a/poetry.lock b/poetry.lock index 6e37ade99..feb026c68 100644 --- a/poetry.lock +++ b/poetry.lock @@ -302,6 +302,20 @@ files = [ [package.extras] unicode-backport = ["unicodedata2"] +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" @@ -377,18 +391,18 @@ all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)" [[package]] name = "filelock" -version = "3.13.1" +version = "3.13.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, + {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -560,6 +574,17 @@ files = [ [package.extras] protobuf = ["grpcio-tools (>=1.62.1)"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + [[package]] name = "http-client" version = "2.1.10" @@ -1136,61 +1161,62 @@ files = [ [[package]] name = "orjson" -version = "3.9.15" +version = "3.10.0" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58"}, - {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99"}, - {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe"}, - {file = "orjson-3.9.15-cp310-none-win32.whl", hash = "sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7"}, - {file = "orjson-3.9.15-cp310-none-win_amd64.whl", hash = "sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb"}, - {file = "orjson-3.9.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde"}, - {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404"}, - {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357"}, - {file = "orjson-3.9.15-cp311-none-win32.whl", hash = "sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7"}, - {file = "orjson-3.9.15-cp311-none-win_amd64.whl", hash = "sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8"}, - {file = "orjson-3.9.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494"}, - {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068"}, - {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda"}, - {file = "orjson-3.9.15-cp312-none-win_amd64.whl", hash = "sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2"}, - {file = "orjson-3.9.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40"}, - {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7"}, - {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1"}, - {file = "orjson-3.9.15-cp38-none-win32.whl", hash = "sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5"}, - {file = "orjson-3.9.15-cp38-none-win_amd64.whl", hash = "sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b"}, - {file = "orjson-3.9.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790"}, - {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b"}, - {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10"}, - {file = "orjson-3.9.15-cp39-none-win32.whl", hash = "sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a"}, - {file = "orjson-3.9.15-cp39-none-win_amd64.whl", hash = "sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7"}, - {file = "orjson-3.9.15.tar.gz", hash = "sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061"}, + {file = "orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b"}, + {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca"}, + {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217"}, + {file = "orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a"}, + {file = "orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d"}, + {file = "orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7"}, + {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747"}, + {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302"}, + {file = "orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63"}, + {file = "orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f"}, + {file = "orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e"}, + {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570"}, + {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a"}, + {file = "orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7"}, + {file = "orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9"}, + {file = "orjson-3.10.0-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a667769a96a72ca67237224a36faf57db0c82ab07d09c3aafc6f956196cfa1b"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1e21dfde1d37feee8cf6464c20a2f41fa46c8bcd5251e761903e46102dc6b"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c12bb4ced1c3308eff7ba5c63ef8f0edb3e4c43c026440247dd6c1c61cea4b"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2d014cf8d4dc9f03fc9f870de191a49a03b1bcda51f2a957943fb9fafe55aac"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eadecaa16d9783affca33597781328e4981b048615c2ddc31c47a51b833d6319"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd583341218826f48bd7c6ebf3310b4126216920853cbc471e8dbeaf07b0b80e"}, + {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90bfc137c75c31d32308fd61951d424424426ddc39a40e367704661a9ee97095"}, + {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13b5d3c795b09a466ec9fcf0bd3ad7b85467d91a60113885df7b8d639a9d374b"}, + {file = "orjson-3.10.0-cp38-none-win32.whl", hash = "sha256:5d42768db6f2ce0162544845facb7c081e9364a5eb6d2ef06cd17f6050b048d8"}, + {file = "orjson-3.10.0-cp38-none-win_amd64.whl", hash = "sha256:33e6655a2542195d6fd9f850b428926559dee382f7a862dae92ca97fea03a5ad"}, + {file = "orjson-3.10.0-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4050920e831a49d8782a1720d3ca2f1c49b150953667eed6e5d63a62e80f46a2"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1897aa25a944cec774ce4a0e1c8e98fb50523e97366c637b7d0cddabc42e6643"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bf565a69e0082ea348c5657401acec3cbbb31564d89afebaee884614fba36b4"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6ebc17cfbbf741f5c1a888d1854354536f63d84bee537c9a7c0335791bb9009"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2817877d0b69f78f146ab305c5975d0618df41acf8811249ee64231f5953fee"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d017863ec8aa4589be30a328dacd13c2dc49de1c170bc8d8c8a98ece0f2925"}, + {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22c2f7e377ac757bd3476ecb7480c8ed79d98ef89648f0176deb1da5cd014eb7"}, + {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e62ba42bfe64c60c1bc84799944f80704e996592c6b9e14789c8e2a303279912"}, + {file = "orjson-3.10.0-cp39-none-win32.whl", hash = "sha256:60c0b1bdbccd959ebd1575bd0147bd5e10fc76f26216188be4a36b691c937077"}, + {file = "orjson-3.10.0-cp39-none-win_amd64.whl", hash = "sha256:175a41500ebb2fdf320bf78e8b9a75a1279525b62ba400b2b2444e274c2c8bee"}, + {file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"}, ] [[package]] @@ -1692,13 +1718,13 @@ urllib3 = ">=2" [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -1718,6 +1744,25 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "uvicorn" +version = "0.29.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, + {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + [[package]] name = "virtualenv" version = "20.25.1" @@ -1926,4 +1971,4 @@ testing = ["aioresponses", "tornado-httpclient-mock"] [metadata] lock-version = "2.0" python-versions = "~=3.9" -content-hash = "3971c56a87814b276190342cf60ec3f5dac0fe78531bad88e0de215a1a7ef364" +content-hash = "f07d38533e1c0a1adde160c041b6a3697cd3100333a8e1294fc25715733d5783" diff --git a/pyproject.toml b/pyproject.toml index 36659caab..4acab791c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ aiokafka = '0.8.1' sentry-sdk = '1.26.0' aioresponses = '0.7.4' tornado-httpclient-mock = '0.2.3' +uvicorn = '0.29.0' [tool.poetry.group.test.dependencies] pytest = '>=7.2.0'