diff --git a/frontik/app.py b/frontik/app.py
index 63cb91dd8..f633b345b 100644
--- a/frontik/app.py
+++ b/frontik/app.py
@@ -7,7 +7,7 @@
from collections.abc import Callable
from ctypes import c_bool, c_int
from threading import Lock
-from typing import Optional, Union
+from typing import Awaitable, Optional, Union
from aiokafka import AIOKafkaProducer
from fastapi import FastAPI, HTTPException
@@ -15,21 +15,44 @@
from http_client import options as http_client_options
from http_client.balancing import RequestBalancerBuilder, Upstream
from lxml import etree
+from tornado import httputil
import frontik.producers.json_producer
import frontik.producers.xml_producer
from frontik import integrations, media_types
from frontik.debug import get_frontik_and_apps_versions
from frontik.handler import PageHandler, get_current_handler
+from frontik.handler_asgi import execute_page
from frontik.integrations.statsd import StatsDClient, StatsDClientStub, create_statsd_client
from frontik.options import options
from frontik.process import WorkerState
-from frontik.routing import router
+from frontik.routing import import_all_pages, router
from frontik.service_discovery import UpstreamManager
+from frontik.util import check_request_id, generate_uniq_timestamp_request_id
app_logger = logging.getLogger('app_logger')
+class AsgiRouter:
+ async def __call__(self, scope, receive, send):
+ assert scope['type'] == 'http'
+
+ if 'router' not in scope:
+ scope['router'] = self
+
+ route = scope['route']
+ scope['endpoint'] = route.endpoint
+
+ await route.handle(scope, receive, send)
+
+
+class FrontikAsgiApp(FastAPI):
+ def __init__(self) -> None:
+ super().__init__()
+ self.router = AsgiRouter() # type: ignore
+ self.http_client = None
+
+
@router.get('/version', cls=PageHandler)
async def get_version(handler: PageHandler = get_current_handler()) -> None:
handler.set_header('Content-Type', 'text/xml')
@@ -53,10 +76,8 @@ class DefaultConfig:
def __init__(self, app_module_name: Optional[str] = None) -> None:
self.start_time = time.time()
- self.fastapi_app = FastAPI()
-
self.app_module_name: Optional[str] = app_module_name
- if app_module_name is None:
+ if app_module_name is None: # for tests
app_module = importlib.import_module(self.__class__.__module__)
else:
app_module = importlib.import_module(app_module_name)
@@ -79,6 +100,38 @@ def __init__(self, app_module_name: Optional[str] = None) -> None:
count_down_lock = multiprocessing.Lock()
self.worker_state = WorkerState(init_workers_count_down, master_done, count_down_lock) # type: ignore
+ import_all_pages(app_module_name)
+
+ self.ui_methods: dict = {}
+ self.ui_modules: dict = {}
+ self.settings: dict = {}
+
+ self.app = FrontikAsgiApp()
+
+ def __call__(self, tornado_request: httputil.HTTPServerRequest) -> Optional[Awaitable[None]]:
+ # for making more asgi, reimplement tornado.http1connection._server_request_loop and ._read_message
+ request_id = tornado_request.headers.get('X-Request-Id') or generate_uniq_timestamp_request_id()
+ if options.validate_request_id:
+ check_request_id(request_id)
+
+ async def _serve_tornado_request(
+ frontik_app: FrontikApplication,
+ _tornado_request: httputil.HTTPServerRequest,
+ _request_id: str,
+ app: FrontikAsgiApp,
+ ) -> None:
+ status, reason, headers, data = await execute_page(frontik_app, _tornado_request, _request_id, app)
+
+ assert _tornado_request.connection is not None
+ _tornado_request.connection.set_close_callback(None) # type: ignore
+
+ start_line = httputil.ResponseStartLine('', status, reason)
+ future = _tornado_request.connection.write_headers(start_line, headers, data)
+ _tornado_request.connection.finish()
+ return await future
+
+ return asyncio.create_task(_serve_tornado_request(self, tornado_request, request_id, self.app))
+
def create_upstream_manager(
self,
upstreams: dict[str, Upstream],
@@ -164,3 +217,6 @@ def get_current_status(self) -> dict[str, str]:
def get_kafka_producer(self, producer_name: str) -> Optional[AIOKafkaProducer]: # pragma: no cover
pass
+
+ def log_request(self, tornado_handler: PageHandler) -> None:
+ pass
diff --git a/frontik/auth.py b/frontik/auth.py
index 671c8dc67..2b965197a 100644
--- a/frontik/auth.py
+++ b/frontik/auth.py
@@ -7,7 +7,11 @@
from tornado.escape import to_unicode
from tornado.web import Finish
+from frontik.options import options
+
if TYPE_CHECKING:
+ from tornado import httputil
+
from frontik.handler import PageHandler
DEBUG_AUTH_HEADER_NAME = 'Frontik-Debug-Auth'
@@ -17,8 +21,8 @@ class DebugUnauthorizedError(Finish):
pass
-def passed_basic_auth(handler: PageHandler, login: Optional[str], passwd: Optional[str]) -> bool:
- auth_header = handler.get_header('Authorization')
+def passed_basic_auth(tornado_request: httputil.HTTPServerRequest, login: Optional[str], passwd: Optional[str]) -> bool:
+ auth_header = tornado_request.headers.get('Authorization')
if auth_header and auth_header.startswith('Basic '):
method, auth_b64 = auth_header.split(' ')
try:
@@ -30,21 +34,30 @@ def passed_basic_auth(handler: PageHandler, login: Optional[str], passwd: Option
return False
-def check_debug_auth(handler: PageHandler, login: Optional[str], password: Optional[str]) -> None:
- """
- :type handler: tornado.web.RequestHandler
- :return: None or tuple(http_code, headers)
- """
- debug_auth_header = handler.get_header(DEBUG_AUTH_HEADER_NAME)
+def check_debug_auth(
+ tornado_request: httputil.HTTPServerRequest, login: Optional[str], password: Optional[str]
+) -> Optional[str]:
+ debug_auth_header = tornado_request.headers.get(DEBUG_AUTH_HEADER_NAME)
if debug_auth_header is not None:
debug_access = debug_auth_header == f'{login}:{password}'
if not debug_access:
- handler.set_header('WWW-Authenticate', f'{DEBUG_AUTH_HEADER_NAME}-Header realm="Secure Area"')
- handler.set_status(http.client.UNAUTHORIZED)
- handler.finish()
+ return f'{DEBUG_AUTH_HEADER_NAME}-Header realm="Secure Area"'
else:
- debug_access = passed_basic_auth(handler, login, password)
+ debug_access = passed_basic_auth(tornado_request, login, password)
if not debug_access:
- handler.set_header('WWW-Authenticate', 'Basic realm="Secure Area"')
- handler.set_status(http.client.UNAUTHORIZED)
- handler.finish()
+ return 'Basic realm="Secure Area"'
+ return None
+
+
+def check_debug_auth_or_finish(
+ handler: PageHandler, login: Optional[str] = None, password: Optional[str] = None
+) -> None:
+ if options.debug:
+ return
+ login = login or options.debug_login
+ password = password or options.debug_password
+ fail_header = check_debug_auth(handler.request, login, password)
+ if fail_header:
+ handler.set_header('WWW-Authenticate', fail_header)
+ handler.set_status(http.client.UNAUTHORIZED)
+ handler.finish()
diff --git a/frontik/balancing_client.py b/frontik/balancing_client.py
new file mode 100644
index 000000000..c5a830077
--- /dev/null
+++ b/frontik/balancing_client.py
@@ -0,0 +1,57 @@
+import time
+from functools import partial
+from typing import Annotated
+
+from fastapi import Depends, Request
+from http_client import HttpClient, RequestBuilder
+from http_client.request_response import USER_AGENT_HEADER
+
+from frontik import request_context
+from frontik.auth import DEBUG_AUTH_HEADER_NAME
+from frontik.debug import DEBUG_HEADER_NAME
+from frontik.timeout_tracking import get_timeout_checker
+from frontik.util import make_url
+
+OUTER_TIMEOUT_MS_HEADER = 'X-Outer-Timeout-Ms'
+
+
+def modify_http_client_request(request: Request, balanced_request: RequestBuilder) -> None:
+ balanced_request.headers['x-request-id'] = request_context.get_request_id()
+ balanced_request.headers[OUTER_TIMEOUT_MS_HEADER] = f'{balanced_request.request_timeout * 1000:.0f}'
+
+ outer_timeout = request.headers.get(OUTER_TIMEOUT_MS_HEADER.lower())
+ if outer_timeout:
+ timeout_checker = get_timeout_checker(
+ request.headers.get(USER_AGENT_HEADER.lower()),
+ float(outer_timeout),
+ request['start_time'],
+ )
+ timeout_checker.check(balanced_request)
+
+ if request['pass_debug']:
+ balanced_request.headers[DEBUG_HEADER_NAME] = 'true'
+
+ # debug_timestamp is added to avoid caching of debug responses
+ balanced_request.path = make_url(balanced_request.path, debug_timestamp=int(time.time()))
+
+ for header_name in ('Authorization', DEBUG_AUTH_HEADER_NAME):
+ authorization = request.headers.get(header_name.lower())
+ if authorization is not None:
+ balanced_request.headers[header_name] = authorization
+
+
+def get_http_client(modify_request_hook=None):
+ def _get_http_client(request: Request) -> HttpClient:
+ hook = modify_request_hook or partial(modify_http_client_request, request)
+
+ http_client = request['http_client_factory'].get_http_client(
+ modify_http_request_hook=hook,
+ debug_enabled=request['debug_enabled'],
+ )
+
+ return http_client
+
+ return _get_http_client
+
+
+HttpClientT = Annotated[HttpClient, Depends(get_http_client())]
diff --git a/frontik/debug.py b/frontik/debug.py
index 03d28e767..e32c019e7 100644
--- a/frontik/debug.py
+++ b/frontik/debug.py
@@ -21,17 +21,19 @@
import aiohttp
import tornado
-from fastapi import Request
from lxml import etree
from lxml.builder import E
-from starlette.datastructures import Headers
+from tornado import httputil
from tornado.escape import to_unicode, utf8
from tornado.httputil import HTTPHeaders
import frontik.util
import frontik.xml_util
from frontik import media_types, request_context
+from frontik.auth import check_debug_auth
from frontik.loggers import BufferedHandler
+from frontik.options import options
+from frontik.util import get_cookie_or_param_from_request
from frontik.version import version as frontik_version
from frontik.xml_util import dict_to_xml
@@ -41,7 +43,6 @@
from http_client.request_response import RequestBuilder, RequestResult
from frontik.app import FrontikApplication
- from frontik.handler import PageHandler
debug_log = logging.getLogger('frontik.debug')
@@ -203,7 +204,7 @@ def _params_to_xml(url: str) -> etree.Element:
return params
-def _headers_to_xml(request_or_response_headers: dict | Headers) -> etree.Element:
+def _headers_to_xml(request_or_response_headers: HTTPHeaders) -> etree.Element:
headers = etree.Element('headers')
for name, value in request_or_response_headers.items():
if name != 'Cookie':
@@ -212,7 +213,7 @@ def _headers_to_xml(request_or_response_headers: dict | Headers) -> etree.Elemen
return headers
-def _cookies_to_xml(request_or_response_headers: dict) -> etree.Element:
+def _cookies_to_xml(request_or_response_headers: HTTPHeaders) -> etree.Element:
cookies = etree.Element('cookies')
if 'Cookie' in request_or_response_headers:
_cookies: SimpleCookie = SimpleCookie(request_or_response_headers['Cookie'])
@@ -365,18 +366,39 @@ def _produce_one(self, record: logging.LogRecord) -> etree.Element:
DEBUG_XSL = os.path.join(os.path.dirname(__file__), 'debug/debug.xsl')
+def _data_to_chunk(data: Any, headers: HTTPHeaders) -> bytes:
+ result: bytes = b''
+ if data is None:
+ return result
+ if isinstance(data, str):
+ result = data.encode('utf-8')
+ elif isinstance(data, dict):
+ chunk = json.dumps(data).replace('', '<\\/')
+ result = chunk.encode('utf-8')
+ headers['Content-Type'] = 'application/json; charset=UTF-8'
+ elif isinstance(data, bytes):
+ result = data
+ else:
+ raise TypeError(f'unexpected type of chunk - {type(data)}')
+ return result
+
+
class DebugTransform:
- def __init__(self, application: FrontikApplication, request: Request) -> None:
+ def __init__(self, application: FrontikApplication, debug_mode: DebugMode) -> None:
self.application = application
- self.request: Request = request
+ self.debug_mode = debug_mode
def is_enabled(self) -> bool:
- return getattr(self.request.state.handler, '_debug_enabled', False)
+ return self.debug_mode.enabled
def is_inherited(self) -> bool:
- return getattr(self.request.state.handler, '_debug_inherited', False)
+ return self.debug_mode.inherited
+
+ def transform_chunk(
+ self, tornado_request: httputil.HTTPServerRequest, status_code: int, original_headers: HTTPHeaders, data: bytes
+ ) -> tuple[int, HTTPHeaders, bytes]:
+ chunk = _data_to_chunk(data, original_headers)
- def transform_chunk(self, status_code: int, original_headers: dict, chunk: bytes) -> tuple[int, dict, bytes]:
if not self.is_enabled():
return status_code, original_headers, chunk
@@ -390,9 +412,9 @@ def transform_chunk(self, status_code: int, original_headers: dict, chunk: bytes
debug_log_data = request_context.get_log_handler().produce_all() # type: ignore
debug_log_data.set('code', str(int(status_code)))
debug_log_data.set('handler-name', request_context.get_handler_name())
- debug_log_data.set('started', _format_number(self.request.state.start_time))
- debug_log_data.set('request-id', str(self.request.state.handler.request_id))
- debug_log_data.set('stages-total', _format_number((time.time() - self.request.state.start_time) * 1000))
+ debug_log_data.set('started', _format_number(tornado_request._start_time))
+ debug_log_data.set('request-id', str(tornado_request.request_id)) # type: ignore
+ debug_log_data.set('stages-total', _format_number((time.time() - tornado_request._start_time) * 1000))
try:
debug_log_data.append(E.versions(_pretty_print_xml(get_frontik_and_apps_versions(self.application))))
@@ -408,10 +430,10 @@ def transform_chunk(self, status_code: int, original_headers: dict, chunk: bytes
debug_log_data.append(
E.request(
- E.method(self.request.method),
- _params_to_xml(str(self.request.url)),
- _headers_to_xml(self.request.headers),
- _cookies_to_xml(self.request.headers), # type: ignore
+ E.method(tornado_request.method),
+ _params_to_xml(str(tornado_request.uri)),
+ _headers_to_xml(tornado_request.headers),
+ _cookies_to_xml(tornado_request.headers),
),
)
@@ -432,7 +454,7 @@ def transform_chunk(self, status_code: int, original_headers: dict, chunk: bytes
upstream.set('bgcolor', bgcolor)
upstream.set('fgcolor', fgcolor)
- if not getattr(self.request.state.handler, '_debug_inherited', False):
+ if not self.debug_mode.inherited:
try:
transform = etree.XSLT(etree.parse(DEBUG_XSL))
log_document = utf8(str(transform(debug_log_data)))
@@ -449,35 +471,43 @@ def transform_chunk(self, status_code: int, original_headers: dict, chunk: bytes
else:
log_document = etree.tostring(debug_log_data, encoding='UTF-8', xml_declaration=True)
- return 200, wrap_headers, log_document
+ return 200, HTTPHeaders(wrap_headers), log_document
class DebugMode:
- def __init__(self, handler: PageHandler) -> None:
- debug_value = frontik.util.get_cookie_or_url_param_value(handler, 'debug')
-
- self.mode_values = debug_value.split(',') if debug_value is not None else ''
- self.inherited = handler.get_header(DEBUG_HEADER_NAME, None)
- self.pass_debug: bool = False
+ def __init__(self, tornado_request: httputil.HTTPServerRequest) -> None:
+ self.debug_value = get_cookie_or_param_from_request(tornado_request, 'debug')
+ self.mode_values = self.debug_value.split(',') if self.debug_value is not None else ''
+ self.inherited = tornado_request.headers.get(DEBUG_HEADER_NAME, None)
+ self.pass_debug = False
+ self.enabled = False
+ self.profile_xslt = False
+ self.failed_auth_header = None
if self.inherited:
debug_log.debug('debug mode is inherited due to %s request header', DEBUG_HEADER_NAME)
- handler._debug_inherited = True # type: ignore
- if debug_value is not None or self.inherited:
- handler.require_debug_access()
+ if self.debug_value is not None or self.inherited:
+ if options.debug:
+ self.on_auth_ok()
+ return
- self.enabled = handler._debug_enabled = True # type: ignore
- self.pass_debug = 'nopass' not in self.mode_values or bool(self.inherited)
- self.profile_xslt = 'xslt' in self.mode_values
+ self.failed_auth_header = check_debug_auth(tornado_request, options.debug_login, options.debug_password)
+ if not self.failed_auth_header:
+ self.on_auth_ok()
- request_context.set_log_handler(DebugBufferedHandler())
+ def on_auth_ok(self) -> None:
+ self.enabled = True
+ self.pass_debug = 'nopass' not in self.mode_values or bool(self.inherited)
+ self.profile_xslt = 'xslt' in self.mode_values
- if self.pass_debug:
- debug_log.debug('%s header will be passed to all requests', DEBUG_HEADER_NAME)
- else:
- self.enabled = False
- self.profile_xslt = False
+ request_context.set_log_handler(DebugBufferedHandler())
+
+ if self.pass_debug:
+ debug_log.debug('%s header will be passed to all requests', DEBUG_HEADER_NAME)
+
+ def auth_failed(self) -> bool:
+ return self.failed_auth_header is not None
def get_frontik_and_apps_versions(application: FrontikApplication) -> etree.Element:
diff --git a/frontik/futures.py b/frontik/futures.py
index d7151d1e9..cafe7dd66 100644
--- a/frontik/futures.py
+++ b/frontik/futures.py
@@ -1,8 +1,17 @@
+from __future__ import annotations
+
import asyncio
import logging
-from typing import Optional
+import time
+from functools import partial, wraps
+from typing import TYPE_CHECKING, Optional
from tornado.concurrent import Future
+from tornado.ioloop import IOLoop
+
+if TYPE_CHECKING:
+ from collections.abc import Callable
+ from typing import Any
async_logger = logging.getLogger('frontik.futures')
@@ -14,33 +23,163 @@ class AbortAsyncGroup(Exception):
# AsyncGroup will become legacy in future releases
# It will be replaced with FutureGroup
class AsyncGroup:
- def __init__(self, name: Optional[str] = None) -> None:
+ """
+ Grouping of several async requests and final callback in such way that final callback is invoked
+ after the last request is finished.
+
+ If any callback throws an exception, all pending callbacks would be aborted and finish_cb
+ would not be automatically called.
+ """
+
+ def __init__(self, finish_cb: Callable, name: Optional[str] = None) -> None:
+ self._counter = 0
+ self._finish_cb: Optional[Callable] = finish_cb
self._finished = False
self._name = name
+ self._future: Future = Future()
+ self._start_time = time.time()
self._futures: list[Future] = []
- def add_future(self, future: Future) -> None:
+ def is_finished(self) -> bool:
+ return self._finished
+
+ def abort(self) -> None:
+ async_logger.info('aborting %s', self)
+ self._finished = True
+ if not self._future.done():
+ self._future.set_exception(AbortAsyncGroup())
+
+ def finish(self) -> None:
if self._finished:
- raise RuntimeError('finish group is finished')
- self._futures.append(future)
+ async_logger.warning('trying to finish already finished %s', self)
+ return None
+
+ self._finished = True
+ self._future.set_result(None)
- async def finish(self) -> None:
try:
- await asyncio.gather(*self._futures)
+ if self._finish_cb is not None:
+ self._finish_cb()
finally:
- self._finished = True
+ # prevent possible cycle references
+ self._finish_cb = None
- def done(self) -> bool:
- return self._finished
+ return None
- def pending(self) -> bool:
- return not self._finished and len(self._futures) != 0
+ def try_finish(self) -> None:
+ if self._counter == 0:
+ self.finish()
- def abort(self) -> None:
- for future in self._futures:
- if not future.done():
- future.cancel()
- self._finished = True
+ def try_finish_async(self):
+ """Executes finish_cb in next IOLoop iteration"""
+ if self._counter == 0:
+ IOLoop.current().add_callback(self.finish)
+
+ def _inc(self) -> None:
+ if self._finished:
+ async_logger.info('ignoring adding callback in %s', self)
+ raise AbortAsyncGroup()
+
+ self._counter += 1
+
+ def _dec(self) -> None:
+ self._counter -= 1
+
+ def add(self, intermediate_cb: Callable, exception_handler: Optional[Callable] = None) -> Callable:
+ self._inc()
+
+ @wraps(intermediate_cb)
+ def new_cb(*args, **kwargs):
+ if self._finished:
+ async_logger.info('ignoring executing callback in %s', self)
+ return
+
+ try:
+ self._dec()
+ intermediate_cb(*args, **kwargs)
+ except Exception as ex:
+ self.abort()
+ if exception_handler is not None:
+ exception_handler(ex)
+ else:
+ raise
+
+ self.try_finish()
+
+ return new_cb
+
+ def add_notification(self) -> Callable:
+ self._inc()
+
+ def new_cb(*args, **kwargs):
+ self._dec()
+ self.try_finish()
+
+ return new_cb
+
+ @staticmethod
+ def _handle_future(callback, future):
+ future.result()
+ callback()
+
+ def add_future(self, future: Future) -> Future:
+ IOLoop.current().add_future(future, partial(self._handle_future, self.add_notification()))
+ self._futures.append(future)
+ return future
+
+ def get_finish_future(self) -> Future:
+ return self._future
+
+ def get_gathering_future(self) -> Future:
+ return asyncio.gather(*self._futures)
def __str__(self):
return f'AsyncGroup(name={self._name}, finished={self._finished})'
+
+
+def future_fold(
+ future: Future,
+ result_mapper: Optional[Callable] = None,
+ exception_mapper: Optional[Callable] = None,
+) -> Future:
+ """
+ Creates a new future with result or exception processed by result_mapper and exception_mapper.
+
+ If result_mapper or exception_mapper raises an exception, it will be set as an exception for the resulting future.
+ Any of the mappers can be None — then the result or exception is left as is.
+ """
+
+ res_future: Future = Future()
+
+ def _process(func: Optional[Callable], value: Any) -> None:
+ try:
+ processed = func(value) if func is not None else value
+ except Exception as e:
+ res_future.set_exception(e)
+ return
+ res_future.set_result(processed)
+
+ def _on_ready(wrapped_future):
+ exception = wrapped_future.exception()
+ if exception is not None:
+ if not callable(exception_mapper):
+
+ def default_exception_func(error):
+ raise error
+
+ _process(default_exception_func, exception)
+ else:
+ _process(exception_mapper, exception)
+ else:
+ _process(result_mapper, future.result())
+
+ IOLoop.current().add_future(future, callback=_on_ready)
+ return res_future
+
+
+def future_map(future, func):
+ return future_fold(future, result_mapper=func)
+
+
+def future_map_exception(future, func):
+ return future_fold(future, exception_mapper=func)
diff --git a/frontik/handler.py b/frontik/handler.py
index 25f02fe78..05d91dbf2 100644
--- a/frontik/handler.py
+++ b/frontik/handler.py
@@ -1,40 +1,40 @@
from __future__ import annotations
import asyncio
-import datetime
import http.client
-import json
import logging
import re
-import sys
import time
from asyncio import Task
from asyncio.futures import Future
+from functools import wraps
+from http import HTTPStatus
from typing import TYPE_CHECKING, Any, Optional, Type, TypeVar, Union, overload
-from fastapi import Depends, HTTPException, Request, Response
+import tornado.web
+from fastapi import Depends, Request
+from fastapi.dependencies.utils import solve_dependencies
from fastapi.routing import APIRoute
from http_client.request_response import USER_AGENT_HEADER, FailFastError, RequestBuilder, RequestResult
from pydantic import BaseModel, ValidationError
-from starlette.datastructures import Headers, QueryParams
-from tornado.httputil import format_timestamp, parse_body_arguments
+from tornado import httputil
+from tornado.ioloop import IOLoop
+from tornado.web import Finish, RequestHandler
import frontik.auth
-import frontik.handler_active_limit
import frontik.producers.json_producer
import frontik.producers.xml_producer
import frontik.util
from frontik import media_types, request_context
from frontik.auth import DEBUG_AUTH_HEADER_NAME
-from frontik.debug import DEBUG_HEADER_NAME, DebugMode, DebugTransform
+from frontik.debug import DEBUG_HEADER_NAME, DebugMode
from frontik.futures import AbortAsyncGroup, AsyncGroup
-from frontik.http_status import ALLOWED_STATUSES
+from frontik.http_status import ALLOWED_STATUSES, CLIENT_CLOSED_REQUEST, NON_CRITICAL_BAD_GATEWAY
from frontik.json_builder import FrontikJsonDecodeError, json_decode
from frontik.loggers import CUSTOM_JSON_EXTRA, JSON_REQUESTS_LOGGER
from frontik.loggers.stages import StagesLogger
-from frontik.options import options
from frontik.timeout_tracking import get_timeout_checker
-from frontik.util import make_url
+from frontik.util import gather_dict, make_url
from frontik.validator import BaseValidationModel, Validators
from frontik.version import version as frontik_version
@@ -42,6 +42,7 @@
from collections.abc import Callable, Coroutine
from http_client import HttpClient
+ from tornado.httputil import HTTPHeaders, HTTPServerRequest
from frontik.app import FrontikApplication
from frontik.integrations.statsd import StatsDClient, StatsDClientStub
@@ -52,43 +53,28 @@ def __init__(self, wait_finish_group: bool = False) -> None:
self.wait_finish_group = wait_finish_group
-class HTTPErrorWithPostprocessors(HTTPException):
+class HTTPErrorWithPostprocessors(tornado.web.HTTPError):
pass
-class TypedArgumentError(HTTPException):
+class TypedArgumentError(tornado.web.HTTPError):
pass
-class JSONBodyParseError(HTTPException):
+class JSONBodyParseError(tornado.web.HTTPError):
def __init__(self) -> None:
super().__init__(400, 'Failed to parse json in request body')
-class DefaultValueError(HTTPException):
- def __init__(self, arg_name: str) -> None:
- super().__init__(400, 'Missing argument %s' % arg_name)
- self.arg_name = arg_name
-
-
-class FinishPageSignal(Exception):
- def __init__(self, data: Any = None, *args: object) -> None:
- super().__init__(*args)
- self.data = data
-
-
-class RedirectPageSignal(Exception):
- def __init__(self, url: str, status: int, *args: object) -> None:
+class DefaultValueError(Exception):
+ def __init__(self, *args: object) -> None:
super().__init__(*args)
- self.url = url
- self.status = status
_ARG_DEFAULT = object()
MEDIA_TYPE_PARAMETERS_SEPARATOR_RE = r' *; *'
OUTER_TIMEOUT_MS_HEADER = 'X-Outer-Timeout-Ms'
_remove_control_chars_regex = re.compile(r'[\x00-\x08\x0e-\x1f]')
-_T = TypeVar('_T')
handler_logger = logging.getLogger('handler')
@@ -105,81 +91,59 @@ def _fail_fast_policy(fail_fast: bool, waited: bool, host: str, path: str) -> bo
return fail_fast
-class PageHandler:
+class PageHandler(RequestHandler):
def __init__(
self,
application: FrontikApplication,
- query_params: QueryParams,
- cookie_params: dict[str, str],
- header_params: Headers,
- body_bytes: bytes,
- request_start_time: float,
- path: str,
- path_params: dict,
- remote_ip: str,
- method: str,
- ) -> None: # request: Request
- self.application = application
- self.query_params = query_params
- self.cookie_params = cookie_params or {}
- self.header_params: Headers = header_params
- self.body_bytes = body_bytes
- self.request_start_time = request_start_time
- self.path = path
- self.path_params = path_params
- self.remote_ip = remote_ip
- self.method = method
-
- self._json_body = None
- self.body_arguments: dict[str, Any] = {}
- self.files: dict = {}
- self.parse_body_bytes()
-
+ request: HTTPServerRequest,
+ route: APIRoute,
+ debug_mode: DebugMode,
+ path_params: dict[str, str],
+ ) -> None:
+ self.name = self.__class__.__name__
self.request_id: str = request_context.get_request_id() # type: ignore
self.config = application.config
self.log = handler_logger
self.text: Any = None
- self._finished = False
+ self.route = route
+ self.debug_mode = debug_mode
+ self.path_params = path_params
+
+ super().__init__(application, request) # type: ignore
- self.statsd_client: StatsDClient | StatsDClientStub
+ self.statsd_client: StatsDClient | StatsDClientStub = application.statsd_client
for integration in application.available_integrations:
integration.initialize_handler(self)
- self.stages_logger = StagesLogger(request_start_time, self.statsd_client)
+ self.stages_logger = StagesLogger(request._start_time, self.statsd_client)
- self._debug_access: Optional[bool] = None
self._render_postprocessors: list = []
self._postprocessors: list = []
+ self._mandatory_cookies: dict = {}
+ self._mandatory_headers = httputil.HTTPHeaders()
+
self._validation_model: type[BaseValidationModel | BaseModel] = BaseValidationModel
self.timeout_checker = None
- self.use_adaptive_strategy = False
- outer_timeout = header_params.get(OUTER_TIMEOUT_MS_HEADER)
+ outer_timeout = request.headers.get(OUTER_TIMEOUT_MS_HEADER)
if outer_timeout:
self.timeout_checker = get_timeout_checker(
- header_params.get(USER_AGENT_HEADER),
+ request.headers.get(USER_AGENT_HEADER),
float(outer_timeout),
- request_start_time,
+ request._start_time,
)
- self._status = 200
- self._reason: Optional[str] = None
+ self.handler_result_future: Future[tuple[int, str, HTTPHeaders, bytes]] = Future()
def __repr__(self):
return f'{self.__module__}.{self.__class__.__name__}'
def prepare(self) -> None:
- self.resp_headers = get_default_headers()
- self.resp_cookies: dict[str, dict] = {}
-
- self.finish_group = AsyncGroup(name='finish')
-
- self.active_limit = frontik.handler_active_limit.ActiveHandlersLimit(self.statsd_client)
-
- self.debug_mode = DebugMode(self)
+ self.application: FrontikApplication # type: ignore
+ self.finish_group = AsyncGroup(lambda: None, name='finish')
self.json_producer = self.application.json.get_producer(self)
self.json = self.json_producer.json
@@ -190,15 +154,23 @@ def prepare(self) -> None:
self._http_client: HttpClient = self.application.http_client_factory.get_http_client(
self.modify_http_client_request,
self.debug_mode.enabled,
- self.use_adaptive_strategy,
)
- # Simple getters and setters
+ self._handler_finished_notification = self.finish_group.add_notification()
+
+ super().prepare()
- def get_request_headers(self) -> Headers:
- return self.header_params
+ def set_default_headers(self):
+ self._headers = httputil.HTTPHeaders({
+ 'Server': f'Frontik/{frontik_version}',
+ 'X-Request-Id': self.request_id,
+ })
+
+ @property
+ def path(self) -> str:
+ return self.request.path
- def get_path_argument(self, name, default=_ARG_DEFAULT):
+ def get_path_argument(self, name: str, default: Any = _ARG_DEFAULT) -> str:
value = self.path_params.get(name, None)
if value is None:
if default is _ARG_DEFAULT:
@@ -207,91 +179,49 @@ def get_path_argument(self, name, default=_ARG_DEFAULT):
value = _remove_control_chars_regex.sub(' ', value)
return value
- def get_query_argument(
- self,
- name: str,
- default: Union[str, _T] = _ARG_DEFAULT, # type: ignore
- strip: bool = True,
- ) -> Union[str, _T]:
- args = self._get_arguments(name, strip=strip)
- if not args:
- if default is _ARG_DEFAULT:
- raise DefaultValueError(name)
- return default
- return args[-1]
-
- def get_query_arguments(self, name: Optional[str] = None, strip: bool = True) -> Union[list[str], dict[str, str]]:
- if name is None:
- return self._get_all_query_arguments(strip)
- return self._get_arguments(name, strip)
-
- def _get_all_query_arguments(self, strip: bool = True) -> dict[str, str]:
- qargs_list = self.query_params.multi_items()
- values = {}
- for qarg_k, qarg_v in qargs_list:
- v = _remove_control_chars_regex.sub(' ', qarg_v)
- if strip:
- v = v.strip()
- values[qarg_k] = v
-
- return values
-
- def _get_arguments(self, name: str, strip: bool = True) -> list[str]:
- qargs_list = self.query_params.multi_items()
- values = []
- for qarg_k, qarg_v in qargs_list:
- if qarg_k != name:
- continue
-
- # Get rid of any weird control chars (unless decoding gave
- # us bytes, in which case leave it alone)
- v = _remove_control_chars_regex.sub(' ', qarg_v)
- if strip:
- v = v.strip()
- values.append(v)
-
- return values
+ @overload
+ def get_header(self, param_name: str, default: None = None) -> Optional[str]: ...
- def get_str_argument(
- self,
- name: str,
- default: Any = _ARG_DEFAULT,
- path_safe: bool = True,
- **kwargs: Any,
- ) -> Optional[Union[str, list[str]]]:
- if path_safe:
- return self.get_validated_argument(name, Validators.PATH_SAFE_STRING, default=default, **kwargs)
- return self.get_validated_argument(name, Validators.STRING, default=default, **kwargs)
+ @overload
+ def get_header(self, param_name: str, default: str) -> str: ...
- def get_int_argument(
- self,
- name: str,
- default: Any = _ARG_DEFAULT,
- **kwargs: Any,
- ) -> Optional[Union[int, list[int]]]:
- return self.get_validated_argument(name, Validators.INTEGER, default=default, **kwargs)
+ def get_header(self, param_name: str, default: Optional[str] = None) -> Optional[str]:
+ return self.request.headers.get(param_name.lower(), default)
- def get_bool_argument(
- self,
- name: str,
- default: Any = _ARG_DEFAULT,
- **kwargs: Any,
- ) -> Optional[Union[bool, list[bool]]]:
- return self.get_validated_argument(name, Validators.BOOLEAN, default=default, **kwargs)
+ def decode_argument(self, value: bytes, name: Optional[str] = None) -> str:
+ try:
+ return super().decode_argument(value, name)
+ except (UnicodeError, tornado.web.HTTPError):
+ self.log.warning('cannot decode utf-8 query parameter, trying other charsets')
- def get_float_argument(
- self,
- name: str,
- default: Any = _ARG_DEFAULT,
- **kwargs: Any,
- ) -> Optional[Union[float, list[float]]]:
- return self.get_validated_argument(name, Validators.FLOAT, default=default, **kwargs)
+ try:
+ return frontik.util.decode_string_from_charset(value)
+ except UnicodeError:
+ self.log.exception('cannot decode argument, ignoring invalid chars')
+ return value.decode('utf-8', 'ignore')
+
+ def get_body_argument(self, name: str, default: Any = _ARG_DEFAULT, strip: bool = True) -> Optional[str]:
+ if self._get_request_mime_type(self.request) == media_types.APPLICATION_JSON:
+ if name not in self.json_body and default == _ARG_DEFAULT:
+ raise tornado.web.MissingArgumentError(name)
+
+ result = self.json_body.get(name, default)
+
+ if strip and isinstance(result, str):
+ return result.strip()
+
+ return result
+
+ if default == _ARG_DEFAULT:
+ return super().get_body_argument(name, strip=strip)
+ return super().get_body_argument(name, default, strip)
def set_validation_model(self, model: type[Union[BaseValidationModel, BaseModel]]) -> None:
if issubclass(model, BaseModel):
self._validation_model = model
else:
- raise TypeError('model is not subclass of BaseClass')
+ msg = 'model is not subclass of BaseClass'
+ raise TypeError(msg)
def get_validated_argument(
self,
@@ -318,9 +248,9 @@ def get_validated_argument(
elif from_body:
value = self.get_body_argument(name, validated_default, strip)
elif array:
- value = self.get_query_arguments(name, strip)
+ value = self.get_arguments(name, strip)
else:
- value = self.get_query_argument(name, validated_default, strip)
+ value = self.get_argument(name, validated_default, strip)
try:
params = {validator: value}
@@ -332,238 +262,193 @@ def get_validated_argument(
return validated_value
- def get_body_arguments(
- self, name: Optional[str] = None, strip: bool = True
- ) -> Union[list[str], dict[str, list[str]]]:
- if name is None:
- return self._get_all_body_arguments(strip)
- return self._get_body_arguments(name, strip)
-
- def _get_all_body_arguments(self, strip: bool) -> dict[str, list[str]]:
- result: dict[str, list[str]] = {}
- for key, values in self.body_arguments.items():
- result[key] = []
- for v in values:
- s = self.decode_argument(v)
- if isinstance(s, str):
- s = _remove_control_chars_regex.sub(' ', s)
- if strip:
- s = s.strip()
- result[key].append(s)
- return result
-
- def get_body_argument(self, name: str, default: Any = _ARG_DEFAULT, strip: bool = True) -> Optional[str]:
- if self._get_request_mime_type() == media_types.APPLICATION_JSON:
- if name not in self.json_body and default is _ARG_DEFAULT:
- raise DefaultValueError(name)
-
- result = self.json_body.get(name, default)
-
- if strip and isinstance(result, str):
- return result.strip()
+ def get_str_argument(
+ self,
+ name: str,
+ default: Any = _ARG_DEFAULT,
+ path_safe: bool = True,
+ **kwargs: Any,
+ ) -> Optional[Union[str, list[str]]]:
+ if path_safe:
+ return self.get_validated_argument(name, Validators.PATH_SAFE_STRING, default=default, **kwargs)
+ return self.get_validated_argument(name, Validators.STRING, default=default, **kwargs)
- return result
+ def get_int_argument(
+ self,
+ name: str,
+ default: Any = _ARG_DEFAULT,
+ **kwargs: Any,
+ ) -> Optional[Union[int, list[int]]]:
+ return self.get_validated_argument(name, Validators.INTEGER, default=default, **kwargs)
- if default is _ARG_DEFAULT:
- return self._get_body_argument(name, strip=strip)
- return self._get_body_argument(name, default, strip)
+ def get_bool_argument(
+ self,
+ name: str,
+ default: Any = _ARG_DEFAULT,
+ **kwargs: Any,
+ ) -> Optional[Union[bool, list[bool]]]:
+ return self.get_validated_argument(name, Validators.BOOLEAN, default=default, **kwargs)
- def _get_body_argument(
+ def get_float_argument(
self,
name: str,
default: Any = _ARG_DEFAULT,
- strip: bool = True,
- ) -> Optional[str]:
- args = self._get_body_arguments(name, strip=strip)
- if not args:
- if default is _ARG_DEFAULT:
- raise DefaultValueError(name)
- return default
- return args[-1]
-
- def _get_body_arguments(self, name: str, strip: bool = True) -> list[str]:
- values = []
- for v in self.body_arguments.get(name, []):
- s = self.decode_argument(v, name=name)
- if isinstance(s, str):
- s = _remove_control_chars_regex.sub(' ', s)
- if strip:
- s = s.strip()
- values.append(s)
- return values
-
- def parse_body_bytes(self) -> None:
- if self._get_request_mime_type() == media_types.APPLICATION_JSON:
- return
- else:
- parse_body_arguments(
- self.get_header('Content-Type', ''),
- self.body_bytes,
- self.body_arguments,
- self.files,
- self.header_params, # type: ignore
- )
+ **kwargs: Any,
+ ) -> Optional[Union[float, list[float]]]:
+ return self.get_validated_argument(name, Validators.FLOAT, default=default, **kwargs)
+
+ def _get_request_mime_type(self, request: HTTPServerRequest) -> str:
+ content_type = request.headers.get('Content-Type', '')
+ return re.split(MEDIA_TYPE_PARAMETERS_SEPARATOR_RE, content_type)[0]
+
+ def set_status(self, status_code: int, reason: Optional[str] = None) -> None:
+ status_code = status_code if status_code in ALLOWED_STATUSES else http.client.SERVICE_UNAVAILABLE
+ super().set_status(status_code, reason=reason)
+
+ def redirect(self, url: str, *args: Any, allow_protocol_relative: bool = False, **kwargs: Any) -> None:
+ if not allow_protocol_relative and url.startswith('//'):
+ # A redirect with two initial slashes is a "protocol-relative" URL.
+ # This means the next path segment is treated as a hostname instead
+ # of a part of the path, making this effectively an open redirect.
+ # Reject paths starting with two slashes to prevent this.
+ # This is only reachable under certain configurations.
+ raise tornado.web.HTTPError(403, 'cannot redirect path with two initial slashes')
+ self.log.info('redirecting to: %s', url)
+ return super().redirect(url, *args, **kwargs)
@property
def json_body(self):
- if self._json_body is None:
+ if not hasattr(self, '_json_body'):
self._json_body = self._get_json_body()
return self._json_body
def _get_json_body(self) -> Any:
try:
- return json_decode(self.body_bytes)
+ return json_decode(self.request.body)
except FrontikJsonDecodeError as _:
raise JSONBodyParseError()
- def decode_argument(self, value: bytes, name: Optional[str] = None) -> str:
- try:
- return value.decode('utf-8')
- except UnicodeError:
- self.log.warning('cannot decode utf-8 body parameter %s, trying other charsets', name)
+ @classmethod
+ def add_callback(cls, callback: Callable, *args: Any, **kwargs: Any) -> None:
+ IOLoop.current().add_callback(callback, *args, **kwargs)
- try:
- return frontik.util.decode_string_from_charset(value)
- except UnicodeError:
- self.log.exception('cannot decode body parameter %s, ignoring invalid chars', name)
- return value.decode('utf-8', 'ignore')
+ @classmethod
+ def add_timeout(cls, deadline: float, callback: Callable, *args: Any, **kwargs: Any) -> Any:
+ return IOLoop.current().add_timeout(deadline, callback, *args, **kwargs)
- @overload
- def get_header(self, param_name: str, default: None = None) -> Optional[str]: ...
+ @staticmethod
+ def remove_timeout(timeout):
+ IOLoop.current().remove_timeout(timeout)
- @overload
- def get_header(self, param_name: str, default: str) -> str: ...
+ @classmethod
+ def add_future(cls, future: Future, callback: Callable) -> None:
+ IOLoop.current().add_future(future, callback)
- def get_header(self, param_name: str, default: Optional[str] = None) -> Optional[str]:
- return self.header_params.get(param_name.lower(), default)
+ # Requests handling
- def set_header(self, k: str, v: str) -> None:
- self.resp_headers[k] = v
+ async def my_execute(self) -> tuple[int, str, HTTPHeaders, bytes]:
+ try:
+ await super()._execute([], b'', b'')
+ except Exception as ex:
+ self._handle_request_exception(ex)
+ return await self.handler_result_future # status, reason, headers, chunk
- def _get_request_mime_type(self) -> str:
- content_type = self.get_header('Content-Type', '')
- return re.split(MEDIA_TYPE_PARAMETERS_SEPARATOR_RE, content_type)[0]
+ async def get(self, *args, **kwargs):
+ await self._execute_page()
- def clear_header(self, name: str) -> None:
- if name in self.resp_headers:
- del self.resp_headers[name]
+ async def post(self, *args, **kwargs):
+ await self._execute_page()
- def clear_cookie(self, name: str, path: str = '/', domain: Optional[str] = None) -> None:
- expires = datetime.datetime.now() - datetime.timedelta(days=365)
- self.set_cookie(name, value='', expires=expires, path=path, domain=domain)
+ async def put(self, *args, **kwargs):
+ await self._execute_page()
- def get_cookie(self, param_name: str, default: Optional[str]) -> Optional[str]:
- return self.cookie_params.get(param_name, default)
+ async def delete(self, *args, **kwargs):
+ await self._execute_page()
- def set_cookie(
- self,
- name: str,
- value: Union[str, bytes],
- domain: Optional[str] = None,
- expires: Optional[Union[float, tuple, datetime.datetime]] = None,
- path: str = '/',
- expires_days: Optional[float] = None,
- # Keyword-only args start here for historical reasons.
- *,
- max_age: Optional[int] = None,
- httponly: bool = False,
- secure: bool = False,
- samesite: Optional[str] = None,
- ) -> None:
- name = str(name)
- value = str(value)
- if re.search(r'[\x00-\x20]', name + value):
- # Don't let us accidentally inject bad stuff
- raise ValueError('Invalid cookie %s: %s', name, value)
-
- if name in self.resp_cookies:
- del self.resp_cookies[name]
- self.resp_cookies[name] = {'value': value}
- morsel = self.resp_cookies[name]
- if domain:
- morsel['domain'] = domain
- if expires_days is not None and not expires:
- expires = datetime.datetime.now() + datetime.timedelta(days=expires_days)
- if expires:
- morsel['expires'] = format_timestamp(expires)
- if path:
- morsel['path'] = path
- if max_age:
- # Note change from _ to -.
- morsel['max_age'] = str(max_age)
- if httponly:
- # Note that SimpleCookie ignores the value here. The presense of an
- # httponly (or secure) key is treated as true.
- morsel['httponly'] = True
- if secure:
- morsel['secure'] = True
- if samesite:
- morsel['samesite'] = samesite
+ async def head(self, *args, **kwargs):
+ await self._execute_page()
- # Requests handling
+ def options(self, *args, **kwargs):
+ self.return_405()
- def require_debug_access(self, login: Optional[str] = None, passwd: Optional[str] = None) -> None:
- if self._debug_access is None:
- if options.debug:
- debug_access = True
- else:
- check_login = login if login is not None else options.debug_login
- check_passwd = passwd if passwd is not None else options.debug_password
- frontik.auth.check_debug_auth(self, check_login, check_passwd)
- debug_access = True
+ async def _execute_page(self) -> None:
+ self.stages_logger.commit_stage('prepare')
- self._debug_access = debug_access
+ f_request = Request({
+ 'type': 'http',
+ 'query_string': '',
+ 'headers': '',
+ 'handler': self,
+ })
- def set_status(self, status_code: int, reason: Optional[str] = None) -> None:
- status_code = status_code if status_code in ALLOWED_STATUSES else http.client.SERVICE_UNAVAILABLE
+ values, errors, _, _, _ = await solve_dependencies(
+ request=f_request, dependant=self.route.dependant, body=None, dependency_overrides_provider=None
+ )
+ if errors:
+ raise RuntimeError(f'dependency solving failed: {errors}')
- self._status = status_code
- self._reason = reason
+ assert self.route.dependant.call is not None
+ await self.route.dependant.call(**values)
- def get_status(self) -> int:
- return self._status
+ self._handler_finished_notification()
+ await self.finish_group.get_gathering_future()
+ await self.finish_group.get_finish_future()
- def redirect(self, url: str, permanent: bool = False, status: Optional[int] = None) -> None:
- if url.startswith('//'):
- raise RuntimeError('403 cannot redirect path with two initial slashes')
- self.log.info('redirecting to: %s', url)
- if status is None:
- status = 301 if permanent else 302
- else:
- assert isinstance(status, int)
- assert 300 <= status <= 399
- raise RedirectPageSignal(url, status)
+ render_result = await self._postprocess()
+ if render_result is not None:
+ self.write(render_result)
- def finish(self, data: Optional[Union[str, bytes, dict]] = None) -> None:
- raise FinishPageSignal(data)
+ def return_405(self) -> None:
+ allowed_methods = [name for name in ('get', 'post', 'put', 'delete') if f'{name}_page' in vars(self.__class__)]
+ self.set_header('Allow', ', '.join(allowed_methods))
+ self.set_status(405)
+ self.finish()
- async def get_page_fail_fast(self, request_result: RequestResult) -> tuple[int, dict, Any]:
- return await self.__return_error(request_result.status_code, error_info={'is_fail_fast': True})
+ def get_page_fail_fast(self, request_result: RequestResult) -> None:
+ self.__return_error(request_result.status_code, error_info={'is_fail_fast': True})
- async def post_page_fail_fast(self, request_result: RequestResult) -> tuple[int, dict, Any]:
- return await self.__return_error(request_result.status_code, error_info={'is_fail_fast': True})
+ def post_page_fail_fast(self, request_result: RequestResult) -> None:
+ self.__return_error(request_result.status_code, error_info={'is_fail_fast': True})
- async def put_page_fail_fast(self, request_result: RequestResult) -> tuple[int, dict, Any]:
- return await self.__return_error(request_result.status_code, error_info={'is_fail_fast': True})
+ def put_page_fail_fast(self, request_result: RequestResult) -> None:
+ self.__return_error(request_result.status_code, error_info={'is_fail_fast': True})
- async def delete_page_fail_fast(self, request_result: RequestResult) -> tuple[int, dict, Any]:
- return await self.__return_error(request_result.status_code, error_info={'is_fail_fast': True})
+ def delete_page_fail_fast(self, request_result: RequestResult) -> None:
+ self.__return_error(request_result.status_code, error_info={'is_fail_fast': True})
- async def __return_error(self, response_code: int, **kwargs: Any) -> tuple[int, dict, Any]:
- return await self.send_error(response_code if 300 <= response_code < 500 else 502, **kwargs)
+ def __return_error(self, response_code: int, **kwargs: Any) -> None:
+ if not (300 <= response_code < 500 or response_code == NON_CRITICAL_BAD_GATEWAY):
+ response_code = HTTPStatus.BAD_GATEWAY
+ self.send_error(response_code, **kwargs)
# Finish page
def is_finished(self) -> bool:
return self._finished
- async def finish_with_postprocessors(self) -> tuple[int, dict, Any]:
- if self.finish_group.pending():
- self.log.error('finish_with_postprocessors before finish group done')
+ def check_finished(self, callback: Callable) -> Callable:
+ @wraps(callback)
+ def wrapper(*args, **kwargs):
+ if self.is_finished():
+ self.log.warning('page was already finished, %s ignored', callback)
+ else:
+ return callback(*args, **kwargs)
+
+ return wrapper
+
+ def finish_with_postprocessors(self) -> None:
+ if not self.finish_group.get_finish_future().done():
self.finish_group.abort()
- content = await self._postprocess()
- return self.get_status(), self.resp_headers, content
+ def _cb(future: Future) -> None:
+ if (ex := future.exception()) is not None:
+ self.log.error('postprocess failed %s', ex)
+ self.set_status(500)
+ self.finish()
+ if future.result() is not None:
+ self.finish(future.result())
+
+ asyncio.create_task(self._postprocess()).add_done_callback(_cb)
def run_task(self: PageHandler, coro: Coroutine) -> Task:
task = asyncio.create_task(coro)
@@ -600,42 +485,40 @@ async def _postprocess(self) -> Any:
)
return postprocessed_result
- def on_finish(self, status: int) -> None:
- self.stages_logger.commit_stage('flush')
- self.stages_logger.flush_stages(status)
+ def on_connection_close(self):
+ with request_context.request_context(self.request_id):
+ super().on_connection_close()
- async def handle_request_exception(self, ex: BaseException) -> tuple[int, dict, Any]:
- if isinstance(ex, FinishPageSignal):
- chunk = _data_to_chunk(ex.data, self.resp_headers)
- return self.get_status(), self.resp_headers, chunk
-
- if isinstance(ex, RedirectPageSignal):
- self.set_header('Location', ex.url)
- return ex.status, self.resp_headers, None
+ self.finish_group.abort()
+ self.set_status(CLIENT_CLOSED_REQUEST, 'Client closed the connection: aborting request')
- if isinstance(ex, FinishWithPostprocessors):
- if ex.wait_finish_group:
- await self.finish_group.finish()
- return await self.finish_with_postprocessors()
+ self.stages_logger.commit_stage('page')
+ self.stages_logger.flush_stages(self.get_status())
- if isinstance(ex, HTTPErrorWithPostprocessors):
- self.set_status(ex.status_code)
- return await self.finish_with_postprocessors()
+ self.finish()
- if isinstance(ex, HTTPException):
- self.resp_cookies = {}
- if ex.headers is None:
- ex.headers = {'Content-Type': media_types.TEXT_PLAIN}
+ def on_finish(self) -> None:
+ self.stages_logger.commit_stage('flush')
+ self.stages_logger.flush_stages(self.get_status())
- self.log.error('HTTPException with code: %s, reason: %s', ex.status_code, ex.detail)
+ def _handle_request_exception(self, e: BaseException) -> None:
+ if isinstance(e, AbortAsyncGroup):
+ self.log.info('page was aborted, skipping postprocessing')
+ return
- if hasattr(self, 'write_error'):
- return await self.write_error(ex.status_code, exc_info=sys.exc_info())
+ if isinstance(e, FinishWithPostprocessors):
+ if e.wait_finish_group:
+ self._handler_finished_notification()
+ self.add_future(self.finish_group.get_finish_future(), lambda _: self.finish_with_postprocessors())
+ else:
+ self.finish_with_postprocessors()
+ return
- return build_error_data(ex.status_code, ex.detail)
+ if self._finished and not isinstance(e, Finish):
+ return
- if isinstance(ex, FailFastError):
- request = ex.failed_result.request
+ if isinstance(e, FailFastError):
+ request = e.failed_result.request
if self.log.isEnabledFor(logging.WARNING):
_max_uri_length = 24
@@ -646,37 +529,154 @@ async def handle_request_exception(self, ex: BaseException) -> tuple[int, dict,
if request.name:
request_name = f'{request_name} ({request.name})'
- self.log.error(
+ self.log.warning(
'FailFastError: request %s failed with %s code',
request_name,
- ex.failed_result.status_code,
+ e.failed_result.status_code,
)
- error_method_name = f'{self.method.lower()}_page_fail_fast'
- method = getattr(self, error_method_name, None)
- if callable(method):
- return await method(ex.failed_result)
- else:
- return await self.__return_error(ex.failed_result.status_code, error_info={'is_fail_fast': True})
+ try:
+ error_method_name = f'{self.request.method.lower()}_page_fail_fast' # type: ignore
+ method = getattr(self, error_method_name, None)
+ if callable(method):
+ method(e.failed_result)
+ else:
+ self.__return_error(e.failed_result.status_code, error_info={'is_fail_fast': True})
+
+ except Exception as exc:
+ super()._handle_request_exception(exc)
else:
- raise ex
+ super()._handle_request_exception(e)
- async def send_error(self, status_code: int = 500, **kwargs: Any) -> tuple[int, dict, Any]:
+ def send_error(self, status_code: int = 500, **kwargs: Any) -> None:
+ """`send_error` is adapted to support `write_error` that can call
+ `finish` asynchronously.
+ """
self.stages_logger.commit_stage('page')
- self._reason = kwargs.get('reason')
+ if self._headers_written:
+ super().send_error(status_code, **kwargs)
+ return
+
+ reason = kwargs.get('reason')
+ if 'exc_info' in kwargs:
+ exception = kwargs['exc_info'][1]
+ if isinstance(exception, tornado.web.HTTPError) and exception.reason:
+ reason = exception.reason
+ else:
+ exception = None
+
+ if not isinstance(exception, HTTPErrorWithPostprocessors):
+ self.clear()
+
+ self.set_status(status_code, reason=reason)
+
+ try:
+ self.write_error(status_code, **kwargs)
+ except Exception:
+ self.log.exception('Uncaught exception in write_error')
+ if not self._finished:
+ self.finish()
+
+ def write_error(self, status_code: int = 500, **kwargs: Any) -> None:
+ """
+ `write_error` can call `finish` asynchronously if HTTPErrorWithPostprocessors is raised.
+ """
+ exception = kwargs['exc_info'][1] if 'exc_info' in kwargs else None
+
+ if isinstance(exception, HTTPErrorWithPostprocessors):
+ self.finish_with_postprocessors()
+ return
+
+ self.set_header('Content-Type', media_types.TEXT_HTML)
+ super().write_error(status_code, **kwargs)
- self.set_status(status_code, reason=self._reason)
- return build_error_data(status_code, self._reason)
+ def finish(self, chunk: Optional[Union[str, bytes, dict]] = None) -> Future[None]:
+ self.stages_logger.commit_stage('postprocess')
+ for name, value in self._mandatory_headers.items():
+ self.set_header(name, value)
+
+ for args, kwargs in self._mandatory_cookies.values():
+ try:
+ self.set_cookie(*args, **kwargs)
+ except ValueError:
+ self.set_status(http.client.BAD_REQUEST)
- def cleanup(self) -> None:
+ if self._status_code in (204, 304) or (100 <= self._status_code < 200):
+ self._write_buffer = []
+ chunk = None
+
+ if self._finished:
+ raise RuntimeError('finish() called twice')
+
+ if chunk is not None:
+ self.write(chunk)
+
+ if not self._headers_written:
+ if self._status_code == 200 and self.request.method in ('GET', 'HEAD') and 'Etag' not in self._headers:
+ self.set_etag_header()
+ if self.check_etag_header():
+ self._write_buffer = []
+ self.set_status(304)
+ if self._status_code in (204, 304) or (100 <= self._status_code < 200):
+ assert not self._write_buffer, 'Cannot send body with %s' % self._status_code
+ self._clear_representation_headers()
+ elif 'Content-Length' not in self._headers:
+ content_length = sum(len(part) for part in self._write_buffer)
+ self.set_header('Content-Length', content_length)
+
+ future = self.flush(include_footers=True)
self._finished = True
- if hasattr(self, 'active_limit'):
- self.active_limit.release()
+ self.on_finish()
+ return future
+
+ def flush(self, include_footers: bool = False) -> Future[None]:
+ assert self.request.connection is not None
+ chunk = b''.join(self._write_buffer)
+ self._write_buffer = []
+ self._headers_written = True
+
+ if self.request.method == 'HEAD':
+ chunk = b''
+
+ if hasattr(self, '_new_cookie'):
+ for cookie in self._new_cookie.values():
+ self.add_header('Set-Cookie', cookie.OutputString(None))
+
+ self.handler_result_future.set_result((self._status_code, self._reason, self._headers, chunk))
+
+ future = Future() # type: Future[None]
+ future.set_result(None)
+ return future
# postprocessors
+ def set_mandatory_header(self, name: str, value: str) -> None:
+ self._mandatory_headers[name] = value
+
+ def set_mandatory_cookie(
+ self,
+ name: str,
+ value: str,
+ domain: Optional[str] = None,
+ expires: Optional[str] = None,
+ path: str = '/',
+ expires_days: Optional[int] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._mandatory_cookies[name] = ((name, value, domain, expires, path, expires_days), kwargs)
+
+ def clear_header(self, name: str) -> None:
+ if name in self._mandatory_headers:
+ del self._mandatory_headers[name]
+ super().clear_header(name)
+
+ def clear_cookie(self, name: str, path: str = '/', domain: Optional[str] = None) -> None: # type: ignore
+ if name in self._mandatory_cookies:
+ del self._mandatory_cookies[name]
+ super().clear_cookie(name, path=path, domain=domain)
+
async def _run_postprocessors(self, postprocessors: list) -> bool:
for p in postprocessors:
if asyncio.iscoroutinefunction(p):
@@ -714,7 +714,7 @@ def add_postprocessor(self, postprocessor: Any) -> None:
async def _generic_producer(self):
self.log.debug('finishing plaintext')
- if self.resp_headers.get('Content-Type') is None:
+ if self._headers.get('Content-Type') is None:
self.set_header('Content-Type', media_types.TEXT_HTML)
return self.text, None
@@ -732,7 +732,6 @@ def set_template(self, filename: str) -> None:
def modify_http_client_request(self, balanced_request: RequestBuilder) -> None:
balanced_request.headers['x-request-id'] = request_context.get_request_id()
-
balanced_request.headers[OUTER_TIMEOUT_MS_HEADER] = f'{balanced_request.request_timeout * 1000:.0f}'
if self.timeout_checker is not None:
@@ -745,10 +744,13 @@ def modify_http_client_request(self, balanced_request: RequestBuilder) -> None:
balanced_request.path = make_url(balanced_request.path, debug_timestamp=int(time.time()))
for header_name in ('Authorization', DEBUG_AUTH_HEADER_NAME):
- authorization = self.get_header(header_name)
+ authorization = self.request.headers.get(header_name)
if authorization is not None:
balanced_request.headers[header_name] = authorization
+ def group(self, futures: dict) -> Task:
+ return self.run_task(gather_dict(coro_dict=futures))
+
def get_url(
self,
host: str,
@@ -967,7 +969,7 @@ def _execute_http_client_method(
client_method: Callable,
waited: bool,
) -> Future[RequestResult]:
- if waited and (self.is_finished() or self.finish_group.done()):
+ if waited and (self.is_finished() or self.finish_group.is_finished()):
handler_logger.info(
'attempted to make waited http request to %s %s in finished handler, ignoring',
host,
@@ -975,7 +977,7 @@ def _execute_http_client_method(
)
future: Future = Future()
- future.set_exception(AbortAsyncGroup('attempted to make waited http request is finished handler'))
+ future.set_exception(AbortAsyncGroup())
return future
future = client_method()
@@ -985,22 +987,23 @@ def _execute_http_client_method(
return future
- def log_request(self, request: Request) -> None:
- request_time = int(1000.0 * (time.time() - self.request_start_time))
- extra = {
- 'ip': request.client.host if request.client else None,
- 'rid': request_context.get_request_id(),
- 'status': self.get_status(),
- 'time': request_time,
- 'method': request.method,
- 'uri': request.url.path,
- }
- handler_name = request_context.get_handler_name()
- if handler_name:
- extra['controller'] = handler_name
+def log_request(tornado_request: httputil.HTTPServerRequest, status_code: int) -> None:
+ request_time = int(1000.0 * tornado_request.request_time())
+ extra = {
+ 'ip': tornado_request.remote_ip,
+ 'rid': request_context.get_request_id(),
+ 'status': status_code,
+ 'time': request_time,
+ 'method': tornado_request.method,
+ 'uri': tornado_request.uri,
+ }
+
+ handler_name = request_context.get_handler_name()
+ if handler_name:
+ extra['controller'] = handler_name
- JSON_REQUESTS_LOGGER.info('', extra={CUSTOM_JSON_EXTRA: extra})
+ JSON_REQUESTS_LOGGER.info('', extra={CUSTOM_JSON_EXTRA: extra})
PageHandlerT = TypeVar('PageHandlerT', bound=PageHandler)
@@ -1008,7 +1011,7 @@ def log_request(self, request: Request) -> None:
def get_current_handler(_: Union[PageHandlerT, Type[PageHandler]] = PageHandler) -> PageHandlerT:
async def handler_getter(request: Request) -> PageHandlerT:
- return request.state.handler
+ return request['handler']
return Depends(handler_getter)
@@ -1019,84 +1022,3 @@ def get_default_headers() -> dict[str, str]:
'Server': f'Frontik/{frontik_version}',
'X-Request-Id': request_id,
}
-
-
-def build_error_data(status_code: int = 500, message: Optional[str] = 'Internal Server Error') -> tuple[int, dict, Any]:
- headers = get_default_headers()
- headers['Content-Type'] = media_types.TEXT_HTML
- content = f'
{status_code}: {message}{status_code}: {message}'
- return status_code, headers, content
-
-
-def _data_to_chunk(data: Any, headers: dict) -> bytes:
- result: bytes = b''
- if data is None:
- return result
- if isinstance(data, str):
- result = data.encode('utf-8')
- elif isinstance(data, dict):
- chunk = json.dumps(data).replace('', '<\\/')
- result = chunk.encode('utf-8')
- headers['Content-Type'] = 'application/json; charset=UTF-8'
- elif isinstance(data, bytes):
- result = data
- else:
- raise TypeError(f'unexpected type of chunk - {type(data)}')
- return result
-
-
-async def process_request(request: Request, call_next: Callable, route: APIRoute) -> Response:
- handler: PageHandler = request.state.handler
-
- try:
- request_context.set_handler_name(f'{route.endpoint.__module__}.{route.endpoint.__name__}')
-
- handler.prepare()
- handler.stages_logger.commit_stage('prepare')
- _response = await call_next(request)
-
- await handler.finish_group.finish()
- handler.stages_logger.commit_stage('page')
-
- content = await handler._postprocess()
- headers = handler.resp_headers
- status = handler.get_status()
-
- handler.stages_logger.commit_stage('postprocess')
-
- except Exception as ex:
- try:
- status, headers, content = await handler.handle_request_exception(ex)
- except Exception as exc:
- handler_logger.error('request processing has failed: %s', exc)
- if getattr(handler, '_debug_enabled', False):
- status, headers, content = build_error_data()
- elif hasattr(handler, 'write_error'):
- status, headers, content = await handler.write_error(exc_info=sys.exc_info())
- else:
- raise
-
- finally:
- handler.cleanup()
-
- if status in (204, 304) or (100 <= status < 200):
- for h in ('Content-Encoding', 'Content-Language', 'Content-Type'):
- if h in headers:
- headers.pop(h)
- content = None
-
- if getattr(handler, '_debug_enabled', False):
- chunk = _data_to_chunk(content, headers)
- debug_transform = DebugTransform(request.app.frontik_app, request)
- status, headers, content = debug_transform.transform_chunk(status, headers, chunk)
-
- response = Response(status_code=status, headers=headers, content=content)
-
- for key, values in handler.resp_cookies.items():
- response.set_cookie(key, **values)
-
- handler.finish_group.abort()
- handler.log_request(request)
- handler.on_finish(status)
-
- return response
diff --git a/frontik/handler_active_limit.py b/frontik/handler_active_limit.py
index 0f45e1561..889346bb7 100644
--- a/frontik/handler_active_limit.py
+++ b/frontik/handler_active_limit.py
@@ -1,9 +1,8 @@
from __future__ import annotations
import logging
-from typing import TYPE_CHECKING
-
-from fastapi import HTTPException
+from contextlib import contextmanager
+from typing import TYPE_CHECKING, Iterator, Union
from frontik.options import options
@@ -18,14 +17,13 @@ class ActiveHandlersLimit:
high_watermark_ratio = 0.75
def __init__(self, statsd_client: StatsDClient | StatsDClientStub) -> None:
- self._acquired = False
+ self.acquired = False
self._statsd_client = statsd_client
self._high_watermark = int(options.max_active_handlers * self.high_watermark_ratio)
if ActiveHandlersLimit.count > options.max_active_handlers:
handlers_count_logger.warning('dropping request: too many active handlers (%s)', ActiveHandlersLimit.count)
-
- raise HTTPException(503)
+ return
elif ActiveHandlersLimit.count > self._high_watermark:
handlers_count_logger.warning(
@@ -38,13 +36,22 @@ def __init__(self, statsd_client: StatsDClient | StatsDClientStub) -> None:
self.acquire()
def acquire(self) -> None:
- if not self._acquired:
+ if not self.acquired:
ActiveHandlersLimit.count += 1
- self._acquired = True
+ self.acquired = True
self._statsd_client.gauge('handler.active_count', ActiveHandlersLimit.count)
def release(self) -> None:
- if self._acquired:
+ if self.acquired:
ActiveHandlersLimit.count -= 1
- self._acquired = False
+ self.acquired = False
self._statsd_client.gauge('handler.active_count', ActiveHandlersLimit.count)
+
+
+@contextmanager
+def request_limiter(statsd_client: Union[StatsDClient, StatsDClientStub]) -> Iterator:
+ active_limit = ActiveHandlersLimit(statsd_client)
+ try:
+ yield active_limit.acquired
+ finally:
+ active_limit.release()
diff --git a/frontik/handler_asgi.py b/frontik/handler_asgi.py
new file mode 100644
index 000000000..9694145da
--- /dev/null
+++ b/frontik/handler_asgi.py
@@ -0,0 +1,187 @@
+from __future__ import annotations
+
+import http.client
+import logging
+from typing import TYPE_CHECKING, Any, Callable, Optional
+
+from fastapi.routing import APIRoute
+from tornado import httputil
+from tornado.httputil import HTTPHeaders
+
+from frontik import media_types, request_context
+from frontik.debug import DebugMode, DebugTransform
+from frontik.handler import PageHandler, get_default_headers, log_request
+from frontik.handler_active_limit import request_limiter
+from frontik.json_builder import JsonBuilder
+from frontik.routing import find_route, get_allowed_methods
+
+if TYPE_CHECKING:
+ from frontik.app import FrontikApplication, FrontikAsgiApp
+
+CHARSET = 'utf-8'
+log = logging.getLogger('handler')
+
+
+async def execute_page(
+ frontik_app: FrontikApplication, tornado_request: httputil.HTTPServerRequest, request_id: str, app: FrontikAsgiApp
+) -> tuple[int, str, HTTPHeaders, bytes]:
+ with request_context.request_context(request_id), request_limiter(frontik_app.statsd_client) as accepted:
+ log.info('requested url: %s', tornado_request.uri)
+ tornado_request.request_id = request_id # type: ignore
+ assert tornado_request.method is not None
+ route, page_cls, path_params = find_route(tornado_request.path, tornado_request.method)
+
+ debug_mode = DebugMode(tornado_request)
+ data: bytes
+
+ if not accepted:
+ status, reason, headers, data = make_not_accepted_response()
+ elif debug_mode.auth_failed():
+ assert debug_mode.failed_auth_header is not None
+ status, reason, headers, data = make_debug_auth_failed_response(debug_mode.failed_auth_header)
+ elif route is None:
+ status, reason, headers, data = make_not_found_response(frontik_app, tornado_request.path)
+ else:
+ request_context.set_handler_name(f'{route.endpoint.__module__}.{route.endpoint.__name__}')
+
+ if page_cls is not None:
+ status, reason, headers, data = await legacy_process_request(
+ frontik_app, tornado_request, route, page_cls, path_params, debug_mode
+ )
+ else:
+ result = {'headers': get_default_headers()}
+ scope, receive, send = convert_tornado_request_to_asgi(
+ frontik_app, tornado_request, route, path_params, debug_mode, result
+ )
+ await app(scope, receive, send)
+
+ status = result['status']
+ reason = httputil.responses.get(status, 'Unknown')
+ headers = HTTPHeaders(result['headers'])
+ data = result['data']
+
+ if not scope['json_builder'].is_empty():
+ if data != b'null':
+ raise RuntimeError('Cant have return and json.put at the same time')
+
+ headers['Content-Type'] = media_types.APPLICATION_JSON
+ data = scope['json_builder'].to_bytes()
+ headers['Content-Length'] = str(len(data))
+
+ if debug_mode.enabled:
+ debug_transform = DebugTransform(frontik_app, debug_mode)
+ status, headers, data = debug_transform.transform_chunk(tornado_request, status, headers, data)
+ reason = httputil.responses.get(status, 'Unknown')
+
+ log_request(tornado_request, status)
+
+ return status, reason, headers, data
+
+
+def make_not_found_response(frontik_app: FrontikApplication, path: str) -> tuple[int, str, HTTPHeaders, bytes]:
+ allowed_methods = get_allowed_methods(path)
+
+ if allowed_methods:
+ status = 405
+ headers = get_default_headers()
+ headers['Allow'] = ', '.join(allowed_methods)
+ data = b''
+ elif hasattr(frontik_app, 'application_404_handler'):
+ status, headers, data = frontik_app.application_404_handler()
+ else:
+ status, headers, data = build_error_data(404, 'Not Found')
+
+ reason = httputil.responses.get(status, 'Unknown')
+ return status, reason, HTTPHeaders(headers), data
+
+
+def make_debug_auth_failed_response(auth_header: str) -> tuple[int, str, HTTPHeaders, bytes]:
+ status = http.client.UNAUTHORIZED
+ reason = httputil.responses.get(status, 'Unknown')
+ headers = get_default_headers()
+ headers['WWW-Authenticate'] = auth_header
+
+ return status, reason, HTTPHeaders(headers), b''
+
+
+def make_not_accepted_response() -> tuple[int, str, HTTPHeaders, bytes]:
+ status = http.client.SERVICE_UNAVAILABLE
+ reason = httputil.responses.get(status, 'Unknown')
+ headers = get_default_headers()
+ return status, reason, HTTPHeaders(headers), b''
+
+
+def build_error_data(
+ status_code: int = 500, message: Optional[str] = 'Internal Server Error'
+) -> tuple[int, dict, bytes]:
+ headers = get_default_headers()
+ headers['Content-Type'] = media_types.TEXT_HTML
+ data = f'{status_code}: {message}{status_code}: {message}'.encode()
+ return status_code, headers, data
+
+
+async def legacy_process_request(
+ frontik_app: FrontikApplication,
+ tornado_request: httputil.HTTPServerRequest,
+ route: APIRoute,
+ page_cls: type[PageHandler],
+ path_params: dict[str, str],
+ debug_mode: DebugMode,
+) -> tuple[int, str, HTTPHeaders, bytes]:
+ handler: PageHandler = page_cls(frontik_app, tornado_request, route, debug_mode, path_params)
+ return await handler.my_execute()
+
+
+def convert_tornado_request_to_asgi(
+ frontik_app: FrontikApplication,
+ tornado_request: httputil.HTTPServerRequest,
+ route: APIRoute,
+ path_params: dict[str, str],
+ debug_mode: DebugMode,
+ result: dict[str, Any],
+) -> tuple[dict, Callable, Callable]:
+ headers = [
+ (header.encode(CHARSET).lower(), value.encode(CHARSET))
+ for header in tornado_request.headers
+ for value in tornado_request.headers.get_list(header)
+ ]
+
+ json_builder = JsonBuilder()
+
+ scope = {
+ 'type': tornado_request.protocol,
+ 'http_version': tornado_request.version,
+ 'path': tornado_request.path,
+ 'method': tornado_request.method,
+ 'query_string': tornado_request.query.encode(CHARSET),
+ 'headers': headers,
+ 'client': (tornado_request.remote_ip, 0),
+ 'route': route,
+ 'path_params': path_params,
+ 'http_client_factory': frontik_app.http_client_factory,
+ 'debug_enabled': debug_mode.enabled,
+ 'pass_debug': debug_mode.pass_debug,
+ 'start_time': tornado_request._start_time,
+ 'json_builder': json_builder,
+ }
+
+ async def receive():
+ return {
+ 'body': tornado_request.body,
+ 'type': 'http.request',
+ 'more_body': False,
+ }
+
+ async def send(data):
+ if data['type'] == 'http.response.start':
+ result['status'] = data['status']
+ for h in data['headers']:
+ if len(h) == 2:
+ result['headers'][h[0].decode(CHARSET)] = h[1].decode(CHARSET)
+ elif data['type'] == 'http.response.body':
+ assert isinstance(data['body'], bytes)
+ result['data'] = data['body']
+ else:
+ raise RuntimeError(f'Unsupported response type "{data["type"]}" for asgi app')
+
+ return scope, receive, send
diff --git a/frontik/integrations/sentry.py b/frontik/integrations/sentry.py
index cb29341a6..7887c946c 100644
--- a/frontik/integrations/sentry.py
+++ b/frontik/integrations/sentry.py
@@ -3,6 +3,7 @@
from typing import TYPE_CHECKING, Optional
import sentry_sdk
+from http_client.request_response import FailFastError
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
from sentry_sdk.integrations.atexit import AtexitIntegration
from sentry_sdk.integrations.dedupe import DedupeIntegration
@@ -11,6 +12,8 @@
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.integrations.modules import ModulesIntegration
from sentry_sdk.integrations.stdlib import StdlibIntegration
+from sentry_sdk.integrations.tornado import TornadoIntegration
+from tornado.web import HTTPError
from frontik.integrations import Integration, integrations_logger
from frontik.options import options
@@ -35,6 +38,7 @@ def initialize_app(self, app: FrontikApplication) -> Optional[Future]:
DedupeIntegration(),
ModulesIntegration(),
StdlibIntegration(),
+ TornadoIntegration(),
]
if options.sentry_exception_integration:
@@ -54,6 +58,7 @@ def initialize_app(self, app: FrontikApplication) -> Optional[Future]:
traces_sample_rate=options.sentry_traces_sample_rate,
in_app_include=list(filter(None, options.sentry_in_app_include.split(','))),
profiles_sample_rate=options.sentry_profiles_sample_rate,
+ ignore_errors=[HTTPError, FailFastError],
)
return None
diff --git a/frontik/integrations/statsd.py b/frontik/integrations/statsd.py
index 9631958a6..01c4be340 100644
--- a/frontik/integrations/statsd.py
+++ b/frontik/integrations/statsd.py
@@ -27,7 +27,7 @@ def initialize_app(self, app: FrontikApplication) -> Optional[Future]:
return None
def initialize_handler(self, handler):
- handler.statsd_client = self.statsd_client
+ pass
def _convert_tag(name: str, value: Any) -> str:
diff --git a/frontik/integrations/telemetry.py b/frontik/integrations/telemetry.py
index e5165414e..d77471169 100644
--- a/frontik/integrations/telemetry.py
+++ b/frontik/integrations/telemetry.py
@@ -5,22 +5,21 @@
from typing import TYPE_CHECKING, Optional
from urllib.parse import urlparse
-import opentelemetry.instrumentation.fastapi
from http_client import client_request_context, response_status_code_context
from http_client.options import options as http_client_options
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
-from opentelemetry.instrumentation import aiohttp_client, fastapi
+from opentelemetry.instrumentation import aiohttp_client, tornado
from opentelemetry.propagate import set_global_textmap
from opentelemetry.sdk.resources import Resource
-from opentelemetry.sdk.trace import IdGenerator, ReadableSpan, TracerProvider
+from opentelemetry.sdk.trace import IdGenerator, TracerProvider
+from opentelemetry.sdk.trace import Span as SpanImpl
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from opentelemetry.sdk.trace.sampling import ParentBased, TraceIdRatioBased
from opentelemetry.semconv.resource import ResourceAttributes
from opentelemetry.semconv.trace import SpanAttributes
-from opentelemetry.trace import SpanKind
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
-from starlette.types import Scope
+from opentelemetry.util.http import ExcludeList
from frontik import request_context
from frontik.integrations import Integration, integrations_logger
@@ -32,46 +31,39 @@
import aiohttp
from http_client.request_response import RequestBuilder
from opentelemetry.trace import Span
+ from opentelemetry.util import types
from frontik.app import FrontikApplication
log = logging.getLogger('telemetry')
+# change log-level, because mainly detach context produce exception on Tornado 5. Will be deleted, when up Tornado to 6
+logging.getLogger('opentelemetry.context').setLevel(logging.CRITICAL)
set_global_textmap(TraceContextTextMapPropagator())
-
-class FrontikSpanProcessor(BatchSpanProcessor):
- def on_end(self, span: ReadableSpan) -> None:
- if (
- span.kind == SpanKind.INTERNAL
- and span.attributes
- and (
- span.attributes.get('type', None)
- in ('http.request', 'http.response.start', 'http.disconnect', 'http.response.body')
- )
- ):
- return
- super().on_end(span=span)
-
-
-def monkey_patch_route_details(scope: Scope) -> tuple:
- route = scope['path']
- span_name = route or scope.get('method', '')
- attributes = {}
- if route:
- attributes[SpanAttributes.HTTP_ROUTE] = route
- return span_name, attributes
+tornado._excluded_urls = ExcludeList([*list(tornado._excluded_urls._excluded_urls), '/status'])
+excluded_span_attributes = ['tornado.handler']
class TelemetryIntegration(Integration):
def __init__(self):
self.aiohttp_instrumentor = aiohttp_client.AioHttpClientInstrumentor()
+ self.tornado_instrumentor = tornado.TornadoInstrumentor()
+ TelemetryIntegration.patch_span_impl()
+
+ @staticmethod
+ def patch_span_impl() -> None:
+ set_attribute = SpanImpl.set_attribute
+
+ def patched_set_attribute(self: SpanImpl, key: str, value: types.AttributeValue) -> None:
+ if key not in excluded_span_attributes:
+ return set_attribute(self, key, value)
+
+ SpanImpl.set_attribute = patched_set_attribute # type: ignore
def initialize_app(self, app: FrontikApplication) -> Optional[Future]:
if not options.opentelemetry_enabled:
return None
- opentelemetry.instrumentation.fastapi._get_route_details = monkey_patch_route_details
-
integrations_logger.info('start telemetry')
resource = Resource(
@@ -90,15 +82,11 @@ def initialize_app(self, app: FrontikApplication) -> Optional[Future]:
sampler=ParentBased(TraceIdRatioBased(options.opentelemetry_sampler_ratio)),
)
- provider.add_span_processor(FrontikSpanProcessor(otlp_exporter))
+ provider.add_span_processor(BatchSpanProcessor(otlp_exporter))
trace.set_tracer_provider(provider)
self.aiohttp_instrumentor.instrument(request_hook=_client_request_hook, response_hook=_client_response_hook)
-
- fastapi.FastAPIInstrumentor.instrument_app(
- app.fastapi_app, server_request_hook=_server_request_hook, excluded_urls='/status'
- )
-
+ self.tornado_instrumentor.instrument(server_request_hook=_server_request_hook)
return None
def deinitialize_app(self, app: FrontikApplication) -> Optional[Future]:
@@ -107,15 +95,21 @@ def deinitialize_app(self, app: FrontikApplication) -> Optional[Future]:
integrations_logger.info('stop telemetry')
self.aiohttp_instrumentor.uninstrument()
- fastapi.FastAPIInstrumentor.uninstrument_app(app.fastapi_app)
+ self.tornado_instrumentor.uninstrument()
return None
def initialize_handler(self, handler):
pass
-def _server_request_hook(span: Span, scope: dict) -> None:
- span.set_attribute(SpanAttributes.HTTP_TARGET, scope['path'])
+def _server_request_hook(span, handler):
+ if (handler_name := request_context.get_handler_name()) is not None:
+ method_path, method_name = handler_name.rsplit('.', 1)
+ span.update_name(f'{method_path}.{method_name}')
+ span.set_attribute(SpanAttributes.CODE_FUNCTION, method_name)
+ span.set_attribute(SpanAttributes.CODE_NAMESPACE, method_path)
+
+ span.set_attribute(SpanAttributes.HTTP_TARGET, handler.request.uri)
def _client_request_hook(span: Span, params: aiohttp.TraceRequestStartParams) -> None:
diff --git a/frontik/json_builder.py b/frontik/json_builder.py
index bdf940cea..9c98054e4 100644
--- a/frontik/json_builder.py
+++ b/frontik/json_builder.py
@@ -1,9 +1,10 @@
from __future__ import annotations
import logging
-from typing import Any, Callable, Optional, Union
+from typing import Annotated, Any, Callable, Optional, Union
import orjson
+from fastapi import Depends, Request
from pydantic import BaseModel
from tornado.concurrent import Future
@@ -60,8 +61,12 @@ def _encode_value(value: Any) -> Any:
raise TypeError
+def json_encode_bytes(obj: Any, default: Callable = _encode_value) -> bytes:
+ return orjson.dumps(obj, default=default, option=orjson.OPT_NON_STR_KEYS)
+
+
def json_encode(obj: Any, default: Callable = _encode_value) -> str:
- return orjson.dumps(obj, default=default, option=orjson.OPT_NON_STR_KEYS).decode('utf-8')
+ return json_encode_bytes(obj, default).decode('utf-8')
def json_decode(value: Union[str, bytes]) -> Any:
@@ -120,3 +125,13 @@ def to_string(self) -> str:
return json_encode(self._concat_chunks())
return json_encode(self._concat_chunks(), default=self._encoder)
+
+ def to_bytes(self) -> bytes:
+ return json_encode_bytes(self._concat_chunks())
+
+
+def get_json_builder(request: Request) -> JsonBuilder:
+ return request['json_builder']
+
+
+JsonBuilderT = Annotated[JsonBuilder, Depends(get_json_builder)]
diff --git a/frontik/producers/json_producer.py b/frontik/producers/json_producer.py
index 4881e0772..96f24c253 100644
--- a/frontik/producers/json_producer.py
+++ b/frontik/producers/json_producer.py
@@ -10,6 +10,7 @@
from tornado.escape import to_unicode
from frontik import json_builder, media_types
+from frontik.auth import check_debug_auth_or_finish
from frontik.options import options
from frontik.producers import ProducerFactory
from frontik.util import get_abs_path, get_cookie_or_url_param_value
@@ -39,7 +40,7 @@ def __init__(
def __call__(self):
if get_cookie_or_url_param_value(self.handler, 'notpl') is not None:
- self.handler.require_debug_access()
+ check_debug_auth_or_finish(self.handler)
self.log.debug('ignoring templating because notpl parameter is passed')
return self._finish_with_json()
@@ -108,7 +109,7 @@ async def _finish_with_template(self) -> tuple[Optional[str], None]:
msg = 'Cannot apply template, no Jinja2 environment configured'
raise Exception(msg)
- if self.handler.resp_headers.get('Content-Type', None) is None:
+ if self.handler._headers.get('Content-Type') is None:
self.handler.set_header('Content-Type', media_types.TEXT_HTML)
try:
@@ -141,7 +142,7 @@ async def _finish_with_template(self) -> tuple[Optional[str], None]:
async def _finish_with_json(self) -> tuple[str, None]:
self.log.debug('finishing without templating')
- if self.handler.resp_headers.get('Content-Type', None) is None:
+ if self.handler._headers.get('Content-Type') is None:
self.handler.set_header('Content-Type', media_types.APPLICATION_JSON)
return self.json.to_string(), None
diff --git a/frontik/producers/xml_producer.py b/frontik/producers/xml_producer.py
index 86144943d..dbf18782e 100644
--- a/frontik/producers/xml_producer.py
+++ b/frontik/producers/xml_producer.py
@@ -14,6 +14,7 @@
import frontik.doc
import frontik.util
from frontik import file_cache, media_types
+from frontik.auth import check_debug_auth_or_finish
from frontik.options import options
from frontik.producers import ProducerFactory
from frontik.util import get_abs_path
@@ -49,7 +50,7 @@ def __init__(
def __call__(self):
if any(frontik.util.get_cookie_or_url_param_value(self.handler, p) is not None for p in ('noxsl', 'notpl')):
- self.handler.require_debug_access()
+ check_debug_auth_or_finish(self.handler)
self.log.debug('ignoring XSLT because noxsl/notpl parameter is passed')
return self._finish_with_xml(escape_xmlns=True)
@@ -76,7 +77,7 @@ def set_xsl(self, filename: str) -> None:
async def _finish_with_xslt(self) -> tuple[Optional[str], Optional[list[Any]]]:
self.log.debug('finishing with XSLT')
- if self.handler.resp_headers.get('Content-Type', None) is None:
+ if self.handler._headers.get('Content-Type') is None:
self.handler.set_header('Content-Type', media_types.TEXT_HTML)
def job():
@@ -127,7 +128,7 @@ def get_xsl_log() -> str:
async def _finish_with_xml(self, escape_xmlns: bool = False) -> tuple[bytes, None]:
self.log.debug('finishing without XSLT')
- if self.handler.resp_headers.get('Content-Type', None) is None:
+ if self.handler._headers.get('Content-Type') is None:
self.handler.set_header('Content-Type', media_types.APPLICATION_XML)
if escape_xmlns:
diff --git a/frontik/routing.py b/frontik/routing.py
index 7e605b870..2a4efce41 100644
--- a/frontik/routing.py
+++ b/frontik/routing.py
@@ -1,20 +1,18 @@
+from __future__ import annotations
+
import importlib
import logging
import pkgutil
import re
-import time
from collections.abc import Generator
from pathlib import Path
-from typing import Any, Callable, MutableSequence, Optional, Type, Union
+from typing import TYPE_CHECKING, Any, Callable, MutableSequence, Optional, Type, Union
-from fastapi import APIRouter, Request, Response
+from fastapi import APIRouter
from fastapi.routing import APIRoute
-from starlette.middleware.base import BaseHTTPMiddleware
-from frontik import request_context
-from frontik.handler import PageHandler, build_error_data, get_default_headers, process_request
-from frontik.options import options
-from frontik.util import check_request_id, generate_uniq_timestamp_request_id
+if TYPE_CHECKING:
+ from frontik.handler import PageHandler
routing_logger = logging.getLogger('frontik.routing')
@@ -30,23 +28,23 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
self._cls: Optional[Type[PageHandler]] = None
self._path: Optional[str] = None
- def get(self, path: str, cls: Type[PageHandler] = PageHandler, **kwargs: Any) -> Callable:
+ def get(self, path: str, cls: Optional[Type[PageHandler]] = None, **kwargs: Any) -> Callable:
self._path, self._cls = path, cls
return super().get(path, **kwargs)
- def post(self, path: str, cls: Type[PageHandler] = PageHandler, **kwargs: Any) -> Callable:
+ def post(self, path: str, cls: Optional[Type[PageHandler]] = None, **kwargs: Any) -> Callable:
self._path, self._cls = path, cls
return super().post(path, **kwargs)
- def put(self, path: str, cls: Type[PageHandler] = PageHandler, **kwargs: Any) -> Callable:
+ def put(self, path: str, cls: Optional[Type[PageHandler]] = None, **kwargs: Any) -> Callable:
self._path, self._cls = path, cls
return super().put(path, **kwargs)
- def delete(self, path: str, cls: Type[PageHandler] = PageHandler, **kwargs: Any) -> Callable:
+ def delete(self, path: str, cls: Optional[Type[PageHandler]] = None, **kwargs: Any) -> Callable:
self._path, self._cls = path, cls
return super().delete(path, **kwargs)
- def head(self, path: str, cls: Type[PageHandler] = PageHandler, **kwargs: Any) -> Callable:
+ def head(self, path: str, cls: Optional[Type[PageHandler]] = None, **kwargs: Any) -> Callable:
self._path, self._cls = path, cls
return super().head(path, **kwargs)
@@ -69,23 +67,23 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
self._cls: Optional[Type[PageHandler]] = None
self._path: Optional[str] = None
- def get(self, path: str, cls: Type[PageHandler] = PageHandler, **kwargs: Any) -> Callable:
+ def get(self, path: str, cls: Optional[Type[PageHandler]] = None, **kwargs: Any) -> Callable:
self._path, self._cls = path, cls
return super().get(path, **kwargs)
- def post(self, path: str, cls: Type[PageHandler] = PageHandler, **kwargs: Any) -> Callable:
+ def post(self, path: str, cls: Optional[Type[PageHandler]] = None, **kwargs: Any) -> Callable:
self._path, self._cls = path, cls
return super().post(path, **kwargs)
- def put(self, path: str, cls: Type[PageHandler] = PageHandler, **kwargs: Any) -> Callable:
+ def put(self, path: str, cls: Optional[Type[PageHandler]] = None, **kwargs: Any) -> Callable:
self._path, self._cls = path, cls
return super().put(path, **kwargs)
- def delete(self, path: str, cls: Type[PageHandler] = PageHandler, **kwargs: Any) -> Callable:
+ def delete(self, path: str, cls: Optional[Type[PageHandler]] = None, **kwargs: Any) -> Callable:
self._path, self._cls = path, cls
return super().delete(path, **kwargs)
- def head(self, path: str, cls: Type[PageHandler] = PageHandler, **kwargs: Any) -> Callable:
+ def head(self, path: str, cls: Optional[Type[PageHandler]] = None, **kwargs: Any) -> Callable:
self._path, self._cls = path, cls
return super().head(path, **kwargs)
@@ -136,85 +134,37 @@ def import_all_pages(app_module: Optional[str]) -> None:
routers.extend((router, regex_router))
-def _get_remote_ip(request: Request) -> str:
- ip = request.headers.get('X-Real-Ip', None) or request.headers.get('X-Forwarded-For', None)
- if ip is None and request.client:
- ip = request.client.host
- return ip or ''
+def _find_regex_route(
+ path: str, method: str
+) -> Union[tuple[APIRoute, Type[PageHandler], dict], tuple[None, None, None]]:
+ for pattern, route, cls in _regex_mapping:
+ match = pattern.match(path)
+ if match and next(iter(route.methods), None) == method:
+ return route, cls, match.groupdict()
+ return None, None, None
-def _setup_page_handler(request: Request, cls: Type[PageHandler]) -> None:
- # create legacy PageHandler and put to request
- handler = cls(
- request.app.frontik_app,
- request.query_params,
- request.cookies,
- request.headers,
- request.state.body_bytes,
- request.state.start_time,
- request.url.path,
- request.state.path_params,
- _get_remote_ip(request),
- request.method,
- )
- request.state.handler = handler
+def find_route(path: str, method: str) -> tuple[APIRoute, type, dict]:
+ route: APIRoute
+ route, page_cls = _plain_routes.get((path, method), (None, None))
+ path_params: dict = {}
+ if route is None:
+ route, page_cls, path_params = _find_regex_route(path, method)
-def _find_regex_route(request: Request) -> Union[tuple[APIRoute, Type[PageHandler], dict], tuple[None, None, None]]:
- for pattern, route, cls in _regex_mapping:
- match = pattern.match(request.url.path)
- if match and next(iter(route.methods), None) == request.method:
- return route, cls, match.groupdict()
+ if route is None:
+ routing_logger.error('match for request url %s "%s" not found', method, path)
+ return None, None, None
- return None, None, None
+ return route, page_cls, path_params
-def make_not_found_response(frontik_app, path):
+def get_allowed_methods(path: str) -> list[str]:
allowed_methods = []
for method in ('GET', 'POST', 'PUT', 'DELETE', 'HEAD'):
route, page_cls = _plain_routes.get((path, method), (None, None))
if route is not None:
allowed_methods.append(method)
- if allowed_methods:
- status = 405
- headers = get_default_headers()
- headers['Allow'] = ', '.join(allowed_methods)
- content = b''
- elif hasattr(frontik_app, 'application_404_handler'):
- status, headers, content = frontik_app.application_404_handler()
- else:
- status, headers, content = build_error_data(404, 'Not Found')
-
- return Response(status_code=status, headers=headers, content=content)
-
-
-class RoutingMiddleware(BaseHTTPMiddleware):
- async def dispatch(self, request: Request, _ignored_call_next: Callable) -> Response:
- request.state.start_time = time.time()
-
- routing_logger.info('requested url: %s', request.url.path)
-
- request_id = request.headers.get('X-Request-Id') or generate_uniq_timestamp_request_id()
- if options.validate_request_id:
- check_request_id(request_id)
-
- with request_context.request_context(request_id):
- route: APIRoute
- route, page_cls = _plain_routes.get((request.url.path, request.method), (None, None))
- request.state.path_params = {}
-
- if route is None:
- route, page_cls, path_params = _find_regex_route(request)
- request.state.path_params = path_params
-
- if route is None:
- routing_logger.error('match for request url %s "%s" not found', request.method, request.url.path)
- return make_not_found_response(request.app.frontik_app, request.url.path)
-
- request.state.body_bytes = await request.body()
- _setup_page_handler(request, page_cls)
-
- response = await process_request(request, route.get_route_handler(), route)
- return response
+ return allowed_methods
diff --git a/frontik/server.py b/frontik/server.py
index 808b9ae84..3657eba07 100644
--- a/frontik/server.py
+++ b/frontik/server.py
@@ -4,28 +4,23 @@
import importlib
import logging
import signal
-import socket
import sys
from asyncio import Future
-from collections.abc import Awaitable, Coroutine
+from collections.abc import Coroutine
from concurrent.futures import ThreadPoolExecutor
-from datetime import timedelta
from functools import partial
from threading import Lock
from typing import Any, Callable, Optional, Union
-import anyio
import tornado.autoreload
-import uvicorn
from http_client.balancing import Upstream
-from starlette.middleware import Middleware
+from tornado.httpserver import HTTPServer
from frontik.app import FrontikApplication
from frontik.config_parser import parse_configs
from frontik.loggers import MDC
from frontik.options import options
from frontik.process import fork_workers
-from frontik.routing import RoutingMiddleware, import_all_pages, routers
log = logging.getLogger('server')
@@ -109,79 +104,28 @@ def _run_worker(app: FrontikApplication) -> None:
loop = asyncio.get_event_loop()
executor = ThreadPoolExecutor(options.common_executor_pool_size)
loop.set_default_executor(executor)
- init_task = loop.create_task(_init_app(app))
+ initialize_application_task = loop.create_task(_init_app(app))
- def initialize_application_task_result_handler(task):
- if task.exception():
+ def initialize_application_task_result_handler(future):
+ if future.exception():
loop.stop()
- init_task.add_done_callback(initialize_application_task_result_handler)
+ initialize_application_task.add_done_callback(initialize_application_task_result_handler)
loop.run_forever()
+ # to raise init exception if any
+ initialize_application_task.result()
- if init_task.done() and init_task.exception():
- raise RuntimeError('worker failed') from init_task.exception()
-
-async def periodic_task(callback: Callable, check_timedelta: timedelta) -> None:
- while True:
- await asyncio.sleep(check_timedelta.total_seconds())
- callback()
-
-
-def bind_socket(host: str, port: int) -> socket.socket:
- sock = socket.socket(family=socket.AF_INET)
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
-
- try:
- sock.bind((host, port))
- except OSError as exc:
- log.error(exc)
- sys.exit(1)
-
- sock.set_inheritable(True)
- return sock
-
-
-def run_server(frontik_app: FrontikApplication, sock: Optional[socket.socket] = None) -> Awaitable:
+def run_server(app: FrontikApplication) -> None:
"""Starts Frontik server for an application"""
loop = asyncio.get_event_loop()
log.info('starting server on %s:%s', options.host, options.port)
-
- anyio.to_thread.run_sync = anyio_noop
- import_all_pages(frontik_app.app_module_name)
- fastapi_app = frontik_app.fastapi_app
- setattr(fastapi_app, 'frontik_app', frontik_app)
- for router in routers:
- fastapi_app.include_router(router)
-
- # because on idx=0 we have OpenTelemetryMiddleware
- fastapi_app.user_middleware.insert(1, Middleware(RequestCancelledMiddleware))
- fastapi_app.user_middleware.insert(1, Middleware(RoutingMiddleware)) # should be last, because it ignores call_next
-
- config = uvicorn.Config(
- fastapi_app,
- host=options.host,
- port=options.port,
- log_level='critical',
- loop='none',
- log_config=None,
- access_log=False,
- server_header=False,
- lifespan='off',
- )
- server = uvicorn.Server(config)
+ http_server = HTTPServer(app, xheaders=options.xheaders)
+ http_server.bind(options.port, options.host, reuse_port=options.reuse_port)
+ http_server.start()
if options.autoreload:
- check_timedelta = timedelta(milliseconds=500)
- modify_times: dict[str, float] = {}
- reload = partial(tornado.autoreload._reload_on_update, modify_times)
-
- server_task = asyncio.gather(server._serve(), periodic_task(reload, check_timedelta))
- else:
- if sock is None:
- sock = bind_socket(options.host, options.port)
- server_task = loop.create_task(server._serve([sock])) # type: ignore
+ tornado.autoreload.start(1000)
def worker_sigterm_handler(_signum, _frame):
log.info('requested shutdown, shutting down server on %s:%d', options.host, options.port)
@@ -189,42 +133,35 @@ def worker_sigterm_handler(_signum, _frame):
loop.call_soon_threadsafe(server_stop)
def server_stop():
- log.info('going down in %s seconds', options.stop_timeout)
+ deinit_task = loop.create_task(_deinit_app(app))
+ http_server.stop()
- def ioloop_stop(_deinit_task):
- if loop.is_running():
- log.info('stopping IOLoop')
- loop.stop()
- log.info('stopped')
+ if loop.is_running():
+ log.info('going down in %s seconds', options.stop_timeout)
- deinit_task = loop.create_task(_deinit_app(frontik_app, server))
- deinit_task.add_done_callback(ioloop_stop)
+ def ioloop_stop(_deinit_task):
+ if loop.is_running():
+ log.info('stopping IOLoop')
+ loop.stop()
+ log.info('stopped')
+
+ deinit_task.add_done_callback(ioloop_stop)
signal.signal(signal.SIGTERM, worker_sigterm_handler)
signal.signal(signal.SIGINT, worker_sigterm_handler)
- return server_task
-
async def _init_app(frontik_app: FrontikApplication) -> None:
await frontik_app.init()
- server_task = run_server(frontik_app)
+ run_server(frontik_app)
log.info('Successfully inited application %s', frontik_app.app_name)
with frontik_app.worker_state.count_down_lock:
frontik_app.worker_state.init_workers_count_down.value -= 1
log.info('worker is up, remaining workers = %s', frontik_app.worker_state.init_workers_count_down.value)
- await server_task
-
-
-async def kill_server(app: FrontikApplication, server: uvicorn.Server) -> None:
- await asyncio.sleep(options.stop_timeout)
- if app.http_client is not None:
- await app.http_client.client_session.close()
- server.should_exit = True
-async def _deinit_app(app: FrontikApplication, server: uvicorn.Server) -> None:
- deinit_futures: list[Optional[Union[Future, Coroutine]]] = [kill_server(app, server)]
+async def _deinit_app(app: FrontikApplication) -> None:
+ deinit_futures: list[Optional[Union[Future, Coroutine]]] = []
deinit_futures.extend([integration.deinitialize_app(app) for integration in app.available_integrations])
app.upstream_manager.deregister_service_and_close()
diff --git a/frontik/testing.py b/frontik/testing.py
index 221bf28cb..3d1245aa2 100644
--- a/frontik/testing.py
+++ b/frontik/testing.py
@@ -10,6 +10,7 @@
from http_client.request_response import RequestBuilder, RequestResult
from lxml import etree
from tornado.escape import utf8
+from tornado.httpserver import HTTPServer
from tornado_mock.httpclient import patch_http_client, set_stub
from yarl import URL
@@ -17,8 +18,7 @@
from frontik.loggers import bootstrap_core_logging
from frontik.media_types import APPLICATION_JSON, APPLICATION_PROTOBUF, APPLICATION_XML, TEXT_PLAIN
from frontik.options import options
-from frontik.server import bind_socket, run_server
-from frontik.util import make_url, safe_template
+from frontik.util import bind_socket, make_url, safe_template
class FrontikTestBase:
@@ -40,13 +40,13 @@ async def _run_server(self, frontik_app):
await frontik_app.init()
- async def _server_coro() -> None:
- await run_server(frontik_app, sock)
+ http_server = HTTPServer(frontik_app)
+ http_server.add_sockets([sock])
- server_task = asyncio.create_task(_server_coro())
yield
- server_task.cancel()
- await asyncio.wait_for(frontik_app.http_client.client_session.close(), timeout=5)
+
+ http_server.stop()
+ await asyncio.wait_for(http_server.close_all_connections(), timeout=5)
@pytest.fixture(scope='class')
def with_tornado_mocks(self):
diff --git a/frontik/timeout_tracking.py b/frontik/timeout_tracking.py
index 9dbf52edf..8272354e5 100644
--- a/frontik/timeout_tracking.py
+++ b/frontik/timeout_tracking.py
@@ -112,7 +112,7 @@ def check(self, request: RequestBuilder) -> None:
def get_timeout_checker(
outer_caller: Optional[str],
outer_timeout_ms: float,
- time_since_outer_request_start_ms_supplier: float,
+ request_start_time: float,
*,
threshold_ms: float = 100,
) -> TimeoutChecker:
@@ -120,6 +120,6 @@ def get_timeout_checker(
return TimeoutChecker(
outer_caller,
outer_timeout_ms,
- time_since_outer_request_start_ms_supplier,
+ request_start_time,
threshold_ms=threshold_ms,
)
diff --git a/frontik/util.py b/frontik/util.py
index 400088c7e..b1df2d79e 100644
--- a/frontik/util.py
+++ b/frontik/util.py
@@ -6,9 +6,11 @@
import os.path
import random
import re
+import socket
+import sys
from string import Template
from typing import TYPE_CHECKING, Optional
-from urllib.parse import parse_qs, urlencode
+from urllib.parse import urlencode
from uuid import uuid4
from http_client.util import any_to_bytes, any_to_unicode, to_unicode
@@ -17,6 +19,8 @@
if TYPE_CHECKING:
from typing import Any
+ from tornado.web import httputil
+
from frontik.handler import PageHandler
logger = logging.getLogger('util')
@@ -82,7 +86,19 @@ def choose_boundary():
def get_cookie_or_url_param_value(handler: PageHandler, param_name: str) -> Optional[str]:
- return handler.get_query_argument(param_name, handler.get_cookie(param_name, None))
+ return handler.get_argument(param_name, handler.get_cookie(param_name, None))
+
+
+def get_cookie_or_param_from_request(tornado_request: httputil.HTTPServerRequest, param_name: str) -> Optional[str]:
+ query = tornado_request.query_arguments.get(param_name)
+ if query:
+ return query[-1].decode()
+
+ cookie = tornado_request.cookies.get('debug', None)
+ if cookie:
+ return cookie.value
+
+ return None
def reverse_regex_named_groups(pattern: str, *args: Any, **kwargs: Any) -> str:
@@ -149,11 +165,18 @@ async def gather_dict(coro_dict: dict) -> dict:
return dict(zip(coro_dict.keys(), results))
-def tornado_parse_qs_bytes(
- qs: bytes, keep_blank_values: bool = False, strict_parsing: bool = False
-) -> dict[str, list[bytes]]:
- result = parse_qs(qs.decode('latin1'), keep_blank_values, strict_parsing, encoding='latin1', errors='strict')
- encoded = {}
- for key, values in result.items():
- encoded[key] = [item.encode('latin1') for item in values]
- return encoded
+def bind_socket(host: str, port: int) -> socket.socket:
+ sock = socket.socket(family=socket.AF_INET)
+ sock.setblocking(False)
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+
+ try:
+ sock.bind((host, port))
+ except OSError as exc:
+ logger.error(exc)
+ sys.exit(1)
+
+ sock.set_inheritable(True)
+ sock.listen()
+ return sock
diff --git a/poetry.lock b/poetry.lock
index 00abaa333..af39c14b2 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -156,17 +156,17 @@ zstd = ["zstandard"]
[[package]]
name = "aioresponses"
-version = "0.7.4"
+version = "0.7.6"
description = "Mock out requests made by ClientSession from aiohttp package"
optional = false
python-versions = "*"
files = [
- {file = "aioresponses-0.7.4-py2.py3-none-any.whl", hash = "sha256:1160486b5ea96fcae6170cf2bdef029b9d3a283b7dbeabb3d7f1182769bfb6b7"},
- {file = "aioresponses-0.7.4.tar.gz", hash = "sha256:9b8c108b36354c04633bad0ea752b55d956a7602fe3e3234b939fc44af96f1d8"},
+ {file = "aioresponses-0.7.6-py2.py3-none-any.whl", hash = "sha256:d2c26defbb9b440ea2685ec132e90700907fd10bcca3e85ec2f157219f0d26f7"},
+ {file = "aioresponses-0.7.6.tar.gz", hash = "sha256:f795d9dbda2d61774840e7e32f5366f45752d1adc1b74c9362afd017296c7ee1"},
]
[package.dependencies]
-aiohttp = ">=2.0.0,<4.0.0"
+aiohttp = ">=3.3.0,<4.0.0"
[[package]]
name = "aiosignal"
@@ -261,17 +261,6 @@ tests = ["attrs[tests-no-zope]", "zope-interface"]
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
-[[package]]
-name = "backoff"
-version = "2.2.1"
-description = "Function decoration for backoff and retry"
-optional = false
-python-versions = ">=3.7,<4.0"
-files = [
- {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
- {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
-]
-
[[package]]
name = "cachetools"
version = "5.3.3"
@@ -319,20 +308,6 @@ files = [
[package.extras]
unicode-backport = ["unicodedata2"]
-[[package]]
-name = "click"
-version = "8.1.7"
-description = "Composable command line interface toolkit"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
- {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
-]
-
-[package.dependencies]
-colorama = {version = "*", markers = "platform_system == \"Windows\""}
-
[[package]]
name = "colorama"
version = "0.4.6"
@@ -408,18 +383,18 @@ all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)"
[[package]]
name = "filelock"
-version = "3.15.1"
+version = "3.15.4"
description = "A platform independent file lock."
optional = false
python-versions = ">=3.8"
files = [
- {file = "filelock-3.15.1-py3-none-any.whl", hash = "sha256:71b3102950e91dfc1bb4209b64be4dc8854f40e5f534428d8684f953ac847fac"},
- {file = "filelock-3.15.1.tar.gz", hash = "sha256:58a2549afdf9e02e10720eaa4d4470f56386d7a6f72edd7d0596337af8ed7ad8"},
+ {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"},
+ {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"},
]
[package.extras]
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
-testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"]
typing = ["typing-extensions (>=4.8)"]
[[package]]
@@ -510,17 +485,17 @@ files = [
[[package]]
name = "googleapis-common-protos"
-version = "1.63.1"
+version = "1.63.2"
description = "Common protobufs used in Google APIs"
optional = false
python-versions = ">=3.7"
files = [
- {file = "googleapis-common-protos-1.63.1.tar.gz", hash = "sha256:c6442f7a0a6b2a80369457d79e6672bb7dcbaab88e0848302497e3ec80780a6a"},
- {file = "googleapis_common_protos-1.63.1-py2.py3-none-any.whl", hash = "sha256:0e1c2cdfcbc354b76e4a211a35ea35d6926a835cba1377073c4861db904a1877"},
+ {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"},
+ {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"},
]
[package.dependencies]
-protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0"
+protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0"
[package.extras]
grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"]
@@ -583,20 +558,9 @@ files = [
[package.extras]
protobuf = ["grpcio-tools (>=1.64.1)"]
-[[package]]
-name = "h11"
-version = "0.14.0"
-description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
- {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
-]
-
[[package]]
name = "http-client"
-version = "2.1.13"
+version = "2.1.14"
description = "Balancing http client around aiohttp"
optional = false
python-versions = "~=3.9"
@@ -612,8 +576,8 @@ yarl = "1.9.2"
[package.source]
type = "git"
url = "https://github.com/hhru/balancing-http-client.git"
-reference = "2.1.13"
-resolved_reference = "9f503bf815262df536ebe63bf60396045e6d6271"
+reference = "2.1.14"
+resolved_reference = "a64a31f40bf632feec2da6d210d1123006738dff"
[[package]]
name = "idna"
@@ -628,22 +592,22 @@ files = [
[[package]]
name = "importlib-metadata"
-version = "6.0.1"
+version = "7.1.0"
description = "Read metadata from Python packages"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "importlib_metadata-6.0.1-py3-none-any.whl", hash = "sha256:1543daade821c89b1c4a55986c326f36e54f2e6ca3bad96be4563d0acb74dcd4"},
- {file = "importlib_metadata-6.0.1.tar.gz", hash = "sha256:950127d57e35a806d520817d3e92eec3f19fdae9f0cd99da77a407c5aabefba3"},
+ {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"},
+ {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"},
]
[package.dependencies]
zipp = ">=0.5"
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
-testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
[[package]]
name = "iniconfig"
@@ -1020,51 +984,62 @@ files = [
[[package]]
name = "opentelemetry-api"
-version = "1.17.0"
+version = "1.25.0"
description = "OpenTelemetry Python API"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "opentelemetry_api-1.17.0-py3-none-any.whl", hash = "sha256:b41d9b2a979607b75d2683b9bbf97062a683d190bc696969fb2122fa60aeaabc"},
- {file = "opentelemetry_api-1.17.0.tar.gz", hash = "sha256:3480fcf6b783be5d440a226a51db979ccd7c49a2e98d1c747c991031348dcf04"},
+ {file = "opentelemetry_api-1.25.0-py3-none-any.whl", hash = "sha256:757fa1aa020a0f8fa139f8959e53dec2051cc26b832e76fa839a6d76ecefd737"},
+ {file = "opentelemetry_api-1.25.0.tar.gz", hash = "sha256:77c4985f62f2614e42ce77ee4c9da5fa5f0bc1e1821085e9a47533a9323ae869"},
]
[package.dependencies]
deprecated = ">=1.2.6"
-importlib-metadata = ">=6.0.0,<6.1.0"
-setuptools = ">=16.0"
+importlib-metadata = ">=6.0,<=7.1"
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-common"
+version = "1.25.0"
+description = "OpenTelemetry Protobuf encoding"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "opentelemetry_exporter_otlp_proto_common-1.25.0-py3-none-any.whl", hash = "sha256:15637b7d580c2675f70246563363775b4e6de947871e01d0f4e3881d1848d693"},
+ {file = "opentelemetry_exporter_otlp_proto_common-1.25.0.tar.gz", hash = "sha256:c93f4e30da4eee02bacd1e004eb82ce4da143a2f8e15b987a9f603e0a85407d3"},
+]
+
+[package.dependencies]
+opentelemetry-proto = "1.25.0"
[[package]]
name = "opentelemetry-exporter-otlp-proto-grpc"
-version = "1.17.0"
+version = "1.25.0"
description = "OpenTelemetry Collector Protobuf over gRPC Exporter"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "opentelemetry_exporter_otlp_proto_grpc-1.17.0-py3-none-any.whl", hash = "sha256:192d781b668a74edb49152b8b5f4f7e25bcb4307a9cf4b2dfcf87e68feac98bd"},
- {file = "opentelemetry_exporter_otlp_proto_grpc-1.17.0.tar.gz", hash = "sha256:f01476ae89484bc6210e50d7a4d93c293b3a12aff562253b94f588a85af13f70"},
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0-py3-none-any.whl", hash = "sha256:3131028f0c0a155a64c430ca600fd658e8e37043cb13209f0109db5c1a3e4eb4"},
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0.tar.gz", hash = "sha256:c0b1661415acec5af87625587efa1ccab68b873745ca0ee96b69bb1042087eac"},
]
[package.dependencies]
-backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""}
+deprecated = ">=1.2.6"
googleapis-common-protos = ">=1.52,<2.0"
grpcio = ">=1.0.0,<2.0.0"
opentelemetry-api = ">=1.15,<2.0"
-opentelemetry-proto = "1.17.0"
-opentelemetry-sdk = ">=1.17.0,<1.18.0"
-
-[package.extras]
-test = ["pytest-grpc"]
+opentelemetry-exporter-otlp-proto-common = "1.25.0"
+opentelemetry-proto = "1.25.0"
+opentelemetry-sdk = ">=1.25.0,<1.26.0"
[[package]]
name = "opentelemetry-instrumentation"
-version = "0.38b0"
+version = "0.46b0"
description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation-0.38b0-py3-none-any.whl", hash = "sha256:48eed87e5db9d2cddd57a8ea359bd15318560c0ffdd80d90a5fc65816e15b7f4"},
- {file = "opentelemetry_instrumentation-0.38b0.tar.gz", hash = "sha256:3dbe93248eec7652d5725d3c6d2f9dd048bb8fda6b0505aadbc99e51638d833c"},
+ {file = "opentelemetry_instrumentation-0.46b0-py3-none-any.whl", hash = "sha256:89cd721b9c18c014ca848ccd11181e6b3fd3f6c7669e35d59c48dc527408c18b"},
+ {file = "opentelemetry_instrumentation-0.46b0.tar.gz", hash = "sha256:974e0888fb2a1e01c38fbacc9483d024bb1132aad92d6d24e2e5543887a7adda"},
]
[package.dependencies]
@@ -1074,79 +1049,96 @@ wrapt = ">=1.0.0,<2.0.0"
[[package]]
name = "opentelemetry-instrumentation-aiohttp-client"
-version = "0.38b0"
+version = "0.46b0"
description = "OpenTelemetry aiohttp client instrumentation"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_aiohttp_client-0.38b0-py3-none-any.whl", hash = "sha256:093987f5c96518ac6999eb7480af168655bc3538752ae67d4d9a5807eaad1ee0"},
- {file = "opentelemetry_instrumentation_aiohttp_client-0.38b0.tar.gz", hash = "sha256:9c3e637e742b5d8e5c8a76fae4f3812dde5e58f85598d119abd0149cb1c82ec0"},
+ {file = "opentelemetry_instrumentation_aiohttp_client-0.46b0-py3-none-any.whl", hash = "sha256:e0562fbabaf5cf6dd39a391827386f33d0b12edb4c8a6b6f0c361cbc2fa0b6b8"},
+ {file = "opentelemetry_instrumentation_aiohttp_client-0.46b0.tar.gz", hash = "sha256:18c9cf8631cd6fe75376a84c6a1190f87085d184e92d4bbbdcd64a535e3a7e22"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.38b0"
-opentelemetry-semantic-conventions = "0.38b0"
-opentelemetry-util-http = "0.38b0"
+opentelemetry-instrumentation = "0.46b0"
+opentelemetry-semantic-conventions = "0.46b0"
+opentelemetry-util-http = "0.46b0"
wrapt = ">=1.0.0,<2.0.0"
[package.extras]
instruments = ["aiohttp (>=3.0,<4.0)"]
-test = ["opentelemetry-instrumentation-aiohttp-client[instruments]"]
[[package]]
name = "opentelemetry-instrumentation-asgi"
-version = "0.38b0"
+version = "0.46b0"
description = "ASGI instrumentation for OpenTelemetry"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_asgi-0.38b0-py3-none-any.whl", hash = "sha256:c5bba11505008a3cd1b2c42b72f85f3f4f5af50ab931eddd0b01bde376dc5971"},
- {file = "opentelemetry_instrumentation_asgi-0.38b0.tar.gz", hash = "sha256:32d1034c253de6048d0d0166b304f9125267ca9329e374202ebe011a206eba53"},
+ {file = "opentelemetry_instrumentation_asgi-0.46b0-py3-none-any.whl", hash = "sha256:f13c55c852689573057837a9500aeeffc010c4ba59933c322e8f866573374759"},
+ {file = "opentelemetry_instrumentation_asgi-0.46b0.tar.gz", hash = "sha256:02559f30cf4b7e2a737ab17eb52aa0779bcf4cc06573064f3e2cb4dcc7d3040a"},
]
[package.dependencies]
asgiref = ">=3.0,<4.0"
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.38b0"
-opentelemetry-semantic-conventions = "0.38b0"
-opentelemetry-util-http = "0.38b0"
+opentelemetry-instrumentation = "0.46b0"
+opentelemetry-semantic-conventions = "0.46b0"
+opentelemetry-util-http = "0.46b0"
[package.extras]
instruments = ["asgiref (>=3.0,<4.0)"]
-test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-utils (==0.38b0)"]
[[package]]
name = "opentelemetry-instrumentation-fastapi"
-version = "0.38b0"
+version = "0.46b0"
description = "OpenTelemetry FastAPI Instrumentation"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_fastapi-0.38b0-py3-none-any.whl", hash = "sha256:91139586732e437b1c3d5cf838dc5be910bce27b4b679612112be03fcc4fa2aa"},
- {file = "opentelemetry_instrumentation_fastapi-0.38b0.tar.gz", hash = "sha256:8946fd414084b305ad67556a1907e2d4a497924d023effc5ea3b4b1b0c55b256"},
+ {file = "opentelemetry_instrumentation_fastapi-0.46b0-py3-none-any.whl", hash = "sha256:e0f5d150c6c36833dd011f0e6ef5ede6d7406c1aed0c7c98b2d3b38a018d1b33"},
+ {file = "opentelemetry_instrumentation_fastapi-0.46b0.tar.gz", hash = "sha256:928a883a36fc89f9702f15edce43d1a7104da93d740281e32d50ffd03dbb4365"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.38b0"
-opentelemetry-instrumentation-asgi = "0.38b0"
-opentelemetry-semantic-conventions = "0.38b0"
-opentelemetry-util-http = "0.38b0"
+opentelemetry-instrumentation = "0.46b0"
+opentelemetry-instrumentation-asgi = "0.46b0"
+opentelemetry-semantic-conventions = "0.46b0"
+opentelemetry-util-http = "0.46b0"
[package.extras]
instruments = ["fastapi (>=0.58,<1.0)"]
-test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instruments]", "opentelemetry-test-utils (==0.38b0)", "requests (>=2.23,<3.0)"]
+
+[[package]]
+name = "opentelemetry-instrumentation-tornado"
+version = "0.46b0"
+description = "Tornado instrumentation for OpenTelemetry"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "opentelemetry_instrumentation_tornado-0.46b0-py3-none-any.whl", hash = "sha256:e0c933087a9fa74c1918a3a971ba09903762e6d30a3a0e9998c261cc11a96fa9"},
+ {file = "opentelemetry_instrumentation_tornado-0.46b0.tar.gz", hash = "sha256:3369a20c57eb9ee6846de11b192403a2c25a1a56c5ab66e03178d2b6c10bddc2"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.12,<2.0"
+opentelemetry-instrumentation = "0.46b0"
+opentelemetry-semantic-conventions = "0.46b0"
+opentelemetry-util-http = "0.46b0"
+
+[package.extras]
+instruments = ["tornado (>=5.1.1)"]
[[package]]
name = "opentelemetry-proto"
-version = "1.17.0"
+version = "1.25.0"
description = "OpenTelemetry Python Proto"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "opentelemetry_proto-1.17.0-py3-none-any.whl", hash = "sha256:c7c0f748668102598e84ca4d51975f87ebf66865aa7469fc2c5e8bdaab813e93"},
- {file = "opentelemetry_proto-1.17.0.tar.gz", hash = "sha256:8501fdc3bc76c03a2ed11603a4d9fce6e5a97eeaebd7a20ad84bba7bd79cc9f8"},
+ {file = "opentelemetry_proto-1.25.0-py3-none-any.whl", hash = "sha256:f07e3341c78d835d9b86665903b199893befa5e98866f63d22b00d0b7ca4972f"},
+ {file = "opentelemetry_proto-1.25.0.tar.gz", hash = "sha256:35b6ef9dc4a9f7853ecc5006738ad40443701e52c26099e197895cbda8b815a3"},
]
[package.dependencies]
@@ -1154,41 +1146,43 @@ protobuf = ">=3.19,<5.0"
[[package]]
name = "opentelemetry-sdk"
-version = "1.17.0"
+version = "1.25.0"
description = "OpenTelemetry Python SDK"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "opentelemetry_sdk-1.17.0-py3-none-any.whl", hash = "sha256:07424cbcc8c012bc120ed573d5443e7322f3fb393512e72866c30111070a8c37"},
- {file = "opentelemetry_sdk-1.17.0.tar.gz", hash = "sha256:99bb9a787006774f865a4b24f8179900347d03a214c362a6cb70191f77dd6132"},
+ {file = "opentelemetry_sdk-1.25.0-py3-none-any.whl", hash = "sha256:d97ff7ec4b351692e9d5a15af570c693b8715ad78b8aafbec5c7100fe966b4c9"},
+ {file = "opentelemetry_sdk-1.25.0.tar.gz", hash = "sha256:ce7fc319c57707ef5bf8b74fb9f8ebdb8bfafbe11898410e0d2a761d08a98ec7"},
]
[package.dependencies]
-opentelemetry-api = "1.17.0"
-opentelemetry-semantic-conventions = "0.38b0"
-setuptools = ">=16.0"
+opentelemetry-api = "1.25.0"
+opentelemetry-semantic-conventions = "0.46b0"
typing-extensions = ">=3.7.4"
[[package]]
name = "opentelemetry-semantic-conventions"
-version = "0.38b0"
+version = "0.46b0"
description = "OpenTelemetry Semantic Conventions"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "opentelemetry_semantic_conventions-0.38b0-py3-none-any.whl", hash = "sha256:b0ba36e8b70bfaab16ee5a553d809309cc11ff58aec3d2550d451e79d45243a7"},
- {file = "opentelemetry_semantic_conventions-0.38b0.tar.gz", hash = "sha256:37f09e47dd5fc316658bf9ee9f37f9389b21e708faffa4a65d6a3de484d22309"},
+ {file = "opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl", hash = "sha256:6daef4ef9fa51d51855d9f8e0ccd3a1bd59e0e545abe99ac6203804e36ab3e07"},
+ {file = "opentelemetry_semantic_conventions-0.46b0.tar.gz", hash = "sha256:fbc982ecbb6a6e90869b15c1673be90bd18c8a56ff1cffc0864e38e2edffaefa"},
]
+[package.dependencies]
+opentelemetry-api = "1.25.0"
+
[[package]]
name = "opentelemetry-util-http"
-version = "0.38b0"
+version = "0.46b0"
description = "Web util for OpenTelemetry"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "opentelemetry_util_http-0.38b0-py3-none-any.whl", hash = "sha256:8e5f0451eeb5307b2c628dd799886adc5e113fb13a7207c29c672e8d168eabd8"},
- {file = "opentelemetry_util_http-0.38b0.tar.gz", hash = "sha256:85eb032b6129c4d7620583acf574e99fe2e73c33d60e256b54af436f76ceb5ae"},
+ {file = "opentelemetry_util_http-0.46b0-py3-none-any.whl", hash = "sha256:8dc1949ce63caef08db84ae977fdc1848fe6dc38e6bbaad0ae3e6ecd0d451629"},
+ {file = "opentelemetry_util_http-0.46b0.tar.gz", hash = "sha256:03b6e222642f9c7eae58d9132343e045b50aca9761fcb53709bd2b663571fdf6"},
]
[[package]]
@@ -1431,22 +1425,22 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pyproject-api"
-version = "1.6.1"
+version = "1.7.1"
description = "API to interact with the python pyproject.toml based projects"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"},
- {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"},
+ {file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"},
+ {file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"},
]
[package.dependencies]
-packaging = ">=23.1"
+packaging = ">=24.1"
tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
[package.extras]
-docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"]
-testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"]
+docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"]
+testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"]
[[package]]
name = "pytest"
@@ -1562,13 +1556,13 @@ files = [
[[package]]
name = "sentry-sdk"
-version = "2.1.1"
+version = "2.7.0"
description = "Python client for Sentry (https://sentry.io)"
optional = false
python-versions = ">=3.6"
files = [
- {file = "sentry_sdk-2.1.1-py2.py3-none-any.whl", hash = "sha256:99aeb78fb76771513bd3b2829d12613130152620768d00cd3e45ac00cb17950f"},
- {file = "sentry_sdk-2.1.1.tar.gz", hash = "sha256:95d8c0bb41c8b0bc37ab202c2c4a295bb84398ee05f4cdce55051cd75b926ec1"},
+ {file = "sentry_sdk-2.7.0-py2.py3-none-any.whl", hash = "sha256:db9594c27a4d21c1ebad09908b1f0dc808ef65c2b89c1c8e7e455143262e37c1"},
+ {file = "sentry_sdk-2.7.0.tar.gz", hash = "sha256:d846a211d4a0378b289ced3c434480945f110d0ede00450ba631fc2852e7a0d4"},
]
[package.dependencies]
@@ -1590,7 +1584,7 @@ django = ["django (>=1.8)"]
falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"]
-grpcio = ["grpcio (>=1.21.1)"]
+grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"]
httpx = ["httpx (>=0.16.0)"]
huey = ["huey (>=2)"]
huggingface-hub = ["huggingface-hub (>=0.22)"]
@@ -1598,7 +1592,7 @@ langchain = ["langchain (>=0.0.210)"]
loguru = ["loguru (>=0.5)"]
openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
-opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"]
+opentelemetry-experimental = ["opentelemetry-instrumentation-aio-pika (==0.46b0)", "opentelemetry-instrumentation-aiohttp-client (==0.46b0)", "opentelemetry-instrumentation-aiopg (==0.46b0)", "opentelemetry-instrumentation-asgi (==0.46b0)", "opentelemetry-instrumentation-asyncio (==0.46b0)", "opentelemetry-instrumentation-asyncpg (==0.46b0)", "opentelemetry-instrumentation-aws-lambda (==0.46b0)", "opentelemetry-instrumentation-boto (==0.46b0)", "opentelemetry-instrumentation-boto3sqs (==0.46b0)", "opentelemetry-instrumentation-botocore (==0.46b0)", "opentelemetry-instrumentation-cassandra (==0.46b0)", "opentelemetry-instrumentation-celery (==0.46b0)", "opentelemetry-instrumentation-confluent-kafka (==0.46b0)", "opentelemetry-instrumentation-dbapi (==0.46b0)", "opentelemetry-instrumentation-django (==0.46b0)", "opentelemetry-instrumentation-elasticsearch (==0.46b0)", "opentelemetry-instrumentation-falcon (==0.46b0)", "opentelemetry-instrumentation-fastapi (==0.46b0)", "opentelemetry-instrumentation-flask (==0.46b0)", "opentelemetry-instrumentation-grpc (==0.46b0)", "opentelemetry-instrumentation-httpx (==0.46b0)", "opentelemetry-instrumentation-jinja2 (==0.46b0)", "opentelemetry-instrumentation-kafka-python (==0.46b0)", "opentelemetry-instrumentation-logging (==0.46b0)", "opentelemetry-instrumentation-mysql (==0.46b0)", "opentelemetry-instrumentation-mysqlclient (==0.46b0)", "opentelemetry-instrumentation-pika (==0.46b0)", "opentelemetry-instrumentation-psycopg (==0.46b0)", "opentelemetry-instrumentation-psycopg2 (==0.46b0)", "opentelemetry-instrumentation-pymemcache (==0.46b0)", "opentelemetry-instrumentation-pymongo (==0.46b0)", "opentelemetry-instrumentation-pymysql (==0.46b0)", "opentelemetry-instrumentation-pyramid (==0.46b0)", "opentelemetry-instrumentation-redis (==0.46b0)", "opentelemetry-instrumentation-remoulade (==0.46b0)", "opentelemetry-instrumentation-requests (==0.46b0)", "opentelemetry-instrumentation-sklearn (==0.46b0)", "opentelemetry-instrumentation-sqlalchemy (==0.46b0)", "opentelemetry-instrumentation-sqlite3 (==0.46b0)", "opentelemetry-instrumentation-starlette (==0.46b0)", "opentelemetry-instrumentation-system-metrics (==0.46b0)", "opentelemetry-instrumentation-threading (==0.46b0)", "opentelemetry-instrumentation-tornado (==0.46b0)", "opentelemetry-instrumentation-tortoiseorm (==0.46b0)", "opentelemetry-instrumentation-urllib (==0.46b0)", "opentelemetry-instrumentation-urllib3 (==0.46b0)", "opentelemetry-instrumentation-wsgi (==0.46b0)"]
pure-eval = ["asttokens", "executing", "pure-eval"]
pymongo = ["pymongo (>=3.1)"]
pyspark = ["pyspark (>=2.4.4)"]
@@ -1612,18 +1606,18 @@ tornado = ["tornado (>=5)"]
[[package]]
name = "setuptools"
-version = "70.0.0"
+version = "70.1.1"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
- {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"},
- {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"},
+ {file = "setuptools-70.1.1-py3-none-any.whl", hash = "sha256:a58a8fde0541dab0419750bcc521fbdf8585f6e5cb41909df3a472ef7b81ca95"},
+ {file = "setuptools-70.1.1.tar.gz", hash = "sha256:937a48c7cdb7a21eb53cd7f9b59e525503aa8abaf3584c730dc5f7a5bec3a650"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
-testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "six"
@@ -1678,22 +1672,22 @@ files = [
[[package]]
name = "tornado"
-version = "6.3.2"
+version = "6.3.3"
description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
optional = false
python-versions = ">= 3.8"
files = [
- {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c367ab6c0393d71171123ca5515c61ff62fe09024fa6bf299cd1339dc9456829"},
- {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b46a6ab20f5c7c1cb949c72c1994a4585d2eaa0be4853f50a03b5031e964fc7c"},
- {file = "tornado-6.3.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2de14066c4a38b4ecbbcd55c5cc4b5340eb04f1c5e81da7451ef555859c833f"},
- {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05615096845cf50a895026f749195bf0b10b8909f9be672f50b0fe69cba368e4"},
- {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b17b1cf5f8354efa3d37c6e28fdfd9c1c1e5122f2cb56dac121ac61baa47cbe"},
- {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:29e71c847a35f6e10ca3b5c2990a52ce38b233019d8e858b755ea6ce4dcdd19d"},
- {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:834ae7540ad3a83199a8da8f9f2d383e3c3d5130a328889e4cc991acc81e87a0"},
- {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6a0848f1aea0d196a7c4f6772197cbe2abc4266f836b0aac76947872cd29b411"},
- {file = "tornado-6.3.2-cp38-abi3-win32.whl", hash = "sha256:7efcbcc30b7c654eb6a8c9c9da787a851c18f8ccd4a5a3a95b05c7accfa068d2"},
- {file = "tornado-6.3.2-cp38-abi3-win_amd64.whl", hash = "sha256:0c325e66c8123c606eea33084976c832aa4e766b7dff8aedd7587ea44a604cdf"},
- {file = "tornado-6.3.2.tar.gz", hash = "sha256:4b927c4f19b71e627b13f3db2324e4ae660527143f9e1f2e2fb404f3a187e2ba"},
+ {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"},
+ {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"},
+ {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"},
+ {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"},
+ {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"},
+ {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"},
+ {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"},
+ {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"},
+ {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"},
+ {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"},
+ {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"},
]
[[package]]
@@ -1779,25 +1773,6 @@ h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
-[[package]]
-name = "uvicorn"
-version = "0.29.0"
-description = "The lightning-fast ASGI server."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"},
- {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"},
-]
-
-[package.dependencies]
-click = ">=7.0"
-h11 = ">=0.8"
-typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
-
-[package.extras]
-standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
-
[[package]]
name = "virtualenv"
version = "20.26.0"
@@ -2006,4 +1981,4 @@ testing = ["aioresponses", "tornado-httpclient-mock"]
[metadata]
lock-version = "2.0"
python-versions = "~=3.9"
-content-hash = "20a463510ece2dcca3a46f3ff16a08e8b41035ad75ecb22571bab9ccedeeb5a1"
+content-hash = "76a5450bc58bbbd4cc77a3f432306993e2713174498a5ea473427ead08555593"
diff --git a/pyproject.toml b/pyproject.toml
index 7ef840b3d..eb9a1d9a5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,21 +20,21 @@ aiohttp = '3.8.3'
jinja2 = '3.1.2'
lxml = '4.9.2'
pydantic = '^2.3.0'
-tornado = '6.3.2'
+tornado = '6.3.3'
orjson = '*'
-http-client = {git = 'https://github.com/hhru/balancing-http-client.git', tag = '2.1.13'}
+http-client = {git = 'https://github.com/hhru/balancing-http-client.git', tag = '2.1.14'}
python-consul2-hh = {git = 'https://github.com/hhru/python-consul2', tag = 'v0.2.10'}
-opentelemetry-sdk = '1.17.0'
-opentelemetry-api = '1.17.0'
-opentelemetry-exporter-otlp-proto-grpc = '1.17.0'
-opentelemetry-instrumentation-fastapi = '0.38b0' # check monkey patches on update
-opentelemetry-instrumentation-aiohttp-client = '0.38b0'
+opentelemetry-sdk = '1.25.0'
+opentelemetry-api = '1.25.0'
+opentelemetry-exporter-otlp-proto-grpc = '1.25.0'
+opentelemetry-instrumentation-fastapi = '0.46b0'
+opentelemetry-instrumentation-aiohttp-client = '0.46b0'
+opentelemetry-instrumentation-tornado = '0.46b0'
fastapi = '0.105.0'
aiokafka = '0.8.1'
-sentry-sdk = '2.1.1'
-aioresponses = '0.7.4'
+sentry-sdk = '2.7.0'
+aioresponses = '0.7.6'
tornado-httpclient-mock = '0.2.3'
-uvicorn = '0.29.0' # check server_run on update
[tool.poetry.group.test.dependencies]
pytest = '8.1.1'
diff --git a/tests/projects/balancer_app/pages/__init__.py b/tests/projects/balancer_app/pages/__init__.py
index d95c7b5bb..22796e835 100644
--- a/tests/projects/balancer_app/pages/__init__.py
+++ b/tests/projects/balancer_app/pages/__init__.py
@@ -1,5 +1,5 @@
-from fastapi import HTTPException
from http_client.balancing import Upstream
+from tornado.web import HTTPError
from frontik.handler import PageHandler
@@ -9,18 +9,18 @@
def check_all_servers_occupied(handler: PageHandler, name: str) -> None:
servers = handler.application.upstream_manager.get_upstreams().get(name, noop_upstream).servers
if any(server.current_requests == 0 for server in servers):
- raise HTTPException(500, 'some servers are ignored')
+ raise HTTPError(500, 'some servers are ignored')
def check_all_requests_done(handler: PageHandler, name: str) -> None:
servers = handler.application.upstream_manager.get_upstreams().get(name, noop_upstream).servers
if any(server.current_requests != 0 for server in servers):
- raise HTTPException(500, 'some servers have unfinished requests')
+ raise HTTPError(500, 'some servers have unfinished requests')
def check_all_servers_were_occupied(handler: PageHandler, name: str) -> None:
servers = handler.application.upstream_manager.get_upstreams().get(name, noop_upstream).servers
if any(server.current_requests != 0 for server in servers):
- raise HTTPException(500, 'some servers are ignored')
+ raise HTTPError(500, 'some servers are ignored')
if any(server.stat_requests == 0 for server in servers):
- raise HTTPException(500, 'some servers are ignored')
+ raise HTTPError(500, 'some servers are ignored')
diff --git a/tests/projects/balancer_app/pages/different_datacenter.py b/tests/projects/balancer_app/pages/different_datacenter.py
index 9189e4d5a..4df600090 100644
--- a/tests/projects/balancer_app/pages/different_datacenter.py
+++ b/tests/projects/balancer_app/pages/different_datacenter.py
@@ -1,6 +1,6 @@
-from fastapi import HTTPException
from http_client.balancing import Upstream
from http_client.request_response import NoAvailableServerException
+from tornado.web import HTTPError
from frontik import media_types
from frontik.handler import PageHandler, get_current_handler
@@ -21,7 +21,7 @@ async def get_page(handler=get_current_handler()):
result = await handler.post_url('different_datacenter', handler.path)
for server in upstream.servers:
if server.stat_requests != 0:
- raise HTTPException(500)
+ raise HTTPError(500)
if result.exc is not None and isinstance(result.exc, NoAvailableServerException):
handler.text = 'no backend available'
diff --git a/tests/projects/balancer_app/pages/profile_with_retry.py b/tests/projects/balancer_app/pages/profile_with_retry.py
index 2ba559a78..414758955 100644
--- a/tests/projects/balancer_app/pages/profile_with_retry.py
+++ b/tests/projects/balancer_app/pages/profile_with_retry.py
@@ -1,5 +1,5 @@
-from fastapi import HTTPException
from http_client.balancing import Upstream, UpstreamConfig
+from tornado.web import HTTPError
from frontik import media_types
from frontik.handler import PageHandler, get_current_handler
@@ -25,7 +25,7 @@ async def get_page(handler=get_current_handler()):
result = await handler.put_url('profile_with_retry', handler.path, profile='profile_with_retry')
if result.failed or result.data is None:
- raise HTTPException(500)
+ raise HTTPError(500)
handler.text = result.data
diff --git a/tests/projects/balancer_app/pages/retry_connect.py b/tests/projects/balancer_app/pages/retry_connect.py
index c5d87a79b..14947108e 100644
--- a/tests/projects/balancer_app/pages/retry_connect.py
+++ b/tests/projects/balancer_app/pages/retry_connect.py
@@ -1,5 +1,5 @@
-from fastapi import HTTPException
from http_client.balancing import Upstream
+from tornado.web import HTTPError
from frontik import media_types
from frontik.handler import PageHandler, get_current_handler
@@ -30,7 +30,7 @@ async def get_page(handler: PageHandler = get_current_handler()) -> None:
for result in results:
if result.failed or result.data is None:
- raise HTTPException(500)
+ raise HTTPError(500)
handler.text = handler.text + result.data
diff --git a/tests/projects/balancer_app/pages/retry_connect_timeout.py b/tests/projects/balancer_app/pages/retry_connect_timeout.py
index 7c69a6f5f..82806e56f 100644
--- a/tests/projects/balancer_app/pages/retry_connect_timeout.py
+++ b/tests/projects/balancer_app/pages/retry_connect_timeout.py
@@ -1,5 +1,5 @@
-from fastapi import HTTPException
from http_client.balancing import Upstream
+from tornado.web import HTTPError
from frontik import media_types
from frontik.handler import PageHandler, get_current_handler
@@ -26,7 +26,7 @@ async def get_page(handler: PageHandler = get_current_handler()) -> None:
for result in results:
if result.error or result.data is None:
- raise HTTPException(500)
+ raise HTTPError(500)
handler.text = handler.text + result.data
diff --git a/tests/projects/balancer_app/pages/retry_error.py b/tests/projects/balancer_app/pages/retry_error.py
index f215a6a05..a25b0e011 100644
--- a/tests/projects/balancer_app/pages/retry_error.py
+++ b/tests/projects/balancer_app/pages/retry_error.py
@@ -1,5 +1,5 @@
-from fastapi import HTTPException
from http_client.balancing import Upstream
+from tornado.web import HTTPError
from frontik import media_types
from frontik.handler import PageHandler, get_current_handler
@@ -29,7 +29,7 @@ async def get_page(handler=get_current_handler()):
for result in results:
if result.error or result.data is None:
- raise HTTPException(500)
+ raise HTTPError(500)
handler.text = handler.text + result.data
diff --git a/tests/projects/balancer_app/pages/retry_non_idempotent_503.py b/tests/projects/balancer_app/pages/retry_non_idempotent_503.py
index 3a5e66046..3426492c2 100644
--- a/tests/projects/balancer_app/pages/retry_non_idempotent_503.py
+++ b/tests/projects/balancer_app/pages/retry_non_idempotent_503.py
@@ -1,5 +1,5 @@
-from fastapi import HTTPException
from http_client.balancing import Upstream, UpstreamConfig
+from tornado.web import HTTPError
from frontik import media_types
from frontik.handler import PageHandler, get_current_handler
@@ -30,11 +30,11 @@ async def get_page(handler=get_current_handler()):
)
if res1.error or res1.data is None:
- raise HTTPException(500)
+ raise HTTPError(500)
handler.text = res1.data
if res2.status_code != 503:
- raise HTTPException(500)
+ raise HTTPError(500)
check_all_requests_done(handler, 'retry_non_idempotent_503')
check_all_requests_done(handler, 'do_not_retry_non_idempotent_503')
diff --git a/tests/projects/balancer_app/pages/retry_on_timeout.py b/tests/projects/balancer_app/pages/retry_on_timeout.py
index e9a46310c..517d921ed 100644
--- a/tests/projects/balancer_app/pages/retry_on_timeout.py
+++ b/tests/projects/balancer_app/pages/retry_on_timeout.py
@@ -1,5 +1,5 @@
-from fastapi import HTTPException
from http_client.balancing import Upstream
+from tornado.web import HTTPError
from frontik import media_types
from frontik.handler import PageHandler, get_current_handler
@@ -26,7 +26,7 @@ async def get_page(handler=get_current_handler()):
)
if result.error or result.data is None:
- raise HTTPException(500)
+ raise HTTPError(500)
handler.text = result.data
diff --git a/tests/projects/balancer_app/pages/speculative_retry.py b/tests/projects/balancer_app/pages/speculative_retry.py
index e37f3dd18..e71b623fb 100644
--- a/tests/projects/balancer_app/pages/speculative_retry.py
+++ b/tests/projects/balancer_app/pages/speculative_retry.py
@@ -1,5 +1,5 @@
-from fastapi import HTTPException
from http_client.balancing import Upstream
+from tornado.web import HTTPError
from frontik import media_types
from frontik.handler import PageHandler, get_current_handler
@@ -26,7 +26,7 @@ async def get_page(handler=get_current_handler()):
)
if result.failed or result.data is None:
- raise HTTPException(500)
+ raise HTTPError(500)
handler.text = result.data
diff --git a/tests/projects/broken_balancer_app/pages/no_retry_error.py b/tests/projects/broken_balancer_app/pages/no_retry_error.py
index 9d61f30d1..ddec3d87a 100644
--- a/tests/projects/broken_balancer_app/pages/no_retry_error.py
+++ b/tests/projects/broken_balancer_app/pages/no_retry_error.py
@@ -1,4 +1,4 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler
from frontik.routing import router
@@ -6,4 +6,4 @@
@router.post('/no_retry_error', cls=PageHandler)
async def post_page():
- raise HTTPException(500, 'something went wrong, no retry')
+ raise HTTPError(500, 'something went wrong, no retry')
diff --git a/tests/projects/broken_balancer_app/pages/profile_with_retry.py b/tests/projects/broken_balancer_app/pages/profile_with_retry.py
index f142f28fb..2d826e4b5 100644
--- a/tests/projects/broken_balancer_app/pages/profile_with_retry.py
+++ b/tests/projects/broken_balancer_app/pages/profile_with_retry.py
@@ -1,4 +1,4 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler
from frontik.routing import router
@@ -6,4 +6,4 @@
@router.put('/profile_with_retry', cls=PageHandler)
async def put_page():
- raise HTTPException(503, 'broken')
+ raise HTTPError(503, 'broken')
diff --git a/tests/projects/broken_balancer_app/pages/profile_without_retry.py b/tests/projects/broken_balancer_app/pages/profile_without_retry.py
index 0fc5d8aa6..e44051a15 100644
--- a/tests/projects/broken_balancer_app/pages/profile_without_retry.py
+++ b/tests/projects/broken_balancer_app/pages/profile_without_retry.py
@@ -1,4 +1,4 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler
from frontik.routing import router
@@ -6,4 +6,4 @@
@router.put('/profile_without_retry', cls=PageHandler)
async def put_page():
- raise HTTPException(503, 'broken')
+ raise HTTPError(503, 'broken')
diff --git a/tests/projects/broken_balancer_app/pages/retry_connect.py b/tests/projects/broken_balancer_app/pages/retry_connect.py
index a2fd8194c..a54fb1b6f 100644
--- a/tests/projects/broken_balancer_app/pages/retry_connect.py
+++ b/tests/projects/broken_balancer_app/pages/retry_connect.py
@@ -1,4 +1,4 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler
from frontik.routing import router
@@ -6,4 +6,4 @@
@router.post('/retry_connect', cls=PageHandler)
async def post_page():
- raise HTTPException(503, 'broken, retry')
+ raise HTTPError(503, 'broken, retry')
diff --git a/tests/projects/broken_balancer_app/pages/retry_error.py b/tests/projects/broken_balancer_app/pages/retry_error.py
index d56b7cfdd..7d4c3c964 100644
--- a/tests/projects/broken_balancer_app/pages/retry_error.py
+++ b/tests/projects/broken_balancer_app/pages/retry_error.py
@@ -1,4 +1,4 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler
from frontik.routing import router
@@ -6,4 +6,4 @@
@router.put('/retry_error', cls=PageHandler)
async def put_page():
- raise HTTPException(503, 'broken, retry')
+ raise HTTPError(503, 'broken, retry')
diff --git a/tests/projects/broken_balancer_app/pages/retry_non_idempotent_503.py b/tests/projects/broken_balancer_app/pages/retry_non_idempotent_503.py
index db8271f54..8866443f9 100644
--- a/tests/projects/broken_balancer_app/pages/retry_non_idempotent_503.py
+++ b/tests/projects/broken_balancer_app/pages/retry_non_idempotent_503.py
@@ -1,4 +1,4 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler
from frontik.routing import router
@@ -6,4 +6,4 @@
@router.post('/retry_non_idempotent_503', cls=PageHandler)
async def post_page():
- raise HTTPException(503, 'broken, retry')
+ raise HTTPError(503, 'broken, retry')
diff --git a/tests/projects/broken_balancer_app/pages/speculative_no_retry.py b/tests/projects/broken_balancer_app/pages/speculative_no_retry.py
index d1cdc766f..e4bd29b05 100644
--- a/tests/projects/broken_balancer_app/pages/speculative_no_retry.py
+++ b/tests/projects/broken_balancer_app/pages/speculative_no_retry.py
@@ -1,6 +1,6 @@
import asyncio
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler
from frontik.routing import router
@@ -9,4 +9,4 @@
@router.post('/speculative_no_retry', cls=PageHandler)
async def post_page():
await asyncio.sleep(0.8)
- raise HTTPException(500, 'broken')
+ raise HTTPError(500, 'broken')
diff --git a/tests/projects/broken_balancer_app/pages/speculative_retry.py b/tests/projects/broken_balancer_app/pages/speculative_retry.py
index 21e13e257..6ba5d6d5e 100644
--- a/tests/projects/broken_balancer_app/pages/speculative_retry.py
+++ b/tests/projects/broken_balancer_app/pages/speculative_retry.py
@@ -1,6 +1,6 @@
import asyncio
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler
from frontik.routing import router
@@ -9,4 +9,4 @@
@router.put('/speculative_retry', cls=PageHandler)
async def put_page():
await asyncio.sleep(0.8)
- raise HTTPException(503, 'broken, retry')
+ raise HTTPError(503, 'broken, retry')
diff --git a/tests/projects/no_debug_app/pages/basic_auth.py b/tests/projects/no_debug_app/pages/basic_auth.py
index 431a893f8..d0c652d9e 100644
--- a/tests/projects/no_debug_app/pages/basic_auth.py
+++ b/tests/projects/no_debug_app/pages/basic_auth.py
@@ -1,8 +1,9 @@
+from frontik.auth import check_debug_auth_or_finish
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import router
@router.get('/basic_auth', cls=PageHandler)
-async def get_page(handler=get_current_handler()):
- handler.require_debug_access('user', 'god')
+async def get_page(handler: PageHandler = get_current_handler()) -> None:
+ check_debug_auth_or_finish(handler, 'user', 'god')
handler.json.put({'authenticated': True})
diff --git a/tests/projects/re_app/pages/id_param.py b/tests/projects/re_app/pages/id_param.py
index 899563c0c..adbbe5018 100644
--- a/tests/projects/re_app/pages/id_param.py
+++ b/tests/projects/re_app/pages/id_param.py
@@ -5,6 +5,6 @@
@regex_router.get('/id/(?P[^/]+)', cls=PageHandler)
-async def get_page(handler=get_current_handler()):
+async def get_page(handler: PageHandler = get_current_handler()) -> None:
handler.set_xsl('id_param.xsl')
handler.doc.put(etree.Element('id', value=handler.get_path_argument('id', 'wrong')))
diff --git a/tests/projects/test_app/pages/api/2/store.py b/tests/projects/test_app/pages/api/2/store.py
index d60695bd8..561bfd144 100644
--- a/tests/projects/test_app/pages/api/2/store.py
+++ b/tests/projects/test_app/pages/api/2/store.py
@@ -11,7 +11,7 @@ class Page(PageHandler):
@router.post('/api/2/envelope/', cls=Page)
async def post_page(handler: Page = get_current_handler()):
- messages = gzip.decompress(handler.body_bytes).decode('utf8')
+ messages = gzip.decompress(handler.request.body).decode('utf8')
for message in messages.split('\n'):
if message == '':
diff --git a/tests/projects/test_app/pages/arguments.py b/tests/projects/test_app/pages/arguments.py
index 0418534e2..9c7b45e45 100644
--- a/tests/projects/test_app/pages/arguments.py
+++ b/tests/projects/test_app/pages/arguments.py
@@ -1,15 +1,7 @@
-from fastapi import Request
-
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import router
-from frontik.util import tornado_parse_qs_bytes
@router.get('/arguments', cls=PageHandler)
-async def get_page(request: Request, handler: PageHandler = get_current_handler()) -> None:
- if handler.get_bool_argument('enc', False):
- qs = tornado_parse_qs_bytes(request.scope['query_string'])
- param = qs.get('param', [])[0]
- handler.json.put({'тест': handler.decode_argument(param)})
- else:
- handler.json.put({'тест': handler.get_query_argument('param')})
+async def get_page(handler: PageHandler = get_current_handler()) -> None:
+ handler.json.put({'тест': handler.get_argument('param')})
diff --git a/tests/projects/test_app/pages/async_group/group.py b/tests/projects/test_app/pages/async_group/group.py
index 7f7e01ef8..abc3577f7 100644
--- a/tests/projects/test_app/pages/async_group/group.py
+++ b/tests/projects/test_app/pages/async_group/group.py
@@ -1,29 +1,27 @@
from typing import Any
-from fastapi import Request
-
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import router
from frontik.util import gather_dict
@router.get('/async_group/group', cls=PageHandler)
-async def get_page(request: Request, handler: PageHandler = get_current_handler()) -> None:
+async def get_page(handler: PageHandler = get_current_handler()) -> None:
fail_callback = handler.get_query_argument('fail_callback', 'false') == 'true'
fail_request = handler.get_query_argument('fail_request', 'false') == 'true'
async def task() -> Any:
- request_result = await handler.post_url(request.headers.get('host', ''), handler.path + '?data=2')
+ request_result = await handler.post_url(handler.request.headers.get('host', ''), handler.path + '?data=2')
if fail_callback:
msg = "I'm dying!"
raise Exception(msg)
return request_result.data
data = await gather_dict({
- '1': handler.post_url(request.headers.get('host', ''), handler.path + '?data=1'),
+ '1': handler.post_url(handler.request.headers.get('host', ''), handler.path + '?data=1'),
'2': task(),
'3': handler.post_url(
- request.headers.get('host', ''),
+ handler.request.headers.get('host', ''),
handler.path,
data={'data': '3' if not fail_request else None},
parse_on_error=False,
@@ -31,7 +29,9 @@ async def task() -> Any:
})
handler.json.put(data)
- result = await gather_dict({'4': handler.post_url(request.headers.get('host', ''), handler.path + '?data=4')})
+ result = await gather_dict({
+ '4': handler.post_url(handler.request.headers.get('host', ''), handler.path + '?data=4')
+ })
handler.json.put({'future_callback_result': result['4'].data['4']})
handler.json.put({'final_callback_called': True})
diff --git a/tests/projects/test_app/pages/async_group/not_waited_failed_requests.py b/tests/projects/test_app/pages/async_group/not_waited_failed_requests.py
index 2ccf68bd1..719d46f38 100644
--- a/tests/projects/test_app/pages/async_group/not_waited_failed_requests.py
+++ b/tests/projects/test_app/pages/async_group/not_waited_failed_requests.py
@@ -1,5 +1,3 @@
-from fastapi import Request
-
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import router
@@ -14,13 +12,14 @@ def _record_failed_request(self, data: dict) -> None:
@router.get('/async_group/not_waited_failed_requests', cls=Page)
-async def get_page(request: Request, handler: Page = get_current_handler()) -> None:
+async def get_page(handler: Page = get_current_handler()) -> None:
if not handler.data:
+ host = handler.request.headers.get('host', '')
# HTTP request with waited=False and fail_fast=True should not influence responses to client
- await handler.head_url(request.headers.get('host', ''), handler.path, waited=False, fail_fast=True)
- await handler.post_url(request.headers.get('host', ''), handler.path, waited=False, fail_fast=True)
- await handler.put_url(request.headers.get('host', ''), handler.path, waited=False, fail_fast=True)
- await handler.delete_url(request.headers.get('host', ''), handler.path, waited=False, fail_fast=True)
+ await handler.head_url(host, handler.path, waited=False, fail_fast=True)
+ await handler.post_url(host, handler.path, waited=False, fail_fast=True)
+ await handler.put_url(host, handler.path, waited=False, fail_fast=True)
+ await handler.delete_url(host, handler.path, waited=False, fail_fast=True)
handler.json.put({'get': True})
else:
diff --git a/tests/projects/test_app/pages/async_group/not_waited_requests.py b/tests/projects/test_app/pages/async_group/not_waited_requests.py
index 8421c4d73..1991d78d2 100644
--- a/tests/projects/test_app/pages/async_group/not_waited_requests.py
+++ b/tests/projects/test_app/pages/async_group/not_waited_requests.py
@@ -1,7 +1,5 @@
import asyncio
-from fastapi import Request
-
from frontik.handler import AbortAsyncGroup, PageHandler, get_current_handler
from frontik.routing import router
@@ -24,10 +22,10 @@ def record_request(self, data: dict) -> None:
@router.get('/async_group/not_waited_requests', cls=Page)
-async def get_page(request: Request, handler: Page = get_current_handler()) -> None:
+async def get_page(handler: Page = get_current_handler()) -> None:
if not handler.data:
handler.json.put({'get': True})
- asyncio.create_task(handler.coro(request.headers.get('host', '')))
+ asyncio.create_task(handler.coro(handler.request.headers.get('host', '')))
else:
while not all(x in handler.data for x in ('post_made', 'delete_cancelled')):
await asyncio.sleep(0.05)
diff --git a/tests/projects/test_app/pages/broken_workflow.py b/tests/projects/test_app/pages/broken_workflow.py
index 6670d15a1..27352ef2e 100644
--- a/tests/projects/test_app/pages/broken_workflow.py
+++ b/tests/projects/test_app/pages/broken_workflow.py
@@ -1,4 +1,4 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import router
@@ -11,7 +11,7 @@ async def get_page(handler=get_current_handler()):
@handler.check_finished
def cb(*args, **kw):
- raise HTTPException(400)
+ raise HTTPError(400)
results = await gather_list(
handler.get_url(f'http://localhost:{port}', '/page/simple/'),
diff --git a/tests/projects/test_app/pages/fail_fast/__init__.py b/tests/projects/test_app/pages/fail_fast/__init__.py
index 24f253032..069f98113 100644
--- a/tests/projects/test_app/pages/fail_fast/__init__.py
+++ b/tests/projects/test_app/pages/fail_fast/__init__.py
@@ -10,13 +10,14 @@ async def get_page_preprocessor(handler: PageHandler = get_current_handler()) ->
class Page(PageHandler):
- async def get_page_fail_fast(self, failed_future):
- if self.get_query_argument('exception_in_fail_fast', 'false') == 'true':
- raise Exception('Exception in fail_fast')
+ def get_page_fail_fast(self, failed_future):
+ if self.get_argument('exception_in_fail_fast', 'false') == 'true':
+ msg = 'Exception in fail_fast'
+ raise Exception(msg)
self.json.replace({'fail_fast': True})
self.set_status(403)
- return await self.finish_with_postprocessors()
+ self.finish_with_postprocessors()
@router.get('/fail_fast', cls=Page, dependencies=[Depends(get_page_preprocessor)])
diff --git a/tests/projects/test_app/pages/fail_fast/fail_fast_without_done.py b/tests/projects/test_app/pages/fail_fast/fail_fast_without_done.py
index ea917c5a0..aef672cf0 100644
--- a/tests/projects/test_app/pages/fail_fast/fail_fast_without_done.py
+++ b/tests/projects/test_app/pages/fail_fast/fail_fast_without_done.py
@@ -1,12 +1,12 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import router
class Page(PageHandler):
- async def get_page_fail_fast(self, failed_future):
- raise HTTPException(401)
+ def get_page_fail_fast(self, failed_future):
+ raise HTTPError(401)
@router.get('/fail_fast/fail_fast_without_done', cls=Page)
@@ -16,4 +16,4 @@ async def get_page(handler=get_current_handler()):
@router.post('/fail_fast/fail_fast_without_done', cls=Page)
async def post_page():
- raise HTTPException(403)
+ raise HTTPError(403)
diff --git a/tests/projects/test_app/pages/fail_fast/with_postprocessors.py b/tests/projects/test_app/pages/fail_fast/with_postprocessors.py
index 9de9b4e3b..fc3e0136e 100644
--- a/tests/projects/test_app/pages/fail_fast/with_postprocessors.py
+++ b/tests/projects/test_app/pages/fail_fast/with_postprocessors.py
@@ -1,4 +1,4 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import HTTPErrorWithPostprocessors, PageHandler, get_current_handler
from frontik.routing import router
@@ -18,4 +18,4 @@ async def get_page(handler=get_current_handler()):
@router.post('/fail_fast/with_postprocessors', cls=Page)
async def post_page():
- raise HTTPException(403)
+ raise HTTPError(403)
diff --git a/tests/projects/test_app/pages/finish_with_postprocessors.py b/tests/projects/test_app/pages/finish_with_postprocessors.py
index 70e979664..8c3c193d6 100644
--- a/tests/projects/test_app/pages/finish_with_postprocessors.py
+++ b/tests/projects/test_app/pages/finish_with_postprocessors.py
@@ -1,5 +1,5 @@
-from fastapi import HTTPException
from lxml import etree
+from tornado.web import HTTPError
from frontik.handler import FinishWithPostprocessors, PageHandler, get_current_handler
from frontik.routing import router
@@ -21,7 +21,7 @@ async def get_page(handler=get_current_handler()):
async def fail_request() -> None:
await handler.post_url(handler.get_header('host'), handler.path)
- raise HTTPException(500)
+ raise HTTPError(500)
handler.run_task(fail_request())
diff --git a/tests/projects/test_app/pages/handler/delete.py b/tests/projects/test_app/pages/handler/delete.py
index 69f4c9ae0..c6c940c98 100644
--- a/tests/projects/test_app/pages/handler/delete.py
+++ b/tests/projects/test_app/pages/handler/delete.py
@@ -1,23 +1,26 @@
from fastapi import Request
-from frontik.handler import PageHandler, get_current_handler
+from frontik.balancing_client import HttpClientT
+from frontik.json_builder import JsonBuilderT
from frontik.routing import router
-@router.get('/handler/delete', cls=PageHandler)
-async def get_page(request: Request, handler: PageHandler = get_current_handler()) -> None:
- result = await handler.delete_url('http://' + request.headers.get('host', ''), handler.path, data={'data': 'true'})
+@router.get('/handler/delete')
+async def get_page(request: Request, http_client: HttpClientT, json_builder: JsonBuilderT) -> None:
+ result = await http_client.delete_url(
+ 'http://' + request.headers.get('host', ''), request.url.path, data={'data': 'true'}
+ )
if not result.failed:
- handler.json.put(result.data)
+ json_builder.put(result.data)
-@router.post('/handler/delete', cls=PageHandler)
-async def post_page(handler: PageHandler = get_current_handler()) -> None:
- result = await handler.delete_url('http://backend', handler.path, fail_fast=True)
+@router.post('/handler/delete')
+async def post_page(request: Request, http_client: HttpClientT, json_builder: JsonBuilderT) -> None:
+ result = await http_client.delete_url('http://backend', request.url.path, fail_fast=True)
if not result.failed:
- handler.json.put(result.data)
+ json_builder.put(result.data)
-@router.delete('/handler/delete', cls=PageHandler)
-async def delete_page(handler: PageHandler = get_current_handler()) -> None:
- handler.json.put({'delete': handler.get_query_argument('data')})
+@router.delete('/handler/delete')
+async def delete_page(data: str, json_builder: JsonBuilderT) -> None:
+ json_builder.put({'delete': data})
diff --git a/tests/projects/test_app/pages/handler/head_url.py b/tests/projects/test_app/pages/handler/head_url.py
index 839c518ba..a63dc240b 100644
--- a/tests/projects/test_app/pages/handler/head_url.py
+++ b/tests/projects/test_app/pages/handler/head_url.py
@@ -1,14 +1,12 @@
import http.client
-from fastapi import Request
-
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import router
@router.get('/handler/head_url', cls=PageHandler)
-async def get_page(request: Request, handler: PageHandler = get_current_handler()) -> None:
- head_result = await handler.head_url(request.headers.get('host', ''), '/handler/head', name='head')
+async def get_page(handler: PageHandler = get_current_handler()) -> None:
+ head_result = await handler.head_url(handler.request.headers.get('host', ''), '/handler/head', name='head')
if head_result.raw_body == b'' and head_result.status_code == http.client.OK:
handler.text = 'OK'
diff --git a/tests/projects/test_app/pages/http_client/custom_headers.py b/tests/projects/test_app/pages/http_client/custom_headers.py
index a4ccbb527..d82d86a57 100644
--- a/tests/projects/test_app/pages/http_client/custom_headers.py
+++ b/tests/projects/test_app/pages/http_client/custom_headers.py
@@ -16,4 +16,4 @@ async def get_page(handler=get_current_handler()):
@router.post('/http_client/custom_headers', cls=Page)
async def post_page(handler: Page = get_current_handler()):
- handler.json.put(handler.get_request_headers())
+ handler.json.put(handler.request.headers)
diff --git a/tests/projects/test_app/pages/http_client/post_url.py b/tests/projects/test_app/pages/http_client/post_url.py
index 8bee3efc3..422cec1b5 100644
--- a/tests/projects/test_app/pages/http_client/post_url.py
+++ b/tests/projects/test_app/pages/http_client/post_url.py
@@ -34,7 +34,7 @@ async def get_page(handler=get_current_handler()):
@router.post('/http_client/post_url', cls=PageHandler)
async def post_page(handler: PageHandler = get_current_handler()):
errors_count = 0
- body_parts = handler.body_bytes.split(b'\r\n--')
+ body_parts = handler.request.body.split(b'\r\n--')
for part in body_parts:
field_part = re.search(rb'name="(?P.+)"\r\n\r\n(?P.*)', part)
diff --git a/tests/projects/test_app/pages/http_error.py b/tests/projects/test_app/pages/http_error.py
index aee79b90c..5d37d9cac 100644
--- a/tests/projects/test_app/pages/http_error.py
+++ b/tests/projects/test_app/pages/http_error.py
@@ -1,4 +1,4 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import router
@@ -7,4 +7,4 @@
@router.get('/http_error', cls=PageHandler)
async def get_page(handler=get_current_handler()):
code = int(handler.get_query_argument('code', '200'))
- raise HTTPException(code)
+ raise HTTPError(code)
diff --git a/tests/projects/test_app/pages/json_page.py b/tests/projects/test_app/pages/json_page.py
index 65e61a2f2..388e5f0fc 100644
--- a/tests/projects/test_app/pages/json_page.py
+++ b/tests/projects/test_app/pages/json_page.py
@@ -1,5 +1,3 @@
-from fastapi import Request
-
from frontik import media_types
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import router
@@ -19,13 +17,13 @@ def jinja_context_provider(handler):
@router.get('/json_page', cls=Page)
-async def get_page(request: Request, handler: Page = get_current_handler()) -> None:
+async def get_page(handler: Page = get_current_handler()) -> None:
invalid_json = handler.get_query_argument('invalid', 'false')
requests = {
- 'req1': handler.post_url(request.headers.get('host', ''), handler.path, data={'param': 1}),
+ 'req1': handler.post_url(handler.request.headers.get('host', ''), handler.path, data={'param': 1}),
'req2': handler.post_url(
- request.headers.get('host', ''), handler.path, data={'param': 2, 'invalid': invalid_json}
+ handler.request.headers.get('host', ''), handler.path, data={'param': 2, 'invalid': invalid_json}
),
}
data = await gather_dict(requests)
@@ -33,7 +31,7 @@ async def get_page(request: Request, handler: Page = get_current_handler()) -> N
if handler.get_query_argument('template_error', 'false') == 'true':
del data['req1']
- handler.set_template(handler.get_query_argument('template', 'jinja.html'))
+ handler.set_template(handler.get_query_argument('template', 'jinja.html')) # type: ignore
handler.json.put(data)
diff --git a/tests/projects/test_app/pages/log.py b/tests/projects/test_app/pages/log.py
index 44499e810..a5d2682fe 100644
--- a/tests/projects/test_app/pages/log.py
+++ b/tests/projects/test_app/pages/log.py
@@ -12,8 +12,7 @@ async def get_page(handler=get_current_handler()):
handler.log.info('info')
try:
- msg = 'test'
- raise Exception(msg)
+ raise Exception('test')
except Exception:
handler.log.exception('exception')
handler.log.error('error', stack_info=True)
diff --git a/tests/projects/test_app/pages/mandatory_headers.py b/tests/projects/test_app/pages/mandatory_headers.py
new file mode 100644
index 000000000..aa5547bfd
--- /dev/null
+++ b/tests/projects/test_app/pages/mandatory_headers.py
@@ -0,0 +1,33 @@
+from tornado.web import HTTPError
+
+from frontik.handler import PageHandler, get_current_handler
+from frontik.routing import router
+
+
+@router.get('/mandatory_headers', cls=PageHandler)
+async def get_page(handler=get_current_handler()):
+ if handler.get_argument('test_mandatory_headers', None) is not None:
+ handler.set_mandatory_header('TEST_HEADER', 'TEST_HEADER_VALUE')
+ handler.set_mandatory_cookie('TEST_COOKIE', 'TEST_HEADER_COOKIE')
+ raise HTTPError(500)
+
+ elif handler.get_argument('test_without_mandatory_headers', None) is not None:
+ handler.add_header('TEST_HEADER', 'TEST_HEADER_VALUE')
+ handler.set_cookie('TEST_COOKIE', 'TEST_HEADER_COOKIE')
+ raise HTTPError(500)
+
+ elif handler.get_argument('test_clear_set_mandatory_headers', None) is not None:
+ handler.set_mandatory_header('TEST_HEADER', 'TEST_HEADER_VALUE')
+ handler.set_mandatory_cookie('TEST_COOKIE', 'TEST_HEADER_COOKIE')
+ handler.clear_header('TEST_HEADER')
+ handler.clear_cookie('TEST_COOKIE')
+ raise HTTPError(500)
+
+ elif handler.get_argument('test_clear_not_set_headers', None) is not None:
+ handler.clear_header('TEST_HEADER')
+ handler.clear_cookie('TEST_COOKIE')
+ raise HTTPError(500)
+
+ elif handler.get_argument('test_invalid_mandatory_cookie') is not None:
+ handler.set_mandatory_cookie('TEST_COOKIE', '')
+ raise HTTPError(500)
diff --git a/tests/projects/test_app/pages/postprocess.py b/tests/projects/test_app/pages/postprocess.py
index cf1696ce0..2ffe65719 100644
--- a/tests/projects/test_app/pages/postprocess.py
+++ b/tests/projects/test_app/pages/postprocess.py
@@ -1,4 +1,4 @@
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import router
@@ -12,7 +12,7 @@ def postprocessor(self, handler, tpl, meta_info):
class Page(PageHandler):
@staticmethod
def _pp_1(handler):
- raise HTTPException(400)
+ raise HTTPError(400)
@staticmethod
def _pp_2(handler):
diff --git a/tests/projects/test_app/pages/redirect.py b/tests/projects/test_app/pages/redirect.py
index a5a967012..20f87649e 100644
--- a/tests/projects/test_app/pages/redirect.py
+++ b/tests/projects/test_app/pages/redirect.py
@@ -1,7 +1,5 @@
import re
-from fastapi import Request
-
from frontik.handler import PageHandler, get_current_handler
from frontik.routing import regex_router
@@ -10,15 +8,15 @@
@regex_router.get('^/redirect', cls=PageHandler)
-async def get_page(request: Request, handler: PageHandler = get_current_handler()) -> None:
- if PERMANENT_REDIRECT_PATTERN.match(request.url.path):
+async def get_page(handler: PageHandler = get_current_handler()) -> None:
+ if PERMANENT_REDIRECT_PATTERN.match(handler.path):
permanent = True
- elif TEMPORARY_REDIRECT_PATTERN.match(request.url.path):
+ elif TEMPORARY_REDIRECT_PATTERN.match(handler.path):
permanent = False
else:
raise RuntimeError('123')
to_url = '/finish?foo=bar'
- if request.url.query:
- to_url = to_url + f'&{request.url.query}'
+ if handler.request.query:
+ to_url = to_url + f'&{handler.request.query}'
handler.redirect(to_url, permanent)
diff --git a/tests/projects/test_app/pages/write_after_finish.py b/tests/projects/test_app/pages/write_after_finish.py
index de3a59ee9..14dc31975 100644
--- a/tests/projects/test_app/pages/write_after_finish.py
+++ b/tests/projects/test_app/pages/write_after_finish.py
@@ -15,7 +15,7 @@ def prepare(self):
@classmethod
async def _pp(cls, handler):
- if handler.method != 'POST':
+ if handler.request.method != 'POST':
handler.counter += 1
cls.counter_static = handler.counter
diff --git a/tests/projects/test_app/pages/write_error.py b/tests/projects/test_app/pages/write_error.py
index 9c8f700ca..6615fb964 100644
--- a/tests/projects/test_app/pages/write_error.py
+++ b/tests/projects/test_app/pages/write_error.py
@@ -3,14 +3,13 @@
class Page(PageHandler):
- async def write_error(self, status_code=500, **kwargs):
- self.set_status(status_code)
+ def write_error(self, status_code=500, **kwargs):
self.json.put({'write_error': True})
- if self.get_query_argument('fail_write_error', 'false') == 'true':
+ if self.get_argument('fail_write_error', 'false') == 'true':
raise Exception('exception in write_error')
- return await self.finish_with_postprocessors()
+ self.finish_with_postprocessors()
@router.get('/write_error', cls=Page)
diff --git a/tests/test_arguments.py b/tests/test_arguments.py
index cc365015d..23a9bc8f5 100644
--- a/tests/test_arguments.py
+++ b/tests/test_arguments.py
@@ -53,7 +53,7 @@ def test_arg_validation_raises_for_empty_value_with_no_default(self):
def test_arg_validation_raises_for_default_of_incorrect_type(self) -> None:
response = frontik_test_app.get_page('validate_arguments?str_arg=test', method=requests.put, notpl=True)
- assert response.status_code == 400
+ assert response.status_code == 500
def test_validation_model(self):
self.query_args.update(int_arg=0)
diff --git a/tests/test_asyncgroup.py b/tests/test_asyncgroup.py
index 42773c4f7..b1cd0f107 100644
--- a/tests/test_asyncgroup.py
+++ b/tests/test_asyncgroup.py
@@ -1,42 +1,125 @@
-import asyncio
import logging
+import unittest
+from functools import partial
-import pytest
+from tornado.concurrent import Future
+from tornado.testing import ExpectLog
-from frontik.futures import AsyncGroup
+from frontik.futures import AsyncGroup, async_logger
logging.root.setLevel(logging.NOTSET)
-class TestAsyncGroup:
- async def test_exception_in_first(self) -> None:
- async def callback1() -> None:
- raise Exception('callback1 error')
+class TestAsyncGroup(unittest.TestCase):
+ async def test_callbacks(self):
+ data = []
- async def callback2() -> None:
- await asyncio.sleep(0)
+ def callback2():
+ data.append(2)
- ag = AsyncGroup(name='test_group')
- ag.add_future(asyncio.create_task(callback1()))
- ag.add_future(asyncio.create_task(callback2()))
+ def finish_callback():
+ self.assertEqual(data, [1, 2])
+ data.append(3)
- with pytest.raises(Exception, match='callback1 error'):
- await ag.finish()
+ ag = AsyncGroup(finish_callback)
+ cb1 = ag.add(partial(data.append, 1))
+ cb2 = ag.add(callback2)
- assert ag.done() is True
+ self.assertEqual(ag._finished, False)
- async def test_exception_in_last(self) -> None:
- async def callback1() -> None:
- await asyncio.sleep(0)
+ ag.try_finish()
- async def callback2() -> None:
- raise Exception('callback2 error')
+ self.assertEqual(ag._finished, False)
- ag = AsyncGroup(name='test_group')
- ag.add_future(asyncio.create_task(callback1()))
- ag.add_future(asyncio.create_task(callback2()))
+ cb1()
- with pytest.raises(Exception, match='callback2 error'):
- await ag.finish()
+ self.assertEqual(ag._finished, False)
- assert ag.done() is True
+ cb2()
+
+ self.assertEqual(ag._finished, True)
+ self.assertEqual(data, [1, 2, 3])
+
+ def test_notifications(self) -> None:
+ f: Future = Future()
+ ag = AsyncGroup(partial(f.set_result, True))
+ not1 = ag.add_notification()
+ not2 = ag.add_notification()
+
+ self.assertEqual(ag._finished, False)
+
+ not1()
+
+ self.assertEqual(ag._finished, False)
+
+ not2('params', are='ignored')
+
+ self.assertEqual(ag._finished, True)
+ self.assertEqual(f.result(), True)
+
+ with ExpectLog(async_logger, r'.*trying to finish already finished AsyncGroup\(name=None, finished=True\)'):
+ ag.finish()
+
+ def test_finish(self) -> None:
+ f: Future = Future()
+ ag = AsyncGroup(partial(f.set_result, True))
+
+ self.assertEqual(ag._finished, False)
+
+ ag.add_notification()
+ ag.finish()
+
+ self.assertEqual(ag._finished, True)
+ self.assertEqual(f.result(), True)
+
+ def test_exception_in_first(self) -> None:
+ def callback1():
+ msg = 'callback1 error'
+ raise Exception(msg)
+
+ def callback2():
+ self.fail('callback2 should not be called')
+
+ def finish_callback():
+ self.fail('finish_callback should not be called')
+
+ ag = AsyncGroup(finish_callback, name='test_group')
+ cb1 = ag.add(callback1)
+ cb2 = ag.add(callback2)
+
+ self.assertRaises(Exception, cb1)
+ self.assertEqual(ag._finished, True)
+
+ with ExpectLog(async_logger, r'.*ignoring executing callback in AsyncGroup\(name=test_group, finished=True\)'):
+ cb2()
+
+ self.assertEqual(ag._finished, True)
+
+ def test_exception_in_last(self) -> None:
+ def callback2():
+ msg = 'callback1 error'
+ raise Exception(msg)
+
+ def finish_callback():
+ self.fail('finish_callback should not be called')
+
+ ag = AsyncGroup(finish_callback, name='test_group')
+ cb1 = ag.add(lambda: None)
+ cb2 = ag.add(callback2)
+
+ cb1()
+
+ with ExpectLog(async_logger, r'.*aborting AsyncGroup\(name=test_group, finished=False\)'):
+ self.assertRaises(Exception, cb2)
+
+ self.assertEqual(ag._finished, True)
+
+ def test_exception_in_final(self) -> None:
+ def finish_callback():
+ msg = 'callback1 error'
+ raise Exception(msg)
+
+ ag = AsyncGroup(finish_callback)
+
+ self.assertRaises(Exception, ag.try_finish)
+ self.assertEqual(ag._finished, True)
diff --git a/tests/test_errors.py b/tests/test_errors.py
index 7e45a3161..505beb3fd 100644
--- a/tests/test_errors.py
+++ b/tests/test_errors.py
@@ -55,7 +55,7 @@ def test_write_error(self) -> None:
def test_write_error_exception(self) -> None:
response = frontik_test_app.get_page('write_error?fail_write_error=true')
assert response.status_code == 500
- assert response.content == b'Internal Server Error'
+ assert response.content == b''
def test_write_error_405(self):
response = frontik_test_app.get_page('write_error', method=requests.put)
diff --git a/tests/test_fail_fast.py b/tests/test_fail_fast.py
index 5fb0127d9..c543dbf9f 100644
--- a/tests/test_fail_fast.py
+++ b/tests/test_fail_fast.py
@@ -24,7 +24,7 @@ def test_fail_fast_unknown_method(self):
def test_fail_fast_without_done(self):
response = frontik_test_app.get_page('fail_fast/fail_fast_without_done')
- assert response.status_code == 500
+ assert response.status_code == 401
def test_fail_fast_default(self):
response = frontik_test_app.get_page('fail_fast?fail_fast_default=true&code=400', method=requests.post)
@@ -48,5 +48,5 @@ def test_exception_in_fail_fast(self) -> None:
assert response.status_code == 500
def test_fail_fast_with_producer(self):
- response = frontik_test_app.get_page('fail_fast/with_postprocessors')
- assert response.status_code == 500
+ response = frontik_test_app.get_page_json('fail_fast/with_postprocessors')
+ assert response['error'] == 'some_error'
diff --git a/tests/test_handler.py b/tests/test_handler.py
index 0d7642c64..8ca6ecc53 100644
--- a/tests/test_handler.py
+++ b/tests/test_handler.py
@@ -25,7 +25,7 @@ def test_no_method(self):
def test_delete_post_arguments(self):
response = frontik_test_app.get_page('handler/delete', method=requests.delete)
- assert response.status_code == 400
+ assert response.status_code == 422
def test_204(self):
response = frontik_test_app.get_page('finish_204')
diff --git a/tests/test_http_client.py b/tests/test_http_client.py
index 3a74034cf..6643e200a 100644
--- a/tests/test_http_client.py
+++ b/tests/test_http_client.py
@@ -42,7 +42,7 @@ def test_parse_response(self):
def test_custom_headers(self):
json = frontik_test_app.get_page_json('http_client/custom_headers')
- assert json['x-foo'] == 'Bar'
+ assert json['X-Foo'] == 'Bar'
def test_http_client_method_future(self):
json = frontik_test_app.get_page_json('http_client/future')
diff --git a/tests/test_logging.py b/tests/test_logging.py
index 0a587699c..04f019900 100644
--- a/tests/test_logging.py
+++ b/tests/test_logging.py
@@ -61,7 +61,7 @@ def test_send_to_syslog(self):
parsed_logs[tag].append({'priority': priority, 'message': message})
expected_service_logs = [
- {'priority': '14', 'message': {'lvl': 'INFO', 'logger': r'frontik\.routing', 'msg': 'requested url: /log'}},
+ {'priority': '14', 'message': {'lvl': 'INFO', 'logger': r'handler', 'msg': 'requested url: /log'}},
{'priority': '15', 'message': {'lvl': 'DEBUG', 'logger': r'handler', 'msg': 'debug'}},
{'priority': '14', 'message': {'lvl': 'INFO', 'logger': r'handler', 'msg': 'info'}},
{
diff --git a/tests/test_mandatory_headers.py b/tests/test_mandatory_headers.py
new file mode 100644
index 000000000..1bea41316
--- /dev/null
+++ b/tests/test_mandatory_headers.py
@@ -0,0 +1,32 @@
+from tests.instances import frontik_test_app
+
+
+class TestPostprocessors:
+ def test_set_mandatory_headers(self):
+ response = frontik_test_app.get_page('mandatory_headers?test_mandatory_headers')
+ assert response.status_code == 500
+ assert response.headers.get('TEST_HEADER') == 'TEST_HEADER_VALUE'
+ assert response.cookies.get('TEST_COOKIE') == 'TEST_HEADER_COOKIE' # type: ignore
+
+ def test_mandatory_headers_are_lost(self) -> None:
+ response = frontik_test_app.get_page('mandatory_headers?test_without_mandatory_headers')
+ assert response.status_code == 500
+ assert response.headers.get('TEST_HEADER') is None
+ assert response.headers.get('TEST_COOKIE') is None
+
+ def test_mandatory_headers_are_cleared(self) -> None:
+ response = frontik_test_app.get_page('mandatory_headers?test_clear_set_mandatory_headers')
+ assert response.status_code == 500
+ assert response.headers.get('TEST_HEADER') is None
+ assert response.headers.get('TEST_COOKIE') is None
+
+ def test_clear_not_set_headers_does_not_faile(self) -> None:
+ response = frontik_test_app.get_page('mandatory_headers?test_clear_not_set_headers')
+ assert response.status_code == 500
+ assert response.headers.get('TEST_HEADER') is None
+ assert response.headers.get('TEST_COOKIE') is None
+
+ def test_invalid_mandatory_cookie(self):
+ response = frontik_test_app.get_page('mandatory_headers?test_invalid_mandatory_cookie')
+ assert response.status_code == 400
+ assert response.headers.get('TEST_COOKIE') is None
diff --git a/tests/test_sentry_integration.py b/tests/test_sentry_integration.py
index 410d2bda9..8398e858f 100644
--- a/tests/test_sentry_integration.py
+++ b/tests/test_sentry_integration.py
@@ -4,7 +4,7 @@
import pytest
import requests
import sentry_sdk
-from fastapi import HTTPException
+from tornado.web import HTTPError
from frontik.app import FrontikApplication
from frontik.handler import PageHandler, get_current_handler
@@ -33,7 +33,7 @@ async def get_page(handler: Page = get_current_handler()) -> None:
@router.post('/sentry_error', cls=Page)
async def post_page():
- raise HTTPException(500, 'my_HTTPError')
+ raise HTTPError(500, 'my_HTTPError')
@router.put('/sentry_error', cls=Page)
@@ -88,7 +88,7 @@ async def test_sentry_message(self):
assert event.get('modules') is not None
assert event['request']['url'].endswith('/sentry_error') is True
assert event['request']['method'] == 'PUT'
- assert event['request']['headers']['maheaderkey'] == 'MaHeaderValue'
+ assert event['request']['headers']['Maheaderkey'] == 'MaHeaderValue'
assert event['extra']['extra_key'] == 'extra_value'
assert event['user']['id'] == '123456'
diff --git a/tests/test_telemetry.py b/tests/test_telemetry.py
index 2150b0e1e..6964de345 100644
--- a/tests/test_telemetry.py
+++ b/tests/test_telemetry.py
@@ -6,14 +6,14 @@
from opentelemetry import trace
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
-from opentelemetry.sdk.trace.export import ReadableSpan, SpanExporter, SpanExportResult
+from opentelemetry.sdk.trace.export import BatchSpanProcessor, ReadableSpan, SpanExporter, SpanExportResult
from opentelemetry.sdk.trace.sampling import ParentBased, TraceIdRatioBased
from opentelemetry.semconv.resource import ResourceAttributes
from frontik import request_context
from frontik.app import FrontikApplication
from frontik.handler import PageHandler, get_current_handler
-from frontik.integrations.telemetry import FrontikIdGenerator, FrontikSpanProcessor, get_netloc
+from frontik.integrations.telemetry import FrontikIdGenerator, get_netloc
from frontik.options import options
from frontik.routing import router
from frontik.testing import FrontikTestBase
@@ -98,7 +98,7 @@ def make_otel_provider() -> TracerProvider:
SPAN_STORAGE: list[ReadableSpan] = []
-BATCH_SPAN_PROCESSOR: list[FrontikSpanProcessor] = []
+BATCH_SPAN_PROCESSOR: list[BatchSpanProcessor] = []
def find_span(attr: str, value: Any) -> Optional[ReadableSpan]:
@@ -126,7 +126,7 @@ def frontik_app(self) -> FrontikApplication:
test_exporter = TestExporter()
provider = make_otel_provider()
- batch_span_processor = FrontikSpanProcessor(test_exporter)
+ batch_span_processor = BatchSpanProcessor(test_exporter)
provider.add_span_processor(batch_span_processor)
trace.set_tracer_provider(provider)