Skip to content

Commit

Permalink
test koechego
Browse files Browse the repository at this point in the history
  • Loading branch information
712u3 committed Jul 25, 2024
1 parent c21effb commit 977b3d0
Showing 1 changed file with 30 additions and 22 deletions.
52 changes: 30 additions & 22 deletions frontik/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from frontik.futures import AbortAsyncGroup, AsyncGroup
from frontik.http_status import ALLOWED_STATUSES, CLIENT_CLOSED_REQUEST, NON_CRITICAL_BAD_GATEWAY
from frontik.json_builder import FrontikJsonDecodeError, json_decode
from frontik.loggers import CUSTOM_JSON_EXTRA, JSON_REQUESTS_LOGGER
from frontik.loggers import CUSTOM_JSON_EXTRA, JSON_REQUESTS_LOGGER, bootstrap_logger
from frontik.loggers.stages import StagesLogger
from frontik.options import options
from frontik.timeout_tracking import get_timeout_checker
Expand Down Expand Up @@ -59,6 +59,10 @@ class RedirectSignal(Exception):
pass


class FinishSignal(Exception):
pass


class HTTPErrorWithPostprocessors(tornado.web.HTTPError):
pass

Expand All @@ -83,6 +87,7 @@ def __init__(self, *args: object) -> None:
_remove_control_chars_regex = re.compile(r'[\x00-\x08\x0e-\x1f]')

handler_logger = logging.getLogger('handler')
handler_logger2 = bootstrap_logger('xsl_handler', logging.DEBUG)


def _fail_fast_policy(fail_fast: bool, waited: bool, host: str, path: str) -> bool:
Expand Down Expand Up @@ -362,20 +367,17 @@ def add_future(cls, future: Future, callback: Callable) -> None:
# Requests handling

async def execute(self) -> tuple[int, str, HTTPHeaders, bytes]:
try:
if (
self.request.method
not in (
'GET',
'HEAD',
'OPTIONS',
)
and options.xsrf_cookies
):
self.check_xsrf_cookie()
await super()._execute([], b'', b'')
except Exception as ex:
self._handle_request_exception(ex)
if (
self.request.method
not in (
'GET',
'HEAD',
'OPTIONS',
)
and options.xsrf_cookies
):
self.check_xsrf_cookie()
await super()._execute([], b'', b'')

try:
return await asyncio.wait_for(self.handler_result_future, timeout=5.0)
Expand Down Expand Up @@ -492,13 +494,19 @@ async def _postprocess(self) -> Any:
self.log.info('page was already finished, skipping page producer')
return

flag = False

renderer: Any
if self.text is not None:
renderer = self._generic_producer
elif not self.json.is_empty():
renderer = self.json_producer
else:
renderer = self.xml_producer
flag = True

if flag:
handler_logger2.error(f'---{self.name}--{renderer.transform_filename}---')

self.log.debug('using %s renderer', renderer)
rendered_result, meta_info = await renderer()
Expand Down Expand Up @@ -542,6 +550,10 @@ def _handle_request_exception(self, e: BaseException) -> None:
if self._finished and not isinstance(e, Finish):
return

if isinstance(e, FinishSignal):
# Not an error; request was finished explicitly
return

if isinstance(e, FailFastError):
request = e.failed_result.request

Expand Down Expand Up @@ -651,12 +663,12 @@ def finish(self, chunk: Optional[Union[str, bytes, dict]] = None) -> Future[None
content_length = sum(len(part) for part in self._write_buffer)
self.set_header('Content-Length', content_length)

future = self.flush(include_footers=True)
self._flush()
self._finished = True
self.on_finish()
return future
raise FinishSignal()

def flush(self, include_footers: bool = False) -> Future[None]:
def _flush(self) -> None:
assert self.request.connection is not None
chunk = b''.join(self._write_buffer)
self._write_buffer = []
Expand All @@ -671,10 +683,6 @@ def flush(self, include_footers: bool = False) -> Future[None]:

self.handler_result_future.set_result((self._status_code, self._reason, self._headers, chunk))

future = Future() # type: Future[None]
future.set_result(None)
return future

# postprocessors

def set_mandatory_header(self, name: str, value: str) -> None:
Expand Down

0 comments on commit 977b3d0

Please sign in to comment.