diff --git a/src/DB_Type.py b/src/DB_Type.py index 9845642..9b5045e 100644 --- a/src/DB_Type.py +++ b/src/DB_Type.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import enum @@ -7,8 +7,8 @@ class DB_Type(enum.Enum): Which database on \"https://ourairports.com/data/\"? """ - aerodrome="https://ourairports.com/data/airports.csv" #aerodrome database - country ="https://ourairports.com/data/countries.csv" #country database for country names - frequency="https://ourairports.com/data/airport-frequencies.csv" #frequency database for information command - navaid ="https://ourairports.com/data/navaids.csv" #navaid database for information command - runway ="https://ourairports.com/data/runways.csv" #runway database for cross wind components and information command \ No newline at end of file + aerodrome="https://ourairports.com/data/airports.csv" # aerodrome database + country ="https://ourairports.com/data/countries.csv" # country database for country names + frequency="https://ourairports.com/data/airport-frequencies.csv" # frequency database for information command + navaid ="https://ourairports.com/data/navaids.csv" # navaid database for information command + runway ="https://ourairports.com/data/runways.csv" # runway database for cross wind components and information command \ No newline at end of file diff --git a/src/Doc_Type.py b/src/Doc_Type.py index a3a54c2..2dcccc8 100644 --- a/src/Doc_Type.py +++ b/src/Doc_Type.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import enum diff --git a/src/Server.py b/src/Server.py index e01bbfe..2df2c3e 100644 --- a/src/Server.py +++ b/src/Server.py @@ -1,5 +1,4 @@ import dataclasses -import discord @dataclasses.dataclass @@ -8,13 +7,13 @@ class Server: all variables for 1 server instance """ - id: int #discord server id - name: str #discord server name + id: int # discord server id + name: str # discord server name - channel_id: int|None=None #active channel id, needed later for subscription - command: str|None=None #active command, needed later for subscription - force_print: bool=True #force sending to discord (after user input request) or not (subscription) - METAR_o_previous: str|None=None #METAR previous for subscription - METAR_update_finished: bool=False #has program in subscription mode waited 1 round until source website refreshed METAR completely? - TAF_o_previous: str|None=None #TAF previous for subscription - TAF_update_finished: bool=False #has program in subscription mode waited 1 round until source website refreshed TAF completely? \ No newline at end of file + channel_id: int|None=None # active channel id, needed later for subscription + command: str|None=None # active command, needed later for subscription + force_print: bool=True # force sending to discord (after user input request) or not (subscription) + METAR_o_previous: str|None=None # METAR previous for subscription + METAR_update_finished: bool=False # has program in subscription mode waited 1 round until source website refreshed METAR completely? + TAF_o_previous: str|None=None # TAF previous for subscription + TAF_update_finished: bool=False # has program in subscription mode waited 1 round until source website refreshed TAF completely? \ No newline at end of file diff --git a/src/Station.py b/src/Station.py index 8d07c3d..37ae058 100644 --- a/src/Station.py +++ b/src/Station.py @@ -7,6 +7,6 @@ class Station: represents 1 aeronautical station (aerodrome) """ - ICAO: str #station ICAO code - name: str|None=None #station name - elev: float|None=None #station elevation [m] \ No newline at end of file + ICAO: str # station ICAO code + name: str|None=None # station name + elev: float|None=None # station elevation [m] \ No newline at end of file diff --git a/src/change_format.py b/src/change_format.py index 11b9def..4cf9502 100644 --- a/src/change_format.py +++ b/src/change_format.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import datetime as dt import pandas from change_format_.change import change_format_change @@ -20,62 +20,62 @@ def change_format(info_list: list[str], i: int, station: Station, met_report_DT: dt.datetime, now_DT: dt.datetime, RWY_DB: pandas.DataFrame, server: Server) -> str: - #just forward station ICAO + # just forward station ICAO - info_new=change_format_met_report_DT(info_list[i], met_report_DT, now_DT, server) #met report time + info_new=change_format_met_report_DT(info_list[i], met_report_DT, now_DT, server) # met report time if info_new!=None: return info_new - info_new=change_format_wind (info_list[i], station, RWY_DB) #wind + info_new=change_format_wind (info_list[i], station, RWY_DB) # wind if info_new!=None: return info_new - info_new=change_format_vis (info_list, i) #visibility + info_new=change_format_vis (info_list, i) # visibility if info_new!=None: return info_new - info_new=change_format_RVR (info_list[i]) #RVR + info_new=change_format_RVR (info_list[i]) # RVR if info_new!=None: return info_new - info_new=change_format_weather (info_list, i) #weather, only mark weather dangerous + info_new=change_format_weather (info_list, i) # weather, only mark weather dangerous if info_new!=None: return info_new - info_new=change_format_clouds (info_list[i], station) #clouds + info_new=change_format_clouds (info_list[i], station) # clouds if info_new!=None: return info_new - info_new=change_format_VV (info_list[i]) #visibility vertical + info_new=change_format_VV (info_list[i]) # visibility vertical if info_new!=None: return info_new - info_new=change_format_temp_dew (info_list[i]) #temperature and dewpoint + info_new=change_format_temp_dew (info_list[i]) # temperature and dewpoint if info_new!=None: return info_new - info_new=change_format_QNH (info_list[i]) #QNH, altimeter setting + info_new=change_format_QNH (info_list[i]) # QNH, altimeter setting if info_new!=None: return info_new - info_new=change_format_RSM (info_list[i]) #runway state message + info_new=change_format_RSM (info_list[i]) # runway state message if info_new!=None: return info_new - info_new=change_format_change (info_list, i, met_report_DT) #trend and TAF: changes in weather + info_new=change_format_change (info_list, i, met_report_DT) # trend and TAF: changes in weather if info_new!=None: return info_new - info_new=change_format_validity (info_list[i], met_report_DT) #TAF: validity timespan + info_new=change_format_validity (info_list[i], met_report_DT) # TAF: validity timespan if info_new!=None: return info_new - info_new=change_format_TXTN (info_list[i], met_report_DT) #TAF: daily temperature max and min + info_new=change_format_TXTN (info_list[i], met_report_DT) # TAF: daily temperature max and min if info_new!=None: return info_new - info_new=change_format_USA_codes (info_list[i], met_report_DT, station, RWY_DB) #USA weather station machine codes + info_new=change_format_USA_codes (info_list[i], met_report_DT, station, RWY_DB) # USA weather station machine codes if info_new!=None: return info_new - return f" {info_list[i]}" #if format not found: just forward it \ No newline at end of file + return f" {info_list[i]}" # if format not found: just forward it \ No newline at end of file diff --git a/src/change_format_/HGT.py b/src/change_format_/HGT.py index eb2b05f..5aa5416 100644 --- a/src/change_format_/HGT.py +++ b/src/change_format_/HGT.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. from KFSconvert_to_SI import KFSconvert_to_SI from KFSfstr import KFSfstr import re @@ -9,8 +9,8 @@ def change_format_HGT(info: str, station: Station) -> str|None: re_match: re.Match|None - #RMK height + # RMK height re_match=re.search("^(?P[0-9]{1,4})FT$", info) if re_match!=None: - HGT=int(re_match.groupdict()["HGT"])*KFSconvert_to_SI.LENGTH["ft"] #height [m]; tbh don't really know if it's a height, that's why no elevation used to calculate altitude + HGT=int(re_match.groupdict()["HGT"])*KFSconvert_to_SI.LENGTH["ft"] # height [m]; tbh don't really know if it's a height, that's why no elevation used to calculate altitude return f" {KFSfstr.notation_abs(HGT, 2)}m" \ No newline at end of file diff --git a/src/change_format_/QNH.py b/src/change_format_/QNH.py index c798738..b731849 100644 --- a/src/change_format_/QNH.py +++ b/src/change_format_/QNH.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import inspect from KFSconvert_to_SI import KFSconvert_to_SI from KFSfstr import KFSfstr @@ -11,7 +11,7 @@ def change_format_QNH(info: str) -> str|None: re_match: re.Match|None - #QNH [100Pa] + # QNH [100Pa] re_match=re.search("^Q(?P[0-9]{4})$", info) if re_match!=None: info_new: str @@ -20,12 +20,12 @@ def change_format_QNH(info: str) -> str|None: info_new=f"Q{KFSfstr.notation_abs(QNH*1e-3, 1, round_static=True, width=5)}kPa" - if ("QNH_min" in WEATHER_MIN and QNH[0-9]{4})$", info) if re_match!=None: info_new: str @@ -34,12 +34,12 @@ def change_format_QNH(info: str) -> str|None: info_new=f"A{KFSfstr.notation_abs(QNH*1e-3, 1, round_static=True, width=5)}kPa" - if ("QNH_min" in WEATHER_MIN and QNH[0-9]{4})INS$", info) if re_match!=None: info_new: str @@ -48,12 +48,12 @@ def change_format_QNH(info: str) -> str|None: info_new=f"A{KFSfstr.notation_abs(QNH*1e-3, 1, round_static=True, width=5)}kPa" - if ("QNH_min" in WEATHER_MIN and QNH[0-9]{3}([.][0-9])?)$", info) if re_match!=None: info_new: str @@ -65,7 +65,7 @@ def change_format_QNH(info: str) -> str|None: return f" {info_new}" - #russia: QFE [mmHg] and [100Pa] + # russia: QFE [mmHg] and [100Pa] re_match=re.search("^QFE[0-9]{3}/(?P[0-9]{4})$", info) if re_match!=None: info_new: str @@ -77,16 +77,16 @@ def change_format_QNH(info: str) -> str|None: return f" {info_new}" - #USA: SLP [???] + # USA: SLP [???] re_match=re.search("^SLP(?P[0-9]{3})$", info) if re_match!=None: info_new: str - SLP: float #SLP [Pa] + SLP: float # SLP [Pa] SLP=float(re_match.groupdict()['SLP'])*10 - if 0<=SLP and SLP<5e3: #if 0kPa<=info<5kPa: +100kPa + if 0<=SLP and SLP<5e3: # if 0kPa<=info<5kPa: +100kPa SLP+=100e3 - elif SLP and SLP<10e3: #if 5kPa<=info<10kPa: +90kPa + elif SLP and SLP<10e3: # if 5kPa<=info<10kPa: +90kPa SLP+=90e3 else: logging.critical(f"SLP info *10 shoud be in [0; 10e3[, but it is {SLP}.") diff --git a/src/change_format_/RVR.py b/src/change_format_/RVR.py index 2ce2a19..eab465c 100644 --- a/src/change_format_/RVR.py +++ b/src/change_format_/RVR.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import re from KFSconvert_to_SI import KFSconvert_to_SI from KFSfstr import KFSfstr @@ -24,18 +24,18 @@ def change_format_RVR(info: str) -> str|None: } - #RVR [m] + # RVR [m] re_match=re.search("^R(?P[0-3][0-9]([LCR])?)/(?P[PM]?)(?P[0-9]{4})(V(?P[0-9]{4}))?(?P[UND]?)$", info) if re_match!=None: info_new: str plus_minus: str=PLUS_MINUS[re_match.groupdict()["plus_minus"]] runway: str RVR_1: float=float(re_match.groupdict()["RVR_1"]) - RVR_2: float #RVR 2, if no given equal to RVR 1 - trend: str=TREND[re_match.groupdict()["trend"]] #type:ignore + RVR_2: float # RVR 2, if no given equal to RVR 1 + trend: str=TREND[re_match.groupdict()["trend"]] # type:ignore runway=re_match.groupdict()["runway"] - if runway=="88": #if runway 88: all runways + if runway=="88": # if runway 88: all runways runway=":ALL" if re_match.groupdict()["RVR_2"]==None: RVR_2=RVR_1 @@ -48,23 +48,23 @@ def change_format_RVR(info: str) -> str|None: info_new+=f"V{KFSfstr.notation_tech(float(RVR_2), 2)}m" info_new+=trend - if "RVR" in WEATHER_MIN and (RVR_1[0-3][0-9]([LCR])?)/(?P[PM]?)(?P[0-9]{4})(V(?P[0-9]{4}))?FT(/(?P[UND]))?$", info) if re_match!=None: info_new: str plus_minus: str=PLUS_MINUS[re_match.groupdict()["plus_minus"]] runway: str RVR_1: float=float(re_match.groupdict()["RVR_1"])*KFSconvert_to_SI.LENGTH["ft"] - RVR_2: float #RVR 2, if no given equal to RVR 1 + RVR_2: float # RVR 2, if no given equal to RVR 1 trend: str|None runway=re_match.groupdict()["runway"] - if runway=="88": #if runway 88: all runways + if runway=="88": # if runway 88: all runways runway=":ALL" if re_match.groupdict()["RVR_2"]==None: RVR_2=RVR_1 @@ -83,6 +83,6 @@ def change_format_RVR(info: str) -> str|None: info_new+=f"V{KFSfstr.notation_tech(float(RVR_2), 2)}m" info_new+=trend - if "RVR" in WEATHER_MIN and (RVR_1 str|None: re_match: re.Match|None - #TX TN + # TX TN re_match=re.search("^(?PT[XN])(?P[M]?[0-9]{2})/(?P[0-3][0-9])(?P[0-2][0-9])Z$", info) if re_match!=None: event_DT: dt.datetime info_new: str - temperature: str=re_match.groupdict()["temperature"].replace("M", "-") #replace M with proper minus + temperature: str=re_match.groupdict()["temperature"].replace("M", "-") # replace M with proper minus temperature_type: str=f"{re_match.groupdict()['temperature_type']}" - event_DT=dt.datetime(met_report_DT.year, met_report_DT.month, met_report_DT.day, 0, 0, 0, 0, dt.timezone.utc) #event date, initialised with met report datetime - while event_DT.day!=int(re_match.groupdict()["day"]): #as long as days not matching: - event_DT+=dt.timedelta(days=1) #event must be after met report datetime, increment day until same - event_DT+=dt.timedelta(hours=int(re_match.groupdict()["hour"])) #correct day now, add time + event_DT=dt.datetime(met_report_DT.year, met_report_DT.month, met_report_DT.day, 0, 0, 0, 0, dt.timezone.utc) # event date, initialised with met report datetime + while event_DT.day!=int(re_match.groupdict()["day"]): # as long as days not matching: + event_DT+=dt.timedelta(days=1) # event must be after met report datetime, increment day until same + event_DT+=dt.timedelta(hours=int(re_match.groupdict()["hour"])) # correct day now, add time info_new=temperature_type - if met_report_DT.strftime("%Y-%m")==event_DT.strftime("%Y-%m"): #if year and month still same: - info_new+=f"{event_DT.strftime('%dT%H')}" #day, hour - elif met_report_DT.strftime("%Y")==event_DT.strftime("%Y"): #if year still same: - info_new+=f"{event_DT.strftime('%m-%dT%H')}" #month, day, hour - else: #nothing same: - info_new+=f"{event_DT.strftime('%Y-%m-%dT%H')}" #full datetime + if met_report_DT.strftime("%Y-%m")==event_DT.strftime("%Y-%m"): # if year and month still same: + info_new+=f"{event_DT.strftime('%dT%H')}" # day, hour + elif met_report_DT.strftime("%Y")==event_DT.strftime("%Y"): # if year still same: + info_new+=f"{event_DT.strftime('%m-%dT%H')}" # month, day, hour + else: # nothing same: + info_new+=f"{event_DT.strftime('%Y-%m-%dT%H')}" # full datetime info_new+=f"/{temperature}°C" - if ("temp_min" in WEATHER_MIN and int(temperature)[0-3][0-9]{2})(?P[0-9]{2})/(?P[0-2][0-9])(?P[0-5][0-9])$", info) if re_match!=None: bold: bool - CWC: list=[] #across all runways, crosswind component + CWC: list=[] # across all runways, crosswind component event_DT: dt.datetime info_new: str - RWY: pandas.DataFrame=RWY_DB[RWY_DB["airport_ident"]==station.ICAO] #in aerodrome all runways - wind_direction: int=int(re_match.groupdict()["wind_direction"])%360 #keep in [0; 360[ + RWY: pandas.DataFrame=RWY_DB[RWY_DB["airport_ident"]==station.ICAO] # in aerodrome all runways + wind_direction: int=int(re_match.groupdict()["wind_direction"])%360 # keep in [0; 360[ wind_speed: float=float(re_match.groupdict()["wind_speed"])*KFSconvert_to_SI.SPEED["kt"] - event_DT=met_report_DT #event date, initialised with met report datetime - while event_DT.minute!=int(re_match.groupdict()["minute"]): #as long as minutes not matching: - event_DT-=dt.timedelta(minutes=1) #event must be before met report datetime, decrement minute until same - while event_DT.hour!=int(re_match.groupdict()["hour"]): #as long as hours not matching: - event_DT-=dt.timedelta(hours=1) #event must be before met report datetime, decrement hour until same + event_DT=met_report_DT # event date, initialised with met report datetime + while event_DT.minute!=int(re_match.groupdict()["minute"]): # as long as minutes not matching: + event_DT-=dt.timedelta(minutes=1) # event must be before met report datetime, decrement minute until same + while event_DT.hour!=int(re_match.groupdict()["hour"]): # as long as hours not matching: + event_DT-=dt.timedelta(hours=1) # event must be before met report datetime, decrement hour until same - if met_report_DT.strftime("%Y-%m-%d")==event_DT.strftime("%Y-%m-%d"): #if date still same: - info_new=f"{event_DT.strftime('%H:%M')}" #time - elif met_report_DT.strftime("%Y-%m")==event_DT.strftime("%Y-%m"): #if year and month still same: - info_new=f"{event_DT.strftime('%dT%H:%M')}" #day, hour, minute - elif met_report_DT.strftime("%Y")==event_DT.strftime("%Y"): #if year still same: - info_new=f"{event_DT.strftime('%m-%dT%H:%M')}" #month, day, hour - else: #nothing same: - info_new=f"{event_DT.strftime('%Y-%m-%dT%H:%M')}" #full datetime + if met_report_DT.strftime("%Y-%m-%d")==event_DT.strftime("%Y-%m-%d"): # if date still same: + info_new=f"{event_DT.strftime('%H:%M')}" # time + elif met_report_DT.strftime("%Y-%m")==event_DT.strftime("%Y-%m"): # if year and month still same: + info_new=f"{event_DT.strftime('%dT%H:%M')}" # day, hour, minute + elif met_report_DT.strftime("%Y")==event_DT.strftime("%Y"): # if year still same: + info_new=f"{event_DT.strftime('%m-%dT%H:%M')}" # month, day, hour + else: # nothing same: + info_new=f"{event_DT.strftime('%Y-%m-%dT%H:%M')}" # full datetime info_new+=f"/{KFSfstr.notation_abs(wind_direction, 0, round_static=True, width=3)}°{KFSfstr.notation_abs(wind_speed, 0, round_static=True, width=2)}m/s" - if RWY.empty==True: #if no runways found: assume direct crosswind + if RWY.empty==True: # if no runways found: assume direct crosswind CWC.append(wind_speed) - else: #if runways found: for each runway calculate crosswind components - CWC+=abs(numpy.sin(numpy.radians(wind_direction-RWY["le_heading_degT"]))*wind_speed).dropna().tolist() #sin(direction difference)*wind speed, abs, remove NaN, convert to list #type:ignore + else: # if runways found: for each runway calculate crosswind components + CWC+=abs(numpy.sin(numpy.radians(wind_direction-RWY["le_heading_degT"]))*wind_speed).dropna().tolist() # sin(direction difference)*wind speed, abs, remove NaN, convert to list # type:ignore for i in range(len(CWC)): - if CWC[i]<=WEATHER_MIN["CWC"]: #if at least 1 CWC below maximum: - bold=False #landable + if CWC[i]<=WEATHER_MIN["CWC"]: # if at least 1 CWC below maximum: + bold=False # landable break - else: #if all CWC above maximum: + else: # if all CWC above maximum: bold=True - if WEATHER_MIN["wind"][+-]?([A-Z][A-Z])+)(?P[BE])(?P([0-9]{2})?)(?P[0-9]{2}))$", info) if re_match!=None: change_type: str event_DT: dt.datetime info_new: str - event_DT=met_report_DT #event date, initialised with met report datetime - while event_DT.strftime("%M")!=re_match.groupdict()["minute"]: #as long as minutes not matching: - event_DT-=dt.timedelta(minutes=1) #event must be before met report datetime, decrement minute until same + event_DT=met_report_DT # event date, initialised with met report datetime + while event_DT.strftime("%M")!=re_match.groupdict()["minute"]: # as long as minutes not matching: + event_DT-=dt.timedelta(minutes=1) # event must be before met report datetime, decrement minute until same if re_match.groupdict()["hour"]!="": - while event_DT.strftime("%H")!=re_match.groupdict()["hour"]: #as long as hours not matching: - event_DT-=dt.timedelta(hours=1) #event must be before met report datetime, decrement hour until same + while event_DT.strftime("%H")!=re_match.groupdict()["hour"]: # as long as hours not matching: + event_DT-=dt.timedelta(hours=1) # event must be before met report datetime, decrement hour until same change_type=re_match.groupdict()["change_type"] @@ -130,28 +130,28 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat logging.critical(f"Weather change type is neither \"B\" (began), nor \"E\" (ended), but \"{change_type}\".") raise RuntimeError(f"Error in {change_format_USA_codes.__name__}{inspect.signature(change_format_USA_codes)}: Weather change type is neither \"B\" (began), nor \"E\" (ended), but \"{change_type}\".") - if met_report_DT.strftime("%Y-%m-%d")==event_DT.strftime("%Y-%m-%d"): #if date still same: - info_new+=f"{event_DT.strftime('%H:%M')}" #time - elif met_report_DT.strftime("%Y-%m")==event_DT.strftime("%Y-%m"): #if year and month still same: - info_new+=f"{event_DT.strftime('%dT%H:%M')}" #day, hour, minute - elif met_report_DT.strftime("%Y")==event_DT.strftime("%Y"): #if year still same: - info_new+=f"{event_DT.strftime('%m-%dT%H:%M')}" #month, day, hour - else: #nothing same: - info_new+=f"{event_DT.strftime('%Y-%m-%dT%H:%M')}" #full datetime + if met_report_DT.strftime("%Y-%m-%d")==event_DT.strftime("%Y-%m-%d"): # if date still same: + info_new+=f"{event_DT.strftime('%H:%M')}" # time + elif met_report_DT.strftime("%Y-%m")==event_DT.strftime("%Y-%m"): # if year and month still same: + info_new+=f"{event_DT.strftime('%dT%H:%M')}" # day, hour, minute + elif met_report_DT.strftime("%Y")==event_DT.strftime("%Y"): # if year still same: + info_new+=f"{event_DT.strftime('%m-%dT%H:%M')}" # month, day, hour + else: # nothing same: + info_new+=f"{event_DT.strftime('%Y-%m-%dT%H:%M')}" # full datetime info_new+=f"/{re_match.groupdict()['weather']}" return info_new - #USA code PWE: precipitation water equivalent + # USA code PWE: precipitation water equivalent re_match=re.search("^P(?P[0-9]{4})$", info) if re_match!=None: PWE=int(re_match.groupdict()["PWE"])/100*KFSconvert_to_SI.LENGTH["in"] return f" PWE/{KFSfstr.notation_tech(PWE, 2)}m" - #USA code T: temperature and dewpoint + # USA code T: temperature and dewpoint re_match=re.search("^T(?P[0-1][0-9]{3})(?P[0-1][0-9]{3})$", info) if re_match!=None: dewpoint: str @@ -159,17 +159,17 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat temperature: str temperature=re_match.groupdict()["temperature"] - temperature=f"{temperature[:3]},{temperature[3:]}" #add decimal separator - if temperature[0]=="0": #if sign positive: cut + temperature=f"{temperature[:3]},{temperature[3:]}" # add decimal separator + if temperature[0]=="0": # if sign positive: cut temperature=temperature[1:] - elif temperature[0]=="1": #if sign negative: replace + elif temperature[0]=="1": # if sign negative: replace temperature=temperature.replace("1", "-", 1) dewpoint=re_match.groupdict()["dewpoint"] dewpoint=f"{dewpoint[:3]},{dewpoint[3:]}" - if dewpoint[0]=="0": #if sign positive: cut + if dewpoint[0]=="0": # if sign positive: cut dewpoint=dewpoint[1:] - elif dewpoint[0]=="1": #if sign negative: replace + elif dewpoint[0]=="1": # if sign negative: replace dewpoint=dewpoint.replace("1", "-", 1) @@ -178,19 +178,19 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat else: info_new=f"{temperature}°C/{dewpoint}°C" - if ("temp_min" in WEATHER_MIN and float(temperature.replace(",", "."))[1-2])(?P[0-1][0-9][0-9][0-9])$", info) if re_match!=None: bold: bool info_new: str="" temperature_type: str=re_match.groupdict()["temperature_type"] - if temperature_type=="1": #replace temperature type code with readable abbreviation + if temperature_type=="1": # replace temperature type code with readable abbreviation temperature_type="TX6h" elif temperature_type=="2": temperature_type="TN6h" @@ -199,21 +199,21 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat raise RuntimeError(f"Error in {change_format_USA_codes.__name__}{inspect.signature(change_format_USA_codes)}: Temperature type shoud be in [1; 2], but it is {temperature_type}.") temperature=re_match.groupdict()["temperature"] - temperature=f"{temperature[:3]},{temperature[3:]}" #add decimal separator - if temperature[0]=="0": #if sign positive: cut + temperature=f"{temperature[:3]},{temperature[3:]}" # add decimal separator + if temperature[0]=="0": # if sign positive: cut temperature=temperature[1:] - elif temperature[0]=="1": #if sign negative: replace + elif temperature[0]=="1": # if sign negative: replace temperature=temperature.replace("1", "-", 1) info_new=f"{temperature_type}/{temperature}°C" - if ("temp_min" in WEATHER_MIN and float(temperature.replace(",", "."))[0-9]{3})$", info) if re_match!=None: snow_depth: float=int(re_match.groupdict()["snow_depth"])*KFSconvert_to_SI.LENGTH["in"] @@ -227,7 +227,7 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat return f" {info_new}" - #USA code 4: 86ks (1d) temperature max. and min. + # USA code 4: 86ks (1d) temperature max. and min. re_match=re.search("^4(?P[0-1][0-9]{3})(?P[0-1][0-9]{3})$", info) if re_match!=None: info_new: str="" @@ -235,26 +235,26 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat temperature_min: str temperature_max=re_match.groupdict()["temperature_max"] - temperature_max=f"{temperature_max[:3]},{temperature_max[3:]}" #add decimal separator - if temperature_max[0]=="0": #if sign positive: cut + temperature_max=f"{temperature_max[:3]},{temperature_max[3:]}" # add decimal separator + if temperature_max[0]=="0": # if sign positive: cut temperature_max=temperature_max[1:] - elif temperature_max[0]=="1": #if sign negative: replace + elif temperature_max[0]=="1": # if sign negative: replace temperature_max=temperature_max.replace("1", "-", 1) temperature_min=re_match.groupdict()["temperature_min"] temperature_min=f"{temperature_min[:3]},{temperature_min[3:]}" - if temperature_min[0]=="0": #if sign positive: cut + if temperature_min[0]=="0": # if sign positive: cut temperature_min=temperature_min[1:] - elif temperature_min[0]=="1": #if sign negative: replace + elif temperature_min[0]=="1": # if sign negative: replace temperature_min=temperature_min.replace("1", "-", 1) - if ("temp_min" in WEATHER_MIN and int(temperature_max)[0-8])(?P[0-9]{3})$", info) if re_match!=None: info_new: str @@ -270,7 +270,7 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat trend_direction: str trend_direction=re_match.groupdict()["trend_direction"] - if 0<=int(trend_direction) and int(trend_direction)<=3: #convert trend direction to sign + if 0<=int(trend_direction) and int(trend_direction)<=3: # convert trend direction to sign trend_direction="+" elif int(trend_direction)==4: trend_direction="" @@ -286,7 +286,7 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat return f" {info_new}" - #USA code 6: in 11ks (3h) or 22ks (6h) precipitation amount + # USA code 6: in 11ks (3h) or 22ks (6h) precipitation amount re_match=re.search("^6(?P[0-9]{4})$", info) if re_match!=None: precipitation: float=float(re_match.groupdict()["precipitation"])/100*KFSconvert_to_SI.LENGTH["in"] @@ -295,7 +295,7 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat return f" PCPN(3h,6h)/{KFSfstr.notation_tech(precipitation, 2)}m" - #USA code 7: in 86ks (24h) precipitation amount + # USA code 7: in 86ks (24h) precipitation amount re_match=re.search("^7(?P[0-9]{4})$", info) if re_match!=None: precipitation: float=float(re_match.groupdict()["precipitation"])/100*KFSconvert_to_SI.LENGTH["in"] @@ -304,7 +304,7 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat return f" PCPN24h/{KFSfstr.notation_tech(precipitation, 2)}m" - #USA code 8/: cloud type + # USA code 8/: cloud type re_match=re.search("^8/(?P[0-9])(?P[0-9/])(?P[0-9/])$", info) if re_match!=None: cloud_type_high: str =WMO_CLOUD_TYPE_HIGH [re_match.groupdict()["cloud_type_high"]] @@ -336,14 +336,14 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat return f" {info_new}" - #USA code 98: sunshine duration + # USA code 98: sunshine duration re_match=re.search("^98(?P[0-9]{3})$", info) if re_match!=None: sunshine_duration: int=int(re_match.groupdict()["sunshine_duration"])*KFSconvert_to_SI.TIME["min"] return f" SUN/{KFSfstr.notation_tech(sunshine_duration, 2)}s" - #USA code 931: in 22ks (6h) snowfall + # USA code 931: in 22ks (6h) snowfall re_match=re.search("^931(?P[0-9]{3})$", info) if re_match!=None: snowfall: float=int(re_match.groupdict()["snowfall"])/10*KFSconvert_to_SI.LENGTH["in"] @@ -356,7 +356,7 @@ def change_format_USA_codes(info: str, met_report_DT: dt.datetime, station: Stat return f" {info_new}" - #USA code 933: snow liquid water equivalent (SWE) + # USA code 933: snow liquid water equivalent (SWE) re_match=re.search("^933(?P[0-9]{3})$", info) if re_match!=None: SWE: float=int(re_match.groupdict()["SWE"])/10*KFSconvert_to_SI.LENGTH["in"] diff --git a/src/change_format_/change.py b/src/change_format_/change.py index 4f9ebba..3d364da 100644 --- a/src/change_format_/change.py +++ b/src/change_format_/change.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import datetime as dt from KFSfstr import KFSfstr import re @@ -8,63 +8,63 @@ def change_format_change(info_list: list[str], i: int, met_report_DT: dt.datetim re_match: re.Match|None - #change + # change re_match=re.search("^(NOSIG|BECMG|TEMPO)$", info_list[i]) if re_match!=None: info_new: str - if i==0: #if info[0]: - info_new=f"{info_list[i]}" #just forward - elif re.search("^PROB[0-9]{2}$", info_list[i-1])!=None: #if not info[0] and info previous PROBxx: - info_new=f" {info_list[i]}" #space then forward - else: #else: - info_new=f"\n{info_list[i]}" #change info new, linebreak then forward + if i==0: # if info[0]: + info_new=f"{info_list[i]}" # just forward + elif re.search("^PROB[0-9]{2}$", info_list[i-1])!=None: # if not info[0] and info previous PROBxx: + info_new=f" {info_list[i]}" # space then forward + else: # else: + info_new=f"\n{info_list[i]}" # change info new, linebreak then forward return info_new - #probability + # probability re_match=re.search("^PROB(?P[0-9]{2})$", info_list[i]) if re_match!=None: - info_new=f"PROB{KFSfstr.notation_abs(float(re_match.groupdict()['probability'])/100, 2, round_static=True, trailing_zeros=False)}" #change info new, linebreak then forward + info_new=f"PROB{KFSfstr.notation_abs(float(re_match.groupdict()['probability'])/100, 2, round_static=True, trailing_zeros=False)}" # change info new, linebreak then forward - if i==0: #if info[0]: - info_new=f"{info_new}" #just forward - elif re.search("^PROB[0-9]{2}$", info_list[i-1])!=None: #if not info[0] and info previous PROBxx: - info_new=f" {info_new}" #space then forward - else: #else: - info_new=f"\n{info_new}" #change info new, linebreak then forward + if i==0: # if info[0]: + info_new=f"{info_new}" # just forward + elif re.search("^PROB[0-9]{2}$", info_list[i-1])!=None: # if not info[0] and info previous PROBxx: + info_new=f" {info_new}" # space then forward + else: # else: + info_new=f"\n{info_new}" # change info new, linebreak then forward return info_new - #from, until, at + # from, until, at re_match=re.search("^(?PFM|TL|AT)(?P([0-3][0-9])?)(?P[0-2][0-9])(?P[0-5][0-9])$", info_list[i]) if re_match!=None: event_DT: dt.datetime info_new: str - event_DT=dt.datetime(met_report_DT.year, met_report_DT.month, met_report_DT.day, 0, 0, 0, 0, dt.timezone.utc) #event date, initialised with met report datetime - if re_match.groupdict()["day"]!="": #if event day given: - while event_DT.day!=int(re_match.groupdict()["day"]): #as long as days not matching: - event_DT+=dt.timedelta(days=1) #event must be after met report datetime, increment day until same - event_DT+=dt.timedelta(hours=int(re_match.groupdict()["hour"]), minutes=int(re_match.groupdict()["minute"])) #add time - if event_DT str|None: re_match: re.Match|None - #clouds + # clouds re_match=re.search("^(?PFEW|SCT|BKN|OVC)(?P[0-9]{3})(?P([A-Z/])*?)?$", info) if re_match!=None: - cloud_ALT: float #altitude [m] - cloud_coverage: str=re_match.groupdict()["cloud_coverage"] #cloud coverage [1/8] - cloud_HGT: float=int(re_match.groupdict()["cloud_HGT"])*100*KFSconvert_to_SI.LENGTH["ft"] #height [m] - cloud_type: str=re_match.groupdict()["cloud_type"] #append cloud type, usually TCU or CB + cloud_ALT: float # altitude [m] + cloud_coverage: str=re_match.groupdict()["cloud_coverage"] # cloud coverage [1/8] + cloud_HGT: float=int(re_match.groupdict()["cloud_HGT"])*100*KFSconvert_to_SI.LENGTH["ft"] # height [m] + cloud_type: str=re_match.groupdict()["cloud_type"] # append cloud type, usually TCU or CB info_new: str - if station.elev==None: #if aerodrome elevation unknown: - cloud_ALT=cloud_HGT #assume elevation=0m - else: #if aerodrome elevation known: - cloud_ALT=cloud_HGT+station.elev #calculate altitude normally + if station.elev==None: # if aerodrome elevation unknown: + cloud_ALT=cloud_HGT # assume elevation=0m + else: # if aerodrome elevation known: + cloud_ALT=cloud_HGT+station.elev # calculate altitude normally info_new=cloud_coverage - if cloud_HGT==0: #if HGT==0m: - info_new+=f"{KFSfstr.notation_abs(cloud_ALT, 0, round_static=True)}m" #cloud touches ground, ALT==elevation, round altitude and height to 1m - if KFSfstr.notation_abs(cloud_ALT, 0, round_static=True)!=KFSfstr.notation_abs(cloud_HGT, 0, round_static=True): #if height!=altitude: - info_new+=f"|{KFSfstr.notation_abs(cloud_HGT, 0, round_static=True)}m" #append height - else: #if HGT!=0: - info_new+=f"{KFSfstr.notation_abs(cloud_ALT, 2)}m" #round altitude to 2 signifcant digits - if KFSfstr.notation_abs(cloud_ALT, 2)!=KFSfstr.notation_abs(cloud_HGT, 2): #if height!=altitude: - info_new+=f"|{KFSfstr.notation_abs(cloud_HGT, 2)}m" #append height - - if cloud_type!="": #if cloud type given: append + if cloud_HGT==0: # if HGT==0m: + info_new+=f"{KFSfstr.notation_abs(cloud_ALT, 0, round_static=True)}m" # cloud touches ground, ALT==elevation, round altitude and height to 1m + if KFSfstr.notation_abs(cloud_ALT, 0, round_static=True)!=KFSfstr.notation_abs(cloud_HGT, 0, round_static=True): # if height!=altitude: + info_new+=f"|{KFSfstr.notation_abs(cloud_HGT, 0, round_static=True)}m" # append height + else: # if HGT!=0: + info_new+=f"{KFSfstr.notation_abs(cloud_ALT, 2)}m" # round altitude to 2 signifcant digits + if KFSfstr.notation_abs(cloud_ALT, 2)!=KFSfstr.notation_abs(cloud_HGT, 2): # if height!=altitude: + info_new+=f"|{KFSfstr.notation_abs(cloud_HGT, 2)}m" # append height + + if cloud_type!="": # if cloud type given: append info_new+=f"|{cloud_type}" - if cloud_type=="CB": #if CB: mark + if cloud_type=="CB": # if CB: mark info_new=f"**{info_new}**" - elif "ceiling" in WEATHER_MIN and cloud_HGT[0-3][0-9])(?P[0-2][0-9])(?P[0-5][0-9])Z$", info) if re_match!=None: - event_DT: dt.datetime #datetime of THIS info; should be same as met_report_DT because group should only exist once, but you never know. That's why not just using already parsed met_report_DT + event_DT: dt.datetime # datetime of THIS info; should be same as met_report_DT because group should only exist once, but you never know. That's why not just using already parsed met_report_DT info_new: str timespan_published: dt.timedelta - event_DT=dt.datetime(met_report_DT.year, met_report_DT.month, met_report_DT.day, 0, 0, 0, 0, dt.timezone.utc) #event date, initialised with met report datetime - while event_DT.day!=int(re_match.groupdict()["day"]): #as long as days not matching: - event_DT+=dt.timedelta(days=1) #event must be after met report datetime, increment day until same - event_DT+=dt.timedelta(hours=int(re_match.groupdict()["hour"]), minutes=int(re_match.groupdict()["minute"])) #correct day now, add time + event_DT=dt.datetime(met_report_DT.year, met_report_DT.month, met_report_DT.day, 0, 0, 0, 0, dt.timezone.utc) # event date, initialised with met report datetime + while event_DT.day!=int(re_match.groupdict()["day"]): # as long as days not matching: + event_DT+=dt.timedelta(days=1) # event must be after met report datetime, increment day until same + event_DT+=dt.timedelta(hours=int(re_match.groupdict()["hour"]), minutes=int(re_match.groupdict()["minute"])) # correct day now, add time info_new=f"{event_DT.strftime('%Y-%m-%dT%H:%M')}" - if server.force_print==True: #if force print because no subscription: - timespan_published=now_DT-event_DT #append timespan published + if server.force_print==True: # if force print because no subscription: + timespan_published=now_DT-event_DT # append timespan published info_new+=f" ({KFSfstr.notation_tech(timespan_published.total_seconds(), 2)}s ago)" return f" {info_new}" \ No newline at end of file diff --git a/src/change_format_/runway_state_message.py b/src/change_format_/runway_state_message.py index ff7b34c..394fbb9 100644 --- a/src/change_format_/runway_state_message.py +++ b/src/change_format_/runway_state_message.py @@ -1,5 +1,5 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. -import re #Regular Expressions +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +import re def change_format_RSM(info: str) -> str|None: @@ -60,7 +60,7 @@ def change_format_RSM(info: str) -> str|None: BRAKING[f"{b:02}"]=("GOOD", False) - #runway state message + # runway state message re_match=re.search("^R(?P[0-3][0-9]([LCR])?)/(?P[0-9]|/)(?P[0-9]|/)(?P[0-9][0-9]|//)(?P[0-9][0-9]|//)$", info) if re_match!=None: braking: str @@ -74,7 +74,7 @@ def change_format_RSM(info: str) -> str|None: info_new: str runway=re_match.groupdict()["runway"] - if runway=="88": #if runway 88: all runways + if runway=="88": # if runway 88: all runways runway=":ALL" try: deposit, deposit_bold=DEPOSIT[re_match.groupdict()["deposit"]] @@ -101,7 +101,7 @@ def change_format_RSM(info: str) -> str|None: return f"\n{info_new}" - #runway cleared + # runway cleared re_match=re.search("^R([0-3][0-9]|88)([LCR]|)/CLRD//$", info) if re_match!=None: info_new: str @@ -117,6 +117,6 @@ def change_format_RSM(info: str) -> str|None: return f"\n{info_new}" - #aerodrome snow closed + # aerodrome snow closed if info=="R/SNOCLO": return f"\n**{info}**" \ No newline at end of file diff --git a/src/change_format_/temp_dew.py b/src/change_format_/temp_dew.py index f63553e..db642ec 100644 --- a/src/change_format_/temp_dew.py +++ b/src/change_format_/temp_dew.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import re from weather_minimums import WEATHER_MIN @@ -7,12 +7,12 @@ def change_format_temp_dew(info: str) -> str|None: re_match: re.Match|None - #temperature and dewpoint + # temperature and dewpoint re_match=re.search("^(?P[M]?[0-9]{2})/(?P([M]?[0-9]{2})?)$", info) if re_match!=None: dewpoint: str=re_match.groupdict()["dewpoint"].replace("M", "-") info_new: str - temperature: str=re_match.groupdict()["temperature"].replace("M", "-") #replace M with proper minus + temperature: str=re_match.groupdict()["temperature"].replace("M", "-") # replace M with proper minus if dewpoint=="": @@ -20,6 +20,6 @@ def change_format_temp_dew(info: str) -> str|None: else: info_new=f"{temperature}°C/{dewpoint}°C" - if ("temp_min" in WEATHER_MIN and int(temperature) str|None: re_match: re.Match|None - #TAF validity + # TAF validity re_match=re.search("^(?P[0-3][0-9])(?P[0-2][0-9])/(?P[0-3][0-9])(?P[0-2][0-9])$", info) if re_match!=None: start_DT: dt.datetime end_DT: dt.datetime info_new: str - start_DT=dt.datetime(met_report_DT.year, met_report_DT.month, met_report_DT.day, 0, 0, 0, 0, dt.timezone.utc) #start datetime, initialised with met report date - while start_DT.strftime("%d")!=re_match.groupdict()["start_day"]: #as long as days not matching: - start_DT+=dt.timedelta(days=1) #start must be after met report datetime, increment day until same - start_DT+=dt.timedelta(hours=int(re_match.groupdict()["start_hour"])) #correct day now, add time - - end_DT=dt.datetime(start_DT.year, start_DT.month, start_DT.day, 0, 0, 0, 0, dt.timezone.utc) #end datetime, initialised with start date - while end_DT.strftime("%d")!=re_match.groupdict()["end_day"]: #as long as days not matching: - end_DT+=dt.timedelta(days=1) #end must be after start datetime, increment day until same - end_DT+=dt.timedelta(hours=int(re_match.groupdict()["end_hour"])) #correct day now, add time - - - if met_report_DT.strftime("%Y-%m")==start_DT.strftime("%Y-%m"): #if year and month still same: - info_new=f"{start_DT.strftime('%dT%H')}/" #day, hour - elif met_report_DT.strftime("%Y")==start_DT.strftime("%Y"): #if year still same: - info_new=f"{start_DT.strftime('%m-%dT%H')}/" #month, day, hour - else: #nothing same: - info_new=f"{start_DT.strftime('%Y-%m-%dT%H')}/" #full datetime - - if start_DT.strftime("%Y-%m-%d")==end_DT.strftime("%Y-%m-%d"): #if date still same: - info_new+=f"{end_DT.strftime('%H')}" #hour - elif start_DT.strftime("%Y-%m")==end_DT.strftime("%Y-%m"): #if year and month still same: - info_new+=f"{end_DT.strftime('%dT%H')}" #day, hour - elif start_DT.strftime("%Y")==end_DT.strftime("%Y"): #if year still same: - info_new+=f"{end_DT.strftime('%m-%dT%H')}" #month, day, hour - else: #nothing same - info_new+=f"{end_DT.strftime('%Y-%m-%dT%H')}" #full datetime + start_DT=dt.datetime(met_report_DT.year, met_report_DT.month, met_report_DT.day, 0, 0, 0, 0, dt.timezone.utc) # start datetime, initialised with met report date + while start_DT.strftime("%d")!=re_match.groupdict()["start_day"]: # as long as days not matching: + start_DT+=dt.timedelta(days=1) # start must be after met report datetime, increment day until same + start_DT+=dt.timedelta(hours=int(re_match.groupdict()["start_hour"])) # correct day now, add time + + end_DT=dt.datetime(start_DT.year, start_DT.month, start_DT.day, 0, 0, 0, 0, dt.timezone.utc) # end datetime, initialised with start date + while end_DT.strftime("%d")!=re_match.groupdict()["end_day"]: # as long as days not matching: + end_DT+=dt.timedelta(days=1) # end must be after start datetime, increment day until same + end_DT+=dt.timedelta(hours=int(re_match.groupdict()["end_hour"])) # correct day now, add time + + + if met_report_DT.strftime("%Y-%m")==start_DT.strftime("%Y-%m"): # if year and month still same: + info_new=f"{start_DT.strftime('%dT%H')}/" # day, hour + elif met_report_DT.strftime("%Y")==start_DT.strftime("%Y"): # if year still same: + info_new=f"{start_DT.strftime('%m-%dT%H')}/" # month, day, hour + else: # nothing same: + info_new=f"{start_DT.strftime('%Y-%m-%dT%H')}/" # full datetime + + if start_DT.strftime("%Y-%m-%d")==end_DT.strftime("%Y-%m-%d"): # if date still same: + info_new+=f"{end_DT.strftime('%H')}" # hour + elif start_DT.strftime("%Y-%m")==end_DT.strftime("%Y-%m"): # if year and month still same: + info_new+=f"{end_DT.strftime('%dT%H')}" # day, hour + elif start_DT.strftime("%Y")==end_DT.strftime("%Y"): # if year still same: + info_new+=f"{end_DT.strftime('%m-%dT%H')}" # month, day, hour + else: # nothing same + info_new+=f"{end_DT.strftime('%Y-%m-%dT%H')}" # full datetime return f" {info_new}" \ No newline at end of file diff --git a/src/change_format_/visibility.py b/src/change_format_/visibility.py index 403cac2..f6addd5 100644 --- a/src/change_format_/visibility.py +++ b/src/change_format_/visibility.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. from KFSconvert_to_SI import KFSconvert_to_SI from KFSfstr import KFSfstr import re @@ -18,13 +18,13 @@ def change_format_vis(info_list: list, i: int) -> str|None: } - #visibility 10km+ + # visibility 10km+ re_match=re.search("^9999$", info_list[i]) if re_match!=None: return f" 10km+" - #visibility normal + # visibility normal re_match=re.search("^(?P[PM]?)(?P[0-9]{4})(?P(N|NE|E|SE|S|SW|W|NW)?)$", info_list[i]) if re_match!=None: direction: str=re_match.groupdict()["direction"] @@ -33,22 +33,22 @@ def change_format_vis(info_list: list, i: int) -> str|None: visibility: float=int(re_match.groupdict()["visibility"]) - if visibility<5e3: #if visbility<5km: round to 2 significant digits + if visibility<5e3: # if visbility<5km: round to 2 significant digits info_new=f"{KFSfstr.notation_tech(visibility, 2)}m" - else: #if 5km<=visibility: round to whole km + else: # if 5km<=visibility: round to whole km info_new=f"{KFSfstr.notation_tech(visibility, -3, round_static=True)}m" - info_new+=plus_minus #append plus or minus + info_new+=plus_minus # append plus or minus - if direction!="": #if direction given: append + if direction!="": # if direction given: append info_new+=f"/{direction}" - if "vis" in WEATHER_MIN and visibility[0-9]{1,2})KM$", info_list[i]) if re_match!=None: info_new: str @@ -57,12 +57,12 @@ def change_format_vis(info_list: list, i: int) -> str|None: info_new=f"{KFSfstr.notation_tech(visibility, -3, round_static=True)}m" - if "vis" in WEATHER_MIN and visibility[PM]?)(?P[0-9]{1,2})SM$", info_list[i]) if re_match!=None: info_new: str @@ -72,16 +72,16 @@ def change_format_vis(info_list: list, i: int) -> str|None: info_new=f"{KFSfstr.notation_tech(visibility, 2)}m{plus_minus}" - if "USA_vis" in WEATHER_MIN and visibility[0-9])$", info_list[i]) #single digit a as part of compound fraction a+b/c + # USA visibility a+b/c [SM] + re_match_1=re.search("^(?P[0-9])$", info_list[i]) # single digit a as part of compound fraction a+b/c try: - re_match_2=re.search("^(?P[0-9])/(?P[0-9])SM$", info_list[i+1]) #is element next rest of fraction b/c? - except IndexError: #if exception because element next does not exist: default none + re_match_2=re.search("^(?P[0-9])/(?P[0-9])SM$", info_list[i+1]) # is element next rest of fraction b/c? + except IndexError: # if exception because element next does not exist: default none re_match_2=None if re_match_1!=None and re_match_2!=None: vis_A: int=int(re_match_1.groupdict()["vis_A"]) @@ -89,12 +89,12 @@ def change_format_vis(info_list: list, i: int) -> str|None: vis_C: int=int(re_match_2.groupdict()["vis_C"]) - info_list[i+1]=f"{vis_A*vis_C+vis_B}/{vis_C}SM" #convert to single fraction a+b/c=(a*c+b)/c - info_list.pop(i) #remove single digit a - #do not return and convert single fraction (a*c+b)/c in next if statement + info_list[i+1]=f"{vis_A*vis_C+vis_B}/{vis_C}SM" # convert to single fraction a+b/c=(a*c+b)/c + info_list.pop(i) # remove single digit a + # do not return and convert single fraction (a*c+b)/c in next if statement - #USA visibility b/c [SM] + # USA visibility b/c [SM] re_match=re.search("^(?P[PM]?)(?P[0-9]{1,2})/(?P[0-9]{1,2})SM$", info_list[i]) if re_match!=None: info_new: str @@ -104,6 +104,6 @@ def change_format_vis(info_list: list, i: int) -> str|None: info_new=f"{KFSfstr.notation_tech(visibility, 2)}m{plus_minus}" - if "USA_vis" in WEATHER_MIN and visibility str|None: re_match: re.Match|None - #VV [100ft] + # VV [100ft] re_match=re.search("^VV(?P[0-9]{3})$", info) if re_match!=None: info_new: str @@ -18,12 +18,12 @@ def change_format_VV(info: str) -> str|None: info_new=f"VV{KFSfstr.notation_abs(VV, 2)}m" - if "vis" in WEATHER_MIN and VV[0-9]{3})$", info) if re_match!=None: info_new: str @@ -32,6 +32,6 @@ def change_format_VV(info: str) -> str|None: info_new=f"QBB{KFSfstr.notation_abs(QBB, 2)}m" - if "vis" in WEATHER_MIN and QBB str|None: re_match: re.Match|None - #weather + # weather re_match=re.search("^(?P[+-]?)(?P([A-Z]{2})+)$", info_list[i]) if re_match!=None: bold: bool @@ -15,17 +15,17 @@ def change_format_weather(info_list: list, i: int) -> str|None: plus_minus: str=re_match.groupdict()["plus_minus"] weather: str =re_match.groupdict()["weather"] - if i==0: #if aerodrome identifier: just forward, don't mark; for example EDGS would be marked all the time otherwise because of GS + if i==0: # if aerodrome identifier: just forward, don't mark; for example EDGS would be marked all the time otherwise because of GS return f" {info_list[i]}" info_new=f"{plus_minus}{weather}" for j in range(0, len(weather), 2): - if "weather_forbidden" in WEATHER_MIN and re.search(WEATHER_MIN["weather_forbidden"], weather[j:j+2])!=None: #do not fly during this weather; visibility, icing, storms... + if "weather_forbidden" in WEATHER_MIN and re.search(WEATHER_MIN["weather_forbidden"], weather[j:j+2])!=None: # do not fly during this weather; visibility, icing, storms... bold=True break - else: #no problematic weather found + else: # no problematic weather found bold=False if bold==True: diff --git a/src/change_format_/wind.py b/src/change_format_/wind.py index f81a9d7..3ac2e4b 100644 --- a/src/change_format_/wind.py +++ b/src/change_format_/wind.py @@ -1,7 +1,7 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. from KFSconvert_to_SI import KFSconvert_to_SI from KFSfstr import KFSfstr -import numpy #for crosswind component, numpy unctions because of DataFrames input +import numpy # for crosswind component, numpy functions because of DataFrames input import pandas import re from Station import Station @@ -12,21 +12,21 @@ def change_format_wind(info: str, station: Station, RWY_DB: pandas.DataFrame) -> re_match: re.Match|None - #wind VRB + # wind VRB re_match=re.search("^VRB(?P[0-9]{2})(G(?P[0-9]{2}))?(?PMPS|KT)$", info) if re_match!=None: info_new: str wind_speed: float=float(re_match.groupdict()["wind_speed"]) - wind_speed_gust: float #wind speed gust, if no given equal to normal wind speed + wind_speed_gust: float # wind speed gust, if no given equal to normal wind speed wind_unit: str=re_match.groupdict()["wind_unit"] - if re_match.groupdict()["wind_speed_gust"]==None: #if no gust given: - wind_speed_gust=wind_speed #equal to normal wind speed + if re_match.groupdict()["wind_speed_gust"]==None: # if no gust given: + wind_speed_gust=wind_speed # equal to normal wind speed else: wind_speed_gust=float(re_match.groupdict()["wind_speed_gust"]) - if wind_unit=="KT": #if kt: convert + if wind_unit=="KT": # if kt: convert wind_speed *=KFSconvert_to_SI.SPEED["kt"] wind_speed_gust*=KFSconvert_to_SI.SPEED["kt"] @@ -36,30 +36,30 @@ def change_format_wind(info: str, station: Station, RWY_DB: pandas.DataFrame) -> info_new+=f"G{KFSfstr.notation_abs(wind_speed_gust, 0, round_static=True, width=2)}" info_new+="m/s" - if WEATHER_MIN["TWC"][0-3][0-9]{2})(?P[0-9]{2})(G(?P[0-9]{2}))?(?PMPS|KT)$", info) if re_match!=None: bold: bool - CWC: list=[] #across all runways, crosswind component + CWC: list=[] # across all runways, crosswind component info_new: str - RWY: pandas.DataFrame=RWY_DB[RWY_DB["airport_ident"]==station.ICAO] #in aerodrome all runways - wind_direction: int=int(re_match.groupdict()["wind_direction"])%360 #keep in [0; 360[ + RWY: pandas.DataFrame=RWY_DB[RWY_DB["airport_ident"]==station.ICAO] # in aerodrome all runways + wind_direction: int=int(re_match.groupdict()["wind_direction"])%360 # keep in [0; 360[ wind_speed: float=float(re_match.groupdict()["wind_speed"]) - wind_speed_gust: float #wind speed gust, if no given equal to normal wind speed + wind_speed_gust: float # wind speed gust, if no given equal to normal wind speed wind_unit: str=re_match.groupdict()["wind_unit"] - if re_match.groupdict()["wind_speed_gust"]==None: #if no gust given: - wind_speed_gust=wind_speed #equal to normal wind speed + if re_match.groupdict()["wind_speed_gust"]==None: # if no gust given: + wind_speed_gust=wind_speed # equal to normal wind speed else: wind_speed_gust=float(re_match.groupdict()["wind_speed_gust"]) - if wind_unit=="KT": #if kt: convert + if wind_unit=="KT": # if kt: convert wind_speed *=KFSconvert_to_SI.SPEED["kt"] wind_speed_gust*=KFSconvert_to_SI.SPEED["kt"] @@ -69,18 +69,18 @@ def change_format_wind(info: str, station: Station, RWY_DB: pandas.DataFrame) -> info_new+=f"G{KFSfstr.notation_abs(wind_speed_gust, 0, round_static=True, width=2)}" info_new+="m/s" - if RWY.empty==True: #if no runways found: assume direct crosswind + if RWY.empty==True: # if no runways found: assume direct crosswind CWC.append(wind_speed) - else: #if runways found: for each runway calculate crosswind components - CWC+=abs(numpy.sin(numpy.radians(wind_direction-RWY["le_heading_degT"]))*wind_speed_gust).dropna().tolist() #sin(direction difference)*wind speed, abs, remove NaN, convert to list #type:ignore + else: # if runways found: for each runway calculate crosswind components + CWC+=abs(numpy.sin(numpy.radians(wind_direction-RWY["le_heading_degT"]))*wind_speed_gust).dropna().tolist() # sin(direction difference)*wind speed, abs, remove NaN, convert to list # type:ignore for i in range(len(CWC)): - if CWC[i]<=WEATHER_MIN["CWC"]: #if at least 1 CWC below maximum: - bold=False #landable + if CWC[i]<=WEATHER_MIN["CWC"]: # if at least 1 CWC below maximum: + bold=False # landable break - else: #if all CWC above maximum: + else: # if all CWC above maximum: bold=True - if WEATHER_MIN["wind"] return f" {info_new}" - #wind too strong + # wind too strong re_match=re.search("^ABV(49MPS|99KT)$", info) if re_match!=None: info_new: str="50m/s+" - if WEATHER_MIN["TWC"]<50: #if TWC max. definitely exceeded: mark + if WEATHER_MIN["TWC"]<50: # if TWC max. definitely exceeded: mark info_new=f"**{info_new}**" return f" {info_new}" - #wind direction variable + # wind direction variable re_match=re.search("^(?P[0-3][0-9]{2})V(?P[0-3][0-9]{2})$", info) if re_match!=None: - wind_direction_1: int=int(re_match.groupdict()["wind_direction_1"])%360 #keep in [0; 360[ - wind_direction_2: int=int(re_match.groupdict()["wind_direction_2"])%360 #keep in [0; 360[ + wind_direction_1: int=int(re_match.groupdict()["wind_direction_1"])%360 # keep in [0; 360[ + wind_direction_2: int=int(re_match.groupdict()["wind_direction_2"])%360 # keep in [0; 360[ return f" {KFSfstr.notation_abs(wind_direction_1, 0, round_static=True, width=3)}°V{KFSfstr.notation_abs(wind_direction_2, 0, round_static=True, width=3)}°" \ No newline at end of file diff --git a/src/change_format_RMK.py b/src/change_format_RMK.py index 3fd470c..97d8f97 100644 --- a/src/change_format_RMK.py +++ b/src/change_format_RMK.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import datetime as dt import pandas from change_format_.HGT import change_format_HGT @@ -11,30 +11,30 @@ def change_format_RMK(info_list: list[str], i: int, station: Station, met_report_DT: dt.datetime, RWY_DB: pandas.DataFrame) -> str: - info_new=change_format_wind (info_list[i], station, RWY_DB) #wind + info_new=change_format_wind (info_list[i], station, RWY_DB) # wind if info_new!=None: return info_new - info_new=change_format_temp_dew (info_list[i]) #temperature and dewpoint + info_new=change_format_temp_dew (info_list[i]) # temperature and dewpoint if info_new!=None: return info_new - info_new=change_format_QNH (info_list[i]) #QNH in additional unit (ex. RCTP) + info_new=change_format_QNH (info_list[i]) # QNH in additional unit (ex. RCTP) if info_new!=None: return info_new - info_new=change_format_HGT (info_list[i], station) #height, example wind change altitude + info_new=change_format_HGT (info_list[i], station) # height, example wind change altitude if info_new!=None: return info_new - #in RMK 4 digits can mean different things: time, QNH... that's why forward unchanged + # in RMK 4 digits can mean different things: time, QNH... that's why forward unchanged - info_new=change_format_VV (info_list[i]) #visibility vertical + info_new=change_format_VV (info_list[i]) # visibility vertical if info_new!=None: return info_new - info_new=change_format_USA_codes(info_list[i], met_report_DT, station, RWY_DB) #USA weather station machine codes + info_new=change_format_USA_codes(info_list[i], met_report_DT, station, RWY_DB) # USA weather station machine codes if info_new!=None: return info_new - return f" {info_list[i]}" #if format not found: just forward it \ No newline at end of file + return f" {info_list[i]}" # if format not found: just forward it \ No newline at end of file diff --git a/src/init_DB.py b/src/init_DB.py index fccc8a0..fedeb54 100644 --- a/src/init_DB.py +++ b/src/init_DB.py @@ -1,30 +1,30 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import datetime as dt -import io #CSV string -> pandas.DataFrame +import io # CSV string -> pandas.DataFrame import logging import os -import pandas #Dataframes +import pandas import re import requests from DB_Type import DB_Type def init_DB(DB_type: DB_Type, DB: pandas.DataFrame, now_DT: dt.datetime, DOWNLOAD_TIMEOUT: int) -> pandas.DataFrame: - DB_filenames: list[str] #databases existing filenames - DB_filepath: str #filepath to database, date can be in past - DB_TODAY_FILEPATH: str=f"./database/{now_DT.strftime('%Y-%m-%d')} {DB_type.name} DB.csv" #filepath to database + DB_filenames: list[str] # databases existing filenames + DB_filepath: str # filepath to database, date can be in past + DB_TODAY_FILEPATH: str=f"./database/{now_DT.strftime('%Y-%m-%d')} {DB_type.name} DB.csv" # filepath to database - if DB.empty==False and os.path.isfile(DB_TODAY_FILEPATH): #if database not empty and from today already existing: assume database as already loaded + if DB.empty==False and os.path.isfile(DB_TODAY_FILEPATH): # if database not empty and from today already existing: assume database as already loaded return DB - DB=pandas.DataFrame() #clear database - os.makedirs(os.path.dirname(DB_TODAY_FILEPATH), exist_ok=True) #create database folder + DB=pandas.DataFrame() # clear database + os.makedirs(os.path.dirname(DB_TODAY_FILEPATH), exist_ok=True) # create database folder - #from file today, load database: + # from file today, load database: logging.info(f"Loading {DB_type.name} database from \"{DB_TODAY_FILEPATH}\"...") - try: #load database + try: # load database DB=pandas.read_csv(f"{DB_TODAY_FILEPATH}") except FileNotFoundError: logging.warning(f"Loading {DB_type.name} database failed, because \"{DB_TODAY_FILEPATH}\" does not exist.") @@ -32,16 +32,16 @@ def init_DB(DB_type: DB_Type, DB: pandas.DataFrame, now_DT: dt.datetime, DOWNLOA logging.warning(f"Loaded {DB_type.name} database, but it is empty.") except pandas.errors.ParserError: logging.warning(f"Loaded {DB_type.name} database, but parsing failed.") - else: #if loading database successful: + else: # if loading database successful: logging.info(f"\rLoaded {DB_type.name} database from \"{DB_TODAY_FILEPATH}\".") return DB - #loading unsuccessful, download database + # loading unsuccessful, download database logging.info(f"Downloading {DB_type.name} database...") - try: #download database - DB=requests.get(DB_type.value, timeout=DOWNLOAD_TIMEOUT).text #type:ignore - DB=pandas.read_csv(io.StringIO(DB)) #type:ignore + try: # download database + DB=requests.get(DB_type.value, timeout=DOWNLOAD_TIMEOUT).text # type:ignore + DB=pandas.read_csv(io.StringIO(DB)) # type:ignore except requests.ConnectionError: logging.warning(f"Downloading {DB_type.name} database failed with requests.ConnectionError.") except requests.ReadTimeout: @@ -50,23 +50,23 @@ def init_DB(DB_type: DB_Type, DB: pandas.DataFrame, now_DT: dt.datetime, DOWNLOA logging.warning(f"Downloaded {DB_type.name} database is empty.") except pandas.errors.ParserError: logging.warning(f"Downloaded {DB_type.name} database, but parsing failed.") - else: #downloading successful, save + else: # downloading successful, save logging.info(f"\rDownloaded and formatted {DB_type.name} database.") - logging.info(f"Saving {DB_type.name} database in \"{DB_TODAY_FILEPATH}\"...") #save downloaded database - try: #save database + logging.info(f"Saving {DB_type.name} database in \"{DB_TODAY_FILEPATH}\"...") # save downloaded database + try: # save database DB.to_csv(DB_TODAY_FILEPATH, index=False, mode="wt") except OSError: logging.warning(f"Saving {DB_type.name} database in \"{DB_TODAY_FILEPATH}\" failed.") else: logging.info(f"\rSaved {DB_type.name} database in \"{DB_TODAY_FILEPATH}\".") return DB - finally: #finally check number of databases on harddrive, keep maximum 5 + finally: # finally check number of databases on harddrive, keep maximum 5 logging.info("Looking for old databases to remove from archive...") - DB_filenames=[filename #load databases existing + DB_filenames=[filename # load databases existing for filename in sorted(os.listdir(os.path.dirname(DB_TODAY_FILEPATH))) if re.search(f"^[0-9]{4}-[0-1][0-9]-[0-3][0-9] {DB_type.name} DB.csv$", filename)!=None] - for i in range(len(DB_filenames)-5): #delete all databases saved except 5 most current + for i in range(len(DB_filenames)-5): # delete all databases saved except 5 most current DB_filepath=os.path.join(os.path.dirname(DB_TODAY_FILEPATH), DB_filenames[i]) logging.info(f"Removing \"{DB_filepath}\"...") try: @@ -75,20 +75,20 @@ def init_DB(DB_type: DB_Type, DB: pandas.DataFrame, now_DT: dt.datetime, DOWNLOA logging.warning(f"Removing \"{DB_filepath}\" failed with OSError.") logging.info(f"\rRemoved \"{DB_filepath}\".") - #if database empty: downloading unsuccessful, load from archive + # if database empty: downloading unsuccessful, load from archive logging.info(f"Loading {DB_type.name} database from archive...") - DB_filenames=[filename #load databases existing + DB_filenames=[filename # load databases existing for filename in sorted(os.listdir(os.path.dirname(DB_TODAY_FILEPATH))) if re.search(f"^[0-9]{4}-[0-1][0-9]-[0-3][0-9] {DB_type.name} DB.csv$", filename)!=None] - for i in range(len(DB_filenames)-1, -1, -1): #iterate archive from most recent to oldest + for i in range(len(DB_filenames)-1, -1, -1): # iterate archive from most recent to oldest DB_filepath=os.path.join(os.path.dirname(DB_TODAY_FILEPATH), DB_filenames[i]) logging.info(f"Loading {DB_type.name} database from \"{DB_filepath}\"...") try: DB=pandas.read_csv(DB_filepath) except OSError: logging.warning(f"Loading {DB_type.name} database from \"{DB_filepath}\" failed with OSError.") - else: #loading successful + else: # loading successful logging.info(f"\rLoaded {DB_type.name} database from \"{DB_filepath}\".") return DB diff --git a/src/main.py b/src/main.py index d85a1ba..5bd91d7 100644 --- a/src/main.py +++ b/src/main.py @@ -1,4 +1,4 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import aiohttp.client_exceptions import asyncio import discord, discord.ext.tasks @@ -22,47 +22,47 @@ from weather_minimums import WEATHER_MIN -#keep over runtime whole -aerodrome_DB: pandas.DataFrame=pandas.DataFrame() #aerodrome database -country_DB: pandas.DataFrame =pandas.DataFrame() #country database for country names -frequency_DB: pandas.DataFrame=pandas.DataFrame() #frequency database for information command -navaid_DB: pandas.DataFrame =pandas.DataFrame() #navaid database for information command -RWY_DB: pandas.DataFrame =pandas.DataFrame() #runway database for cross wind components and information command -servers: list[Server] #all variables for 1 server instance -SERVERS_FILENAME: str="servers.json" #save filename for all servers, so subscription is remembered beyond restarts +# keep over runtime whole +aerodrome_DB: pandas.DataFrame=pandas.DataFrame() # aerodrome database +country_DB: pandas.DataFrame =pandas.DataFrame() # country database for country names +frequency_DB: pandas.DataFrame=pandas.DataFrame() # frequency database for information command +navaid_DB: pandas.DataFrame =pandas.DataFrame() # navaid database for information command +RWY_DB: pandas.DataFrame =pandas.DataFrame() # runway database for cross wind components and information command +servers: list[Server] # all variables for 1 server instance +SERVERS_FILENAME: str="servers.json" # save filename for all servers, so subscription is remembered beyond restarts @KFSlog.timeit_async async def main() -> None: global servers - #keep over runtime whole, but read-only in sub-functions - DOWNLOAD_TIMEOUT: int=50 #METAR and TAF download timeouts [s] - COMMANDS_ALLOWED: tuple=( #commands allowed + # keep over runtime whole, but read-only in sub-functions + DOWNLOAD_TIMEOUT: int=50 # METAR and TAF download timeouts [s] + COMMANDS_ALLOWED: tuple=( # commands allowed "^(?P[0-9A-Z]{4})$", "^(?P[0-9A-Z]{4}) TAF$", - #"^(?P[0-9A-Z]{4}) INFO$" #TODO + # "^(?P[0-9A-Z]{4}) INFO$" # TODO ) - discord_bot: discord.Client #discord client instance - discord_bot_channel_names: list[str] #bot channel names - discord_bot_token: str #discord bot token - intents: discord.Intents #client permissions - if logging.root.level<=logging.DEBUG: #if level debug or lower: - UPDATE_FREQUENCY: float=200e-3 #update subscription with 200mHz (every 5s) + discord_bot: discord.Client # discord client instance + discord_bot_channel_names: list[str] # bot channel names + discord_bot_token: str # discord bot token + intents: discord.Intents # client permissions + if logging.root.level<=logging.DEBUG: # if level debug or lower: + UPDATE_FREQUENCY: float=200e-3 # update subscription with 200mHz (every 5s) else: - UPDATE_FREQUENCY: float=10e-3 #but usually update subscription with 10mHz (every 100s) + UPDATE_FREQUENCY: float=10e-3 # but usually update subscription with 10mHz (every 100s) - discord_bot_channel_names=[bot_channel_name for bot_channel_name in KFSconfig.load_config("discord_bot_channel_names.config", "bots\nbotspam\nmetar").split("\n") if bot_channel_name!=""] #load bot channel names, remove empty lines - discord_bot_token=KFSconfig.load_config("discord_bot.token") #load discord bot token - intents=discord.Intents.default() #standard permissions - intents.message_content=True #in addition with message contents - discord_bot=discord.Client(intents=intents) #create client instance + discord_bot_channel_names=[bot_channel_name for bot_channel_name in KFSconfig.load_config("discord_bot_channel_names.config", "bots\nbotspam\nmetar").split("\n") if bot_channel_name!=""] # load bot channel names, remove empty lines + discord_bot_token=KFSconfig.load_config("discord_bot.token") # load discord bot token + intents=discord.Intents.default() # standard permissions + intents.message_content=True # in addition with message contents + discord_bot=discord.Client(intents=intents) # create client instance logging.info(f"Restoring server states from \"{SERVERS_FILENAME}\"...") try: - with open(SERVERS_FILENAME, "rt") as servers_file: #try to restore server states - servers=jsonpickle.decode(servers_file.read()) #type:ignore - except FileNotFoundError: #if file not created yet: no server states available yet + with open(SERVERS_FILENAME, "rt") as servers_file: # try to restore server states + servers=jsonpickle.decode(servers_file.read()) # type:ignore + except FileNotFoundError: # if file not created yet: no server states available yet logging.warning(f"\rRestoring server states from \"{SERVERS_FILENAME}\" failed with FileNotFoundError.") servers=[] else: @@ -75,12 +75,12 @@ async def on_ready(): Executed as soon as bot started up and is ready. Also executes after bot reconnects to the internet and is ready again. Initialised databases. """ logging.info("Started discord client.") - station_subscription.start() #start station subscription task + station_subscription.start() # start station subscription task return @discord_bot.event - async def on_message(message: discord.Message|Server): #either discord.Message if discord triggers or Server instance if subscription task triggers + async def on_message(message: discord.Message|Server): # either discord.Message if discord triggers or Server instance if subscription task triggers """ Executed every time a message is sent on the server. If the message is not from the bot itself and in a bot channel, process it. @@ -94,52 +94,52 @@ async def on_message(message: discord.Message|Server): #either discord.Message i global servers - #keep only for this iteration - append_TAF: bool #append TAF? - #INFO_command: bool #information command? - channel_id: int #current channel id - command: str #current command - message_send: str #message final to discord - METAR_o: str|None #METAR original format - METAR: str|None #METAR my format - server: Server #server current - station: Station #station parsed ICAO: str, and name: str|None, elev: float|None - TAF_o: str|None #TAF original format - TAF: str|None #TAF my format + # keep only for this iteration + append_TAF: bool # append TAF? + # INFO_command: bool # information command? + channel_id: int # current channel id + command: str # current command + message_send: str # message final to discord + METAR_o: str|None # METAR original format + METAR: str|None # METAR my format + server: Server # server current + station: Station # station parsed ICAO: str, and name: str|None, elev: float|None + TAF_o: str|None # TAF original format + TAF: str|None # TAF my format class ContextManager(): - save_server_states: bool=False #save states after exiting? only if command was valid - server: Server #current server + save_server_states: bool=False # save states after exiting? only if command was valid + server: Server # current server - def __enter__(self): #get current server, here because need access to server for force print in exit - if isinstance(message, discord.message.Message): #discord triggered - if message.guild!=None and message.guild.id not in [server.id for server in servers]: #if server not yet in server list: append + def __enter__(self): # get current server, here because need access to server for force print in exit + if isinstance(message, discord.message.Message): # discord triggered + if message.guild!=None and message.guild.id not in [server.id for server in servers]: # if server not yet in server list: append servers.append(Server(message.guild.id, message.guild.name)) - self.server=next(server for server in servers if server.id==message.guild.id) #get current server, SHALLOW COPY which is desired #type:ignore - elif isinstance(message, Server): #subscription triggered + self.server=next(server for server in servers if server.id==message.guild.id) # get current server, SHALLOW COPY which is desired # type:ignore + elif isinstance(message, Server): # subscription triggered self.server=message else: logging.critical("message: discord.Message|Server has invalid type \"{type(message)}\".") raise RuntimeError(f"Error in {main.__name__}{inspect.signature(main)}: message: discord.Message|Server has invalid type \"{type(message)}\".") return self - def __exit__(self, exc_type, exc_value, exc_traceback): #upon exit, force print by default + def __exit__(self, exc_type, exc_value, exc_traceback): # upon exit, force print by default self.server.force_print=True - if self.save_server_states==True: #save server states? only if command was valid + if self.save_server_states==True: # save server states? only if command was valid logging.info(f"Saving server states in \"{SERVERS_FILENAME}\"...") - with open(SERVERS_FILENAME, "wt") as servers_file: #save servers state so subscription is remembered beyond restarts - servers_file.write(jsonpickle.encode(servers, indent=4)) #recursively convert everything in the list to a dict so json can save it #type:ignore + with open(SERVERS_FILENAME, "wt") as servers_file: # save servers state so subscription is remembered beyond restarts + servers_file.write(jsonpickle.encode(servers, indent=4)) # recursively convert everything in the list to a dict so json can save it # type:ignore logging.info(f"\rSaved server states in \"{SERVERS_FILENAME}\".") return - with ContextManager() as context: #upon exit, force print by default - server=context.server #get current server from context, SHALLOW COPY which is desired + with ContextManager() as context: # upon exit, force print by default + server=context.server # get current server from context, SHALLOW COPY which is desired if isinstance(message, discord.message.Message): - if message.author==discord_bot.user or message.channel.name not in discord_bot_channel_names: #if message from bot itself or outside dedicated bot channel: do nothing #type:ignore + if message.author==discord_bot.user or message.channel.name not in discord_bot_channel_names: # if message from bot itself or outside dedicated bot channel: do nothing # type:ignore return - channel_id=message.channel.id #save active channel id - command=message.content.upper() #save active command + channel_id=message.channel.id # save active channel id + command=message.content.upper() # save active command elif isinstance(message, Server): - if server.channel_id==None or server.command==None: #if server created but has no valid command executed yet (invited to server but no command via dedicated bot channel executed) + if server.channel_id==None or server.command==None: # if server created but has no valid command executed yet (invited to server but no command via dedicated bot channel executed) return channel_id=server.channel_id command=server.command @@ -149,142 +149,142 @@ def __exit__(self, exc_type, exc_value, exc_traceback): #upon exit, force print logging.info("--------------------------------------------------") - logging.info(f"On server: {server.name} ({server.id})") #which server are we on? + logging.info(f"On server: {server.name} ({server.id})") # which server are we on? now_DT=dt.datetime.now(dt.timezone.utc) - #station_ICAO, append_TAF + # station_ICAO, append_TAF logging.info(f"Command: {command}") - for command_allowed in COMMANDS_ALLOWED: #did message match an allowed command? + for command_allowed in COMMANDS_ALLOWED: # did message match an allowed command? re_match=re.search(command_allowed, command) - if re_match==None: #if message did not match this command: + if re_match==None: # if message did not match this command: continue - station=Station(re_match.groupdict()["station_ICAO"]) #parse station ICAO code + station=Station(re_match.groupdict()["station_ICAO"]) # parse station ICAO code logging.info(f"Station: {station.ICAO}") - if command.endswith("TAF")==True: #if message ends with TAF: + if command.endswith("TAF")==True: # if message ends with TAF: logging.info("TAF was requested.") - append_TAF=True #TAF requested, append TAF later + append_TAF=True # TAF requested, append TAF later else: append_TAF=False - # if command.endswith("INFO"): #if matched command ends with INFO: - # logging.info("INFO was requested.") - # INFO_command=True #INFO requested, get information later - # else: - # INFO_command=False - break #command found, exit - else: #if message did not match any command: + # if command.endswith("INFO"): # if matched command ends with INFO: + # logging.info("INFO was requested.") + # INFO_command=True # INFO requested, get information later + # else: + # INFO_command=False + break # command found, exit + else: # if message did not match any command: logging.error(f"Last command \"{command}\" did not match any allowed command.") return - context.save_server_states=True #from here on: command is valid, save server state after exiting + context.save_server_states=True # from here on: command is valid, save server state after exiting - #station_name, station_elev + # station_name, station_elev logging.info(f"Looking for {station.ICAO} in aerodrome database...") - aerodrome=aerodrome_DB[aerodrome_DB["ident"]==station.ICAO] #aerodrome desired + aerodrome=aerodrome_DB[aerodrome_DB["ident"]==station.ICAO] # aerodrome desired aerodrome=aerodrome.reset_index(drop=True) - if aerodrome.empty==True: #if aerodrome not found in database: + if aerodrome.empty==True: # if aerodrome not found in database: logging.warning(f"\rCould not find {station.ICAO} in aerodrome database. No title, elevation, and runway directions available.") station.elev=None station.name=None - else: #if aerodrome found in database: + else: # if aerodrome found in database: logging.info(f"\rFound {station.ICAO} in aerodrome database.") - country=country_DB[country_DB["code"]==aerodrome.at[0, "iso_country"]] #country desired + country=country_DB[country_DB["code"]==aerodrome.at[0, "iso_country"]] # country desired country=country.reset_index(drop=True) - if country.empty==True: #if country not found in database: + if country.empty==True: # if country not found in database: logging.warning(f"Could not find country of country code \"{aerodrome.at[0, 'iso_country']}\".") - station.name=f"{aerodrome.at[0, 'iso_country']}, {aerodrome.at[0, 'name']}" #enter country code, aerodrome name - else: #if country found in database: - station.name=f"{country.at[0, 'name']}, {aerodrome.at[0, 'name']}" #enter country, aerodrome name + station.name=f"{aerodrome.at[0, 'iso_country']}, {aerodrome.at[0, 'name']}" # enter country code, aerodrome name + else: # if country found in database: + station.name=f"{country.at[0, 'name']}, {aerodrome.at[0, 'name']}" # enter country, aerodrome name logging.info(f"Name: {station.name}") - if pandas.isna(aerodrome.at[0, "elevation_ft"])==True: #if elevation unavailable: + if pandas.isna(aerodrome.at[0, "elevation_ft"])==True: # if elevation unavailable: logging.warning(f"\r{station.ICAO} has no elevation information in aerodrome database. No elevation available.") station.elev=None - else: #if elevation available: - station.elev=aerodrome.at[0, "elevation_ft"]*KFSconvert_to_SI.LENGTH["ft"] #save elevation [m] + else: # if elevation available: + station.elev=aerodrome.at[0, "elevation_ft"]*KFSconvert_to_SI.LENGTH["ft"] # save elevation [m] logging.info(f"Elevation: {KFSfstr.notation_abs(station.elev, 0, round_static=True)}m") - #information command - # if INFO_command==True: #if information command: execute that, then return without downloading METAR, TAF etc. - # await aerodrome_info(station, frequency_DB, navaid_DB, RWY_DB, command) - # return + # information command + # if INFO_command==True: # if information command: execute that, then return without downloading METAR, TAF etc. + # await aerodrome_info(station, frequency_DB, navaid_DB, RWY_DB, command) + # return - #download and convert METAR and TAF + # download and convert METAR and TAF try: METAR_o, METAR=process_METAR_TAF(Doc_Type.METAR, station, RWY_DB, now_DT, server, DOWNLOAD_TIMEOUT) - except (requests.ConnectTimeout, requests.ConnectionError, ValueError): #if unsuccessful: abort + except (requests.ConnectTimeout, requests.ConnectionError, ValueError): # if unsuccessful: abort return - if append_TAF==True: #if append TAF: download and process TAF + if append_TAF==True: # if append TAF: download and process TAF try: TAF_o, TAF=process_METAR_TAF(Doc_Type.TAF, station, RWY_DB, now_DT, server, DOWNLOAD_TIMEOUT) - except (requests.ConnectTimeout, requests.ConnectionError, ValueError): #if unsuccessful: just no TAF + except (requests.ConnectTimeout, requests.ConnectionError, ValueError): # if unsuccessful: just no TAF logging.warning(f"Continuing without TAF...") append_TAF=False TAF_o=None TAF=None - else: #default TAF + else: # default TAF TAF_o=None TAF=None - #print message? subscription logic - if server.force_print==True: #if force printing: no subscription, update server data - server.channel_id=channel_id #save active channel id, at this point known that command valid - server.command=command #save active command, this point known valid - server.METAR_o_previous=METAR_o #refresh METAR original previous - server.TAF_o_previous=TAF_o #refresh TAF orginal previous - server.METAR_update_finished=False #reset already waited for METAR - server.TAF_update_finished=False #reset already waited for TAF + # print message? subscription logic + if server.force_print==True: # if force printing: no subscription, update server data + server.channel_id=channel_id # save active channel id, at this point known that command valid + server.command=command # save active command, this point known valid + server.METAR_o_previous=METAR_o # refresh METAR original previous + server.TAF_o_previous=TAF_o # refresh TAF orginal previous + server.METAR_update_finished=False # reset already waited for METAR + server.TAF_update_finished=False # reset already waited for TAF - elif append_TAF==False: #subscription; if TAF undesired: disregard TAF - if server.METAR_o_previous==METAR_o: #if METAR original same as previous one: subscription, don't print + elif append_TAF==False: # subscription; if TAF undesired: disregard TAF + if server.METAR_o_previous==METAR_o: # if METAR original same as previous one: subscription, don't print logging.info("Original METAR has not been changed. Not sending anything.") return - elif server.METAR_o_previous!=METAR_o and server.METAR_update_finished==False: #if METAR original new different, but not waited yet: subscription, wait 1 round until source website refreshed METAR completely + elif server.METAR_o_previous!=METAR_o and server.METAR_update_finished==False: # if METAR original new different, but not waited yet: subscription, wait 1 round until source website refreshed METAR completely logging.info("Original METAR has changed, but update process may not have been finished yet. Not sending anything yet.") server.METAR_update_finished=True return - elif server.METAR_o_previous!=METAR_o and server.METAR_update_finished==True: #if METAR original new different and waited already: subscription, source website refreshed METAR completely, now send METAR + elif server.METAR_o_previous!=METAR_o and server.METAR_update_finished==True: # if METAR original new different and waited already: subscription, source website refreshed METAR completely, now send METAR logging.info("Original METAR has changed and update process should have been finished.") - server.METAR_o_previous=METAR_o #refresh METAR original previous - server.METAR_update_finished=False #reset already waited for METAR + server.METAR_o_previous=METAR_o # refresh METAR original previous + server.METAR_update_finished=False # reset already waited for METAR - elif append_TAF==True: #subscription; if TAF desiredt: regard TAF - if server.METAR_o_previous==METAR_o and server.TAF_o_previous==TAF_o: #if METAR original and TAF original same as previous one: subscription, don't print + elif append_TAF==True: # subscription; if TAF desiredt: regard TAF + if server.METAR_o_previous==METAR_o and server.TAF_o_previous==TAF_o: # if METAR original and TAF original same as previous one: subscription, don't print logging.info("Original METAR and TAF have not been changed. Not sending anything.") return - elif(server.METAR_o_previous!=METAR_o and server.TAF_o_previous!=TAF_o #if METAR original new and TAF original new different, but not waited yet: subscription, wait 1 round until source website refreshed METAR and TAF completely + elif(server.METAR_o_previous!=METAR_o and server.TAF_o_previous!=TAF_o # if METAR original new and TAF original new different, but not waited yet: subscription, wait 1 round until source website refreshed METAR and TAF completely and server.METAR_update_finished==False and server.TAF_update_finished==False): logging.info("Original METAR and TAF have changed, but update process may not have been finished yet. Not sending anything yet.") server.METAR_update_finished=True server.TAF_update_finished=True return - elif server.METAR_o_previous!=METAR_o and server.METAR_update_finished==False: #if METAR original new different, but not waited yet: subscription, wait 1 round until source website refreshed METAR completely + elif server.METAR_o_previous!=METAR_o and server.METAR_update_finished==False: # if METAR original new different, but not waited yet: subscription, wait 1 round until source website refreshed METAR completely logging.info("Original METAR has changed, but update process may not have been finished yet. Not sending anything yet.") server.METAR_update_finished=True return - elif server.TAF_o_previous!=TAF_o and server.TAF_update_finished==False: #if TAF original new different, but not waited yet: subscription, wait 1 round until source website refreshed TAF completely + elif server.TAF_o_previous!=TAF_o and server.TAF_update_finished==False: # if TAF original new different, but not waited yet: subscription, wait 1 round until source website refreshed TAF completely logging.info("Original TAF has changed, but update process may not have been finished yet. Not sending anything yet.") server.TAF_update_finished=True return - elif(server.METAR_o_previous!=METAR_o and server.TAF_o_previous!=TAF_o #if METAR original new and TAF original new different and waited already: subscription, source website refreshed METAR and TAF completely, now send METAR and TAF + elif(server.METAR_o_previous!=METAR_o and server.TAF_o_previous!=TAF_o # if METAR original new and TAF original new different and waited already: subscription, source website refreshed METAR and TAF completely, now send METAR and TAF and server.METAR_update_finished==True and server.TAF_update_finished==True): logging.info("Original METAR and TAF have changed and update process should have been finished.") - server.METAR_o_previous=METAR_o #refresh METAR original previous - server.TAF_o_previous=TAF_o #refresh TAF original previous - server.METAR_update_finished=False #reset already waited for METAR - server.TAF_update_finished=False #reset already waited for TAF - elif server.METAR_o_previous!=METAR_o and server.METAR_update_finished==True: #if METAR original new different and waited already: subscription, source website refreshed METAR completely, now send METAR + server.METAR_o_previous=METAR_o # refresh METAR original previous + server.TAF_o_previous=TAF_o # refresh TAF original previous + server.METAR_update_finished=False # reset already waited for METAR + server.TAF_update_finished=False # reset already waited for TAF + elif server.METAR_o_previous!=METAR_o and server.METAR_update_finished==True: # if METAR original new different and waited already: subscription, source website refreshed METAR completely, now send METAR logging.info("Original METAR has changed and update process should have been finished.") - server.METAR_o_previous=METAR_o #refresh METAR original previous - server.METAR_update_finished=False #reset already waited for METAR - append_TAF=False #TAF did probably not refresh yet, subscription, only send METAR - elif server.TAF_o_previous!=TAF_o and server.TAF_update_finished==True: #if TAF original new different and waited already: subscription, source website refreshed TAF completely, now send METAR and TAF + server.METAR_o_previous=METAR_o # refresh METAR original previous + server.METAR_update_finished=False # reset already waited for METAR + append_TAF=False # TAF did probably not refresh yet, subscription, only send METAR + elif server.TAF_o_previous!=TAF_o and server.TAF_update_finished==True: # if TAF original new different and waited already: subscription, source website refreshed TAF completely, now send METAR and TAF logging.info("Original TAF has changed and update process should have been finished.") - server.TAF_o_previous=TAF_o #refresh TAF original previous - server.TAF_update_finished=False #reset already waited for TAF + server.TAF_o_previous=TAF_o # refresh TAF original previous + server.TAF_update_finished=False # reset already waited for TAF - #send messages + # send messages if append_TAF==False: logging.info("Sending METAR and original METAR...") elif append_TAF==True and TAF_o!=None: @@ -293,43 +293,43 @@ def __exit__(self, exc_type, exc_value, exc_traceback): #upon exit, force print logging.info("Sending METAR, original METAR, and error message...") message_send="" - if station.name==None: #if station name not found: - message_send+=f"Could not find {station.ICAO} in aerodrome database. No title, elevation, and runway directions available..\n----------\n" #send error message - else: #if station name found: - message_send+=f"{station.name}\n----------\n" #send station name - if METAR_o==None: #if METAR not found: - message_send+="There is no published METAR.\n----------\n" #send error message - else: #if METAR found: - message_send+=f"```{METAR}```\n----------\n" #send METAR - if append_TAF==True and TAF_o==None: #if TAF desired and not found: - message_send+="There is no published TAF.\n----------\n" #send error message - elif append_TAF==True and TAF_o!=None: #if TAF desired and found: - message_send+=f"```{TAF}```\n----------\n" #send TAF - if METAR_o!=None: #if METAR found: - message_send+=f"```{METAR_o}```\n----------\n" #send METAR original too - if append_TAF==True and TAF_o!=None: #if TAF desired and found: - message_send+=f"```{TAF_o}```\n----------\n" #send TAF original too - if station.elev!=None: #if station elevation found: - message_send+=f"```Elevation = {KFSfstr.notation_abs(station.elev, 0, round_static=True)}m ({KFSfstr.notation_abs(station.elev/KFSconvert_to_SI.LENGTH['ft'], 0, round_static=True)}ft)```\n----------\n".replace(",", ".") #send station elevation + if station.name==None: # if station name not found: + message_send+=f"Could not find {station.ICAO} in aerodrome database. No title, elevation, and runway directions available..\n----------\n" # send error message + else: # if station name found: + message_send+=f"{station.name}\n----------\n" # send station name + if METAR_o==None: # if METAR not found: + message_send+="There is no published METAR.\n----------\n" # send error message + else: # if METAR found: + message_send+=f"```{METAR}```\n----------\n" # send METAR + if append_TAF==True and TAF_o==None: # if TAF desired and not found: + message_send+="There is no published TAF.\n----------\n" # send error message + elif append_TAF==True and TAF_o!=None: # if TAF desired and found: + message_send+=f"```{TAF}```\n----------\n" # send TAF + if METAR_o!=None: # if METAR found: + message_send+=f"```{METAR_o}```\n----------\n" # send METAR original too + if append_TAF==True and TAF_o!=None: # if TAF desired and found: + message_send+=f"```{TAF_o}```\n----------\n" # send TAF original too + if station.elev!=None: # if station elevation found: + message_send+=f"```Elevation = {KFSfstr.notation_abs(station.elev, 0, round_static=True)}m ({KFSfstr.notation_abs(station.elev/KFSconvert_to_SI.LENGTH['ft'], 0, round_static=True)}ft)```\n----------\n".replace(",", ".") # send station elevation - if METAR_o!=None or TAF_o!=None: #if a METAR of TAF sent: warnings + if METAR_o!=None or TAF_o!=None: # if a METAR of TAF sent: warnings message_send+="Only use original METAR and TAF for flight operations!\n" - if station.name==None: #if aerodrome not found in database + if station.name==None: # if aerodrome not found in database message_send+=f"Clouds are given as heights. Winds are assumed direct crosswind and will be marked at {KFSfstr.notation_tech(WEATHER_MIN['CWC'], 2)}m/s or more. Variable winds are assumed direct tailwind and will be marked at {KFSfstr.notation_tech(WEATHER_MIN['TWC'], 2)}m/s or more.\n" - elif station.elev==None: #if only elevation not found + elif station.elev==None: # if only elevation not found message_send+="Clouds are given as heights.\n" - else: #if everything found: default message + else: # if everything found: default message message_send+="Clouds are given as \"{coverage}{altitude}|{height}\".\n" try: - await discord_bot.get_channel(channel_id).send(message_send) #send message to discord #type:ignore - except AttributeError: #get_channel already returned None, bot has probably been removed from server + await discord_bot.get_channel(channel_id).send(message_send) # send message to discord # type:ignore + except AttributeError: # get_channel already returned None, bot has probably been removed from server logging.error("Sending message to discord failed. Assuming bot has been removed from server.") logging.info(f"Deleting server {server.name} ({server.id}) from servers list...") - servers=[s for s in servers if s.id!=server.id] #delete from list + servers=[s for s in servers if s.id!=server.id] # delete from list logging.info(f"\rDeleted server {server.name} ({server.id}) from servers list.") return - except discord.errors.DiscordServerError: #send failed + except discord.errors.DiscordServerError: # send failed logging.error("Sending message to discord failed.") return if append_TAF==False: @@ -342,7 +342,7 @@ def __exit__(self, exc_type, exc_value, exc_traceback): #upon exit, force print return - @discord.ext.tasks.loop(seconds=1/UPDATE_FREQUENCY) #every 100s look for updates + @discord.ext.tasks.loop(seconds=1/UPDATE_FREQUENCY) # every 100s look for updates async def station_subscription(): """ Executed every 100s for subscription logic. @@ -355,7 +355,7 @@ async def station_subscription(): global servers - #refresh databases + # refresh databases now_DT=dt.datetime.now(dt.timezone.utc) aerodrome_DB=init_DB(DB_Type.aerodrome, aerodrome_DB, now_DT, DOWNLOAD_TIMEOUT) country_DB =init_DB(DB_Type.country, country_DB, now_DT, DOWNLOAD_TIMEOUT) @@ -365,16 +365,16 @@ async def station_subscription(): for server in servers: - server.force_print=False #subscription - await on_message(server) #type:ignore + server.force_print=False # subscription + await on_message(server) # type:ignore return while True: logging.info("Starting discord client...") try: - await discord_bot.start(discord_bot_token) #start discord client now - except aiohttp.client_exceptions.ClientConnectorError: #if temporary internet failure: retry connection + await discord_bot.start(discord_bot_token) # start discord client now + except aiohttp.client_exceptions.ClientConnectorError: # if temporary internet failure: retry connection logging.error("Starting discord client failed, because client could not connect. Retrying in 10s...") await asyncio.sleep(10) @@ -383,9 +383,9 @@ async def station_subscription(): # http://tgftp.nws.noaa.gov/data/observations/metar/stations/.TXT # TAF now # https://tgftp.nws.noaa.gov/data/forecasts/taf/stations/.TXT -# +# # METAR explained # https://mediawiki.ivao.aero/index.php?title=METAR_explanation -# +# # databases # https://ourairports.com/data/ \ No newline at end of file diff --git a/src/process_METAR_TAF.py b/src/process_METAR_TAF.py index e556eeb..aac939a 100644 --- a/src/process_METAR_TAF.py +++ b/src/process_METAR_TAF.py @@ -1,13 +1,13 @@ -#Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. +# Copyright (c) 2023 구FS, all rights reserved. Subject to the CC BY-NC-SA 4.0 licence in `licence.md`. import datetime as dt import inspect import logging import pandas import re import requests -from change_format import change_format #change information format -from change_format_RMK import change_format_RMK #in remark section change information format -from Doc_Type import Doc_Type #processing METAR or TAF? +from change_format import change_format # change information format +from change_format_RMK import change_format_RMK # in remark section change information format +from Doc_Type import Doc_Type # processing METAR or TAF? from Server import Server from Station import Station @@ -18,13 +18,13 @@ def process_METAR_TAF(doc_type: Doc_Type, station: Station, RWY_DB: pandas.DataF """ - met_report_DT: dt.datetime #publishion datetime - METAR_TAF_o: requests.Response|str #METAR or TAF original format - METAR_TAF_o_list: list #METAR or TAF original format as list, separated at blank and newline - METAR_TAF="" #METAR or TAF my format - METAR_TAF_URL: str #URL to download from - RMK_section=False #now in remark section? - timespan_published: dt.timedelta #how long ago published + met_report_DT: dt.datetime # publishion datetime + METAR_TAF_o: requests.Response|str # METAR or TAF original format + METAR_TAF_o_list: list # METAR or TAF original format as list, separated at blank and newline + METAR_TAF="" # METAR or TAF my format + METAR_TAF_URL: str # URL to download from + RMK_section=False # now in remark section? + timespan_published: dt.timedelta # how long ago published match doc_type: case Doc_Type.METAR: @@ -36,91 +36,91 @@ def process_METAR_TAF(doc_type: Doc_Type, station: Station, RWY_DB: pandas.DataF raise RuntimeError(f"Error in {process_METAR_TAF.__name__}{inspect.signature(process_METAR_TAF)}: Document type ({doc_type}) is neither {Doc_Type.METAR} nor {Doc_Type.TAF}.") - #download METAR or TAF + # download METAR or TAF logging.info(f"Downloading {doc_type.name}...") try: - METAR_TAF_o=requests.get(METAR_TAF_URL, timeout=DOWNLOAD_TIMEOUT) #download METAR or TAF - except requests.ConnectTimeout: #if unsuccessful: abort + METAR_TAF_o=requests.get(METAR_TAF_URL, timeout=DOWNLOAD_TIMEOUT) # download METAR or TAF + except requests.ConnectTimeout: # if unsuccessful: abort logging.error(f"\rDownloading {doc_type.name} timed out after {DOWNLOAD_TIMEOUT}s.") raise - except requests.ConnectionError: #if unsuccessful: abort + except requests.ConnectionError: # if unsuccessful: abort logging.error(f"\rDownloading {doc_type.name} failed.") raise else: logging.info(f"\rDownloaded {doc_type.name}.") - if METAR_TAF_o.ok==False: #if something went wrong + if METAR_TAF_o.ok==False: # if something went wrong match METAR_TAF_o.status_code: - case 403: #if forbidden: resource temporarily unavailable, probably update + case 403: # if forbidden: resource temporarily unavailable, probably update logging.error(f"{station.ICAO} {doc_type.name} is currently unavailable.") raise ValueError(f"Error in {process_METAR_TAF.__name__}{inspect.signature(process_METAR_TAF)}: {station.ICAO} {doc_type.name} is currently unavailable.") - case 404: #if NOAA has no METAR or TAF page: station has no METAR or TAF + case 404: # if NOAA has no METAR or TAF page: station has no METAR or TAF logging.error(f"{station.ICAO} does not publish any {doc_type.name}.") - return None, None #return None to continue outside to send message that station does not publish METAR or TAF + return None, None # return None to continue outside to send message that station does not publish METAR or TAF case _: logging.error(f"Downloading {station.ICAO} {doc_type.name} failed with status code {METAR_TAF_o.status_code}.") raise ValueError(f"Error in {process_METAR_TAF.__name__}{inspect.signature(process_METAR_TAF)}: Downloading {station.ICAO} {doc_type.name} failed with status code {METAR_TAF_o.status_code}.") - METAR_TAF_o=METAR_TAF_o.text #requests.Response -> str - METAR_TAF_o="\n".join([info.strip() for info in re.split("\n", METAR_TAF_o) if info!=""]) #METAR or TAF original format, separate at newline, remove empty infos, strip infos, rejoin at newline - METAR_TAF_o_list=[info.strip() for info in re.split("[ \n]", METAR_TAF_o) if info!=""] #METAR or TAF original format as list, separated at blank and newline, remove empty infos, strip infos + METAR_TAF_o=METAR_TAF_o.text # requests.Response -> str + METAR_TAF_o="\n".join([info.strip() for info in re.split("\n", METAR_TAF_o) if info!=""]) # METAR or TAF original format, separate at newline, remove empty infos, strip infos, rejoin at newline + METAR_TAF_o_list=[info.strip() for info in re.split("[ \n]", METAR_TAF_o) if info!=""] # METAR or TAF original format as list, separated at blank and newline, remove empty infos, strip infos logging.info(METAR_TAF_o) - if METAR_TAF_o=="": #if METAR or TAF exists but empty string: - return "", "" #that's the METAR or TAF lol, but don't process + if METAR_TAF_o=="": # if METAR or TAF exists but empty string: + return "", "" # that's the METAR or TAF lol, but don't process - #METAR or TAF expired? - for info in METAR_TAF_o_list: #look for publishion datetime - re_match=re.search("^(?P[0-3][0-9])(?P[0-2][0-9])(?P[0-5][0-9])Z$", info) #look for met report time group - if re_match==None: #if not day time info: next + # METAR or TAF expired? + for info in METAR_TAF_o_list: # look for publishion datetime + re_match=re.search("^(?P[0-3][0-9])(?P[0-2][0-9])(?P[0-5][0-9])Z$", info) # look for met report time group + if re_match==None: # if not day time info: next continue - met_report_DT=dt.datetime(int(METAR_TAF_o[0:4]), int(METAR_TAF_o[5:7]), int(METAR_TAF_o[8:10]), 0, 0, 0, 0, dt.timezone.utc) #met report date according header for year and month, next fill out rest with met report time info - while int(met_report_DT.strftime("%d"))!=int(re_match.groupdict()["day"]): #as long as days not matching: met report datetime must be before website refresh, decrement day until same - met_report_DT-=dt.timedelta(days=1) #decrement day - met_report_DT+=dt.timedelta(hours=int(re_match.groupdict()["hour"]), minutes=int(re_match.groupdict()["minute"])) #correct day now, add time according met report time info + met_report_DT=dt.datetime(int(METAR_TAF_o[0:4]), int(METAR_TAF_o[5:7]), int(METAR_TAF_o[8:10]), 0, 0, 0, 0, dt.timezone.utc) # met report date according header for year and month, next fill out rest with met report time info + while int(met_report_DT.strftime("%d"))!=int(re_match.groupdict()["day"]): # as long as days not matching: met report datetime must be before website refresh, decrement day until same + met_report_DT-=dt.timedelta(days=1) # decrement day + met_report_DT+=dt.timedelta(hours=int(re_match.groupdict()["hour"]), minutes=int(re_match.groupdict()["minute"])) # correct day now, add time according met report time info break - else: #if met report time info not found: use website publishion datetime as fallback + else: # if met report time info not found: use website publishion datetime as fallback logging.warning(f"{doc_type.name} report time info could not be found.") logging.info("Using NOAA website publishion datetime as fallback...") try: - met_report_DT=dt.datetime(int(METAR_TAF_o[0:4]), int(METAR_TAF_o[5:7]), int(METAR_TAF_o[8:10]), int(METAR_TAF_o[11:13]), int(METAR_TAF_o[14:16]), 0, 0, dt.timezone.utc) #publision datetime according header + met_report_DT=dt.datetime(int(METAR_TAF_o[0:4]), int(METAR_TAF_o[5:7]), int(METAR_TAF_o[8:10]), int(METAR_TAF_o[11:13]), int(METAR_TAF_o[14:16]), 0, 0, dt.timezone.utc) # publision datetime according header except ValueError: logging.error(f"\rUsing NOAA website publishion datetime as fallback failed. The downloaded {doc_type.name} does not seem to be valid.") raise else: logging.info("\rUsed NOAA website publishion datetime as fallback.") - METAR_TAF_o =METAR_TAF_o[17:] #remove website publishion header - METAR_TAF_o_list=METAR_TAF_o_list[2:] #remove website publishion header + METAR_TAF_o =METAR_TAF_o[17:] # remove website publishion header + METAR_TAF_o_list=METAR_TAF_o_list[2:] # remove website publishion header - timespan_published=now_DT-met_report_DT #timespan published + timespan_published=now_DT-met_report_DT # timespan published if doc_type==Doc_Type.METAR: - if dt.timedelta(seconds=3600)