Skip to content

Commit

Permalink
Added the flag_info method to all sensor/data paths
Browse files Browse the repository at this point in the history
  • Loading branch information
caparker committed Oct 21, 2024
1 parent 7ec6e73 commit 2ce7d1a
Showing 3 changed files with 44 additions and 41 deletions.
47 changes: 21 additions & 26 deletions openaq_api/openaq_api/v3/models/responses.py
Original file line number Diff line number Diff line change
@@ -2,7 +2,7 @@
from typing import Any, List

from humps import camelize
from pydantic import BaseModel, ConfigDict, Field, field_validator
from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator

from .utils import fix_date

@@ -242,56 +242,51 @@ class Location(JsonBase):
datetime_last: DatetimeObject | None = None


class Flag(JsonBase):
id: int = Field(alias='flags_id')
class FlagType(JsonBase):
flag_types_id: int = Field(alias='id')
label: str
level: str
invalidates: bool

class LocationFlag(JsonBase):
#model_config = ConfigDict(exclude_unset=True)
location_id: int
flag: Flag
flag_type: FlagType
datetime_from: DatetimeObject
datetime_to: DatetimeObject
sensor_ids: list[int] = []
note: str | None = None


class FlagInfo(JsonBase):
has_flags: bool

@model_validator(mode='before')
@classmethod
def check_data_type(cls, data: Any):
if isinstance(data, bool):
data = { "has_flags": data }
return data


class Measurement(JsonBase):
value: float
flag_info: FlagInfo
parameter: ParameterBase
period: Period | None = None
coordinates: Coordinates | None = None
summary: Summary | None = None
coverage: Coverage | None = None


class HourlyData(JsonBase):
class HourlyData(Measurement):
value: float | None = None # Nullable to deal with errors
parameter: ParameterBase
period: Period | None = None
coordinates: Coordinates | None = None
summary: Summary | None = None
coverage: Coverage | None = None


class DailyData(JsonBase):
value: float
parameter: ParameterBase
period: Period | None = None
coordinates: Coordinates | None = None
summary: Summary | None = None
coverage: Coverage | None = None
class DailyData(Measurement):
...


class AnnualData(JsonBase):
value: float
parameter: ParameterBase
period: Period | None = None
coordinates: Coordinates | None = None
summary: Summary | None = None
coverage: Coverage | None = None
class AnnualData(Measurement):
...


# Similar to measurement but without timestamps
17 changes: 8 additions & 9 deletions openaq_api/openaq_api/v3/routers/flags.py
Original file line number Diff line number Diff line change
@@ -80,7 +80,7 @@ class SensorFlagQuery(QueryBaseModel):
)

def where(self):
return "ARRAY[:sensor_id::int] @> fm.sensors_ids"
return "ARRAY[:sensor_id::int] @> f.sensors_ids"


class LocationFlagQueries(LocationFlagQuery, DatetimePeriodQuery, Paging):
@@ -128,16 +128,15 @@ async def fetch_flags(q, db):
query.set_column_map({"timezone": "tz.tzid", "datetime": "lower(period)"})

sql = f"""
SELECT fm.sensor_nodes_id as location_id
, json_build_object('id', f.flags_id, 'label', f.label, 'level', fl.label, 'invalidates', fl.invalidates) as flag
SELECT f.sensor_nodes_id as location_id
, json_build_object('id', ft.flag_types_id, 'label', ft.label, 'level', ft.flag_level) as flag_type
, sensors_ids
, get_datetime_object(lower(fm.period), t.tzid) as datetime_from
, get_datetime_object(upper(fm.period), t.tzid) as datetime_to
, get_datetime_object(lower(f.period), t.tzid) as datetime_from
, get_datetime_object(upper(f.period), t.tzid) as datetime_to
, note
FROM flagged_measurements fm
JOIN flags f ON (fm.flags_id = f.flags_id)
JOIN flag_levels fl ON (f.flag_levels_id = fl.flag_levels_id)
JOIN sensor_nodes n ON (fm.sensor_nodes_id = n.sensor_nodes_id)
FROM flags f
JOIN flag_types ft ON (f.flag_types_id = ft.flag_types_id)
JOIN sensor_nodes n ON (f.sensor_nodes_id = n.sensor_nodes_id)
JOIN timezones t ON (n.timezones_id = t.timezones_id)
{query.where()}
"""
21 changes: 15 additions & 6 deletions openaq_api/openaq_api/v3/routers/measurements.py
Original file line number Diff line number Diff line change
@@ -373,6 +373,7 @@ async def fetch_measurements(query, db):
, 'percent_complete', 100
, 'percent_coverage', (s.data_averaging_period_seconds/s.data_logging_period_seconds)*100
) as coverage
, sensor_flags_exist(m.sensors_id, m.datetime, make_interval(secs=>s.data_averaging_period_seconds*-1)) as flag_info
FROM measurements m
JOIN sensors s USING (sensors_id)
JOIN measurands p USING (measurands_id)
@@ -400,7 +401,7 @@ async def fetch_measurements_aggregated(query, aggregate_to, db):
sql = f"""
WITH meas AS (
SELECT
sy.sensor_nodes_id
s.sensors_id
, s.measurands_id
, tz.tzid as timezone
, truncate_timestamp(datetime, :aggregate_to, tz.tzid) as datetime
@@ -428,7 +429,7 @@ async def fetch_measurements_aggregated(query, aggregate_to, db):
JOIN timezones tz ON (sn.timezones_id = tz.timezones_id)
{query.where()}
GROUP BY 1, 2, 3, 4)
SELECT t.sensor_nodes_id
SELECT t.sensors_id
----------
, json_build_object(
'label', '1 {aggregate_to}'
@@ -466,6 +467,7 @@ async def fetch_measurements_aggregated(query, aggregate_to, db):
'datetime_from', get_datetime_object(datetime_first, t.timezone)
, 'datetime_to', get_datetime_object(datetime_last, t.timezone)
) as coverage
, sensor_flags_exist(t.sensors_id, t.datetime, '-{dur}'::interval) as flag_info
{query.total()}
FROM meas t
JOIN measurands m ON (t.measurands_id = m.measurands_id)
@@ -511,6 +513,7 @@ async def fetch_hours(query, db):
'datetime_from', get_datetime_object(h.datetime_first, sn.timezone)
, 'datetime_to', get_datetime_object(h.datetime_last, sn.timezone)
) as coverage
, sensor_flags_exist(h.sensors_id, h.datetime) as flag_info
{query.fields()}
FROM hourly_data h
JOIN sensors s USING (sensors_id)
@@ -539,7 +542,7 @@ async def fetch_hours_aggregated(query, aggregate_to, db):
sql = f"""
WITH meas AS (
SELECT
sy.sensor_nodes_id
s.sensors_id
, s.measurands_id
, tz.tzid as timezone
, truncate_timestamp(datetime, :aggregate_to, tz.tzid) as datetime
@@ -567,7 +570,7 @@ async def fetch_hours_aggregated(query, aggregate_to, db):
JOIN timezones tz ON (sn.timezones_id = tz.timezones_id)
{query.where()}
GROUP BY 1, 2, 3, 4)
SELECT t.sensor_nodes_id
SELECT t.sensors_id
----------
, json_build_object(
'label', '1 {aggregate_to}'
@@ -605,6 +608,7 @@ async def fetch_hours_aggregated(query, aggregate_to, db):
'datetime_from', get_datetime_object(datetime_first, t.timezone)
, 'datetime_to', get_datetime_object(datetime_last, t.timezone)
) as coverage
, sensor_flags_exist(t.sensors_id, t.datetime, '-{dur}'::interval) as flag_info
{query.total()}
FROM meas t
JOIN measurands m ON (t.measurands_id = m.measurands_id)
@@ -751,6 +755,7 @@ async def fetch_days_trends(aggregate_to, query, db):
'datetime_from', get_datetime_object(o.coverage_first::timestamp, o.timezone)
, 'datetime_to', get_datetime_object(o.coverage_last + '1day'::interval, o.timezone)
) as coverage
, sensor_flags_exist(o.sensors_id, e.period_last, '-{dur}'::interval) as flag_info
FROM expected e
JOIN observed o ON (e.factor = o.factor)
ORDER BY e.factor
@@ -901,6 +906,7 @@ async def fetch_hours_trends(aggregate_to, query, db):
'datetime_from', get_datetime_object(o.coverage_first, o.timezone)
, 'datetime_to', get_datetime_object(o.coverage_last, o.timezone)
) as coverage
, sensor_flags_exist(o.sensors_id, o.coverage_first, '{dur}'::interval) as flag_info
FROM expected e
JOIN observed o ON (e.factor = o.factor)
ORDER BY e.factor
@@ -923,7 +929,7 @@ async def fetch_days_aggregated(query, aggregate_to, db):
sql = f"""
WITH meas AS (
SELECT
sy.sensor_nodes_id
s.sensors_id
, s.measurands_id
, sn.timezone
-- days are time begining
@@ -952,7 +958,7 @@ async def fetch_days_aggregated(query, aggregate_to, db):
JOIN locations_view_cached sn ON (sy.sensor_nodes_id = sn.id)
{query.where()}
GROUP BY 1, 2, 3, 4)
SELECT t.sensor_nodes_id
SELECT t.sensors_id
----------
, json_build_object(
'label', '1 {aggregate_to}'
@@ -990,6 +996,7 @@ async def fetch_days_aggregated(query, aggregate_to, db):
'datetime_from', get_datetime_object(datetime_first, t.timezone)
, 'datetime_to', get_datetime_object(datetime_last + '1day'::interval, t.timezone)
) as coverage
, sensor_flags_exist(t.sensors_id, t.datetime, '-{dur}'::interval) as flag_info
{query.total()}
FROM meas t
JOIN measurands m ON (t.measurands_id = m.measurands_id)
@@ -1035,6 +1042,7 @@ async def fetch_days(query, db):
'datetime_from', get_datetime_object(h.datetime_first, sn.timezone)
, 'datetime_to', get_datetime_object(h.datetime_last, sn.timezone)
) as coverage
, sensor_flags_exist(h.sensors_id, h.datetime, '-1day'::interval) as flag_info
{query.fields()}
FROM daily_data h
JOIN sensors s USING (sensors_id)
@@ -1086,6 +1094,7 @@ async def fetch_years(query, db):
'datetime_from', get_datetime_object(h.datetime_first, sn.timezone)
, 'datetime_to', get_datetime_object(h.datetime_last, sn.timezone)
) as coverage
, sensor_flags_exist(h.sensors_id, h.datetime, '-1y'::interval) as flag_info
{query.fields()}
FROM annual_data h
JOIN sensors s USING (sensors_id)

0 comments on commit 2ce7d1a

Please sign in to comment.